[yt-svn] commit/yt: 11 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Feb 16 19:34:26 PST 2016


11 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/28d27a45de8a/
Changeset:   28d27a45de8a
Branch:      yt
User:        atmyers
Date:        2016-01-25 08:28:39+00:00
Summary:     Making off axis projections have the same orientation vectors as before.
Affected #:  1 file

diff -r 62e350b9531193afbbb0a6d97d3b1233ef25b4d2 -r 28d27a45de8ae266102e6e97712bedc129772c2d yt/visualization/volume_rendering/off_axis_projection.py
--- a/yt/visualization/volume_rendering/off_axis_projection.py
+++ b/yt/visualization/volume_rendering/off_axis_projection.py
@@ -19,8 +19,9 @@
 from .utils import data_source_or_all
 from yt.funcs import mylog, iterable
 from yt.utilities.lib.grid_traversal import \
-        PartitionedGrid
+    PartitionedGrid
 from yt.data_objects.api import ImageArray
+from yt.utilities.orientation import Orientation
 import numpy as np
 
 
@@ -162,6 +163,11 @@
         width = data_source.ds.arr([width]*3)
     camera.position = center - width[2]*camera.normal_vector
     camera.focus = center
+    orienter = Orientation(normal_vector=normal_vector, 
+                           north_vector=north_vector)
+    camera.switch_orientation(orienter.normal_vector,
+                              orienter.north_vector)
+    print camera.unit_vectors
     sc.camera = camera
     sc.add_source(vol)
 


https://bitbucket.org/yt_analysis/yt/commits/4e3f61458dca/
Changeset:   4e3f61458dca
Branch:      yt
User:        atmyers
Date:        2016-01-25 08:36:52+00:00
Summary:     remove debug print statement
Affected #:  1 file

diff -r 28d27a45de8ae266102e6e97712bedc129772c2d -r 4e3f61458dca3bc1052355a60f2fafbee5c4ad41 yt/visualization/volume_rendering/off_axis_projection.py
--- a/yt/visualization/volume_rendering/off_axis_projection.py
+++ b/yt/visualization/volume_rendering/off_axis_projection.py
@@ -167,7 +167,6 @@
                            north_vector=north_vector)
     camera.switch_orientation(orienter.normal_vector,
                               orienter.north_vector)
-    print camera.unit_vectors
     sc.camera = camera
     sc.add_source(vol)
 


https://bitbucket.org/yt_analysis/yt/commits/8358c77e898d/
Changeset:   8358c77e898d
Branch:      yt
User:        atmyers
Date:        2016-01-25 23:00:38+00:00
Summary:     Adding a test for the off_axis_projection orientation.
Affected #:  1 file

diff -r 4e3f61458dca3bc1052355a60f2fafbee5c4ad41 -r 8358c77e898df7d4bb70e307c33b41f491d4ed2b yt/visualization/volume_rendering/tests/test_vr_orientation.py
--- a/yt/visualization/volume_rendering/tests/test_vr_orientation.py
+++ b/yt/visualization/volume_rendering/tests/test_vr_orientation.py
@@ -16,12 +16,14 @@
 from yt import load_uniform_grid
 from yt.utilities.answer_testing.framework import \
     requires_answer_testing, \
-    VRImageComparisonTest
+    VRImageComparisonTest, \
+    GenericImageTest
 from yt.visualization.volume_rendering.api import \
     Scene, \
     Camera, \
     VolumeSource, \
-    ColorTransferFunction
+    ColorTransferFunction, \
+    off_axis_projection
 
 
 def setup_ds():
@@ -149,3 +151,12 @@
             sc.camera = cam
             yield VRImageComparisonTest(
                 sc, ds, 'roll_%s_%04d' % (lens_type, frame), decimals)
+
+    image, sc = off_axis_projection(ds,
+                                    [0.5, 0.5, 0.5],
+                                    [0.5, 0.4, 0.7],
+                                    [0.04, 0.04, 0.4],
+                                    512, "density", no_ghost=False)
+    def offaxis_image_func(filename_prefix):
+        return image.write_image(filename_prefix)
+    yield GenericImageTest(ds, offaxis_image_func, decimals)


https://bitbucket.org/yt_analysis/yt/commits/597eb8fbe552/
Changeset:   597eb8fbe552
Branch:      yt
User:        atmyers
Date:        2016-01-25 23:25:43+00:00
Summary:     do multiple normal vectors in the off_axis_projection orientation test.
Affected #:  1 file

diff -r 8358c77e898df7d4bb70e307c33b41f491d4ed2b -r 597eb8fbe552a2adf293eb78f3d3fc88c1a393b9 yt/visualization/volume_rendering/tests/test_vr_orientation.py
--- a/yt/visualization/volume_rendering/tests/test_vr_orientation.py
+++ b/yt/visualization/volume_rendering/tests/test_vr_orientation.py
@@ -152,11 +152,16 @@
             yield VRImageComparisonTest(
                 sc, ds, 'roll_%s_%04d' % (lens_type, frame), decimals)
 
-    image, sc = off_axis_projection(ds,
-                                    [0.5, 0.5, 0.5],
-                                    [0.5, 0.4, 0.7],
-                                    [0.04, 0.04, 0.4],
-                                    512, "density", no_ghost=False)
-    def offaxis_image_func(filename_prefix):
-        return image.write_image(filename_prefix)
-    yield GenericImageTest(ds, offaxis_image_func, decimals)
+    orientations = [ [1.0, 0.0, 0.0],
+                     [0.0, 1.0, 0.0],
+                     [0.0, 0.0, 1.0],
+                     [0.5, 0.4, 0.7],
+                     [-0.3, -0.1, 0.8] ]
+    center = [0.5, 0.5, 0.5]
+    width = [0.04, 0.04, 0.4]
+    for orientation in orientations:
+        image, sc = off_axis_projection(ds, center, orientation, width,
+                                        512, "density", no_ghost=False)
+        def offaxis_image_func(filename_prefix):
+            return image.write_image(filename_prefix)
+        yield GenericImageTest(ds, offaxis_image_func, decimals)


https://bitbucket.org/yt_analysis/yt/commits/bc9d1323a152/
Changeset:   bc9d1323a152
Branch:      yt
User:        atmyers
Date:        2016-01-28 07:45:59+00:00
Summary:     changing default orientation to match on-axis projections
Affected #:  1 file

diff -r 597eb8fbe552a2adf293eb78f3d3fc88c1a393b9 -r bc9d1323a1527465b5aef76b9459f4db797f6f14 yt/utilities/orientation.py
--- a/yt/utilities/orientation.py
+++ b/yt/utilities/orientation.py
@@ -74,7 +74,7 @@
         normal_vector /= np.sqrt(np.dot(normal_vector, normal_vector))
         if north_vector is None:
             vecs = np.identity(3)
-            t = np.cross(normal_vector, vecs).sum(axis=1)
+            t = np.cross(vecs, normal_vector).sum(axis=1)
             ax = t.argmax()
             east_vector = np.cross(vecs[ax, :], normal_vector).ravel()
             # self.north_vector must remain None otherwise rotations about a fixed axis will break. 


https://bitbucket.org/yt_analysis/yt/commits/bc3ca9a8242f/
Changeset:   bc3ca9a8242f
Branch:      yt
User:        atmyers
Date:        2016-01-28 07:46:29+00:00
Summary:     merging with tip
Affected #:  106 files

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/extensions/notebook_sphinxext.py
--- a/doc/extensions/notebook_sphinxext.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import errno
-import os
-import shutil
-import string
-import re
-import tempfile
-import uuid
-from sphinx.util.compat import Directive
-from docutils import nodes
-from docutils.parsers.rst import directives
-from IPython.config import Config
-from IPython.nbconvert import html, python
-from IPython.nbformat import current as nbformat
-from runipy.notebook_runner import NotebookRunner, NotebookError
-
-class NotebookDirective(Directive):
-    """Insert an evaluated notebook into a document
-
-    This uses runipy and nbconvert to transform a path to an unevaluated notebook
-    into html suitable for embedding in a Sphinx document.
-    """
-    required_arguments = 1
-    optional_arguments = 1
-    option_spec = {'skip_exceptions': directives.flag}
-    final_argument_whitespace = True
-
-    def run(self): # check if there are spaces in the notebook name
-        nb_path = self.arguments[0]
-        if ' ' in nb_path: raise ValueError(
-            "Due to issues with docutils stripping spaces from links, white "
-            "space is not allowed in notebook filenames '{0}'".format(nb_path))
-        # check if raw html is supported
-        if not self.state.document.settings.raw_enabled:
-            raise self.warning('"%s" directive disabled.' % self.name)
-
-        cwd = os.getcwd()
-        tmpdir = tempfile.mkdtemp()
-        os.chdir(tmpdir)
-
-        # get path to notebook
-        nb_filename = self.arguments[0]
-        nb_basename = os.path.basename(nb_filename)
-        rst_file = self.state_machine.document.attributes['source']
-        rst_dir = os.path.abspath(os.path.dirname(rst_file))
-        nb_abs_path = os.path.abspath(os.path.join(rst_dir, nb_filename))
-
-        # Move files around.
-        rel_dir = os.path.relpath(rst_dir, setup.confdir)
-        dest_dir = os.path.join(setup.app.builder.outdir, rel_dir)
-        dest_path = os.path.join(dest_dir, nb_basename)
-
-        image_dir, image_rel_dir = make_image_dir(setup, rst_dir)
-
-        # Ensure desination build directory exists
-        thread_safe_mkdir(os.path.dirname(dest_path))
-
-        # Copy unevaluated notebook
-        shutil.copyfile(nb_abs_path, dest_path)
-
-        # Construct paths to versions getting copied over
-        dest_path_eval = string.replace(dest_path, '.ipynb', '_evaluated.ipynb')
-        dest_path_script = string.replace(dest_path, '.ipynb', '.py')
-        rel_path_eval = string.replace(nb_basename, '.ipynb', '_evaluated.ipynb')
-        rel_path_script = string.replace(nb_basename, '.ipynb', '.py')
-
-        # Create python script vesion
-        script_text = nb_to_python(nb_abs_path)
-        f = open(dest_path_script, 'w')
-        f.write(script_text.encode('utf8'))
-        f.close()
-
-        skip_exceptions = 'skip_exceptions' in self.options
-
-        ret = evaluate_notebook(
-            nb_abs_path, dest_path_eval, skip_exceptions=skip_exceptions)
-
-        try:
-            evaluated_text, resources = ret
-            evaluated_text = write_notebook_output(
-                resources, image_dir, image_rel_dir, evaluated_text)
-        except ValueError:
-            # This happens when a notebook raises an unhandled exception
-            evaluated_text = ret
-
-        # Create link to notebook and script files
-        link_rst = "(" + \
-                   formatted_link(nb_basename) + "; " + \
-                   formatted_link(rel_path_eval) + "; " + \
-                   formatted_link(rel_path_script) + \
-                   ")"
-
-        self.state_machine.insert_input([link_rst], rst_file)
-
-        # create notebook node
-        attributes = {'format': 'html', 'source': 'nb_path'}
-        nb_node = notebook_node('', evaluated_text, **attributes)
-        (nb_node.source, nb_node.line) = \
-            self.state_machine.get_source_and_line(self.lineno)
-
-        # add dependency
-        self.state.document.settings.record_dependencies.add(nb_abs_path)
-
-        # clean up
-        os.chdir(cwd)
-        shutil.rmtree(tmpdir, True)
-
-        return [nb_node]
-
-
-class notebook_node(nodes.raw):
-    pass
-
-def nb_to_python(nb_path):
-    """convert notebook to python script"""
-    exporter = python.PythonExporter()
-    output, resources = exporter.from_filename(nb_path)
-    return output
-
-def nb_to_html(nb_path):
-    """convert notebook to html"""
-    c = Config({'ExtractOutputPreprocessor':{'enabled':True}})
-
-    exporter = html.HTMLExporter(template_file='full', config=c)
-    notebook = nbformat.read(open(nb_path), 'json')
-    output, resources = exporter.from_notebook_node(notebook)
-    header = output.split('<head>', 1)[1].split('</head>',1)[0]
-    body = output.split('<body>', 1)[1].split('</body>',1)[0]
-
-    # http://imgur.com/eR9bMRH
-    header = header.replace('<style', '<style scoped="scoped"')
-    header = header.replace('body {\n  overflow: visible;\n  padding: 8px;\n}\n',
-                            '')
-    header = header.replace("code,pre{", "code{")
-
-    # Filter out styles that conflict with the sphinx theme.
-    filter_strings = [
-        'navbar',
-        'body{',
-        'alert{',
-        'uneditable-input{',
-        'collapse{',
-    ]
-
-    filter_strings.extend(['h%s{' % (i+1) for i in range(6)])
-
-    line_begin = [
-        'pre{',
-        'p{margin'
-    ]
-
-    filterfunc = lambda x: not any([s in x for s in filter_strings])
-    header_lines = filter(filterfunc, header.split('\n'))
-
-    filterfunc = lambda x: not any([x.startswith(s) for s in line_begin])
-    header_lines = filter(filterfunc, header_lines)
-
-    header = '\n'.join(header_lines)
-
-    # concatenate raw html lines
-    lines = ['<div class="ipynotebook">']
-    lines.append(header)
-    lines.append(body)
-    lines.append('</div>')
-    return '\n'.join(lines), resources
-
-def evaluate_notebook(nb_path, dest_path=None, skip_exceptions=False):
-    # Create evaluated version and save it to the dest path.
-    notebook = nbformat.read(open(nb_path), 'json')
-    nb_runner = NotebookRunner(notebook, pylab=False)
-    try:
-        nb_runner.run_notebook(skip_exceptions=skip_exceptions)
-    except NotebookError as e:
-        print('')
-        print(e)
-        # Return the traceback, filtering out ANSI color codes.
-        # http://stackoverflow.com/questions/13506033/filtering-out-ansi-escape-sequences
-        return "Notebook conversion failed with the " \
-               "following traceback: \n%s" % \
-            re.sub(r'\\033[\[\]]([0-9]{1,2}([;@][0-9]{0,2})*)*[mKP]?', '',
-                   str(e))
-
-    if dest_path is None:
-        dest_path = 'temp_evaluated.ipynb'
-    nbformat.write(nb_runner.nb, open(dest_path, 'w'), 'json')
-    ret = nb_to_html(dest_path)
-    if dest_path is 'temp_evaluated.ipynb':
-        os.remove(dest_path)
-    return ret
-
-def formatted_link(path):
-    return "`%s <%s>`__" % (os.path.basename(path), path)
-
-def visit_notebook_node(self, node):
-    self.visit_raw(node)
-
-def depart_notebook_node(self, node):
-    self.depart_raw(node)
-
-def setup(app):
-    setup.app = app
-    setup.config = app.config
-    setup.confdir = app.confdir
-
-    app.add_node(notebook_node,
-                 html=(visit_notebook_node, depart_notebook_node))
-
-    app.add_directive('notebook', NotebookDirective)
-
-    retdict = dict(
-        version='0.1',
-        parallel_read_safe=True,
-        parallel_write_safe=True
-    )
-
-    return retdict
-
-def make_image_dir(setup, rst_dir):
-    image_dir = setup.app.builder.outdir + os.path.sep + '_images'
-    rel_dir = os.path.relpath(setup.confdir, rst_dir)
-    image_rel_dir = rel_dir + os.path.sep + '_images'
-    thread_safe_mkdir(image_dir)
-    return image_dir, image_rel_dir
-
-def write_notebook_output(resources, image_dir, image_rel_dir, evaluated_text):
-    my_uuid = uuid.uuid4().hex
-
-    for output in resources['outputs']:
-        new_name = image_dir + os.path.sep + my_uuid + output
-        new_relative_name = image_rel_dir + os.path.sep + my_uuid + output
-        evaluated_text = evaluated_text.replace(output, new_relative_name)
-        with open(new_name, 'wb') as f:
-            f.write(resources['outputs'][output])
-    return evaluated_text
-
-def thread_safe_mkdir(dirname):
-    try:
-        os.makedirs(dirname)
-    except OSError as e:
-        if e.errno != errno.EEXIST:
-            raise
-        pass

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/extensions/notebookcell_sphinxext.py
--- a/doc/extensions/notebookcell_sphinxext.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import os
-import shutil
-import io
-import tempfile
-from sphinx.util.compat import Directive
-from docutils.parsers.rst import directives
-from IPython.nbformat import current
-from notebook_sphinxext import \
-    notebook_node, visit_notebook_node, depart_notebook_node, \
-    evaluate_notebook, make_image_dir, write_notebook_output
-
-
-class NotebookCellDirective(Directive):
-    """Insert an evaluated notebook cell into a document
-
-    This uses runipy and nbconvert to transform an inline python
-    script into html suitable for embedding in a Sphinx document.
-    """
-    required_arguments = 0
-    optional_arguments = 1
-    has_content = True
-    option_spec = {'skip_exceptions': directives.flag}
-
-    def run(self):
-        # check if raw html is supported
-        if not self.state.document.settings.raw_enabled:
-            raise self.warning('"%s" directive disabled.' % self.name)
-
-        cwd = os.getcwd()
-        tmpdir = tempfile.mkdtemp()
-        os.chdir(tmpdir)
-
-        rst_file = self.state_machine.document.attributes['source']
-        rst_dir = os.path.abspath(os.path.dirname(rst_file))
-
-        image_dir, image_rel_dir = make_image_dir(setup, rst_dir)
-
-        # Construct notebook from cell content
-        content = "\n".join(self.content)
-        with open("temp.py", "w") as f:
-            f.write(content)
-
-        convert_to_ipynb('temp.py', 'temp.ipynb')
-
-        skip_exceptions = 'skip_exceptions' in self.options
-
-        evaluated_text, resources = evaluate_notebook(
-            'temp.ipynb', skip_exceptions=skip_exceptions)
-
-        evaluated_text = write_notebook_output(
-            resources, image_dir, image_rel_dir, evaluated_text)
-
-        # create notebook node
-        attributes = {'format': 'html', 'source': 'nb_path'}
-        nb_node = notebook_node('', evaluated_text, **attributes)
-        (nb_node.source, nb_node.line) = \
-            self.state_machine.get_source_and_line(self.lineno)
-
-        # clean up
-        os.chdir(cwd)
-        shutil.rmtree(tmpdir, True)
-
-        return [nb_node]
-
-def setup(app):
-    setup.app = app
-    setup.config = app.config
-    setup.confdir = app.confdir
-
-    app.add_node(notebook_node,
-                 html=(visit_notebook_node, depart_notebook_node))
-
-    app.add_directive('notebook-cell', NotebookCellDirective)
-
-    retdict = dict(
-        version='0.1',
-        parallel_read_safe=True,
-        parallel_write_safe=True
-    )
-
-    return retdict
-
-def convert_to_ipynb(py_file, ipynb_file):
-    with io.open(py_file, 'r', encoding='utf-8') as f:
-        notebook = current.reads(f.read(), format='py')
-    with io.open(ipynb_file, 'w', encoding='utf-8') as f:
-        current.write(notebook, f, format='ipynb')

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/extensions/pythonscript_sphinxext.py
--- a/doc/extensions/pythonscript_sphinxext.py
+++ b/doc/extensions/pythonscript_sphinxext.py
@@ -4,9 +4,9 @@
 import shutil
 import subprocess
 import uuid
+import errno
 from sphinx.util.compat import Directive
 from docutils import nodes
-from notebook_sphinxext import make_image_dir
 
 
 class PythonScriptDirective(Directive):
@@ -82,3 +82,20 @@
     shutil.move(filename, image_dir + os.path.sep + my_uuid + filename)
     relative_filename = image_rel_dir + os.path.sep + my_uuid + filename
     return '<img src="%s" width="600"><br>' % relative_filename
+
+
+def make_image_dir(setup, rst_dir):
+    image_dir = setup.app.builder.outdir + os.path.sep + '_images'
+    rel_dir = os.path.relpath(setup.confdir, rst_dir)
+    image_rel_dir = rel_dir + os.path.sep + '_images'
+    thread_safe_mkdir(image_dir)
+    return image_dir, image_rel_dir
+
+
+def thread_safe_mkdir(dirname):
+    try:
+        os.makedirs(dirname)
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            raise
+        pass

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/get_yt.sh
--- a/doc/get_yt.sh
+++ b/doc/get_yt.sh
@@ -23,7 +23,8 @@
 DEST_SUFFIX="yt-conda"
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
 BRANCH="yt" # This is the branch to which we will forcibly update.
-INST_YT_SOURCE=0 # Do we do a source install of yt?
+INST_YT_SOURCE=1 # Do we do a source install of yt?
+INST_UNSTRUCTURED=1 # Do we want to build with unstructured mesh support?
 
 ##################################################################
 #                                                                #
@@ -40,6 +41,25 @@
 MINICONDA_VERSION="latest"
 YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
 
+if [ $INST_UNSTRUCTURED -eq 1 ]
+then
+  if [ $INST_YT_SOURCE -eq 0 ]
+  then
+      echo "yt must be compiled from source to use the unstructured mesh support."
+      echo "Please set INST_YT_SOURCE to 1 and re-run."
+      exit 1
+  fi
+  if [ `uname` = "Darwin" ]
+  then
+      EMBREE="embree-2.8.0.x86_64.macosx"
+      EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
+  else
+      EMBREE="embree-2.8.0.x86_64.linux"
+      EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
+  fi
+  PYEMBREE_URL="https://github.com/scopatz/pyembree/archive/master.zip"
+fi
+
 function do_exit
 {
     echo "********************************************"
@@ -276,6 +296,11 @@
 YT_DEPS+=('mercurial')
 YT_DEPS+=('sympy')
 
+if [ $INST_UNSTRUCTURED -eq 1 ]
+then
+  YT_DEPS+=('netcdf4')   
+fi
+
 # Here is our dependency list for yt
 log_cmd conda update --yes conda
 
@@ -285,6 +310,32 @@
     log_cmd conda install --yes ${YT_DEP}
 done
 
+if [ $INST_UNSTRUCTURED -eq 1 ]
+then
+
+  echo "Installing embree"
+  mkdir ${DEST_DIR}/src
+  cd ${DEST_DIR}/src
+  ( ${GETFILE} "$EMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
+  log_cmd tar xfz ${EMBREE}.tar.gz
+  log_cmd mv ${DEST_DIR}/src/${EMBREE}/include/embree2 ${DEST_DIR}/include
+  log_cmd mv ${DEST_DIR}/src/${EMBREE}/lib/lib*.* ${DEST_DIR}/lib
+  if [ `uname` = "Darwin" ]
+  then
+    ln -s ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.dylib
+    install_name_tool -id ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.2.dylib
+  else
+    ln -s ${DEST_DIR}/lib/libembree.so.2 ${DEST_DIR}/lib/libembree.so
+  fi
+
+  echo "Installing pyembree from source"
+  ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
+  log_cmd unzip ${DEST_DIR}/src/master.zip
+  pushd ${DEST_DIR}/src/pyembree-master
+  log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
+  popd
+fi
+
 if [ $INST_YT_SOURCE -eq 0 ]
 then
   echo "Installing yt"
@@ -294,6 +345,10 @@
     echo "Installing yt from source"
     YT_DIR="${DEST_DIR}/src/yt-hg"
     log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
+if [ $INST_UNSTRUCTURED -eq 1 ]
+then
+    echo $DEST_DIR > ${YT_DIR}/embree.cfg
+fi
     pushd ${YT_DIR}
     log_cmd python setup.py develop
     popd
@@ -310,7 +365,7 @@
 echo "   $DEST_DIR/bin"
 echo
 echo "On Bash-style shells you can copy/paste the following command to "
-echo "temporarily activate the yt installtion:"
+echo "temporarily activate the yt installation:"
 echo
 echo "    export PATH=$DEST_DIR/bin:\$PATH"
 echo

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -440,10 +440,6 @@
 get_willwont ${INST_SCIPY}
 echo "be installing scipy"
 
-printf "%-15s = %s so I " "INST_0MQ" "${INST_0MQ}"
-get_willwont ${INST_0MQ}
-echo "be installing ZeroMQ"
-
 printf "%-15s = %s so I " "INST_ROCKSTAR" "${INST_ROCKSTAR}"
 get_willwont ${INST_ROCKSTAR}
 echo "be installing Rockstar"
@@ -627,7 +623,6 @@
 FREETYPE_VER='freetype-2.4.12' 
 H5PY='h5py-2.5.0'
 HDF5='hdf5-1.8.14' 
-IPYTHON='ipython-2.4.1'
 LAPACK='lapack-3.4.2'
 PNG=libpng-1.6.3
 MATPLOTLIB='matplotlib-1.4.3'
@@ -635,13 +630,10 @@
 NOSE='nose-1.3.6'
 NUMPY='numpy-1.9.2'
 PYTHON_HGLIB='python-hglib-1.6'
-PYZMQ='pyzmq-14.5.0'
 ROCKSTAR='rockstar-0.99.6'
 SCIPY='scipy-0.15.1'
 SQLITE='sqlite-autoconf-3071700'
 SYMPY='sympy-0.7.6'
-TORNADO='tornado-4.0.2'
-ZEROMQ='zeromq-4.0.5'
 ZLIB='zlib-1.2.8'
 SETUPTOOLS='setuptools-18.0.1'
 
@@ -655,7 +647,6 @@
 echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
 echo '4a83f9ae1855a7fad90133b327d426201c8ccfd2e7fbe9f39b2d61a2eee2f3ebe2ea02cf80f3d4e1ad659f8e790c173df8cc99b87d0b7ce63d34aa88cfdc7939  h5py-2.5.0.tar.gz' > h5py-2.5.0.tar.gz.sha512
 echo '4073fba510ccadaba41db0939f909613c9cb52ba8fb6c1062fc9118edc601394c75e102310be1af4077d07c9b327e6bbb1a6359939a7268dc140382d0c1e0199  hdf5-1.8.14.tar.gz' > hdf5-1.8.14.tar.gz.sha512
-echo 'a9cffc08ba10c47b0371b05664e55eee0562a30ef0d4bbafae79e52e5b9727906c45840c0918122c06c5672ac65e6eb381399f103e1a836aca003eda81b2acde  ipython-2.4.1.tar.gz' > ipython-2.4.1.tar.gz.sha512
 echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
 echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
 echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
@@ -663,12 +654,9 @@
 echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
 echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
 echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
-echo '20164f7b05c308e0f089c07fc46b1c522094f3ac136f2e0bba84f19cb63dfd36152a2465df723dd4d93c6fbd2de4f0d94c160e2bbc353a92cfd680eb03cbdc87  pyzmq-14.5.0.tar.gz' > pyzmq-14.5.0.tar.gz.sha512
 echo 'fff4412d850c431a1b4e6ee3b17958ee5ab3beb81e6cb8a8e7d56d368751eaa8781d7c3e69d932dc002d718fddc66a72098acfe74cfe29ec80b24e6736317275  scipy-0.15.1.tar.gz' > scipy-0.15.1.tar.gz.sha512
 echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
 echo 'ce0f1a17ac01eb48aec31fc0ad431d9d7ed9907f0e8584a6d79d0ffe6864fe62e203fe3f2a3c3e4e3d485809750ce07507a6488e776a388a7a9a713110882fcf  sympy-0.7.6.tar.gz' > sympy-0.7.6.tar.gz.sha512
-echo '93591068dc63af8d50a7925d528bc0cccdd705232c529b6162619fe28dddaf115e8a460b1842877d35160bd7ed480c1bd0bdbec57d1f359085bd1814e0c1c242  tornado-4.0.2.tar.gz' > tornado-4.0.2.tar.gz.sha512
-echo '0d928ed688ed940d460fa8f8d574a9819dccc4e030d735a8c7db71b59287ee50fa741a08249e356c78356b03c2174f2f2699f05aa7dc3d380ed47d8d7bab5408  zeromq-4.0.5.tar.gz' > zeromq-4.0.5.tar.gz.sha512
 echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
 echo '9b318ce2ee2cf787929dcb886d76c492b433e71024fda9452d8b4927652a298d6bd1bdb7a4c73883a98e100024f89b46ea8aa14b250f896e549e6dd7e10a6b41  setuptools-18.0.1.tar.gz' > setuptools-18.0.1.tar.gz.sha512
 # Individual processes
@@ -679,9 +667,6 @@
 [ $INST_FTYPE -eq 1 ] && get_ytproject $FREETYPE_VER.tar.gz
 [ $INST_SQLITE3 -eq 1 ] && get_ytproject $SQLITE.tar.gz
 [ $INST_PYX -eq 1 ] && get_ytproject $PYX.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject $ZEROMQ.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject $PYZMQ.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject $TORNADO.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject $SCIPY.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
@@ -690,7 +675,6 @@
 get_ytproject $PYTHON2.tgz
 get_ytproject $NUMPY.tar.gz
 get_ytproject $MATPLOTLIB.tar.gz
-get_ytproject $IPYTHON.tar.gz
 get_ytproject $H5PY.tar.gz
 get_ytproject $CYTHON.tar.gz
 get_ytproject $NOSE.tar.gz
@@ -976,25 +960,9 @@
 [ -n "${OLD_CXXFLAGS}" ] && export CXXFLAGS=${OLD_CXXFLAGS}
 [ -n "${OLD_CFLAGS}" ] && export CFLAGS=${OLD_CFLAGS}
 
-# Now we do our IPython installation, which has two optional dependencies.
-if [ $INST_0MQ -eq 1 ]
-then
-    if [ ! -e $ZEROMQ/done ]
-    then
-        [ ! -e $ZEROMQ ] && tar xfz $ZEROMQ.tar.gz
-        echo "Installing ZeroMQ"
-        cd $ZEROMQ
-        ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
-    fi
-    do_setup_py $PYZMQ --zmq=${DEST_DIR}
-    do_setup_py $TORNADO
-fi
+echo "Installing Jupyter"
+( ${DEST_DIR}/bin/pip install "jupyter<2.0.0" 2>&1 ) 1>> ${LOG_FILE}
 
-do_setup_py $IPYTHON
 do_setup_py $CYTHON
 do_setup_py $H5PY
 do_setup_py $NOSE

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/source/analyzing/_static/axes_calculator.pyx
--- a/doc/source/analyzing/_static/axes_calculator.pyx
+++ b/doc/source/analyzing/_static/axes_calculator.pyx
@@ -1,7 +1,7 @@
 import numpy as np
 cimport numpy as np
 cimport cython
-from stdlib cimport malloc, free
+from libc.stdlib cimport malloc, free
 
 cdef extern from "axes.h":
     ctypedef struct ParticleCollection:
@@ -16,7 +16,9 @@
 def examine_axes(np.ndarray[np.float64_t, ndim=1] xpos,
                  np.ndarray[np.float64_t, ndim=1] ypos,
                  np.ndarray[np.float64_t, ndim=1] zpos):
-    cdef double ax1[3], ax2[3], ax3[3]
+    cdef double ax1[3]
+    cdef double ax2[3]
+    cdef double ax3[3]
     cdef ParticleCollection particles
     cdef int i
 

diff -r bc9d1323a1527465b5aef76b9459f4db797f6f14 -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 doc/source/analyzing/analysis_modules/PPVCube.ipynb
--- a/doc/source/analyzing/analysis_modules/PPVCube.ipynb
+++ b/doc/source/analyzing/analysis_modules/PPVCube.ipynb
@@ -1,423 +1,455 @@
 {
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Detailed spectra of astrophysical objects sometimes allow for determinations of how much of the gas is moving with a certain velocity along the line of sight, thanks to Doppler shifting of spectral lines. This enables \"data cubes\" to be created in RA, Dec, and line-of-sight velocity space. In yt, we can use the `PPVCube` analysis module to project fields along a given line of sight traveling at different line-of-sight velocities, to \"mock-up\" what would be seen in observations."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "from yt.config import ytcfg\n",
+    "\n",
+    "import yt\n",
+    "import numpy as np\n",
+    "from yt.analysis_modules.ppv_cube.api import PPVCube\n",
+    "import yt.units as u"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "To demonstrate this functionality, we'll create a simple unigrid dataset from scratch of a rotating disk. We create a thin disk in the x-y midplane of the domain of three cells in height in either direction, and a radius of 10 kpc. The density and azimuthal velocity profiles of the disk as a function of radius will be given by the following functions:"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Density: $\\rho(r) \\propto r^{\\alpha}$"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Velocity: $v_{\\theta}(r) \\propto \\frac{r}{1+(r/r_0)^{\\beta}}$"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "where for simplicity we won't worry about the normalizations of these profiles. "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "First, we'll set up the grid and the parameters of the profiles:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# increasing the resolution will make the images in this notebook more visually appealing\n",
+    "nx,ny,nz = (64, 64, 64) # domain dimensions\n",
+    "R = 10. # outer radius of disk, kpc\n",
+    "r_0 = 3. # scale radius, kpc\n",
+    "beta = 1.4 # for the tangential velocity profile\n",
+    "alpha = -1. # for the radial density profile\n",
+    "x, y = np.mgrid[-R:R:nx*1j,-R:R:ny*1j] # cartesian coordinates of x-y plane of disk\n",
+    "r = np.sqrt(x*x+y*y) # polar coordinates\n",
+    "theta = np.arctan2(y, x) # polar coordinates"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Second, we'll construct the data arrays for the density, temperature, and velocity of the disk. Since we have the tangential velocity profile, we have to use the polar coordinates we derived earlier to compute `velx` and `vely`. Everywhere outside the disk, all fields are set to zero.  "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "dens = np.zeros((nx,ny,nz))\n",
+    "dens[:,:,nz/2-3:nz/2+3] = (r**alpha).reshape(nx,ny,1) # the density profile of the disk\n",
+    "temp = np.zeros((nx,ny,nz))\n",
+    "temp[:,:,nz/2-3:nz/2+3] = 1.0e5 # Isothermal\n",
+    "vel_theta = 100.*r/(1.+(r/r_0)**beta) # the azimuthal velocity profile of the disk\n",
+    "velx = np.zeros((nx,ny,nz))\n",
+    "vely = np.zeros((nx,ny,nz))\n",
+    "velx[:,:,nz/2-3:nz/2+3] = (-vel_theta*np.sin(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
+    "vely[:,:,nz/2-3:nz/2+3] = (vel_theta*np.cos(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
+    "dens[r > R] = 0.0\n",
+    "temp[r > R] = 0.0\n",
+    "velx[r > R] = 0.0\n",
+    "vely[r > R] = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Finally, we'll package these data arrays up into a dictionary, which will then be shipped off to `load_uniform_grid`. We'll define the width of the grid to be `2*R` kpc, which will be equal to 1  `code_length`. "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "data = {}\n",
+    "data[\"density\"] = (dens,\"g/cm**3\")\n",
+    "data[\"temperature\"] = (temp, \"K\")\n",
+    "data[\"velocity_x\"] = (velx, \"km/s\")\n",
+    "data[\"velocity_y\"] = (vely, \"km/s\")\n",
+    "data[\"velocity_z\"] = (np.zeros((nx,ny,nz)), \"km/s\") # zero velocity in the z-direction\n",
+    "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) # bbox of width 1 on a side with center (0,0,0)\n",
+    "ds = yt.load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "To get a sense of what the data looks like, we'll take a slice through the middle of the disk:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "slc = yt.SlicePlot(ds, \"z\", [\"density\",\"velocity_x\",\"velocity_y\",\"velocity_magnitude\"])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "slc.set_log(\"velocity_x\", False)\n",
+    "slc.set_log(\"velocity_y\", False)\n",
+    "slc.set_log(\"velocity_magnitude\", False)\n",
+    "slc.set_unit(\"velocity_magnitude\", \"km/s\")\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Which shows a rotating disk with a specific density and velocity profile. Now, suppose we wanted to look at this disk galaxy from a certain orientation angle, and simulate a 3D FITS data cube where we can see the gas that is emitting at different velocities along the line of sight. We can do this using the `PPVCube` class. First, let's assume we rotate our viewing angle 60 degrees from face-on, from along the z-axis into the x-axis. We'll create a normal vector:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "i = 60.*np.pi/180.\n",
+    "L = [np.sin(i),0.0,np.cos(i)]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Next, we need to specify a field that will serve as the \"intensity\" of the emission that we see. For simplicity, we'll simply choose the gas density as this field, though it could be any field (including derived fields) in principle. We also need to choose the bounds in line-of-sight velocity that the data will be binned into, which is a 4-tuple in the shape of `(vmin, vmax, nbins, units)`, which specifies a linear range of `nbins` velocity bins from `vmin` to `vmax` in units of `units`. We may also optionally specify the dimensions of the data cube with the `dims` argument."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false,
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "cube = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, method=\"sum\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Following this, we can now write this cube to a FITS file. The x and y axes of the file can be in length units, which can be optionally specified by `length_unit`:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "cube.write_fits(\"cube.fits\", clobber=True, length_unit=\"kpc\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Or one can use the `sky_scale` and `sky_center` keywords to set up the coordinates in RA and Dec:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "sky_scale = (1.0, \"arcsec/kpc\")\n",
+    "sky_center = (30., 45.) # RA, Dec in degrees\n",
+    "cube.write_fits(\"cube_sky.fits\", clobber=True, sky_scale=sky_scale, sky_center=sky_center)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now, we'll look at the FITS dataset in yt and look at different slices along the velocity axis, which is the \"z\" axis:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "ds_cube = yt.load(\"cube.fits\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# Specifying no center gives us the center slice\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"])\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# Picking different velocities for the slices\n",
+    "new_center = ds_cube.domain_center\n",
+    "new_center[2] = ds_cube.spec2pixel(-100.*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "new_center[2] = ds_cube.spec2pixel(70.0*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "new_center[2] = ds_cube.spec2pixel(-30.0*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "If we project all the emission at all the different velocities along the z-axis, we recover the entire disk:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "prj = yt.ProjectionPlot(ds_cube, \"z\", [\"density\"], method=\"sum\")\n",
+    "prj.set_log(\"density\", True)\n",
+    "prj.set_zlim(\"density\", 1.0e-3, 0.2)\n",
+    "prj.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The `thermal_broad` keyword allows one to simulate thermal line broadening based on the temperature, and the `atomic_weight` argument is used to specify the atomic weight of the particle that is doing the emitting."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "cube2 = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, thermal_broad=True, \n",
+    "                atomic_weight=12.0, method=\"sum\")\n",
+    "cube2.write_fits(\"cube2.fits\", clobber=True, length_unit=\"kpc\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Taking a slice of this cube shows:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "ds_cube2 = yt.load(\"cube2.fits\")\n",
+    "new_center = ds_cube2.domain_center\n",
+    "new_center[2] = ds_cube2.spec2pixel(70.0*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "new_center[2] = ds_cube2.spec2pixel(-100.*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "where we can see the emission has been smeared into this velocity slice from neighboring slices due to the thermal broadening. \n",
+    "\n",
+    "Finally, the \"velocity\" or \"spectral\" axis of the cube can be changed to a different unit, such as wavelength, frequency, or energy: "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "print (cube2.vbins[0], cube2.vbins[-1])\n",
+    "cube2.transform_spectral_axis(400.0,\"nm\")\n",
+    "print (cube2.vbins[0], cube2.vbins[-1])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "If a FITS file is now written from the cube, the spectral axis will be in the new units. To reset the spectral axis back to the original velocity units:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "cube2.reset_spectral_axis()\n",
+    "print (cube2.vbins[0], cube2.vbins[-1])"
+   ]
+  }
+ ],
  "metadata": {
-  "name": "",
-  "signature": "sha256:67e4297cbc32716b2481c71659305687cb5bdadad648a0acf6b48960267bb069"
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.5.1"
+  }
  },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Detailed spectra of astrophysical objects sometimes allow for determinations of how much of the gas is moving with a certain velocity along the line of sight, thanks to Doppler shifting of spectral lines. This enables \"data cubes\" to be created in RA, Dec, and line-of-sight velocity space. In yt, we can use the `PPVCube` analysis module to project fields along a given line of sight traveling at different line-of-sight velocities, to \"mock-up\" what would be seen in observations."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "from yt.config import ytcfg\n",
-      "ytcfg[\"yt\",\"loglevel\"] = 30\n",
-      "\n",
-      "import yt\n",
-      "import numpy as np\n",
-      "from yt.analysis_modules.ppv_cube.api import PPVCube\n",
-      "import yt.units as u"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "To demonstrate this functionality, we'll create a simple unigrid dataset from scratch of a rotating disk. We create a thin disk in the x-y midplane of the domain of three cells in height in either direction, and a radius of 10 kpc. The density and azimuthal velocity profiles of the disk as a function of radius will be given by the following functions:"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Density: $\\rho(r) \\propto r^{\\alpha}$"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Velocity: $v_{\\theta}(r) \\propto \\frac{r}{1+(r/r_0)^{\\beta}}$"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "where for simplicity we won't worry about the normalizations of these profiles. "
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "First, we'll set up the grid and the parameters of the profiles:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "nx,ny,nz = (256,256,256) # domain dimensions\n",
-      "R = 10. # outer radius of disk, kpc\n",
-      "r_0 = 3. # scale radius, kpc\n",
-      "beta = 1.4 # for the tangential velocity profile\n",
-      "alpha = -1. # for the radial density profile\n",
-      "x, y = np.mgrid[-R:R:nx*1j,-R:R:ny*1j] # cartesian coordinates of x-y plane of disk\n",
-      "r = np.sqrt(x*x+y*y) # polar coordinates\n",
-      "theta = np.arctan2(y, x) # polar coordinates"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Second, we'll construct the data arrays for the density, temperature, and velocity of the disk. Since we have the tangential velocity profile, we have to use the polar coordinates we derived earlier to compute `velx` and `vely`. Everywhere outside the disk, all fields are set to zero.  "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "dens = np.zeros((nx,ny,nz))\n",
-      "dens[:,:,nz/2-3:nz/2+3] = (r**alpha).reshape(nx,ny,1) # the density profile of the disk\n",
-      "temp = np.zeros((nx,ny,nz))\n",
-      "temp[:,:,nz/2-3:nz/2+3] = 1.0e5 # Isothermal\n",
-      "vel_theta = 100.*r/(1.+(r/r_0)**beta) # the azimuthal velocity profile of the disk\n",
-      "velx = np.zeros((nx,ny,nz))\n",
-      "vely = np.zeros((nx,ny,nz))\n",
-      "velx[:,:,nz/2-3:nz/2+3] = (-vel_theta*np.sin(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
-      "vely[:,:,nz/2-3:nz/2+3] = (vel_theta*np.cos(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
-      "dens[r > R] = 0.0\n",
-      "temp[r > R] = 0.0\n",
-      "velx[r > R] = 0.0\n",
-      "vely[r > R] = 0.0"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Finally, we'll package these data arrays up into a dictionary, which will then be shipped off to `load_uniform_grid`. We'll define the width of the grid to be `2*R` kpc, which will be equal to 1  `code_length`. "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "data = {}\n",
-      "data[\"density\"] = (dens,\"g/cm**3\")\n",
-      "data[\"temperature\"] = (temp, \"K\")\n",
-      "data[\"velocity_x\"] = (velx, \"km/s\")\n",
-      "data[\"velocity_y\"] = (vely, \"km/s\")\n",
-      "data[\"velocity_z\"] = (np.zeros((nx,ny,nz)), \"km/s\") # zero velocity in the z-direction\n",
-      "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) # bbox of width 1 on a side with center (0,0,0)\n",
-      "ds = yt.load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "To get a sense of what the data looks like, we'll take a slice through the middle of the disk:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "slc = yt.SlicePlot(ds, \"z\", [\"density\",\"velocity_x\",\"velocity_y\",\"velocity_magnitude\"])"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "slc.set_log(\"velocity_x\", False)\n",
-      "slc.set_log(\"velocity_y\", False)\n",
-      "slc.set_log(\"velocity_magnitude\", False)\n",
-      "slc.set_unit(\"velocity_magnitude\", \"km/s\")\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Which shows a rotating disk with a specific density and velocity profile. Now, suppose we wanted to look at this disk galaxy from a certain orientation angle, and simulate a 3D FITS data cube where we can see the gas that is emitting at different velocities along the line of sight. We can do this using the `PPVCube` class. First, let's assume we rotate our viewing angle 60 degrees from face-on, from along the z-axis into the x-axis. We'll create a normal vector:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "i = 60.*np.pi/180.\n",
-      "L = [np.sin(i),0.0,np.cos(i)]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Next, we need to specify a field that will serve as the \"intensity\" of the emission that we see. For simplicity, we'll simply choose the gas density as this field, though it could be any field (including derived fields) in principle. We also need to choose the bounds in line-of-sight velocity that the data will be binned into, which is a 4-tuple in the shape of `(vmin, vmax, nbins, units)`, which specifies a linear range of `nbins` velocity bins from `vmin` to `vmax` in units of `units`. We may also optionally specify the dimensions of the data cube with the `dims` argument."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, method=\"sum\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Following this, we can now write this cube to a FITS file. The x and y axes of the file can be in length units, which can be optionally specified by `length_unit`:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube.write_fits(\"cube.fits\", clobber=True, length_unit=\"kpc\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Or one can use the `sky_scale` and `sky_center` keywords to set up the coordinates in RA and Dec:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sky_scale = (1.0, \"arcsec/kpc\")\n",
-      "sky_center = (30., 45.) # RA, Dec in degrees\n",
-      "cube.write_fits(\"cube_sky.fits\", clobber=True, sky_scale=sky_scale, sky_center=sky_center)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now, we'll look at the FITS dataset in yt and look at different slices along the velocity axis, which is the \"z\" axis:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds_cube = yt.load(\"cube.fits\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Specifying no center gives us the center slice\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"])\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Picking different velocities for the slices\n",
-      "new_center = ds_cube.domain_center\n",
-      "new_center[2] = ds_cube.spec2pixel(-100.*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center[2] = ds_cube.spec2pixel(70.0*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center[2] = ds_cube.spec2pixel(-30.0*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we project all the emission at all the different velocities along the z-axis, we recover the entire disk:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prj = yt.ProjectionPlot(ds_cube, \"z\", [\"density\"], method=\"sum\")\n",
-      "prj.set_log(\"density\", True)\n",
-      "prj.set_zlim(\"density\", 1.0e-3, 0.2)\n",
-      "prj.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The `thermal_broad` keyword allows one to simulate thermal line broadening based on the temperature, and the `atomic_weight` argument is used to specify the atomic weight of the particle that is doing the emitting."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube2 = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, thermal_broad=True, \n",
-      "                atomic_weight=12.0, method=\"sum\")\n",
-      "cube2.write_fits(\"cube2.fits\", clobber=True, length_unit=\"kpc\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Taking a slice of this cube shows:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds_cube2 = yt.load(\"cube2.fits\")\n",
-      "new_center = ds_cube2.domain_center\n",
-      "new_center[2] = ds_cube2.spec2pixel(70.0*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center[2] = ds_cube2.spec2pixel(-100.*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "where we can see the emission has been smeared into this velocity slice from neighboring slices due to the thermal broadening. \n",
-      "\n",
-      "Finally, the \"velocity\" or \"spectral\" axis of the cube can be changed to a different unit, such as wavelength, frequency, or energy: "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print cube2.vbins[0], cube2.vbins[-1]\n",
-      "cube2.transform_spectral_axis(400.0,\"nm\")\n",
-      "print cube2.vbins[0], cube2.vbins[-1]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If a FITS file is now written from the cube, the spectral axis will be in the new units. To reset the spectral axis back to the original velocity units:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube2.reset_spectral_axis()\n",
-      "print cube2.vbins[0], cube2.vbins[-1]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file
+ "nbformat": 4,
+ "nbformat_minor": 0
+}

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/a3d767cb1312/
Changeset:   a3d767cb1312
Branch:      yt
User:        atmyers
Date:        2016-01-28 21:07:05+00:00
Summary:     revert changes to Orientation
Affected #:  1 file

diff -r bc3ca9a8242f4f17a6a8d1a1c2a2e1150f260b03 -r a3d767cb1312fe3632dfa02890b1fcb3a134b517 yt/utilities/orientation.py
--- a/yt/utilities/orientation.py
+++ b/yt/utilities/orientation.py
@@ -74,7 +74,7 @@
         normal_vector /= np.sqrt(np.dot(normal_vector, normal_vector))
         if north_vector is None:
             vecs = np.identity(3)
-            t = np.cross(vecs, normal_vector).sum(axis=1)
+            t = np.cross(normal_vector, vecs).sum(axis=1)
             ax = t.argmax()
             east_vector = np.cross(vecs[ax, :], normal_vector).ravel()
             # self.north_vector must remain None otherwise rotations about a fixed axis will break. 


https://bitbucket.org/yt_analysis/yt/commits/ac735ea96113/
Changeset:   ac735ea96113
Branch:      yt
User:        atmyers
Date:        2016-01-28 21:14:33+00:00
Summary:     remove some redundant code from off_axis_projection
Affected #:  2 files

diff -r a3d767cb1312fe3632dfa02890b1fcb3a134b517 -r ac735ea9611369d1d839e8e3ef7b4e1b737d5c7c yt/utilities/orientation.py
--- a/yt/utilities/orientation.py
+++ b/yt/utilities/orientation.py
@@ -74,7 +74,7 @@
         normal_vector /= np.sqrt(np.dot(normal_vector, normal_vector))
         if north_vector is None:
             vecs = np.identity(3)
-            t = np.cross(normal_vector, vecs).sum(axis=1)
+            t = np.cross(vecs, normal_vector).sum(axis=1)
             ax = t.argmax()
             east_vector = np.cross(vecs[ax, :], normal_vector).ravel()
             # self.north_vector must remain None otherwise rotations about a fixed axis will break. 

diff -r a3d767cb1312fe3632dfa02890b1fcb3a134b517 -r ac735ea9611369d1d839e8e3ef7b4e1b737d5c7c yt/visualization/volume_rendering/off_axis_projection.py
--- a/yt/visualization/volume_rendering/off_axis_projection.py
+++ b/yt/visualization/volume_rendering/off_axis_projection.py
@@ -152,8 +152,6 @@
         vol.set_fields(fields)
     camera = Camera(data_source)
     camera.set_width(width)
-    camera.switch_orientation(normal_vector=normal_vector,
-                              north_vector=north_vector)
     if not iterable(resolution):
         resolution = [resolution]*2
     camera.resolution = resolution


https://bitbucket.org/yt_analysis/yt/commits/45a5554572c0/
Changeset:   45a5554572c0
Branch:      yt
User:        atmyers
Date:        2016-01-29 01:18:15+00:00
Summary:     A more defensive way of changing the orientation for off-axis projections
Affected #:  2 files

diff -r ac735ea9611369d1d839e8e3ef7b4e1b737d5c7c -r 45a5554572c0eb614633760609b55eeec90a9fa4 yt/utilities/orientation.py
--- a/yt/utilities/orientation.py
+++ b/yt/utilities/orientation.py
@@ -74,7 +74,7 @@
         normal_vector /= np.sqrt(np.dot(normal_vector, normal_vector))
         if north_vector is None:
             vecs = np.identity(3)
-            t = np.cross(vecs, normal_vector).sum(axis=1)
+            t = np.cross(normal_vector, vecs).sum(axis=1)
             ax = t.argmax()
             east_vector = np.cross(vecs[ax, :], normal_vector).ravel()
             # self.north_vector must remain None otherwise rotations about a fixed axis will break. 

diff -r ac735ea9611369d1d839e8e3ef7b4e1b737d5c7c -r 45a5554572c0eb614633760609b55eeec90a9fa4 yt/visualization/volume_rendering/off_axis_projection.py
--- a/yt/visualization/volume_rendering/off_axis_projection.py
+++ b/yt/visualization/volume_rendering/off_axis_projection.py
@@ -21,7 +21,6 @@
 from yt.utilities.lib.grid_traversal import \
     PartitionedGrid
 from yt.data_objects.api import ImageArray
-from yt.utilities.orientation import Orientation
 import numpy as np
 
 
@@ -159,10 +158,20 @@
         width = data_source.ds.arr([width]*3)
     camera.position = center - width[2]*camera.normal_vector
     camera.focus = center
-    orienter = Orientation(normal_vector=normal_vector, 
-                           north_vector=north_vector)
-    camera.switch_orientation(orienter.normal_vector,
-                              orienter.north_vector)
+    
+    # If north_vector is None, we set the default here.
+    # This is chosen so that if normal_vector is one of the 
+    # cartesian coordinate axes, the projection will match
+    # the corresponding on-axis projection.
+    if north_vector is None:
+        vecs = np.identity(3)
+        t = np.cross(vecs, normal_vector).sum(axis=1)
+        ax = t.argmax()
+        east_vector = np.cross(vecs[ax, :], normal_vector).ravel()
+        north_vector = np.cross(normal_vector, east_vector).ravel()
+    camera.switch_orientation(normal_vector,
+                              north_vector)
+
     sc.camera = camera
     sc.add_source(vol)
 


https://bitbucket.org/yt_analysis/yt/commits/badc10d8dcbd/
Changeset:   badc10d8dcbd
Branch:      yt
User:        atmyers
Date:        2016-01-29 03:42:01+00:00
Summary:     off axis projection no longer returns a scene
Affected #:  1 file

diff -r 45a5554572c0eb614633760609b55eeec90a9fa4 -r badc10d8dcbddd2d579a52d26c3e6a447eb4a245 yt/visualization/volume_rendering/tests/test_vr_orientation.py
--- a/yt/visualization/volume_rendering/tests/test_vr_orientation.py
+++ b/yt/visualization/volume_rendering/tests/test_vr_orientation.py
@@ -160,8 +160,8 @@
     center = [0.5, 0.5, 0.5]
     width = [0.04, 0.04, 0.4]
     for orientation in orientations:
-        image, sc = off_axis_projection(ds, center, orientation, width,
-                                        512, "density", no_ghost=False)
+        image = off_axis_projection(ds, center, orientation, width,
+                                    512, "density", no_ghost=False)
         def offaxis_image_func(filename_prefix):
             return image.write_image(filename_prefix)
         yield GenericImageTest(ds, offaxis_image_func, decimals)


https://bitbucket.org/yt_analysis/yt/commits/af128f1c867d/
Changeset:   af128f1c867d
Branch:      yt
User:        chummels
Date:        2016-02-17 03:34:15+00:00
Summary:     Merged in atmyers/yt (pull request #1958)

Making off axis projections have the same orientation vectors as before.
Affected #:  3 files

diff -r ee29ca6d81c95d55073d4cf9db89e9486c40156d -r af128f1c867d7f72132699e5d243a37583c36570 yt/visualization/volume_rendering/off_axis_projection.py
--- a/yt/visualization/volume_rendering/off_axis_projection.py
+++ b/yt/visualization/volume_rendering/off_axis_projection.py
@@ -19,7 +19,7 @@
 from .utils import data_source_or_all
 from yt.funcs import mylog, iterable
 from yt.utilities.lib.grid_traversal import \
-        PartitionedGrid
+    PartitionedGrid
 from yt.data_objects.api import ImageArray
 import numpy as np
 
@@ -151,8 +151,6 @@
         vol.set_fields(fields)
     camera = Camera(data_source)
     camera.set_width(width)
-    camera.switch_orientation(normal_vector=normal_vector,
-                              north_vector=north_vector)
     if not iterable(resolution):
         resolution = [resolution]*2
     camera.resolution = resolution
@@ -160,6 +158,20 @@
         width = data_source.ds.arr([width]*3)
     camera.position = center - width[2]*camera.normal_vector
     camera.focus = center
+    
+    # If north_vector is None, we set the default here.
+    # This is chosen so that if normal_vector is one of the 
+    # cartesian coordinate axes, the projection will match
+    # the corresponding on-axis projection.
+    if north_vector is None:
+        vecs = np.identity(3)
+        t = np.cross(vecs, normal_vector).sum(axis=1)
+        ax = t.argmax()
+        east_vector = np.cross(vecs[ax, :], normal_vector).ravel()
+        north_vector = np.cross(normal_vector, east_vector).ravel()
+    camera.switch_orientation(normal_vector,
+                              north_vector)
+
     sc.camera = camera
     sc.add_source(vol)
 

diff -r ee29ca6d81c95d55073d4cf9db89e9486c40156d -r af128f1c867d7f72132699e5d243a37583c36570 yt/visualization/volume_rendering/tests/test_vr_orientation.py
--- a/yt/visualization/volume_rendering/tests/test_vr_orientation.py
+++ b/yt/visualization/volume_rendering/tests/test_vr_orientation.py
@@ -16,12 +16,14 @@
 from yt import load_uniform_grid
 from yt.utilities.answer_testing.framework import \
     requires_answer_testing, \
-    VRImageComparisonTest
+    VRImageComparisonTest, \
+    GenericImageTest
 from yt.visualization.volume_rendering.api import \
     Scene, \
     Camera, \
     VolumeSource, \
-    ColorTransferFunction
+    ColorTransferFunction, \
+    off_axis_projection
 
 
 def setup_ds():
@@ -149,3 +151,17 @@
             sc.camera = cam
             yield VRImageComparisonTest(
                 sc, ds, 'roll_%s_%04d' % (lens_type, frame), decimals)
+
+    orientations = [ [1.0, 0.0, 0.0],
+                     [0.0, 1.0, 0.0],
+                     [0.0, 0.0, 1.0],
+                     [0.5, 0.4, 0.7],
+                     [-0.3, -0.1, 0.8] ]
+    center = [0.5, 0.5, 0.5]
+    width = [0.04, 0.04, 0.4]
+    for orientation in orientations:
+        image = off_axis_projection(ds, center, orientation, width,
+                                    512, "density", no_ghost=False)
+        def offaxis_image_func(filename_prefix):
+            return image.write_image(filename_prefix)
+        yield GenericImageTest(ds, offaxis_image_func, decimals)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list