[yt-svn] commit/yt: xarthisius: Merged in ngoldbaum/yt (pull request #1935)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Jan 27 19:26:22 PST 2016


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/bc0cc00c150c/
Changeset:   bc0cc00c150c
Branch:      yt
User:        xarthisius
Date:        2016-01-28 03:26:16+00:00
Summary:     Merged in ngoldbaum/yt (pull request #1935)

Update documentation notebooks to nbformat4 and py3 compatibility
Affected #:  37 files

diff -r 481d5a937fb8e369949bae0073a7777d71cc952f -r bc0cc00c150c6e5fca1b85310935dfe9ccd905a2 doc/extensions/notebook_sphinxext.py
--- a/doc/extensions/notebook_sphinxext.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import errno
-import os
-import shutil
-import string
-import re
-import tempfile
-import uuid
-from sphinx.util.compat import Directive
-from docutils import nodes
-from docutils.parsers.rst import directives
-from IPython.config import Config
-from IPython.nbconvert import html, python
-from IPython.nbformat import current as nbformat
-from runipy.notebook_runner import NotebookRunner, NotebookError
-
-class NotebookDirective(Directive):
-    """Insert an evaluated notebook into a document
-
-    This uses runipy and nbconvert to transform a path to an unevaluated notebook
-    into html suitable for embedding in a Sphinx document.
-    """
-    required_arguments = 1
-    optional_arguments = 1
-    option_spec = {'skip_exceptions': directives.flag}
-    final_argument_whitespace = True
-
-    def run(self): # check if there are spaces in the notebook name
-        nb_path = self.arguments[0]
-        if ' ' in nb_path: raise ValueError(
-            "Due to issues with docutils stripping spaces from links, white "
-            "space is not allowed in notebook filenames '{0}'".format(nb_path))
-        # check if raw html is supported
-        if not self.state.document.settings.raw_enabled:
-            raise self.warning('"%s" directive disabled.' % self.name)
-
-        cwd = os.getcwd()
-        tmpdir = tempfile.mkdtemp()
-        os.chdir(tmpdir)
-
-        # get path to notebook
-        nb_filename = self.arguments[0]
-        nb_basename = os.path.basename(nb_filename)
-        rst_file = self.state_machine.document.attributes['source']
-        rst_dir = os.path.abspath(os.path.dirname(rst_file))
-        nb_abs_path = os.path.abspath(os.path.join(rst_dir, nb_filename))
-
-        # Move files around.
-        rel_dir = os.path.relpath(rst_dir, setup.confdir)
-        dest_dir = os.path.join(setup.app.builder.outdir, rel_dir)
-        dest_path = os.path.join(dest_dir, nb_basename)
-
-        image_dir, image_rel_dir = make_image_dir(setup, rst_dir)
-
-        # Ensure desination build directory exists
-        thread_safe_mkdir(os.path.dirname(dest_path))
-
-        # Copy unevaluated notebook
-        shutil.copyfile(nb_abs_path, dest_path)
-
-        # Construct paths to versions getting copied over
-        dest_path_eval = string.replace(dest_path, '.ipynb', '_evaluated.ipynb')
-        dest_path_script = string.replace(dest_path, '.ipynb', '.py')
-        rel_path_eval = string.replace(nb_basename, '.ipynb', '_evaluated.ipynb')
-        rel_path_script = string.replace(nb_basename, '.ipynb', '.py')
-
-        # Create python script vesion
-        script_text = nb_to_python(nb_abs_path)
-        f = open(dest_path_script, 'w')
-        f.write(script_text.encode('utf8'))
-        f.close()
-
-        skip_exceptions = 'skip_exceptions' in self.options
-
-        ret = evaluate_notebook(
-            nb_abs_path, dest_path_eval, skip_exceptions=skip_exceptions)
-
-        try:
-            evaluated_text, resources = ret
-            evaluated_text = write_notebook_output(
-                resources, image_dir, image_rel_dir, evaluated_text)
-        except ValueError:
-            # This happens when a notebook raises an unhandled exception
-            evaluated_text = ret
-
-        # Create link to notebook and script files
-        link_rst = "(" + \
-                   formatted_link(nb_basename) + "; " + \
-                   formatted_link(rel_path_eval) + "; " + \
-                   formatted_link(rel_path_script) + \
-                   ")"
-
-        self.state_machine.insert_input([link_rst], rst_file)
-
-        # create notebook node
-        attributes = {'format': 'html', 'source': 'nb_path'}
-        nb_node = notebook_node('', evaluated_text, **attributes)
-        (nb_node.source, nb_node.line) = \
-            self.state_machine.get_source_and_line(self.lineno)
-
-        # add dependency
-        self.state.document.settings.record_dependencies.add(nb_abs_path)
-
-        # clean up
-        os.chdir(cwd)
-        shutil.rmtree(tmpdir, True)
-
-        return [nb_node]
-
-
-class notebook_node(nodes.raw):
-    pass
-
-def nb_to_python(nb_path):
-    """convert notebook to python script"""
-    exporter = python.PythonExporter()
-    output, resources = exporter.from_filename(nb_path)
-    return output
-
-def nb_to_html(nb_path):
-    """convert notebook to html"""
-    c = Config({'ExtractOutputPreprocessor':{'enabled':True}})
-
-    exporter = html.HTMLExporter(template_file='full', config=c)
-    notebook = nbformat.read(open(nb_path), 'json')
-    output, resources = exporter.from_notebook_node(notebook)
-    header = output.split('<head>', 1)[1].split('</head>',1)[0]
-    body = output.split('<body>', 1)[1].split('</body>',1)[0]
-
-    # http://imgur.com/eR9bMRH
-    header = header.replace('<style', '<style scoped="scoped"')
-    header = header.replace('body {\n  overflow: visible;\n  padding: 8px;\n}\n',
-                            '')
-    header = header.replace("code,pre{", "code{")
-
-    # Filter out styles that conflict with the sphinx theme.
-    filter_strings = [
-        'navbar',
-        'body{',
-        'alert{',
-        'uneditable-input{',
-        'collapse{',
-    ]
-
-    filter_strings.extend(['h%s{' % (i+1) for i in range(6)])
-
-    line_begin = [
-        'pre{',
-        'p{margin'
-    ]
-
-    filterfunc = lambda x: not any([s in x for s in filter_strings])
-    header_lines = filter(filterfunc, header.split('\n'))
-
-    filterfunc = lambda x: not any([x.startswith(s) for s in line_begin])
-    header_lines = filter(filterfunc, header_lines)
-
-    header = '\n'.join(header_lines)
-
-    # concatenate raw html lines
-    lines = ['<div class="ipynotebook">']
-    lines.append(header)
-    lines.append(body)
-    lines.append('</div>')
-    return '\n'.join(lines), resources
-
-def evaluate_notebook(nb_path, dest_path=None, skip_exceptions=False):
-    # Create evaluated version and save it to the dest path.
-    notebook = nbformat.read(open(nb_path), 'json')
-    nb_runner = NotebookRunner(notebook, pylab=False)
-    try:
-        nb_runner.run_notebook(skip_exceptions=skip_exceptions)
-    except NotebookError as e:
-        print('')
-        print(e)
-        # Return the traceback, filtering out ANSI color codes.
-        # http://stackoverflow.com/questions/13506033/filtering-out-ansi-escape-sequences
-        return "Notebook conversion failed with the " \
-               "following traceback: \n%s" % \
-            re.sub(r'\\033[\[\]]([0-9]{1,2}([;@][0-9]{0,2})*)*[mKP]?', '',
-                   str(e))
-
-    if dest_path is None:
-        dest_path = 'temp_evaluated.ipynb'
-    nbformat.write(nb_runner.nb, open(dest_path, 'w'), 'json')
-    ret = nb_to_html(dest_path)
-    if dest_path is 'temp_evaluated.ipynb':
-        os.remove(dest_path)
-    return ret
-
-def formatted_link(path):
-    return "`%s <%s>`__" % (os.path.basename(path), path)
-
-def visit_notebook_node(self, node):
-    self.visit_raw(node)
-
-def depart_notebook_node(self, node):
-    self.depart_raw(node)
-
-def setup(app):
-    setup.app = app
-    setup.config = app.config
-    setup.confdir = app.confdir
-
-    app.add_node(notebook_node,
-                 html=(visit_notebook_node, depart_notebook_node))
-
-    app.add_directive('notebook', NotebookDirective)
-
-    retdict = dict(
-        version='0.1',
-        parallel_read_safe=True,
-        parallel_write_safe=True
-    )
-
-    return retdict
-
-def make_image_dir(setup, rst_dir):
-    image_dir = setup.app.builder.outdir + os.path.sep + '_images'
-    rel_dir = os.path.relpath(setup.confdir, rst_dir)
-    image_rel_dir = rel_dir + os.path.sep + '_images'
-    thread_safe_mkdir(image_dir)
-    return image_dir, image_rel_dir
-
-def write_notebook_output(resources, image_dir, image_rel_dir, evaluated_text):
-    my_uuid = uuid.uuid4().hex
-
-    for output in resources['outputs']:
-        new_name = image_dir + os.path.sep + my_uuid + output
-        new_relative_name = image_rel_dir + os.path.sep + my_uuid + output
-        evaluated_text = evaluated_text.replace(output, new_relative_name)
-        with open(new_name, 'wb') as f:
-            f.write(resources['outputs'][output])
-    return evaluated_text
-
-def thread_safe_mkdir(dirname):
-    try:
-        os.makedirs(dirname)
-    except OSError as e:
-        if e.errno != errno.EEXIST:
-            raise
-        pass

diff -r 481d5a937fb8e369949bae0073a7777d71cc952f -r bc0cc00c150c6e5fca1b85310935dfe9ccd905a2 doc/extensions/notebookcell_sphinxext.py
--- a/doc/extensions/notebookcell_sphinxext.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import os
-import shutil
-import io
-import tempfile
-from sphinx.util.compat import Directive
-from docutils.parsers.rst import directives
-from IPython.nbformat import current
-from notebook_sphinxext import \
-    notebook_node, visit_notebook_node, depart_notebook_node, \
-    evaluate_notebook, make_image_dir, write_notebook_output
-
-
-class NotebookCellDirective(Directive):
-    """Insert an evaluated notebook cell into a document
-
-    This uses runipy and nbconvert to transform an inline python
-    script into html suitable for embedding in a Sphinx document.
-    """
-    required_arguments = 0
-    optional_arguments = 1
-    has_content = True
-    option_spec = {'skip_exceptions': directives.flag}
-
-    def run(self):
-        # check if raw html is supported
-        if not self.state.document.settings.raw_enabled:
-            raise self.warning('"%s" directive disabled.' % self.name)
-
-        cwd = os.getcwd()
-        tmpdir = tempfile.mkdtemp()
-        os.chdir(tmpdir)
-
-        rst_file = self.state_machine.document.attributes['source']
-        rst_dir = os.path.abspath(os.path.dirname(rst_file))
-
-        image_dir, image_rel_dir = make_image_dir(setup, rst_dir)
-
-        # Construct notebook from cell content
-        content = "\n".join(self.content)
-        with open("temp.py", "w") as f:
-            f.write(content)
-
-        convert_to_ipynb('temp.py', 'temp.ipynb')
-
-        skip_exceptions = 'skip_exceptions' in self.options
-
-        evaluated_text, resources = evaluate_notebook(
-            'temp.ipynb', skip_exceptions=skip_exceptions)
-
-        evaluated_text = write_notebook_output(
-            resources, image_dir, image_rel_dir, evaluated_text)
-
-        # create notebook node
-        attributes = {'format': 'html', 'source': 'nb_path'}
-        nb_node = notebook_node('', evaluated_text, **attributes)
-        (nb_node.source, nb_node.line) = \
-            self.state_machine.get_source_and_line(self.lineno)
-
-        # clean up
-        os.chdir(cwd)
-        shutil.rmtree(tmpdir, True)
-
-        return [nb_node]
-
-def setup(app):
-    setup.app = app
-    setup.config = app.config
-    setup.confdir = app.confdir
-
-    app.add_node(notebook_node,
-                 html=(visit_notebook_node, depart_notebook_node))
-
-    app.add_directive('notebook-cell', NotebookCellDirective)
-
-    retdict = dict(
-        version='0.1',
-        parallel_read_safe=True,
-        parallel_write_safe=True
-    )
-
-    return retdict
-
-def convert_to_ipynb(py_file, ipynb_file):
-    with io.open(py_file, 'r', encoding='utf-8') as f:
-        notebook = current.reads(f.read(), format='py')
-    with io.open(ipynb_file, 'w', encoding='utf-8') as f:
-        current.write(notebook, f, format='ipynb')

diff -r 481d5a937fb8e369949bae0073a7777d71cc952f -r bc0cc00c150c6e5fca1b85310935dfe9ccd905a2 doc/extensions/pythonscript_sphinxext.py
--- a/doc/extensions/pythonscript_sphinxext.py
+++ b/doc/extensions/pythonscript_sphinxext.py
@@ -4,9 +4,9 @@
 import shutil
 import subprocess
 import uuid
+import errno
 from sphinx.util.compat import Directive
 from docutils import nodes
-from notebook_sphinxext import make_image_dir
 
 
 class PythonScriptDirective(Directive):
@@ -82,3 +82,20 @@
     shutil.move(filename, image_dir + os.path.sep + my_uuid + filename)
     relative_filename = image_rel_dir + os.path.sep + my_uuid + filename
     return '<img src="%s" width="600"><br>' % relative_filename
+
+
+def make_image_dir(setup, rst_dir):
+    image_dir = setup.app.builder.outdir + os.path.sep + '_images'
+    rel_dir = os.path.relpath(setup.confdir, rst_dir)
+    image_rel_dir = rel_dir + os.path.sep + '_images'
+    thread_safe_mkdir(image_dir)
+    return image_dir, image_rel_dir
+
+
+def thread_safe_mkdir(dirname):
+    try:
+        os.makedirs(dirname)
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            raise
+        pass

diff -r 481d5a937fb8e369949bae0073a7777d71cc952f -r bc0cc00c150c6e5fca1b85310935dfe9ccd905a2 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -440,10 +440,6 @@
 get_willwont ${INST_SCIPY}
 echo "be installing scipy"
 
-printf "%-15s = %s so I " "INST_0MQ" "${INST_0MQ}"
-get_willwont ${INST_0MQ}
-echo "be installing ZeroMQ"
-
 printf "%-15s = %s so I " "INST_ROCKSTAR" "${INST_ROCKSTAR}"
 get_willwont ${INST_ROCKSTAR}
 echo "be installing Rockstar"
@@ -627,7 +623,6 @@
 FREETYPE_VER='freetype-2.4.12' 
 H5PY='h5py-2.5.0'
 HDF5='hdf5-1.8.14' 
-IPYTHON='ipython-2.4.1'
 LAPACK='lapack-3.4.2'
 PNG=libpng-1.6.3
 MATPLOTLIB='matplotlib-1.4.3'
@@ -635,13 +630,10 @@
 NOSE='nose-1.3.6'
 NUMPY='numpy-1.9.2'
 PYTHON_HGLIB='python-hglib-1.6'
-PYZMQ='pyzmq-14.5.0'
 ROCKSTAR='rockstar-0.99.6'
 SCIPY='scipy-0.15.1'
 SQLITE='sqlite-autoconf-3071700'
 SYMPY='sympy-0.7.6'
-TORNADO='tornado-4.0.2'
-ZEROMQ='zeromq-4.0.5'
 ZLIB='zlib-1.2.8'
 SETUPTOOLS='setuptools-18.0.1'
 
@@ -655,7 +647,6 @@
 echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
 echo '4a83f9ae1855a7fad90133b327d426201c8ccfd2e7fbe9f39b2d61a2eee2f3ebe2ea02cf80f3d4e1ad659f8e790c173df8cc99b87d0b7ce63d34aa88cfdc7939  h5py-2.5.0.tar.gz' > h5py-2.5.0.tar.gz.sha512
 echo '4073fba510ccadaba41db0939f909613c9cb52ba8fb6c1062fc9118edc601394c75e102310be1af4077d07c9b327e6bbb1a6359939a7268dc140382d0c1e0199  hdf5-1.8.14.tar.gz' > hdf5-1.8.14.tar.gz.sha512
-echo 'a9cffc08ba10c47b0371b05664e55eee0562a30ef0d4bbafae79e52e5b9727906c45840c0918122c06c5672ac65e6eb381399f103e1a836aca003eda81b2acde  ipython-2.4.1.tar.gz' > ipython-2.4.1.tar.gz.sha512
 echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
 echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
 echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
@@ -663,12 +654,9 @@
 echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
 echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
 echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
-echo '20164f7b05c308e0f089c07fc46b1c522094f3ac136f2e0bba84f19cb63dfd36152a2465df723dd4d93c6fbd2de4f0d94c160e2bbc353a92cfd680eb03cbdc87  pyzmq-14.5.0.tar.gz' > pyzmq-14.5.0.tar.gz.sha512
 echo 'fff4412d850c431a1b4e6ee3b17958ee5ab3beb81e6cb8a8e7d56d368751eaa8781d7c3e69d932dc002d718fddc66a72098acfe74cfe29ec80b24e6736317275  scipy-0.15.1.tar.gz' > scipy-0.15.1.tar.gz.sha512
 echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
 echo 'ce0f1a17ac01eb48aec31fc0ad431d9d7ed9907f0e8584a6d79d0ffe6864fe62e203fe3f2a3c3e4e3d485809750ce07507a6488e776a388a7a9a713110882fcf  sympy-0.7.6.tar.gz' > sympy-0.7.6.tar.gz.sha512
-echo '93591068dc63af8d50a7925d528bc0cccdd705232c529b6162619fe28dddaf115e8a460b1842877d35160bd7ed480c1bd0bdbec57d1f359085bd1814e0c1c242  tornado-4.0.2.tar.gz' > tornado-4.0.2.tar.gz.sha512
-echo '0d928ed688ed940d460fa8f8d574a9819dccc4e030d735a8c7db71b59287ee50fa741a08249e356c78356b03c2174f2f2699f05aa7dc3d380ed47d8d7bab5408  zeromq-4.0.5.tar.gz' > zeromq-4.0.5.tar.gz.sha512
 echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
 echo '9b318ce2ee2cf787929dcb886d76c492b433e71024fda9452d8b4927652a298d6bd1bdb7a4c73883a98e100024f89b46ea8aa14b250f896e549e6dd7e10a6b41  setuptools-18.0.1.tar.gz' > setuptools-18.0.1.tar.gz.sha512
 # Individual processes
@@ -679,9 +667,6 @@
 [ $INST_FTYPE -eq 1 ] && get_ytproject $FREETYPE_VER.tar.gz
 [ $INST_SQLITE3 -eq 1 ] && get_ytproject $SQLITE.tar.gz
 [ $INST_PYX -eq 1 ] && get_ytproject $PYX.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject $ZEROMQ.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject $PYZMQ.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject $TORNADO.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject $SCIPY.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
@@ -690,7 +675,6 @@
 get_ytproject $PYTHON2.tgz
 get_ytproject $NUMPY.tar.gz
 get_ytproject $MATPLOTLIB.tar.gz
-get_ytproject $IPYTHON.tar.gz
 get_ytproject $H5PY.tar.gz
 get_ytproject $CYTHON.tar.gz
 get_ytproject $NOSE.tar.gz
@@ -976,25 +960,9 @@
 [ -n "${OLD_CXXFLAGS}" ] && export CXXFLAGS=${OLD_CXXFLAGS}
 [ -n "${OLD_CFLAGS}" ] && export CFLAGS=${OLD_CFLAGS}
 
-# Now we do our IPython installation, which has two optional dependencies.
-if [ $INST_0MQ -eq 1 ]
-then
-    if [ ! -e $ZEROMQ/done ]
-    then
-        [ ! -e $ZEROMQ ] && tar xfz $ZEROMQ.tar.gz
-        echo "Installing ZeroMQ"
-        cd $ZEROMQ
-        ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
-    fi
-    do_setup_py $PYZMQ --zmq=${DEST_DIR}
-    do_setup_py $TORNADO
-fi
+echo "Installing Jupyter"
+( ${DEST_DIR}/bin/pip install "jupyter<2.0.0" 2>&1 ) 1>> ${LOG_FILE}
 
-do_setup_py $IPYTHON
 do_setup_py $CYTHON
 do_setup_py $H5PY
 do_setup_py $NOSE

diff -r 481d5a937fb8e369949bae0073a7777d71cc952f -r bc0cc00c150c6e5fca1b85310935dfe9ccd905a2 doc/source/analyzing/analysis_modules/PPVCube.ipynb
--- a/doc/source/analyzing/analysis_modules/PPVCube.ipynb
+++ b/doc/source/analyzing/analysis_modules/PPVCube.ipynb
@@ -1,423 +1,455 @@
 {
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Detailed spectra of astrophysical objects sometimes allow for determinations of how much of the gas is moving with a certain velocity along the line of sight, thanks to Doppler shifting of spectral lines. This enables \"data cubes\" to be created in RA, Dec, and line-of-sight velocity space. In yt, we can use the `PPVCube` analysis module to project fields along a given line of sight traveling at different line-of-sight velocities, to \"mock-up\" what would be seen in observations."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "from yt.config import ytcfg\n",
+    "\n",
+    "import yt\n",
+    "import numpy as np\n",
+    "from yt.analysis_modules.ppv_cube.api import PPVCube\n",
+    "import yt.units as u"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "To demonstrate this functionality, we'll create a simple unigrid dataset from scratch of a rotating disk. We create a thin disk in the x-y midplane of the domain of three cells in height in either direction, and a radius of 10 kpc. The density and azimuthal velocity profiles of the disk as a function of radius will be given by the following functions:"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Density: $\\rho(r) \\propto r^{\\alpha}$"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Velocity: $v_{\\theta}(r) \\propto \\frac{r}{1+(r/r_0)^{\\beta}}$"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "where for simplicity we won't worry about the normalizations of these profiles. "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "First, we'll set up the grid and the parameters of the profiles:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# increasing the resolution will make the images in this notebook more visually appealing\n",
+    "nx,ny,nz = (64, 64, 64) # domain dimensions\n",
+    "R = 10. # outer radius of disk, kpc\n",
+    "r_0 = 3. # scale radius, kpc\n",
+    "beta = 1.4 # for the tangential velocity profile\n",
+    "alpha = -1. # for the radial density profile\n",
+    "x, y = np.mgrid[-R:R:nx*1j,-R:R:ny*1j] # cartesian coordinates of x-y plane of disk\n",
+    "r = np.sqrt(x*x+y*y) # polar coordinates\n",
+    "theta = np.arctan2(y, x) # polar coordinates"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Second, we'll construct the data arrays for the density, temperature, and velocity of the disk. Since we have the tangential velocity profile, we have to use the polar coordinates we derived earlier to compute `velx` and `vely`. Everywhere outside the disk, all fields are set to zero.  "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "dens = np.zeros((nx,ny,nz))\n",
+    "dens[:,:,nz/2-3:nz/2+3] = (r**alpha).reshape(nx,ny,1) # the density profile of the disk\n",
+    "temp = np.zeros((nx,ny,nz))\n",
+    "temp[:,:,nz/2-3:nz/2+3] = 1.0e5 # Isothermal\n",
+    "vel_theta = 100.*r/(1.+(r/r_0)**beta) # the azimuthal velocity profile of the disk\n",
+    "velx = np.zeros((nx,ny,nz))\n",
+    "vely = np.zeros((nx,ny,nz))\n",
+    "velx[:,:,nz/2-3:nz/2+3] = (-vel_theta*np.sin(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
+    "vely[:,:,nz/2-3:nz/2+3] = (vel_theta*np.cos(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
+    "dens[r > R] = 0.0\n",
+    "temp[r > R] = 0.0\n",
+    "velx[r > R] = 0.0\n",
+    "vely[r > R] = 0.0"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Finally, we'll package these data arrays up into a dictionary, which will then be shipped off to `load_uniform_grid`. We'll define the width of the grid to be `2*R` kpc, which will be equal to 1  `code_length`. "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "data = {}\n",
+    "data[\"density\"] = (dens,\"g/cm**3\")\n",
+    "data[\"temperature\"] = (temp, \"K\")\n",
+    "data[\"velocity_x\"] = (velx, \"km/s\")\n",
+    "data[\"velocity_y\"] = (vely, \"km/s\")\n",
+    "data[\"velocity_z\"] = (np.zeros((nx,ny,nz)), \"km/s\") # zero velocity in the z-direction\n",
+    "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) # bbox of width 1 on a side with center (0,0,0)\n",
+    "ds = yt.load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "To get a sense of what the data looks like, we'll take a slice through the middle of the disk:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "slc = yt.SlicePlot(ds, \"z\", [\"density\",\"velocity_x\",\"velocity_y\",\"velocity_magnitude\"])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "slc.set_log(\"velocity_x\", False)\n",
+    "slc.set_log(\"velocity_y\", False)\n",
+    "slc.set_log(\"velocity_magnitude\", False)\n",
+    "slc.set_unit(\"velocity_magnitude\", \"km/s\")\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Which shows a rotating disk with a specific density and velocity profile. Now, suppose we wanted to look at this disk galaxy from a certain orientation angle, and simulate a 3D FITS data cube where we can see the gas that is emitting at different velocities along the line of sight. We can do this using the `PPVCube` class. First, let's assume we rotate our viewing angle 60 degrees from face-on, from along the z-axis into the x-axis. We'll create a normal vector:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "i = 60.*np.pi/180.\n",
+    "L = [np.sin(i),0.0,np.cos(i)]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Next, we need to specify a field that will serve as the \"intensity\" of the emission that we see. For simplicity, we'll simply choose the gas density as this field, though it could be any field (including derived fields) in principle. We also need to choose the bounds in line-of-sight velocity that the data will be binned into, which is a 4-tuple in the shape of `(vmin, vmax, nbins, units)`, which specifies a linear range of `nbins` velocity bins from `vmin` to `vmax` in units of `units`. We may also optionally specify the dimensions of the data cube with the `dims` argument."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false,
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "cube = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, method=\"sum\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Following this, we can now write this cube to a FITS file. The x and y axes of the file can be in length units, which can be optionally specified by `length_unit`:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "cube.write_fits(\"cube.fits\", clobber=True, length_unit=\"kpc\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Or one can use the `sky_scale` and `sky_center` keywords to set up the coordinates in RA and Dec:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "sky_scale = (1.0, \"arcsec/kpc\")\n",
+    "sky_center = (30., 45.) # RA, Dec in degrees\n",
+    "cube.write_fits(\"cube_sky.fits\", clobber=True, sky_scale=sky_scale, sky_center=sky_center)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Now, we'll look at the FITS dataset in yt and look at different slices along the velocity axis, which is the \"z\" axis:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "ds_cube = yt.load(\"cube.fits\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# Specifying no center gives us the center slice\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"])\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "# Picking different velocities for the slices\n",
+    "new_center = ds_cube.domain_center\n",
+    "new_center[2] = ds_cube.spec2pixel(-100.*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "new_center[2] = ds_cube.spec2pixel(70.0*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "new_center[2] = ds_cube.spec2pixel(-30.0*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "If we project all the emission at all the different velocities along the z-axis, we recover the entire disk:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "prj = yt.ProjectionPlot(ds_cube, \"z\", [\"density\"], method=\"sum\")\n",
+    "prj.set_log(\"density\", True)\n",
+    "prj.set_zlim(\"density\", 1.0e-3, 0.2)\n",
+    "prj.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The `thermal_broad` keyword allows one to simulate thermal line broadening based on the temperature, and the `atomic_weight` argument is used to specify the atomic weight of the particle that is doing the emitting."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "cube2 = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, thermal_broad=True, \n",
+    "                atomic_weight=12.0, method=\"sum\")\n",
+    "cube2.write_fits(\"cube2.fits\", clobber=True, length_unit=\"kpc\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Taking a slice of this cube shows:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "ds_cube2 = yt.load(\"cube2.fits\")\n",
+    "new_center = ds_cube2.domain_center\n",
+    "new_center[2] = ds_cube2.spec2pixel(70.0*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "new_center[2] = ds_cube2.spec2pixel(-100.*u.km/u.s)\n",
+    "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
+    "slc.show()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "where we can see the emission has been smeared into this velocity slice from neighboring slices due to the thermal broadening. \n",
+    "\n",
+    "Finally, the \"velocity\" or \"spectral\" axis of the cube can be changed to a different unit, such as wavelength, frequency, or energy: "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "print (cube2.vbins[0], cube2.vbins[-1])\n",
+    "cube2.transform_spectral_axis(400.0,\"nm\")\n",
+    "print (cube2.vbins[0], cube2.vbins[-1])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "If a FITS file is now written from the cube, the spectral axis will be in the new units. To reset the spectral axis back to the original velocity units:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": false
+   },
+   "outputs": [],
+   "source": [
+    "cube2.reset_spectral_axis()\n",
+    "print (cube2.vbins[0], cube2.vbins[-1])"
+   ]
+  }
+ ],
  "metadata": {
-  "name": "",
-  "signature": "sha256:67e4297cbc32716b2481c71659305687cb5bdadad648a0acf6b48960267bb069"
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.5.1"
+  }
  },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Detailed spectra of astrophysical objects sometimes allow for determinations of how much of the gas is moving with a certain velocity along the line of sight, thanks to Doppler shifting of spectral lines. This enables \"data cubes\" to be created in RA, Dec, and line-of-sight velocity space. In yt, we can use the `PPVCube` analysis module to project fields along a given line of sight traveling at different line-of-sight velocities, to \"mock-up\" what would be seen in observations."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "from yt.config import ytcfg\n",
-      "ytcfg[\"yt\",\"loglevel\"] = 30\n",
-      "\n",
-      "import yt\n",
-      "import numpy as np\n",
-      "from yt.analysis_modules.ppv_cube.api import PPVCube\n",
-      "import yt.units as u"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "To demonstrate this functionality, we'll create a simple unigrid dataset from scratch of a rotating disk. We create a thin disk in the x-y midplane of the domain of three cells in height in either direction, and a radius of 10 kpc. The density and azimuthal velocity profiles of the disk as a function of radius will be given by the following functions:"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Density: $\\rho(r) \\propto r^{\\alpha}$"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Velocity: $v_{\\theta}(r) \\propto \\frac{r}{1+(r/r_0)^{\\beta}}$"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "where for simplicity we won't worry about the normalizations of these profiles. "
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "First, we'll set up the grid and the parameters of the profiles:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "nx,ny,nz = (256,256,256) # domain dimensions\n",
-      "R = 10. # outer radius of disk, kpc\n",
-      "r_0 = 3. # scale radius, kpc\n",
-      "beta = 1.4 # for the tangential velocity profile\n",
-      "alpha = -1. # for the radial density profile\n",
-      "x, y = np.mgrid[-R:R:nx*1j,-R:R:ny*1j] # cartesian coordinates of x-y plane of disk\n",
-      "r = np.sqrt(x*x+y*y) # polar coordinates\n",
-      "theta = np.arctan2(y, x) # polar coordinates"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Second, we'll construct the data arrays for the density, temperature, and velocity of the disk. Since we have the tangential velocity profile, we have to use the polar coordinates we derived earlier to compute `velx` and `vely`. Everywhere outside the disk, all fields are set to zero.  "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "dens = np.zeros((nx,ny,nz))\n",
-      "dens[:,:,nz/2-3:nz/2+3] = (r**alpha).reshape(nx,ny,1) # the density profile of the disk\n",
-      "temp = np.zeros((nx,ny,nz))\n",
-      "temp[:,:,nz/2-3:nz/2+3] = 1.0e5 # Isothermal\n",
-      "vel_theta = 100.*r/(1.+(r/r_0)**beta) # the azimuthal velocity profile of the disk\n",
-      "velx = np.zeros((nx,ny,nz))\n",
-      "vely = np.zeros((nx,ny,nz))\n",
-      "velx[:,:,nz/2-3:nz/2+3] = (-vel_theta*np.sin(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
-      "vely[:,:,nz/2-3:nz/2+3] = (vel_theta*np.cos(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
-      "dens[r > R] = 0.0\n",
-      "temp[r > R] = 0.0\n",
-      "velx[r > R] = 0.0\n",
-      "vely[r > R] = 0.0"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Finally, we'll package these data arrays up into a dictionary, which will then be shipped off to `load_uniform_grid`. We'll define the width of the grid to be `2*R` kpc, which will be equal to 1  `code_length`. "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "data = {}\n",
-      "data[\"density\"] = (dens,\"g/cm**3\")\n",
-      "data[\"temperature\"] = (temp, \"K\")\n",
-      "data[\"velocity_x\"] = (velx, \"km/s\")\n",
-      "data[\"velocity_y\"] = (vely, \"km/s\")\n",
-      "data[\"velocity_z\"] = (np.zeros((nx,ny,nz)), \"km/s\") # zero velocity in the z-direction\n",
-      "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) # bbox of width 1 on a side with center (0,0,0)\n",
-      "ds = yt.load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "To get a sense of what the data looks like, we'll take a slice through the middle of the disk:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "slc = yt.SlicePlot(ds, \"z\", [\"density\",\"velocity_x\",\"velocity_y\",\"velocity_magnitude\"])"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "slc.set_log(\"velocity_x\", False)\n",
-      "slc.set_log(\"velocity_y\", False)\n",
-      "slc.set_log(\"velocity_magnitude\", False)\n",
-      "slc.set_unit(\"velocity_magnitude\", \"km/s\")\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Which shows a rotating disk with a specific density and velocity profile. Now, suppose we wanted to look at this disk galaxy from a certain orientation angle, and simulate a 3D FITS data cube where we can see the gas that is emitting at different velocities along the line of sight. We can do this using the `PPVCube` class. First, let's assume we rotate our viewing angle 60 degrees from face-on, from along the z-axis into the x-axis. We'll create a normal vector:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "i = 60.*np.pi/180.\n",
-      "L = [np.sin(i),0.0,np.cos(i)]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Next, we need to specify a field that will serve as the \"intensity\" of the emission that we see. For simplicity, we'll simply choose the gas density as this field, though it could be any field (including derived fields) in principle. We also need to choose the bounds in line-of-sight velocity that the data will be binned into, which is a 4-tuple in the shape of `(vmin, vmax, nbins, units)`, which specifies a linear range of `nbins` velocity bins from `vmin` to `vmax` in units of `units`. We may also optionally specify the dimensions of the data cube with the `dims` argument."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, method=\"sum\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Following this, we can now write this cube to a FITS file. The x and y axes of the file can be in length units, which can be optionally specified by `length_unit`:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube.write_fits(\"cube.fits\", clobber=True, length_unit=\"kpc\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Or one can use the `sky_scale` and `sky_center` keywords to set up the coordinates in RA and Dec:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sky_scale = (1.0, \"arcsec/kpc\")\n",
-      "sky_center = (30., 45.) # RA, Dec in degrees\n",
-      "cube.write_fits(\"cube_sky.fits\", clobber=True, sky_scale=sky_scale, sky_center=sky_center)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now, we'll look at the FITS dataset in yt and look at different slices along the velocity axis, which is the \"z\" axis:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds_cube = yt.load(\"cube.fits\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Specifying no center gives us the center slice\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"])\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Picking different velocities for the slices\n",
-      "new_center = ds_cube.domain_center\n",
-      "new_center[2] = ds_cube.spec2pixel(-100.*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center[2] = ds_cube.spec2pixel(70.0*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center[2] = ds_cube.spec2pixel(-30.0*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we project all the emission at all the different velocities along the z-axis, we recover the entire disk:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prj = yt.ProjectionPlot(ds_cube, \"z\", [\"density\"], method=\"sum\")\n",
-      "prj.set_log(\"density\", True)\n",
-      "prj.set_zlim(\"density\", 1.0e-3, 0.2)\n",
-      "prj.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The `thermal_broad` keyword allows one to simulate thermal line broadening based on the temperature, and the `atomic_weight` argument is used to specify the atomic weight of the particle that is doing the emitting."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube2 = PPVCube(ds, L, \"density\", (-150.,150.,50,\"km/s\"), dims=200, thermal_broad=True, \n",
-      "                atomic_weight=12.0, method=\"sum\")\n",
-      "cube2.write_fits(\"cube2.fits\", clobber=True, length_unit=\"kpc\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Taking a slice of this cube shows:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds_cube2 = yt.load(\"cube2.fits\")\n",
-      "new_center = ds_cube2.domain_center\n",
-      "new_center[2] = ds_cube2.spec2pixel(70.0*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center[2] = ds_cube2.spec2pixel(-100.*u.km/u.s)\n",
-      "slc = yt.SlicePlot(ds_cube2, \"z\", [\"density\"], center=new_center)\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "where we can see the emission has been smeared into this velocity slice from neighboring slices due to the thermal broadening. \n",
-      "\n",
-      "Finally, the \"velocity\" or \"spectral\" axis of the cube can be changed to a different unit, such as wavelength, frequency, or energy: "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print cube2.vbins[0], cube2.vbins[-1]\n",
-      "cube2.transform_spectral_axis(400.0,\"nm\")\n",
-      "print cube2.vbins[0], cube2.vbins[-1]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If a FITS file is now written from the cube, the spectral axis will be in the new units. To reset the spectral axis back to the original velocity units:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cube2.reset_spectral_axis()\n",
-      "print cube2.vbins[0], cube2.vbins[-1]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file
+ "nbformat": 4,
+ "nbformat_minor": 0
+}

This diff is so big that we needed to truncate the remainder.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list