[yt-svn] commit/yt: 7 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sat Aug 2 12:52:39 PDT 2014


7 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/01e12257c098/
Changeset:   01e12257c098
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-17 15:45:24
Summary:     Attempt to fix dark matter masses.
Affected #:  1 file

diff -r dd9c415ca94dcf54849b3760a290a783f5cd44e4 -r 01e12257c0981a6b2a943d9e9e76cb79a2a21e1e yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -110,6 +110,12 @@
                 if ptb:
                     data[a:a+size] = m
                     a += size
+            # We now divide by NGrid in order to make this match up.  Note that
+            # this means that even when requested in *code units*, we are
+            # giving them as modified by the ng value.  This only works for
+            # dark_matter -- stars are regular matter.
+            if ftype == "darkmatter":
+                data /= self.pf.domain_dimensions.prod()
             tr[field] = data
         elif fname == "particle_index":
             tr[field] = np.arange(idxa, idxb)


https://bitbucket.org/yt_analysis/yt/commits/5d0744dc25f2/
Changeset:   5d0744dc25f2
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-21 22:13:17
Summary:     Move the division for ART particles to end of function.
Affected #:  1 file

diff -r 01e12257c0981a6b2a943d9e9e76cb79a2a21e1e -r 5d0744dc25f28a5a483ae65743e096cce71e831d yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -110,12 +110,6 @@
                 if ptb:
                     data[a:a+size] = m
                     a += size
-            # We now divide by NGrid in order to make this match up.  Note that
-            # this means that even when requested in *code units*, we are
-            # giving them as modified by the ng value.  This only works for
-            # dark_matter -- stars are regular matter.
-            if ftype == "darkmatter":
-                data /= self.pf.domain_dimensions.prod()
             tr[field] = data
         elif fname == "particle_index":
             tr[field] = np.arange(idxa, idxb)
@@ -145,6 +139,13 @@
             temp[-nstars:] = data
             tr[field] = temp
             del data
+        # We check again, after it's been filled
+        if fname == "particle_mass":
+            # We now divide by NGrid in order to make this match up.  Note that
+            # this means that even when requested in *code units*, we are
+            # giving them as modified by the ng value.  This only works for
+            # dark_matter -- stars are regular matter.
+            tr[field] /= self.pf.domain_dimensions.prod()
         if tr == {}:
             tr = dict((f, np.array([])) for f in fields)
         if self.caching:


https://bitbucket.org/yt_analysis/yt/commits/300d494baa61/
Changeset:   300d494baa61
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-22 19:02:45
Summary:     First pass at transforming ART into particle unions
Affected #:  2 files

diff -r 5d0744dc25f28a5a483ae65743e096cce71e831d -r 300d494baa61d211f33c516d30b8be1e5ddd8507 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -39,6 +39,8 @@
     io_registry
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
+from yt.data_objects.particle_unions import \
+    ParticleUnion
 
 from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
@@ -104,17 +106,7 @@
         self.particle_field_list = [f for f in particle_fields]
         self.field_list = [("gas", f) for f in fluid_fields]
         # now generate all of the possible particle fields
-        if "wspecies" in self.parameter_file.parameters.keys():
-            wspecies = self.parameter_file.parameters['wspecies']
-            nspecies = len(wspecies)
-            self.parameter_file.particle_types = ["darkmatter", "stars"]
-            for specie in range(nspecies):
-                self.parameter_file.particle_types.append("specie%i" % specie)
-            self.parameter_file.particle_types_raw = tuple(
-                self.parameter_file.particle_types)
-        else:
-            self.parameter_file.particle_types = []
-        for ptype in self.parameter_file.particle_types:
+        for ptype in self.parameter_file.particle_types_raw:
             for pfield in self.particle_field_list:
                 pfn = (ptype, pfield)
                 self.field_list.append(pfn)
@@ -350,6 +342,8 @@
             self.root_level = root_level
             mylog.info("Using root level of %02i", self.root_level)
         # read the particle header
+        self.particle_types = []
+        self.particle_types_raw = ()
         if not self.skip_particles and self._file_particle_header:
             with open(self._file_particle_header, "rb") as fh:
                 particle_header_vals = fpu.read_attrs(
@@ -360,6 +354,10 @@
                 lspecies = np.fromfile(fh, dtype='>i', count=10)
             self.parameters['wspecies'] = wspecies[:n]
             self.parameters['lspecies'] = lspecies[:n]
+            for specie in range(n):
+                self.particle_types.append("specie%i" % specie)
+            self.particle_types_raw = tuple(
+                self.particle_types)
             ls_nonzero = np.diff(lspecies)[:n-1]
             self.star_type = len(ls_nonzero)
             mylog.info("Discovered %i species of particles", len(ls_nonzero))
@@ -392,6 +390,16 @@
         self.gamma = self.parameters["gamma"]
         mylog.info("Max level is %02i", self.max_level)
 
+    def create_field_info(self):
+        super(ARTDataset, self).create_field_info()
+        if "wspecies" in self.parameters:
+            # We create dark_matter and stars unions.
+            ptr = self.particle_types_raw
+            pu = ParticleUnion("darkmatter", list(ptr[:-1]))
+            self.add_particle_union(pu)
+            pu = ParticleUnion("stars", list(ptr[-1:]))
+            self.add_particle_union(pu)
+
     @classmethod
     def _is_valid(self, *args, **kwargs):
         """

diff -r 5d0744dc25f28a5a483ae65743e096cce71e831d -r 300d494baa61d211f33c516d30b8be1e5ddd8507 yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -177,12 +177,8 @@
     if "specie" in field:
         index = int(field.replace("specie", ""))
         pbool[index] = True
-    elif field == "stars":
-        pbool[-1] = True
-    elif field == "darkmatter":
-        pbool[0:-1] = True
     else:
-        pbool[:] = True
+        raise RuntimeError
     idxa, idxb = idxas[pbool][0], idxbs[pbool][-1]
     return pbool, idxa, idxb
 


https://bitbucket.org/yt_analysis/yt/commits/746c7ca7d01b/
Changeset:   746c7ca7d01b
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-02 05:58:44
Summary:     Continue ART refactoring
Affected #:  1 file

diff -r 300d494baa61d211f33c516d30b8be1e5ddd8507 -r 746c7ca7d01b2f1bdcc983577c5276aa0438809e yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -39,6 +39,11 @@
         self.cache = {}
         self.masks = {}
         super(IOHandlerART, self).__init__(*args, **kwargs)
+        self.ws = self.pf.parameters["wspecies"]
+        self.ls = self.pf.parameters["lspecies"]
+        self.file_particle = self.pf._file_particle_data
+        self.file_stars = self.pf._file_particle_stars
+        self.Nrow = self.pf.parameters["Nrow"]
 
     def _read_fluid_selection(self, chunks, selector, fields, size):
         # Chunks in this case will have affiliated domain subset objects
@@ -70,8 +75,6 @@
         if key in self.masks.keys() and self.caching:
             return self.masks[key]
         pf = self.pf
-        ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(pf, ftype, self.ls, ptmax)
         pstr = 'particle_position_%s'
         x,y,z = [self._get_field((ftype, pstr % ax)) for ax in 'xyz']
         mask = selector.select_points(x, y, z, 0.0)
@@ -81,6 +84,26 @@
         else:
             return mask
 
+    def _read_particle_coords(self, chunks, ptf):
+        for chunk in chunks:
+            for ptype, field_list in sorted(ptf.items()):
+                x = self._get_field((ptype, "particle_position_x"))
+                y = self._get_field((ptype, "particle_position_y"))
+                z = self._get_field((ptype, "particle_position_z"))
+                yield ptype, (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        for chunk in chunks:
+            for ptype, field_list in sorted(ptf.items()):
+                x = self._get_field((ptype, "particle_position_x"))
+                y = self._get_field((ptype, "particle_position_y"))
+                z = self._get_field((ptype, "particle_position_z"))
+                mask = selector.select_points(x, y, z, 0.0)
+                if mask is None: continue
+                for field in field_list:
+                    data = self._get_field((ptype, field))
+                    yield (ptype, field), data[mask]
+
     def _get_field(self,  field):
         if field in self.cache.keys() and self.caching:
             mylog.debug("Cached %s", str(field))
@@ -157,11 +180,6 @@
     def _read_particle_selection(self, chunks, selector, fields):
         chunk = chunks.next()
         self.pf = chunk.objs[0].domain.pf
-        self.ws = self.pf.parameters["wspecies"]
-        self.ls = self.pf.parameters["lspecies"]
-        self.file_particle = self.pf._file_particle_data
-        self.file_stars = self.pf._file_particle_stars
-        self.Nrow = self.pf.parameters["Nrow"]
         data = {f:np.array([]) for f in fields}
         for f in fields:
             ftype, fname = f


https://bitbucket.org/yt_analysis/yt/commits/c27861077a46/
Changeset:   c27861077a46
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-02 17:03:10
Summary:     Remove vestigial particle reader.
Affected #:  1 file

diff -r 746c7ca7d01b2f1bdcc983577c5276aa0438809e -r c27861077a46db9622931d1af328f3eaae07263a yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -177,17 +177,6 @@
         else:
             return tr[field]
 
-    def _read_particle_selection(self, chunks, selector, fields):
-        chunk = chunks.next()
-        self.pf = chunk.objs[0].domain.pf
-        data = {f:np.array([]) for f in fields}
-        for f in fields:
-            ftype, fname = f
-            mask = self._get_mask(selector, ftype)
-            arr = self._get_field(f)[mask].astype('f8')
-            data[f] = np.concatenate((arr, data[f]))
-        return data
-
 def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")
     idxas = np.concatenate(([0, ], lspecies[:-1]))


https://bitbucket.org/yt_analysis/yt/commits/7b97a8162373/
Changeset:   7b97a8162373
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-02 17:07:43
Summary:     Merging, resolving conflicts.
Affected #:  481 files

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d .hgchurn
--- a/.hgchurn
+++ b/.hgchurn
@@ -13,8 +13,13 @@
 drudd = drudd at uchicago.edu
 awetzel = andrew.wetzel at yale.edu
 David Collins (dcollins4096 at gmail.com) = dcollins4096 at gmail.com
+dcollins4096 = dcollins4096 at gmail.com
 dcollins at physics.ucsd.edu = dcollins4096 at gmail.com
 tabel = tabel at slac.stanford.edu
 sername=kayleanelson = kaylea.nelson at yale.edu
 kayleanelson = kaylea.nelson at yale.edu
 jcforbes at ucsc.edu = jforbes at ucolick.org
+ngoldbau at ucsc.edu = goldbaum at ucolick.org
+biondo at wisc.edu = Biondo at wisc.edu
+samgeen at googlemail.com = samgeen at gmail.com
+fbogert = fbogert at ucsc.edu
\ No newline at end of file

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d CREDITS
--- a/CREDITS
+++ b/CREDITS
@@ -4,7 +4,9 @@
                 Tom Abel (tabel at stanford.edu)
                 Gabriel Altay (gabriel.altay at gmail.com)
                 Kenza Arraki (karraki at gmail.com)
+                Elliott Biondo (biondo at wisc.edu)
                 Alex Bogert (fbogert at ucsc.edu)
+                Pengfei Chen (madcpf at gmail.com)
                 David Collins (dcollins4096 at gmail.com)
                 Brian Crosby (crosby.bd at gmail.com)
                 Andrew Cunningham (ajcunn at gmail.com)

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/cheatsheet.tex
--- a/doc/cheatsheet.tex
+++ b/doc/cheatsheet.tex
@@ -3,7 +3,7 @@
 \usepackage{calc}
 \usepackage{ifthen}
 \usepackage[landscape]{geometry}
-\usepackage[colorlinks = true, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref}
+\usepackage[hyphens]{url}
 
 % To make this come out properly in landscape mode, do one of the following
 % 1.
@@ -101,9 +101,13 @@
 Documentation \url{http://yt-project.org/doc/index.html}.
 Need help? Start here \url{http://yt-project.org/doc/help/} and then
 try the IRC chat room \url{http://yt-project.org/irc.html},
-or the mailing list \url{http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org}.
-{\bf Installing yt:} The easiest way to install yt is to use the installation script
-found on the yt homepage or the docs linked above.
+or the mailing list \url{http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org}. \\
+
+\subsection{Installing yt} The easiest way to install yt is to use the
+installation script found on the yt homepage or the docs linked above.  If you
+already have python set up with \texttt{numpy}, \texttt{scipy},
+\texttt{matplotlib}, \texttt{h5py}, and \texttt{cython}, you can also use
+\texttt{pip install yt}
 
 \subsection{Command Line yt}
 yt, and its convenience functions, are launched from a command line prompt.
@@ -118,9 +122,8 @@
 \texttt{yt stats} {\it dataset} \textemdash\ Print stats of a dataset. \\
 \texttt{yt update} \textemdash\ Update yt to most recent version.\\
 \texttt{yt update --all} \textemdash\ Update yt and dependencies to most recent version. \\
-\texttt{yt instinfo} \textemdash\ yt installation information. \\
+\texttt{yt version} \textemdash\ yt installation information. \\
 \texttt{yt notebook} \textemdash\ Run the IPython notebook server. \\
-\texttt{yt serve} ({\it dataset}) \textemdash\  Run yt-specific web GUI ({\it dataset} is optional).\\
 \texttt{yt upload\_image} {\it image.png} \textemdash\ Upload PNG image to imgur.com. \\
 \texttt{yt upload\_notebook} {\it notebook.nb} \textemdash\ Upload IPython notebook to hub.yt-project.org.\\
 \texttt{yt plot} {\it dataset} \textemdash\ Create a set of images.\\
@@ -132,16 +135,8 @@
  paste.yt-project.org. \\ 
 \texttt{yt pastebin\_grab} {\it identifier} \textemdash\ Print content of pastebin to
  STDOUT. \\
- \texttt{yt hub\_register} \textemdash\ Register with
-hub.yt-project.org. \\
-\texttt{yt hub\_submit} \textemdash\ Submit hg repo to
-hub.yt-project.org. \\
-\texttt{yt bootstrap\_dev} \textemdash\ Bootstrap a yt 
-development environment. \\
 \texttt{yt bugreport} \textemdash\ Report a yt bug. \\
 \texttt{yt hop} {\it dataset} \textemdash\  Run hop on a dataset. \\
-\texttt{yt rpdb} \textemdash\ Connect to running rpd 
- session. 
 
 \subsection{yt Imports}
 In order to use yt, Python must load the relevant yt modules into memory.
@@ -149,37 +144,40 @@
 used as part of a script.
 \newlength{\MyLen}
 \settowidth{\MyLen}{\texttt{letterpaper}/\texttt{a4paper} \ }
-\texttt{from yt.mods import \textasteriskcentered}  \textemdash\ 
-Load base yt  modules. \\
+\texttt{import yt}  \textemdash\ 
+Load yt. \\
 \texttt{from yt.config import ytcfg}  \textemdash\ 
 Used to set yt configuration options.
- If used, must be called before importing any other module.\\
-\texttt{from yt.analysis\_modules.api import \textasteriskcentered}   \textemdash\ 
-Load all yt analysis modules. \\
+If used, must be called before importing any other module.\\
 \texttt{from yt.analysis\_modules.\emph{halo\_finding}.api import \textasteriskcentered}  \textemdash\ 
 Load halo finding modules. Other modules
 are loaded in a similar way by swapping the 
 {\em emphasized} text.
 See the \textbf{Analysis Modules} section for a listing and short descriptions of each.
 
-\subsection{Numpy Arrays}
-Simulation data in yt is returned in Numpy arrays. The Numpy package provides a wealth of built-in
-functions that operate on Numpy arrays. Here is a very brief list of some useful ones.
-Please see \url{http://docs.scipy.org/doc/numpy/reference/} for the full
-numpy documentation.\\
-\settowidth{\MyLen}{\texttt{multicol} }
+\subsection{YTArray}
+Simulation data in yt is returned as a YTArray.  YTArray is a numpy array that
+has unit data attached to it and can automatically handle unit conversions and
+detect unit errors. Just like a numpy array, YTArray provides a wealth of
+built-in functions to calculate properties of the data in the array. Here is a
+very brief list of some useful ones.
+\settowidth{\MyLen}{\texttt{multicol} }\\
+\texttt{v = a.in\_cgs()} \textemdash\ Return the array in CGS units \\
+\texttt{v = a.in\_units('Msun/pc**3')} \textemdash\ Return the array in solar masses per cubic parsec \\ 
 \texttt{v = a.max(), a.min()} \textemdash\ Return maximum, minimum of \texttt{a}. \\
-\texttt{index = a.argmax(), a.argmin()} \textemdash\ Return index of max, 
+\texttt{index = a.argmax(), a.argmin()} \textemdash\ Return index of max,
 min value of \texttt{a}.\\
 \texttt{v = a[}{\it index}\texttt{]} \textemdash\ Select a single value from \texttt{a} at location {\it index}.\\
-\texttt{b = a[}{\it i:j}\texttt{]} \textemdash\ Select the slice of values from \texttt{a} between
+\texttt{b = a[}{\it i:j}\texttt{]} \textemdash\ Select the slice of values from
+\texttt{a} between
 locations {\it i} to {\it j-1} saved to a new Numpy array \texttt{b} with length {\it j-i}. \\
-\texttt{sel = (a > const)}  \textemdash\ Create a new boolean Numpy array \texttt{sel}, of the same shape as \texttt{a},
+\texttt{sel = (a > const)} \textemdash\ Create a new boolean Numpy array
+\texttt{sel}, of the same shape as \texttt{a},
 that marks which values of \texttt{a > const}. Other operators (e.g. \textless, !=, \%) work as well.\\
-\texttt{b = a[sel]} \textemdash\ Create a new Numpy array \texttt{b} made up of elements from \texttt{a} that correspond to elements of \texttt{sel}
+\texttt{b = a[sel]} \textemdash\ Create a new Numpy array \texttt{b} made up of
+elements from \texttt{a} that correspond to elements of \texttt{sel}
 that are {\it True}. In the above example \texttt{b} would be all elements of \texttt{a} that are greater than \texttt{const}.\\
-\texttt{a.dump({\it filename.dat})} \textemdash\ Save \texttt{a} to the binary file {\it filename.dat}.\\
-\texttt{a = np.load({\it filename.dat})} \textemdash\ Load the contents of {\it filename.dat} into \texttt{a}.
+\texttt{a.write\_hdf5({\it filename.h5})} \textemdash\ Save \texttt{a} to the hdf5 file {\it filename.h5}.\\
 
 \subsection{IPython Tips}
 \settowidth{\MyLen}{\texttt{multicol} }
@@ -196,6 +194,7 @@
 \texttt{\%hist} \textemdash\ Print recent command history.\\
 \texttt{\%quickref} \textemdash\ Print IPython quick reference.\\
 \texttt{\%pdb} \textemdash\ Automatically enter the Python debugger at an exception.\\
+\texttt{\%debug} \textemdash\ Drop into a debugger at the location of the last unhandled exception. \\
 \texttt{\%time, \%timeit} \textemdash\ Find running time of expressions for benchmarking.\\
 \texttt{\%lsmagic} \textemdash\ List all available IPython magics. Hint: \texttt{?} works with magics.\\
 
@@ -208,68 +207,52 @@
 After that, simulation data is generally accessed in yt using {\it Data Containers} which are Python objects
 that define a region of simulation space from which data should be selected.
 \settowidth{\MyLen}{\texttt{multicol} }
-\texttt{pf = load(}{\it dataset}\texttt{)} \textemdash\   Reference a single snapshot.\\
-\texttt{dd = pf.h.all\_data()} \textemdash\ Select the entire volume.\\
-\texttt{a = dd[}{\it field\_name}\texttt{]} \textemdash\ Saves the contents of {\it field} into the
-numpy array \texttt{a}. Similarly for other data containers.\\
-\texttt{pf.h.field\_list} \textemdash\ A list of available fields in the snapshot. \\
-\texttt{pf.h.derived\_field\_list} \textemdash\ A list of available derived fields
+\texttt{ds = yt.load(}{\it dataset}\texttt{)} \textemdash\   Reference a single snapshot.\\
+\texttt{dd = ds.all\_data()} \textemdash\ Select the entire volume.\\
+\texttt{a = dd[}{\it field\_name}\texttt{]} \textemdash\ Copies the contents of {\it field} into the
+YTArray \texttt{a}. Similarly for other data containers.\\
+\texttt{ds.field\_list} \textemdash\ A list of available fields in the snapshot. \\
+\texttt{ds.derived\_field\_list} \textemdash\ A list of available derived fields
 in the snapshot. \\
-\texttt{val, loc = pf.h.find\_max("Density")} \textemdash\ Find the \texttt{val}ue of
+\texttt{val, loc = ds.find\_max("Density")} \textemdash\ Find the \texttt{val}ue of
 the maximum of the field \texttt{Density} and its \texttt{loc}ation. \\
-\texttt{sp = pf.sphere(}{\it cen}\texttt{,}{\it radius}\texttt{)} \textemdash\   Create a spherical data 
+\texttt{sp = ds.sphere(}{\it cen}\texttt{,}{\it radius}\texttt{)} \textemdash\   Create a spherical data 
 container. {\it cen} may be a coordinate, or ``max'' which 
 centers on the max density point. {\it radius} may be a float in 
 code units or a tuple of ({\it length, unit}).\\
 
-\texttt{re = pf.region({\it cen}, {\it left edge}, {\it right edge})} \textemdash\ Create a
+\texttt{re = ds.region({\it cen}, {\it left edge}, {\it right edge})} \textemdash\ Create a
 rectilinear data container. {\it cen} is required but not used.
 {\it left} and {\it right edge} are coordinate values that define the region.
 
-\texttt{di = pf.disk({\it cen}, {\it normal}, {\it radius}, {\it height})} \textemdash\ 
+\texttt{di = ds.disk({\it cen}, {\it normal}, {\it radius}, {\it height})} \textemdash\ 
 Create a cylindrical data container centered at {\it cen} along the 
 direction set by {\it normal},with total length
  2$\times${\it height} and with radius {\it radius}. \\
  
- \texttt{bl = pf.boolean({\it constructor})} \textemdash\ Create a boolean data
- container. {\it constructor} is a list of pre-defined non-boolean 
- data containers with nested boolean logic using the
- ``AND'', ``NOT'', or ``OR'' operators. E.g. {\it constructor=}
- {\it [sp, ``NOT'', (di, ``OR'', re)]} gives a volume defined
- by {\it sp} minus the patches covered by {\it di} and {\it re}.\\
- 
-\texttt{pf.h.save\_object(sp, {\it ``sp\_for\_later''})} \textemdash\ Save an object (\texttt{sp}) for later use.\\
-\texttt{sp = pf.h.load\_object({\it ``sp\_for\_later''})} \textemdash\ Recover a saved object.\\
+\texttt{ds.save\_object(sp, {\it ``sp\_for\_later''})} \textemdash\ Save an object (\texttt{sp}) for later use.\\
+\texttt{sp = ds.load\_object({\it ``sp\_for\_later''})} \textemdash\ Recover a saved object.\\
 
 
-\subsection{Defining New Fields \& Quantities}
-\texttt{yt} expects on-disk fields, fields generated on-demand and in-memory. Quantities reduce a field (e.g. "Density") defined over an object (e.g. "sphere") to get a single value (e.g. "Mass"). \\
-\texttt{def \_MetalMassMsun({\it field},{\it data})}\\
-\texttt{\hspace{4 mm} return data["Metallicity"]*data["CellMassMsun"]}\\
-\texttt{add\_field("MetalMassMsun",function=\_MetalMassMsun)}\\
-Define a new quantity; note the first function operates on grids and data objects and the second on the results of the first. \\
-\texttt{def \_TotalMass(data): }\\
-\texttt{\hspace{4 mm} baryon\_mass = data["CellMassMsun"].sum()}\\
-\texttt{\hspace{4 mm} particle\_mass = data["ParticleMassMsun"].sum()}\\
-\texttt{\hspace{4 mm} return baryon\_mass, particle\_mass}\\
-\texttt{def \_combTotalMass(data, baryon\_mass, particle\_mass):}\\
-\texttt{\hspace{4 mm} return baryon\_mass.sum() + particle\_mass.sum()}\\
-\texttt{add\_quantity("TotalMass", function=\_TotalMass,}\\
-\texttt{\hspace{4 mm} combine\_function=\_combTotalMass, n\_ret = 2)}\\
-
-
+\subsection{Defining New Fields}
+\texttt{yt} expects on-disk fields, fields generated on-demand and in-memory. 
+Field can either be created before a dataset is loaded using \texttt{add\_field}:
+\texttt{def \_metal\_mass({\it field},{\it data})}\\
+\texttt{\hspace{4 mm} return data["metallicity"]*data["cell\_mass"]}\\
+\texttt{add\_field("metal\_mass", units='g', function=\_metal\_mass)}\\
+Or added to an existing dataset using \texttt{ds.add\_field}:
+\texttt{ds.add\_field("metal\_mass", units='g', function=\_metal\_mass)}\\
 
 \subsection{Slices and Projections}
 \settowidth{\MyLen}{\texttt{multicol} }
-\texttt{slc = SlicePlot(pf, {\it axis}, {\it field}, {\it center=}, {\it width=}, {\it weight\_field=}, {\it additional parameters})} \textemdash\ Make a slice plot
-perpendicular to {\it axis} of {\it field} weighted by {\it weight\_field} at (code-units) {\it center} with 
-{\it width} in code units or a (value, unit) tuple. Hint: try {\it SlicePlot?} in IPython to see additional parameters.\\
+\texttt{slc = yt.SlicePlot(ds, {\it axis or normal vector}, {\it field}, {\it center=}, {\it width=}, {\it weight\_field=}, {\it additional parameters})} \textemdash\ Make a slice plot
+perpendicular to {\it axis} (specified via 'x', 'y', or 'z') or a normal vector for an off-axis slice of {\it field} weighted by {\it weight\_field} at (code-units) {\it center} with 
+{\it width} in code units or a (value, unit) tuple. Hint: try {\it yt.SlicePlot?} in IPython to see additional parameters.\\
 \texttt{slc.save({\it file\_prefix})} \textemdash\ Save the slice to a png with name prefix {\it file\_prefix}.
 \texttt{.save()} works similarly for the commands below.\\
 
-\texttt{prj = ProjectionPlot(pf, {\it axis}, {\it field}, {\it addit. params})} \textemdash\ Make a projection. \\
-\texttt{prj = OffAxisSlicePlot(pf, {\it normal}, {\it fields}, {\it center=}, {\it width=}, {\it depth=},{\it north\_vector=},{\it weight\_field=})} \textemdash Make an off-axis slice. Note this takes an array of fields. \\
-\texttt{prj = OffAxisProjectionPlot(pf, {\it normal}, {\it fields}, {\it center=}, {\it width=}, {\it depth=},{\it north\_vector=},{\it weight\_field=})} \textemdash Make an off axis projection. Note this takes an array of fields. \\
+\texttt{prj = yt.ProjectionPlot(ds, {\it axis}, {\it field}, {\it addit. params})} \textemdash\ Make a projection. \\
+\texttt{prj = yt.OffAxisProjectionPlot(ds, {\it normal}, {\it fields}, {\it center=}, {\it width=}, {\it depth=},{\it north\_vector=},{\it weight\_field=})} \textemdash Make an off axis projection. Note this takes an array of fields. \\
 
 \subsection{Plot Annotations}
 \settowidth{\MyLen}{\texttt{multicol} }
@@ -299,51 +282,37 @@
 The \texttt{my\_plugins.py} file \textemdash\ Add functions, derived fields, constants, or other commonly-used Python code to yt.
 
 
-
-
 \subsection{Analysis Modules}
 \settowidth{\MyLen}{\texttt{multicol}}
 The import name for each module is listed at the end of each description (see \textbf{yt Imports}).
 
 \texttt{Absorption Spectrum} \textemdash\ (\texttt{absorption\_spectrum}). \\
 \texttt{Clump Finder} \textemdash\ Find clumps defined by density thresholds (\texttt{level\_sets}). \\
-\texttt{Coordinate Transformation} \textemdash\ (\texttt{coordinate\_transformation}). \\
 \texttt{Halo Finding} \textemdash\ Locate halos of dark matter particles (\texttt{halo\_finding}). \\
-\texttt{Halo Mass Function} \textemdash\ Find halo mass functions from data and from theory (\texttt{halo\_mass\_function}). \\
-\texttt{Halo Profiling} \textemdash\ Profile and project multiple halos (\texttt{halo\_profiler}). \\
-\texttt{Halo Merger Tree} \textemdash\ Create a database of halo mergers (\texttt{halo\_merger\_tree}). \\
 \texttt{Light Cone Generator} \textemdash\ Stitch datasets together to perform analysis over cosmological volumes. \\
 \texttt{Light Ray Generator} \textemdash\ Analyze the path of light rays.\\
-\texttt{Radial Column Density} \textemdash\ Calculate column densities around a point (\texttt{radial\_column\_density}). \\
 \texttt{Rockstar Halo Finding} \textemdash\ Locate halos of dark matter using the Rockstar halo finder (\texttt{halo\_finding.rockstar}). \\
 \texttt{Star Particle Analysis} \textemdash\ Analyze star formation history and assemble spectra (\texttt{star\_analysis}). \\
 \texttt{Sunrise Exporter} \textemdash\ Export data to the sunrise visualization format (\texttt{sunrise\_export}). \\
-\texttt{Two Point Functions} \textemdash\ Two point correlations (\texttt{two\_point\_functions}). \\
 
 
 \subsection{Parallel Analysis}
-\settowidth{\MyLen}{\texttt{multicol}}
-Nearly all of yt is parallelized using MPI.
-The {\it mpi4py} package must be installed for parallelism in yt.
-To install {\it pip install mpi4py} on the command line usually works.
+\settowidth{\MyLen}{\texttt{multicol}} 
+Nearly all of yt is parallelized using
+MPI.  The {\it mpi4py} package must be installed for parallelism in yt.  To
+install {\it pip install mpi4py} on the command line usually works.
 Execute python in parallel similar to this:\\
-{\it mpirun -n 12 python script.py --parallel}\\
-This command may differ for each system on which you use yt;
-please consult the system documentation for details on how to run parallel applications.
+{\it mpirun -n 12 python script.py}\\
+The file \texttt{script.py} must call the \texttt{yt.enable\_parallelism()} to
+turn on yt's parallelism.  If this doesn't happen, all cores will execute the
+same serial yt script.  This command may differ for each system on which you use
+yt; please consult the system documentation for details on how to run parallel
+applications.
 
-\texttt{from yt.pmods import *} \textemdash\ Load yt faster when in parallel.
-This replaces the usual \texttt{from yt.mods import *}.\\
 \texttt{parallel\_objects()} \textemdash\ A way to parallelize analysis over objects
 (such as halos or clumps).\\
 
 
-\subsection{Pre-Installed Versions}
-\settowidth{\MyLen}{\texttt{multicol}}
-yt is pre-installed on several supercomputer systems.
-
-\textbf{NICS Kraken} \textemdash\ {\it module load yt} \\
-
-
 \subsection{Mercurial}
 \settowidth{\MyLen}{\texttt{multicol}}
 Please see \url{http://mercurial.selenic.com/} for the full Mercurial documentation.
@@ -365,8 +334,7 @@
 \subsection{FAQ}
 \settowidth{\MyLen}{\texttt{multicol}}
 
-\texttt{pf.field\_info[`field'].take\_log = False} \textemdash\ When plotting \texttt{field}, do not take log.
-Must enter \texttt{pf.h} before this command. \\
+\texttt{slc.set\_log('field', False)} \textemdash\ When plotting \texttt{field}, use linear scaling instead of log scaling.
 
 
 %\rule{0.3\linewidth}{0.25pt}

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/coding_styleguide.txt
--- a/doc/coding_styleguide.txt
+++ b/doc/coding_styleguide.txt
@@ -49,7 +49,7 @@
  * Don't create a new class to replicate the functionality of an old class --
    replace the old class.  Too many options makes for a confusing user
    experience.
- * Parameter files are a last resort.
+ * Parameter files external to yt are a last resort.
  * The usage of the **kwargs construction should be avoided.  If they cannot
    be avoided, they must be explained, even if they are only to be passed on to
    a nested function.
@@ -61,7 +61,7 @@
    * Hard-coding parameter names that are the same as those in Enzo.  The
      following translation table should be of some help.  Note that the
      parameters are now properties on a Dataset subclass: you access them
-     like pf.refine_by .
+     like ds.refine_by .
      * RefineBy => refine_by
      * TopGridRank => dimensionality
      * TopGridDimensions => domain_dimensions

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/docstring_example.txt
--- a/doc/docstring_example.txt
+++ b/doc/docstring_example.txt
@@ -73,7 +73,7 @@
     Examples
     --------
     These are written in doctest format, and should illustrate how to
-    use the function.  Use the variables 'pf' for the parameter file, 'pc' for
+    use the function.  Use the variables 'ds' for the dataset, 'pc' for
     a plot collection, 'c' for a center, and 'L' for a vector. 
 
     >>> a=[1,2,3]

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/docstring_idioms.txt
--- a/doc/docstring_idioms.txt
+++ b/doc/docstring_idioms.txt
@@ -19,7 +19,7 @@
 useful variable names that correspond to specific instances that the user is
 presupposed to have created.
 
-   * `pf`: a parameter file, loaded successfully
+   * `ds`: a dataset, loaded successfully
    * `sp`: a sphere
    * `c`: a 3-component "center"
    * `L`: a 3-component vector that corresponds to either angular momentum or a

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/helper_scripts/parse_cb_list.py
--- a/doc/helper_scripts/parse_cb_list.py
+++ b/doc/helper_scripts/parse_cb_list.py
@@ -2,7 +2,7 @@
 import inspect
 from textwrap import TextWrapper
 
-pf = load("RD0005-mine/RedshiftOutput0005")
+ds = load("RD0005-mine/RedshiftOutput0005")
 
 output = open("source/visualizing/_cb_docstrings.inc", "w")
 

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/helper_scripts/parse_dq_list.py
--- a/doc/helper_scripts/parse_dq_list.py
+++ b/doc/helper_scripts/parse_dq_list.py
@@ -2,7 +2,7 @@
 import inspect
 from textwrap import TextWrapper
 
-pf = load("RD0005-mine/RedshiftOutput0005")
+ds = load("RD0005-mine/RedshiftOutput0005")
 
 output = open("source/analyzing/_dq_docstrings.inc", "w")
 
@@ -29,7 +29,7 @@
                             docstring = docstring))
                             #docstring = "\n".join(tw.wrap(docstring))))
 
-dd = pf.h.all_data()
+dd = ds.all_data()
 for n,func in sorted(dd.quantities.functions.items()):
     print n, func
     write_docstring(output, n, func[1])

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/helper_scripts/parse_object_list.py
--- a/doc/helper_scripts/parse_object_list.py
+++ b/doc/helper_scripts/parse_object_list.py
@@ -2,7 +2,7 @@
 import inspect
 from textwrap import TextWrapper
 
-pf = load("RD0005-mine/RedshiftOutput0005")
+ds = load("RD0005-mine/RedshiftOutput0005")
 
 output = open("source/analyzing/_obj_docstrings.inc", "w")
 
@@ -27,7 +27,7 @@
     f.write(template % dict(clsname = clsname, sig = sig, clsproxy=clsproxy,
                             docstring = 'physical-object-api'))
 
-for n,c in sorted(pf.h.__dict__.items()):
+for n,c in sorted(ds.__dict__.items()):
     if hasattr(c, '_con_args'):
         print n
         write_docstring(output, n, c)

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/helper_scripts/show_fields.py
--- a/doc/helper_scripts/show_fields.py
+++ b/doc/helper_scripts/show_fields.py
@@ -1,10 +1,55 @@
 import inspect
 from yt.mods import *
+from yt.testing import *
+import numpy as np
+from yt.utilities.cosmology import \
+     Cosmology
+from yt.utilities.definitions import \
+    mpc_conversion, sec_conversion
+from yt.frontends.stream.fields import \
+    StreamFieldInfo
+from yt.fields.derived_field import NullFunc
+from yt.units.yt_array import YTArray, Unit
 
+fields, units = [], []
 
-def islambda(f):
-    return inspect.isfunction(f) and \
-           f.__name__ == (lambda: True).__name__
+for fname, (code_units, aliases, dn) in StreamFieldInfo.known_other_fields:
+    fields.append(("gas", fname))
+    units.append(code_units)
+base_ds = fake_random_ds(4, fields = fields, units = units)
+base_ds.index
+base_ds.cosmological_simulation = 1
+base_ds.cosmology = Cosmology()
+from yt.config import ytcfg
+ytcfg["yt","__withintesting"] = "True"
+np.seterr(all = 'ignore')
+
+def _strip_ftype(field):
+    if not isinstance(field, tuple):
+        return field
+    elif field[0] == "all":
+        return field
+    return field[1]
+
+np.random.seed(int(0x4d3d3d3))
+units = [base_ds._get_field_info(*f).units for f in fields]
+fields = [_strip_ftype(f) for f in fields]
+ds = fake_random_ds(16, fields = fields, units = units)
+ds.parameters["HydroMethod"] = "streaming"
+ds.parameters["EOSType"] = 1.0
+ds.parameters["EOSSoundSpeed"] = 1.0
+ds.conversion_factors["Time"] = 1.0
+ds.conversion_factors.update( dict((f, 1.0) for f in fields) )
+ds.gamma = 5.0/3.0
+ds.current_redshift = 0.0001
+ds.cosmological_simulation = 1
+ds.hubble_constant = 0.7
+ds.omega_matter = 0.27
+ds.omega_lambda = 0.73
+ds.cosmology = Cosmology(hubble_constant=ds.hubble_constant,
+                         omega_matter=ds.omega_matter,
+                         omega_lambda=ds.omega_lambda,
+                         unit_registry=ds.unit_registry)
 
 header = r"""
 .. _field-list:
@@ -12,57 +57,47 @@
 Field List
 ==========
 
-This is a list of all fields available in ``yt``.  It has been organized by the
-type of code that each field is supported by.  "Universal" fields are available
-everywhere, "Enzo" fields in Enzo datasets, "Orion" fields in Orion datasets,
-and so on.
+This is a list of many of the fields available in yt.  We have attempted to
+include most of the fields that are accessible through the plugin system,
+however it is possible to generate many more permutations, particularly through
+vector operations.  For more information about the fields framework,
+see :ref:`fields`.
 
-Try using the ``pf.field_list`` and ``pf.derived_field_list`` to view the
+Try using the ``ds.field_list`` and ``ds.derived_field_list`` to view the
 native and derived fields available for your dataset respectively. For example
 to display the native fields in alphabetical order:
 
 .. notebook-cell::
 
   from yt.mods import *
-  pf = load("Enzo_64/DD0043/data0043")
-  for i in sorted(pf.field_list):
+  ds = load("Enzo_64/DD0043/data0043")
+  for i in sorted(ds.field_list):
     print i
 
-.. note:: Universal fields will be overridden by a code-specific field.
-
-.. rubric:: Table of Contents
-
-.. contents::
-   :depth: 2
-   :local:
-   :backlinks: none
+To figure out out what all of the field types here mean, see 
+:ref:`known-field-types`.
 """
 
 print header
 
 seen = []
 
-
 def print_all_fields(fl):
     for fn in sorted(fl):
         df = fl[fn]
         f = df._function
-        cv = df._convert_function
-        if [f, cv] in seen:
-            continue
-        seen.append([f, cv])
-        print "%s" % (df.name)
-        print "+" * len(df.name)
+        s = "%s" % (df.name,)
+        print s
+        print "+" * len(s)
         print
-        if len(df._units) > 0:
-            print "   * Units: :math:`%s`" % (df._units)
-        if len(df._projected_units) > 0:
-            print "   * Projected Units: :math:`%s`" % (df._projected_units)
+        if len(df.units) > 0:
+            u = Unit(df.units, registry = ds.unit_registry)
+            print "   * Units: :math:`%s`" % (u.latex_representation())
         print "   * Particle Type: %s" % (df.particle_type)
         print
         print "**Field Source**"
         print
-        if islambda(f):
+        if f == NullFunc:
             print "No source available."
             print
             continue
@@ -72,66 +107,6 @@
             for line in inspect.getsource(f).split("\n"):
                 print "  " + line
             print
-        print "**Convert Function Source**"
-        print
-        if islambda(cv):
-            print "No source available."
-            print
-            continue
-        else:
-            print ".. code-block:: python"
-            print
-            for line in inspect.getsource(cv).split("\n"):
-                print "  " + line
-            print
 
-
-print "Universal Field List"
-print "--------------------"
-print
-print_all_fields(FieldInfo)
-
-print "Enzo-Specific Field List"
-print "------------------------"
-print
-print_all_fields(EnzoFieldInfo)
-
-print "Orion-Specific Field List"
-print "-------------------------"
-print
-print_all_fields(OrionFieldInfo)
-
-print "FLASH-Specific Field List"
-print "-------------------------"
-print
-print_all_fields(FLASHFieldInfo)
-
-print "Athena-Specific Field List"
-print "--------------------------"
-print
-print_all_fields(AthenaFieldInfo)
-
-print "Nyx-Specific Field List"
-print "-----------------------"
-print
-print_all_fields(NyxFieldInfo)
-
-print "Chombo-Specific Field List"
-print "--------------------------"
-print
-print_all_fields(ChomboFieldInfo)
-
-print "Pluto-Specific Field List"
-print "--------------------------"
-print
-print_all_fields(PlutoFieldInfo)
-
-print "Grid-Data-Format-Specific Field List"
-print "------------------------------------"
-print
-print_all_fields(GDFFieldInfo)
-
-print "Generic-Format (Stream) Field List"
-print "----------------------------------"
-print
-print_all_fields(StreamFieldInfo)
+ds.index
+print_all_fields(ds.field_info)

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -588,7 +588,7 @@
 FREETYPE_VER='freetype-2.4.12'
 H5PY='h5py-2.1.3'
 HDF5='hdf5-1.8.11'
-IPYTHON='ipython-1.1.0'
+IPYTHON='ipython-2.1.0'
 LAPACK='lapack-3.4.2'
 PNG=libpng-1.6.3
 MATPLOTLIB='matplotlib-1.3.0'
@@ -616,7 +616,7 @@
 echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
 echo '2eb7030f8559ff5cb06333223d98fda5b3a663b6f4a026949d1c423aa9a869d824e612ed5e1851f3bf830d645eea1a768414f73731c23ab4d406da26014fe202  h5py-2.1.3.tar.gz' > h5py-2.1.3.tar.gz.sha512
 echo 'e9db26baa297c8ed10f1ca4a3fcb12d6985c6542e34c18d48b2022db73014f054c8b8434f3df70dcf44631f38b016e8050701d52744953d0fced3272d7b6b3c1  hdf5-1.8.11.tar.gz' > hdf5-1.8.11.tar.gz.sha512
-echo '46b8ae25df2ced674b3b3629070aafac955ba3aa2a5e749f8e63ef1f459126e1c4a9a03661406151622590a90c73b527716ad71bc626f57f52b51abfae0f43ca  ipython-1.1.0.tar.gz' > ipython-1.1.0.tar.gz.sha512
+echo '68c15f6402cacfd623f8e2b70c22d06541de3616fdb2d502ce93cd2fdb4e7507bb5b841a414a4123264221ee5ffb0ebefbb8541f79e647fcb9f73310b4c2d460  ipython-2.1.0.tar.gz' > ipython-2.1.0.tar.gz.sha512
 echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
 echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
 echo '990e3a155ca7a9d329c41a43b44a9625f717205e81157c668a8f3f2ad5459ed3fed8c9bd85e7f81c509e0628d2192a262d4aa30c8bfc348bb67ed60a0362505a  matplotlib-1.3.0.tar.gz' > matplotlib-1.3.0.tar.gz.sha512

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/_static/agogo_yt.css
--- a/doc/source/_static/agogo_yt.css
+++ /dev/null
@@ -1,41 +0,0 @@
- at import url("agogo.css");
- at import url("http://fonts.googleapis.com/css?family=Crimson+Text");
- at import url("http://fonts.googleapis.com/css?family=Droid+Sans");
-
-div.document ul {
-  margin-left: 1.5em;
-  margin-top: 0.0em;
-  margin-bottom: 1.0em;
-}
-
-div.document li.toctree-l1 {
-  margin-bottom: 0.5em;
-}
-
-table.contentstable {
-  width: 100%;
-}
-
-table.contentstable td {
-  padding: 5px 15px 0px 15px;
-}
-
-table.contentstable tr {
-  border-bottom: 1px solid black;
-}
-
-a.biglink {
-  line-height: 1.2em;
-}
-
-a tt.xref {
-  font-weight: bolder;
-}
-
-table.docutils {
-  width: 100%;
-}
-
-table.docutils td {
-  width: 50%;
-}

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/_static/custom.css
--- /dev/null
+++ b/doc/source/_static/custom.css
@@ -0,0 +1,94 @@
+blockquote {
+    font-size: 16px;
+    border-left: none;
+}
+
+dd {
+    margin-left: 30px;
+}
+
+/*
+
+Collapse the navbar when its width is less than 1200 pixels.  This may need to
+be adjusted if the navbar menu changes.
+
+*/
+
+ at media (max-width: 1200px) {
+    .navbar-header {
+        float: none;
+    }
+    .navbar-toggle {
+        display: block;
+    }
+    .navbar-collapse {
+        border-top: 1px solid transparent;
+        box-shadow: inset 0 1px 0 rgba(255,255,255,0.1);
+    }
+    .navbar-collapse.collapse {
+        display: none!important;
+    }
+    .navbar-nav {
+        float: none!important;
+        margin: 7.5px -15px;
+    }
+    .navbar-nav>li {
+        float: none;
+    }
+    .navbar-nav>li>a {
+        padding-top: 10px;
+        padding-bottom: 10px;
+    }
+}
+
+/* 
+
+Sphinx code literals conflict with the notebook code tag, so we special-case
+literals that are inside text.
+
+*/
+
+p code {
+    color:  #d14;    
+    white-space: nowrap;
+    font-size: 90%;
+    background-color: #f9f2f4;
+    font-family: Menlo, Monaco, Consolas, 'Courier New', monospace;
+}
+
+/*
+
+Nicer, controllable formatting for tables that have multi-line headers.
+
+*/
+
+th.head {
+    white-space: pre;
+}
+
+/*
+
+labels have a crappy default color that is almost invisible in our doc theme so
+we use a darker color.
+
+*/
+
+.label {
+    color: #333333;
+}
+
+/*
+
+Hack to prevent internal link targets being positioned behind the navbar.
+
+See: https://github.com/twbs/bootstrap/issues/1768
+
+*/
+
+*[id]:before { 
+  display: block; 
+  content: " "; 
+  margin-top: -45px; 
+  height: 45px; 
+  visibility: hidden; 
+}

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/_templates/layout.html
--- a/doc/source/_templates/layout.html
+++ b/doc/source/_templates/layout.html
@@ -35,3 +35,5 @@
     </div>
 {%- endblock %}
 
+{# Custom CSS overrides #}
+{% set bootswatch_css_custom = ['_static/custom.css'] %}

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/analyzing/_dq_docstrings.inc
--- a/doc/source/analyzing/_dq_docstrings.inc
+++ /dev/null
@@ -1,165 +0,0 @@
-
-
-.. function:: Action(action, combine_action, filter=None):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._Action`.)
-   This function evals the string given by the action arg and uses 
-   the function thrown with the combine_action to combine the values.  
-   A filter can be thrown to be evaled to short-circuit the calculation 
-   if some criterion is not met.
-   :param action: a string containing the desired action to be evaled.
-   :param combine_action: the function used to combine the answers when done lazily.
-   :param filter: a string to be evaled to serve as a data filter.
-
-
-
-.. function:: AngularMomentumVector():
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._AngularMomentumVector`.)
-   This function returns the mass-weighted average angular momentum vector.
-
-
-
-.. function:: BaryonSpinParameter():
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._BaryonSpinParameter`.)
-   This function returns the spin parameter for the baryons, but it uses
-   the particles in calculating enclosed mass.
-
-
-
-.. function:: BulkVelocity():
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._BulkVelocity`.)
-   This function returns the mass-weighted average velocity in the object.
-
-
-
-.. function:: CenterOfMass(use_cells=True, use_particles=False):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._CenterOfMass`.)
-   This function returns the location of the center
-   of mass. By default, it computes of the *non-particle* data in the object. 
-   
-   Parameters
-   ----------
-   
-   use_cells : bool
-       If True, will include the cell mass (default: True)
-   use_particles : bool
-       if True, will include the particles in the object (default: False)
-
-
-
-.. function:: Extrema(fields, non_zero=False, filter=None):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._Extrema`.)
-   This function returns the extrema of a set of fields
-   
-   :param fields: A field name, or a list of field names
-   :param filter: a string to be evaled to serve as a data filter.
-
-
-
-.. function:: IsBound(truncate=True, include_thermal_energy=False, treecode=True, opening_angle=1.0, periodic_test=False, include_particles=True):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._IsBound`.)
-   This returns whether or not the object is gravitationally bound. If this
-   returns a value greater than one, it is bound, and otherwise not.
-   
-   Parameters
-   ----------
-   truncate : Bool
-       Should the calculation stop once the ratio of
-       gravitational:kinetic is 1.0?
-   include_thermal_energy : Bool
-       Should we add the energy from ThermalEnergy
-       on to the kinetic energy to calculate 
-       binding energy?
-   treecode : Bool
-       Whether or not to use the treecode.
-   opening_angle : Float 
-       The maximal angle a remote node may subtend in order
-       for the treecode method of mass conglomeration may be
-       used to calculate the potential between masses.
-   periodic_test : Bool 
-       Used for testing the periodic adjustment machinery
-       of this derived quantity.
-   include_particles : Bool
-       Should we add the mass contribution of particles
-       to calculate binding energy?
-   
-   Examples
-   --------
-   >>> sp.quantities["IsBound"](truncate=False,
-   ... include_thermal_energy=True, treecode=False, opening_angle=2.0)
-   0.32493
-
-
-
-.. function:: MaxLocation(field):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._MaxLocation`.)
-   This function returns the location of the maximum of a set
-   of fields.
-
-
-
-.. function:: MinLocation(field):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._MinLocation`.)
-   This function returns the location of the minimum of a set
-   of fields.
-
-
-
-.. function:: ParticleSpinParameter():
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._ParticleSpinParameter`.)
-   This function returns the spin parameter for the baryons, but it uses
-   the particles in calculating enclosed mass.
-
-
-
-.. function:: StarAngularMomentumVector():
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._StarAngularMomentumVector`.)
-   This function returns the mass-weighted average angular momentum vector 
-   for stars.
-
-
-
-.. function:: TotalMass():
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._TotalMass`.)
-   This function takes no arguments and returns the sum of cell masses and
-   particle masses in the object.
-
-
-
-.. function:: TotalQuantity(fields):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._TotalQuantity`.)
-   This function sums up a given field over the entire region
-   
-   :param fields: The fields to sum up
-
-
-
-.. function:: WeightedAverageQuantity(field, weight):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._WeightedAverageQuantity`.)
-   This function returns an averaged quantity.
-   
-   :param field: The field to average
-   :param weight: The field to weight by
-
-.. function:: WeightedVariance(field, weight):
-
-   (This is a proxy for :func:`~yt.data_objects.derived_quantities._WeightedVariance`.)
-    This function returns the variance of a field.
-
-    :param field: The target field
-    :param weight: The field to weight by
-
-    Returns the weighted variance and the weighted mean.

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/analyzing/_obj_docstrings.inc
--- a/doc/source/analyzing/_obj_docstrings.inc
+++ /dev/null
@@ -1,150 +0,0 @@
-
-
-.. class:: boolean(self, regions, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRBooleanRegionBase`.)
-
-
-.. class:: covering_grid(self, level, left_edge, dims, fields=None, pf=None, num_ghost_zones=0, use_pbar=True, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRCoveringGridBase`.)
-
-
-.. class:: cut_region(self, base_region, field_cuts, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.InLineExtractedRegionBase`.)
-
-
-.. class:: cutting(self, normal, center, fields=None, node_name=None, north_vector=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRCuttingPlaneBase`.)
-
-
-.. class:: disk(self, center, normal, radius, height, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRCylinderBase`.)
-
-
-.. class:: ellipsoid(self, center, A, B, C, e0, tilt, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMREllipsoidBase`.)
-
-
-.. class:: extracted_region(self, base_region, indices, force_refresh=True, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.ExtractedRegionBase`.)
-
-
-.. class:: fixed_res_cutting(self, normal, center, width, dims, fields=None, node_name=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRFixedResCuttingPlaneBase`.)
-
-
-.. class:: fixed_res_proj(self, axis, level, left_edge, dims, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRFixedResProjectionBase`.)
-
-
-.. class:: grid_collection(self, center, grid_list, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRGridCollectionBase`.)
-
-
-.. class:: grid_collection_max_level(self, center, max_level, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRMaxLevelCollectionBase`.)
-
-
-.. class:: inclined_box(self, origin, box_vectors, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRInclinedBoxBase`.)
-
-
-.. class:: ortho_ray(self, axis, coords, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMROrthoRayBase`.)
-
-
-.. class:: overlap_proj(self, axis, field, weight_field=None, max_level=None, center=None, pf=None, source=None, node_name=None, field_cuts=None, preload_style='level', serialize=True, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRProjBase`.)
-
-
-.. class:: periodic_region(self, center, left_edge, right_edge, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRPeriodicRegionBase`.)
-
-
-.. class:: periodic_region_strict(self, center, left_edge, right_edge, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRPeriodicRegionStrictBase`.)
-
-
-.. class:: proj(self, axis, field, weight_field=None, max_level=None, center=None, pf=None, source=None, node_name=None, field_cuts=None, preload_style=None, serialize=True, style='integrate', **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRQuadTreeProjBase`.)
-
-
-.. class:: ray(self, start_point, end_point, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRRayBase`.)
-
-
-.. class:: region(self, center, left_edge, right_edge, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRRegionBase`.)
-
-
-.. class:: region_strict(self, center, left_edge, right_edge, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRRegionStrictBase`.)
-
-
-.. class:: slice(self, axis, coord, fields=None, center=None, pf=None, node_name=False, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRSliceBase`.)
-
-
-.. class:: smoothed_covering_grid(self, *args, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRSmoothedCoveringGridBase`.)
-
-
-.. class:: sphere(self, center, radius, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRSphereBase`.)
-
-
-.. class:: streamline(self, positions, length=1.0, fields=None, pf=None, **field_parameters):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRStreamlineBase`.)
-
-
-.. class:: surface(self, data_source, surface_field, field_value):
-
-   For more information, see :ref:`physical-object-api`
-   (This is a proxy for :class:`~yt.data_objects.data_containers.AMRSurfaceBase`.)

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/analyzing/analysis_modules/Halo_Analysis.ipynb
--- a/doc/source/analyzing/analysis_modules/Halo_Analysis.ipynb
+++ /dev/null
@@ -1,410 +0,0 @@
-{
- "metadata": {
-  "name": ""
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "heading",
-     "level": 1,
-     "metadata": {},
-     "source": [
-      "Full Halo Analysis"
-     ]
-    },
-    {
-     "cell_type": "heading",
-     "level": 3,
-     "metadata": {},
-     "source": [
-      "Creating a Catalog"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Here we put everything together to perform some realistic analysis. First we load a full simulation dataset."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "from yt.mods import *\n",
-      "from yt.analysis_modules.halo_analysis.api import *\n",
-      "import tempfile\n",
-      "import shutil\n",
-      "import os\n",
-      "\n",
-      "# Create temporary directory for storing files\n",
-      "tmpdir = tempfile.mkdtemp()\n",
-      "\n",
-      "# Load the data set with the full simulation information\n",
-      "data_pf = load('Enzo_64/RD0006/RedshiftOutput0006')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now we load a rockstar halos binary file. This is the output from running the rockstar halo finder on the dataset loaded above. It is also possible to require the HaloCatalog to find the halos in the full simulation dataset at runtime by specifying a `finder_method` keyword."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Load the rockstar data files\n",
-      "halos_pf = load('rockstar_halos/halos_0.0.bin')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "From these two loaded datasets we create a halo catalog object. No analysis is done at this point, we are simply defining an object we can add analysis tasks to. These analysis tasks will be run in the order they are added to the halo catalog object."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Instantiate a catalog using those two paramter files\n",
-      "hc = HaloCatalog(data_pf=data_pf, halos_pf=halos_pf, \n",
-      "                 output_dir=os.path.join(tmpdir, 'halo_catalog'))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The first analysis task we add is a filter for the most massive halos; those with masses great than $10^{14}~M_\\odot$. Note that all following analysis will only be performed on these massive halos and we will not waste computational time calculating quantities for halos we are not interested in. This is a result of adding this filter first. If we had called `add_filter` after some other `add_quantity` or `add_callback` to the halo catalog, the quantity and callback calculations would have been performed for all halos, not just those which pass the filter."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": true,
-     "input": [
-      "# Filter out less massive halos\n",
-      "hc.add_filter(\"quantity_value\", \"particle_mass\", \">\", 1e14, \"Msun\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "heading",
-     "level": 3,
-     "metadata": {},
-     "source": [
-      "Finding Radial Profiles"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Our first analysis goal is going to be constructing radial profiles for our halos. We would like these profiles to be in terms of the virial radius. Unfortunately we have no guarantee that values of center and virial radius recorded by the halo finder are actually physical. Therefore we should recalculate these quantities ourselves using the values recorded by the halo finder as a starting point."
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The first step is going to be creating a sphere object that we will create radial profiles along. This attaches a sphere data object to every halo left in the catalog."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# attach a sphere object to each halo whose radius extends to twice the radius of the halo\n",
-      "hc.add_callback(\"sphere\", factor=2.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Next we find the radial profile of the gas overdensity along the sphere object in order to find the virial radius. `radius` is the axis along which we make bins for the radial profiles. `[(\"gas\",\"overdensity\")]` is the quantity that we are profiling. This is a list so we can profile as many quantities as we want. The `weight_field` indicates how the cells should be weighted, but note that this is not a list, so all quantities will be weighted in the same way. The `accumulation` keyword indicates if the profile should be cummulative; this is useful for calculating profiles such as enclosed mass. The `storage` keyword indicates the name of the attribute of a halo where these profiles will be stored. Setting the storage keyword to \"virial_quantities_profiles\" means that the profiles will be stored in a dictionary that can be accessed by `halo.virial_quantities_profiles`."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# use the sphere to calculate radial profiles of gas density weighted by cell volume in terms of the virial radius\n",
-      "hc.add_callback(\"profile\", x_field=\"radius\",\n",
-      "                y_fields=[(\"gas\", \"overdensity\")],\n",
-      "                weight_field=\"cell_volume\", \n",
-      "                accumulation=False,\n",
-      "                storage=\"virial_quantities_profiles\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now we calculate the virial radius of halo using the sphere object. As this is a callback, not a quantity, the virial radius will not be written out with the rest of the halo properties in the final halo catalog. This also has a `profile_storage` keyword to specify where the radial profiles are stored that will allow the callback to calculate the relevant virial quantities. We supply this keyword with the same string we gave to `storage` in the last `profile` callback."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Define a virial radius for the halo.\n",
-      "hc.add_callback(\"virial_quantities\", [\"radius\"], \n",
-      "                profile_storage = \"virial_quantities_profiles\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now that we have calculated the virial radius, we delete the profiles we used to find it."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "hc.add_callback('delete_attribute','virial_quantities_profiles')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now that we have calculated virial quantities we can add a new sphere that is aware of the virial radius we calculated above."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "hc.add_callback('sphere', radius_field='radius_200', factor=5,\n",
-      "                field_parameters=dict(virial_radius=('quantity', 'radius_200')))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Using this new sphere, we calculate a gas temperature profile along the virial radius, weighted by the cell mass."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "hc.add_callback('profile', 'virial_radius', [('gas','temperature')],\n",
-      "                storage='virial_profiles',\n",
-      "                weight_field='cell_mass', \n",
-      "                accumulation=False, output_dir='profiles')\n"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "As profiles are not quantities they will not automatically be written out in the halo catalog; thus in order to be reloadable we must write them out explicitly through a callback of `save_profiles`. This makes sense because they have an extra dimension for each halo along the profile axis. "
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "# Save the profiles\n",
-      "hc.add_callback(\"save_profiles\", storage=\"virial_profiles\", output_dir=\"profiles\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We then create the halo catalog. Remember, no analysis is done before this call to create. By adding callbacks and filters we are simply queuing up the actions we want to take that will all run now."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": true,
-     "input": [
-      "hc.create()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "heading",
-     "level": 3,
-     "metadata": {},
-     "source": [
-      "Reloading HaloCatalogs"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Finally we load these profiles back in and make a pretty plot. It is not strictly necessary to reload the profiles in this notebook, but we show this process here to illustrate that this step may be performed completely separately from the rest of the script. This workflow allows you to create a single script that will allow you to perform all of the analysis that requires the full dataset. The output can then be saved in a compact form where only the necessarily halo quantities are stored. You can then download this smaller dataset to a local computer and run any further non-computationally intense analysis and design the appropriate plots."
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can load a previously saved halo catalog by using the `load` command. We then create a `HaloCatalog` object from just this dataset."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "halos_pf =  load(os.path.join(tmpdir, 'halo_catalog/halo_catalog.0.h5'))\n",
-      "\n",
-      "hc_reloaded = HaloCatalog(halos_pf=halos_pf,\n",
-      "                          output_dir=os.path.join(tmpdir, 'halo_catalog'))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      " Just as profiles are saved seperately throught the `save_profiles` callback they also must be loaded separately using the `load_profiles` callback."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "hc_reloaded.add_callback('load_profiles', storage='virial_profiles',\n",
-      "                         output_dir='profiles')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Calling `load` is the equivalent of calling `create` earlier, but defaults to to not saving new information. This means that the callback to `load_profiles` is not run until we call `load` here."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": true,
-     "input": [
-      "hc_reloaded.load()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "heading",
-     "level": 3,
-     "metadata": {},
-     "source": [
-      "Plotting Radial Profiles"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In the future ProfilePlot will be able to properly interpret the loaded profiles of `Halo` and `HaloCatalog` objects, but this functionality is not yet implemented. In the meantime, we show a quick method of viewing a profile for a single halo."
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The individual `Halo` objects contained in the `HaloCatalog` can be accessed through the `halo_list` attribute. This gives us access to the dictionary attached to each halo where we stored the radial profiles."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "halo = hc_reloaded.halo_list[0]\n",
-      "\n",
-      "radius = halo.virial_profiles['virial_radius']\n",
-      "temperature = halo.virial_profiles[u\"('gas', 'temperature')\"]\n",
-      "\n",
-      "# Remove output files, that are no longer needed\n",
-      "shutil.rmtree(tmpdir)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Here we quickly use matplotlib to create a basic plot of the radial profile of this halo. When `ProfilePlot` is properly configured to accept Halos and HaloCatalogs the full range of yt plotting tools will be accessible."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "%matplotlib inline\n",
-      "import matplotlib.pyplot as plt\n",
-      "\n",
-      "plt.plot(radius, temperature)\n",
-      "\n",
-      "plt.semilogy()\n",
-      "plt.xlabel('$\\mathrm{R/R_{vir}}$')\n",
-      "plt.ylabel('$\\mathrm{Temperature~[K]}$')\n",
-      "\n",
-      "plt.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/analyzing/analysis_modules/PPVCube.ipynb
--- a/doc/source/analyzing/analysis_modules/PPVCube.ipynb
+++ b/doc/source/analyzing/analysis_modules/PPVCube.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:ba8b6a53571695ae1d0c236ad43875823746e979a329a9d35ab0a8b899cebbba"
+  "signature": "sha256:56a8d72735e3cc428ff04b241d4b2ce6f653019818c6fc7a4148840d99030c85"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -19,8 +19,9 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "%matplotlib inline\n",
-      "from yt.mods import *\n",
+      "import yt\n",
+      "import numpy as np\n",
+      "\n",
       "from yt.analysis_modules.ppv_cube.api import PPVCube"
      ],
      "language": "python",
@@ -122,7 +123,7 @@
       "data[\"velocity_y\"] = (vely, \"km/s\")\n",
       "data[\"velocity_z\"] = (np.zeros((nx,ny,nz)), \"km/s\") # zero velocity in the z-direction\n",
       "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) # bbox of width 1 on a side with center (0,0,0)\n",
-      "ds = load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
+      "ds = yt.load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
      ],
      "language": "python",
      "metadata": {},
@@ -139,7 +140,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "slc = SlicePlot(ds, \"z\", [\"density\",\"velocity_x\",\"velocity_y\",\"velocity_magnitude\"])"
+      "slc = yt.SlicePlot(ds, \"z\", [\"density\",\"velocity_x\",\"velocity_y\",\"velocity_magnitude\"])"
      ],
      "language": "python",
      "metadata": {},
@@ -222,7 +223,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pf = load(\"cube.fits\")"
+      "ds = yt.load(\"cube.fits\")"
      ],
      "language": "python",
      "metadata": {},
@@ -233,7 +234,7 @@
      "collapsed": false,
      "input": [
       "# Specifying no center gives us the center slice\n",
-      "slc = SlicePlot(pf, \"z\", [\"density\"])\n",
+      "slc = yt.SlicePlot(ds, \"z\", [\"density\"])\n",
       "slc.show()"
      ],
      "language": "python",
@@ -246,9 +247,9 @@
      "input": [
       "import yt.units as u\n",
       "# Picking different velocities for the slices\n",
-      "new_center = pf.domain_center\n",
-      "new_center[2] = pf.spec2pixel(-1.0*u.km/u.s)\n",
-      "slc = SlicePlot(pf, \"z\", [\"density\"], center=new_center)\n",
+      "new_center = ds.domain_center\n",
+      "new_center[2] = ds.spec2pixel(-1.0*u.km/u.s)\n",
+      "slc = yt.SlicePlot(ds, \"z\", [\"density\"], center=new_center)\n",
       "slc.show()"
      ],
      "language": "python",
@@ -259,8 +260,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "new_center[2] = pf.spec2pixel(0.7*u.km/u.s)\n",
-      "slc = SlicePlot(pf, \"z\", [\"density\"], center=new_center)\n",
+      "new_center[2] = ds.spec2pixel(0.7*u.km/u.s)\n",
+      "slc = yt.SlicePlot(ds, \"z\", [\"density\"], center=new_center)\n",
       "slc.show()"
      ],
      "language": "python",
@@ -271,8 +272,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "new_center[2] = pf.spec2pixel(-0.3*u.km/u.s)\n",
-      "slc = SlicePlot(pf, \"z\", [\"density\"], center=new_center)\n",
+      "new_center[2] = ds.spec2pixel(-0.3*u.km/u.s)\n",
+      "slc = yt.SlicePlot(ds, \"z\", [\"density\"], center=new_center)\n",
       "slc.show()"
      ],
      "language": "python",
@@ -290,7 +291,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "prj = ProjectionPlot(pf, \"z\", [\"density\"], proj_style=\"sum\")\n",
+      "prj = yt.ProjectionPlot(ds, \"z\", [\"density\"], proj_style=\"sum\")\n",
       "prj.set_log(\"density\", True)\n",
       "prj.set_zlim(\"density\", 1.0e-3, 0.2)\n",
       "prj.show()"

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/analyzing/analysis_modules/Particle_Trajectories.ipynb
--- a/doc/source/analyzing/analysis_modules/Particle_Trajectories.ipynb
+++ b/doc/source/analyzing/analysis_modules/Particle_Trajectories.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:e4b5ea69687eb79452c16385b3a6f795b4572518dfa7f9d8a8125bd75b5fea85"
+  "signature": "sha256:5ab80c6b33a115cb88c36fde8659434d14a852dd43b0b419f2bb0c04acf66278"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -20,7 +20,7 @@
      "collapsed": false,
      "input": [
       "%matplotlib inline\n",
-      "from yt.mods import *\n",
+      "import yt\n",
       "import glob\n",
       "from yt.analysis_modules.particle_trajectories.api import ParticleTrajectories\n",
       "from yt.config import ytcfg\n",
@@ -77,7 +77,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(my_fns[0])\n",
+      "ds = yt.load(my_fns[0])\n",
       "dd = ds.all_data()\n",
       "indices = dd[\"particle_index\"].astype(\"int\")\n",
       "print indices"
@@ -205,8 +205,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
-      "slc = SlicePlot(ds, \"x\", [\"density\",\"dark_matter_density\"], center=\"max\", width=(3.0, \"Mpc\"))\n",
+      "ds = yt.load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
+      "slc = yt.SlicePlot(ds, \"x\", [\"density\",\"dark_matter_density\"], center=\"max\", width=(3.0, \"Mpc\"))\n",
       "slc.show()"
      ],
      "language": "python",
@@ -299,7 +299,7 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "Suppose we wanted to know the gas density along the particle trajectory, but there wasn't a particle field corresponding to that in our dataset. Never fear! If the field exists as a grid field, `yt` will interpolate this field to the particle positions and add the interpolated field to the trajectory. To add such a field (or any field, including additional particle fields) we can call the `add_fields` method:"
+      "Suppose we wanted to know the gas density along the particle trajectory, but there wasn't a particle field corresponding to that in our dataset. Never fear! If the field exists as a grid field, yt will interpolate this field to the particle positions and add the interpolated field to the trajectory. To add such a field (or any field, including additional particle fields) we can call the `add_fields` method:"
      ]
     },
     {
@@ -354,4 +354,4 @@
    "metadata": {}
   }
  ]
-}
\ No newline at end of file
+}

diff -r c27861077a46db9622931d1af328f3eaae07263a -r 7b97a8162373c88e0ecc2a7ee67e4e8e2d06033d doc/source/analyzing/analysis_modules/SZ_projections.ipynb
--- a/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
+++ b/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:4745a15abb6512547b50280b92c22567f89255189fd968ca706ef7c39d48024f"
+  "signature": "sha256:e4db171b795d155870280ddbe8986f55f9a94ffb10783abf9d4cc2de3ec24894"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,7 +21,7 @@
       "incorporating all of these effects, there is a library:\n",
       "SZpack ([Chluba et al 2012](http://adsabs.harvard.edu/abs/2012MNRAS.426..510C)). \n",
       "\n",
-      "The `sunyaev_zeldovich` analysis module in `yt` makes it possible\n",
+      "The `sunyaev_zeldovich` analysis module in yt makes it possible\n",
       "to make projections of the full S-Z signal given the properties of the\n",
       "thermal gas in the simulation using SZpack. SZpack has several different options for computing the S-Z signal, from full\n",
       "integrations to very good approximations.  Since a full or even a\n",
@@ -43,7 +43,7 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "`yt` makes projections of the various moments needed for the\n",
+      "yt makes projections of the various moments needed for the\n",
       "calculation, and then the resulting projected fields are used to\n",
       "compute the S-Z signal. In our implementation, the expansion is carried out to first-order\n",
       "terms in $T_e$ and zeroth-order terms in $\\beta_{c,\\parallel}$ by default, but terms up to second-order in can be optionally\n",
@@ -81,7 +81,7 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "Once you have SZpack installed, making S-Z projections from ``yt``\n",
+      "Once you have SZpack installed, making S-Z projections from yt\n",
       "datasets is fairly straightforward:"
      ]
     },
@@ -89,11 +89,10 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "%matplotlib inline\n",
-      "from yt.mods import *\n",
+      "import yt\n",
       "from yt.analysis_modules.sunyaev_zeldovich.api import SZProjection\n",
       "\n",
-      "ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
+      "ds = yt.load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
       "\n",
       "freqs = [90.,180.,240.]\n",
       "szprj = SZProjection(ds, freqs)"
@@ -143,7 +142,7 @@
       "create images of the projected S-Z signal at each requested frequency,\n",
       "which can be accessed dict-like from the projection object (e.g.,\n",
       "`szprj[\"90_GHz\"]`). Projections of other quantities may also be\n",
-      "accessed; to see what fields are available call `szprj.keys()`. The methods also accept standard ``yt``\n",
+      "accessed; to see what fields are available call `szprj.keys()`. The methods also accept standard yt\n",
       "keywords for projections such as `center`, `width`, and `source`. The image buffer size can be controlled by setting `nx`.  \n"
      ]
     },
@@ -218,17 +217,9 @@
       "including coordinate information in kpc. The optional keyword\n",
       "`clobber` allows a previous file to be overwritten. \n"
      ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
     }
    ],
    "metadata": {}
   }
  ]
-}
\ No newline at end of file
+}

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/ef1183617671/
Changeset:   ef1183617671
Branch:      yt-3.0
User:        ngoldbaum
Date:        2014-08-02 21:52:31
Summary:     Merged in MatthewTurk/yt/yt-3.0 (pull request #1025)

NMSU-ART: Attempt to fix dark matter masses
Affected #:  2 files

diff -r 4a8c93735cdf58c42d1f60dbd3fb323e227ab982 -r ef1183617671b95978d736cbbc4402e07683aa1a yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -39,6 +39,8 @@
     io_registry
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
+from yt.data_objects.particle_unions import \
+    ParticleUnion
 
 from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
@@ -104,17 +106,7 @@
         self.particle_field_list = [f for f in particle_fields]
         self.field_list = [("art", f) for f in fluid_fields]
         # now generate all of the possible particle fields
-        if "wspecies" in self.dataset.parameters.keys():
-            wspecies = self.dataset.parameters['wspecies']
-            nspecies = len(wspecies)
-            self.dataset.particle_types = ["darkmatter", "stars"]
-            for specie in range(nspecies):
-                self.dataset.particle_types.append("specie%i" % specie)
-            self.dataset.particle_types_raw = tuple(
-                self.dataset.particle_types)
-        else:
-            self.dataset.particle_types = []
-        for ptype in self.dataset.particle_types:
+        for ptype in self.dataset.particle_types_raw:
             for pfield in self.particle_field_list:
                 pfn = (ptype, pfield)
                 self.field_list.append(pfn)
@@ -313,6 +305,8 @@
             self.root_level = root_level
             mylog.info("Using root level of %02i", self.root_level)
         # read the particle header
+        self.particle_types = []
+        self.particle_types_raw = ()
         if not self.skip_particles and self._file_particle_header:
             with open(self._file_particle_header, "rb") as fh:
                 particle_header_vals = fpu.read_attrs(
@@ -323,6 +317,10 @@
                 lspecies = np.fromfile(fh, dtype='>i', count=10)
             self.parameters['wspecies'] = wspecies[:n]
             self.parameters['lspecies'] = lspecies[:n]
+            for specie in range(n):
+                self.particle_types.append("specie%i" % specie)
+            self.particle_types_raw = tuple(
+                self.particle_types)
             ls_nonzero = np.diff(lspecies)[:n-1]
             ls_nonzero = np.append(lspecies[0], ls_nonzero)
             self.star_type = len(ls_nonzero)
@@ -360,6 +358,16 @@
         self.gamma = self.parameters["gamma"]
         mylog.info("Max level is %02i", self.max_level)
 
+    def create_field_info(self):
+        super(ARTDataset, self).create_field_info()
+        if "wspecies" in self.parameters:
+            # We create dark_matter and stars unions.
+            ptr = self.particle_types_raw
+            pu = ParticleUnion("darkmatter", list(ptr[:-1]))
+            self.add_particle_union(pu)
+            pu = ParticleUnion("stars", list(ptr[-1:]))
+            self.add_particle_union(pu)
+
     @classmethod
     def _is_valid(self, *args, **kwargs):
         """

diff -r 4a8c93735cdf58c42d1f60dbd3fb323e227ab982 -r ef1183617671b95978d736cbbc4402e07683aa1a yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -39,6 +39,11 @@
         self.cache = {}
         self.masks = {}
         super(IOHandlerART, self).__init__(*args, **kwargs)
+        self.ws = self.ds.parameters["wspecies"]
+        self.ls = self.ds.parameters["lspecies"]
+        self.file_particle = self.ds._file_particle_data
+        self.file_stars = self.ds._file_particle_stars
+        self.Nrow = self.ds.parameters["Nrow"]
 
     def _read_fluid_selection(self, chunks, selector, fields, size):
         # Chunks in this case will have affiliated domain subset objects
@@ -70,8 +75,6 @@
         if key in self.masks.keys() and self.caching:
             return self.masks[key]
         ds = self.ds
-        ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(ds, ftype, self.ls, ptmax)
         pstr = 'particle_position_%s'
         x,y,z = [self._get_field((ftype, pstr % ax)) for ax in 'xyz']
         mask = selector.select_points(x, y, z, 0.0)
@@ -81,6 +84,26 @@
         else:
             return mask
 
+    def _read_particle_coords(self, chunks, ptf):
+        for chunk in chunks:
+            for ptype, field_list in sorted(ptf.items()):
+                x = self._get_field((ptype, "particle_position_x"))
+                y = self._get_field((ptype, "particle_position_y"))
+                z = self._get_field((ptype, "particle_position_z"))
+                yield ptype, (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        for chunk in chunks:
+            for ptype, field_list in sorted(ptf.items()):
+                x = self._get_field((ptype, "particle_position_x"))
+                y = self._get_field((ptype, "particle_position_y"))
+                z = self._get_field((ptype, "particle_position_z"))
+                mask = selector.select_points(x, y, z, 0.0)
+                if mask is None: continue
+                for field in field_list:
+                    data = self._get_field((ptype, field))
+                    yield (ptype, field), data[mask]
+
     def _get_field(self,  field):
         if field in self.cache.keys() and self.caching:
             mylog.debug("Cached %s", str(field))
@@ -139,6 +162,13 @@
             temp[-nstars:] = data
             tr[field] = temp
             del data
+        # We check again, after it's been filled
+        if fname == "particle_mass":
+            # We now divide by NGrid in order to make this match up.  Note that
+            # this means that even when requested in *code units*, we are
+            # giving them as modified by the ng value.  This only works for
+            # dark_matter -- stars are regular matter.
+            tr[field] /= self.ds.domain_dimensions.prod()
         if tr == {}:
             tr = dict((f, np.array([])) for f in fields)
         if self.caching:
@@ -147,35 +177,15 @@
         else:
             return tr[field]
 
-    def _read_particle_selection(self, chunks, selector, fields):
-        chunk = chunks.next()
-        self.ds = chunk.objs[0].domain.ds
-        self.ws = self.ds.parameters["wspecies"]
-        self.ls = self.ds.parameters["lspecies"]
-        self.file_particle = self.ds._file_particle_data
-        self.file_stars = self.ds._file_particle_stars
-        self.Nrow = self.ds.parameters["Nrow"]
-        data = {f:np.array([]) for f in fields}
-        for f in fields:
-            ftype, fname = f
-            mask = self._get_mask(selector, ftype)
-            arr = self._get_field(f)[mask].astype('f8')
-            data[f] = np.concatenate((arr, data[f]))
-        return data
-
-def _determine_field_size(ds, field, lspecies, ptmax):
+def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")
     idxas = np.concatenate(([0, ], lspecies[:-1]))
     idxbs = lspecies
     if "specie" in field:
         index = int(field.replace("specie", ""))
         pbool[index] = True
-    elif field == "stars":
-        pbool[-1] = True
-    elif field == "darkmatter":
-        pbool[0:-1] = True
     else:
-        pbool[:] = True
+        raise RuntimeError
     idxa, idxb = idxas[pbool][0], idxbs[pbool][-1]
     return pbool, idxa, idxb

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list