[yt-svn] commit/yt: 5 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Feb 24 11:33:28 PST 2016


5 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/6b93609ba222/
Changeset:   6b93609ba222
Branch:      yt
User:        brittonsmith
Date:        2016-01-30 12:27:16+00:00
Summary:     Adding cython port of FindBindingEnergy.
Affected #:  1 file

diff -r 34ed15d98fe48c75d824fbf6246d8ab89c4e2398 -r 6b93609ba222f817ab6d6b0748ee5960008f7ee9 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -13,12 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+from yt.funcs import get_pbar
 import numpy as np
 from yt.units.yt_array import YTArray
 cimport numpy as np
 cimport cython
 cimport libc.math as math
-from libc.math cimport abs
+from libc.math cimport abs, sqrt
 from fp_utils cimport fmin, fmax, i64min, i64max
 from yt.geometry.selection_routines cimport _ensure_code
 
@@ -966,3 +967,48 @@
                                         dest[i,j,k] += dsp * (overlap[0]*overlap[1]*overlap[2])
                                     else:
                                         dest[i,j,k] = dsp
+
+ at cython.cdivision(True)
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+def gravitational_binding_energy(
+        np.float64_t[:] mass,
+        np.float64_t[:] x,
+        np.float64_t[:] y,
+        np.float64_t[:] z,
+        int truncate,
+        np.float64_t kinetic):
+
+    cdef int q_outer, q_inner, n_q, i
+    cdef np.float64_t mass_o, x_o, y_o, z_o
+    cdef np.float64_t mass_i, x_i, y_i, z_i
+    cdef np.float64_t this_potential, total_potential
+    total_potential = 0.
+
+    i = 0
+    n_q = mass.size
+    pbar = get_pbar("Calculating potential for %d cells" % n_q,
+                    0.5 * (n_q**2 - n_q))
+    for q_outer in range(n_q - 1):
+        this_potential = 0.
+        mass_o = mass[q_outer]
+        x_o = x[q_outer]
+        y_o = y[q_outer]
+        z_o = z[q_outer]
+        for q_inner in range(q_outer + 1, n_q):
+            mass_i = mass[q_inner]
+            x_i = x[q_inner]
+            y_i = y[q_inner]
+            z_i = z[q_inner]
+            this_potential += mass_o * mass_i / \
+              sqrt((x_i - x_o) * (x_i - x_o) +
+                   (y_i - y_o) * (y_i - y_o) +
+                   (z_i - z_o) * (z_i - z_o))
+        i += n_q - q_outer
+        pbar.update(i)
+        total_potential += this_potential
+        if truncate and total_potential / kinetic:
+            break
+    pbar.finish()
+
+    return total_potential


https://bitbucket.org/yt_analysis/yt/commits/9701c1c6fd0c/
Changeset:   9701c1c6fd0c
Branch:      yt
User:        brittonsmith
Date:        2016-01-30 12:29:22+00:00
Summary:     Replacing call to c function with cython version.
Affected #:  1 file

diff -r 6b93609ba222f817ab6d6b0748ee5960008f7ee9 -r 9701c1c6fd0c43a54800413b47394b123262fcb9 yt/analysis_modules/level_sets/clump_validators.py
--- a/yt/analysis_modules/level_sets/clump_validators.py
+++ b/yt/analysis_modules/level_sets/clump_validators.py
@@ -13,7 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.utilities.data_point_utilities import FindBindingEnergy
+from yt.utilities.lib.misc_utilities import \
+    gravitational_binding_energy
 from yt.utilities.operator_registry import \
     OperatorRegistry
 from yt.utilities.physical_constants import \
@@ -64,11 +65,12 @@
              (bulk_velocity[2] - clump["all", "particle_velocity_z"])**2)).sum()
 
     potential = clump.data.ds.quan(G *
-        FindBindingEnergy(clump["gas", "cell_mass"].in_cgs(),
-                          clump["index", "x"].in_cgs(),
-                          clump["index", "y"].in_cgs(),
-                          clump["index", "z"].in_cgs(),
-                          truncate, (kinetic / G).in_cgs()),
+        gravitational_binding_energy(
+            clump["gas", "cell_mass"].in_cgs(),
+            clump["index", "x"].in_cgs(),
+            clump["index", "y"].in_cgs(),
+            clump["index", "z"].in_cgs(),
+            truncate, (kinetic / G).in_cgs()),
         kinetic.in_cgs().units)
     
     if truncate and potential >= kinetic:
@@ -76,7 +78,7 @@
 
     if use_particles:
         potential += clump.data.ds.quan(G *
-            FindBindingEnergy(
+            gravitational_binding_energy(
                 clump["all", "particle_mass"].in_cgs(),
                 clump["all", "particle_position_x"].in_cgs(),
                 clump["all", "particle_position_y"].in_cgs(),


https://bitbucket.org/yt_analysis/yt/commits/f45f7851fbd3/
Changeset:   f45f7851fbd3
Branch:      yt
User:        brittonsmith
Date:        2016-01-30 17:15:36+00:00
Summary:     Add extra bit to if statement.
Affected #:  1 file

diff -r 9701c1c6fd0c43a54800413b47394b123262fcb9 -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -1007,7 +1007,7 @@
         i += n_q - q_outer
         pbar.update(i)
         total_potential += this_potential
-        if truncate and total_potential / kinetic:
+        if truncate and total_potential / kinetic > 1.:
             break
     pbar.finish()
 


https://bitbucket.org/yt_analysis/yt/commits/2ef50ab5517d/
Changeset:   2ef50ab5517d
Branch:      yt
User:        brittonsmith
Date:        2016-02-17 18:01:18+00:00
Summary:     Merging.
Affected #:  122 files

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -28,40 +28,37 @@
 yt/utilities/spatial/ckdtree.c
 yt/utilities/lib/alt_ray_tracers.c
 yt/utilities/lib/amr_kdtools.c
+yt/utilities/lib/basic_octree.c
 yt/utilities/lib/bitarray.c
-yt/utilities/lib/CICDeposit.c
-yt/utilities/lib/ContourFinding.c
-yt/utilities/lib/DepthFirstOctree.c
+yt/utilities/lib/contour_finding.c
+yt/utilities/lib/depth_first_octree.c
 yt/utilities/lib/element_mappings.c
-yt/utilities/lib/FixedInterpolator.c
 yt/utilities/lib/fortran_reader.c
 yt/utilities/lib/freetype_writer.c
 yt/utilities/lib/geometry_utils.c
 yt/utilities/lib/image_utilities.c
-yt/utilities/lib/Interpolators.c
+yt/utilities/lib/interpolators.c
 yt/utilities/lib/kdtree.c
 yt/utilities/lib/line_integral_convolution.c
+yt/utilities/lib/mesh_construction.cpp
+yt/utilities/lib/mesh_intersection.cpp
+yt/utilities/lib/mesh_samplers.cpp
+yt/utilities/lib/mesh_traversal.cpp
 yt/utilities/lib/mesh_utilities.c
 yt/utilities/lib/misc_utilities.c
-yt/utilities/lib/Octree.c
-yt/utilities/lib/GridTree.c
+yt/utilities/lib/particle_mesh_operations.c
 yt/utilities/lib/origami.c
+yt/utilities/lib/particle_mesh_operations.c
 yt/utilities/lib/pixelization_routines.c
 yt/utilities/lib/png_writer.c
-yt/utilities/lib/PointsInVolume.c
-yt/utilities/lib/QuadTree.c
-yt/utilities/lib/RayIntegrators.c
+yt/utilities/lib/points_in_volume.c
+yt/utilities/lib/quad_tree.c
+yt/utilities/lib/ray_integrators.c
 yt/utilities/lib/ragged_arrays.c
-yt/utilities/lib/VolumeIntegrator.c
 yt/utilities/lib/grid_traversal.c
 yt/utilities/lib/marching_cubes.c
 yt/utilities/lib/png_writer.h
 yt/utilities/lib/write_array.c
-yt/utilities/lib/element_mappings.c
-yt/utilities/lib/mesh_construction.cpp
-yt/utilities/lib/mesh_samplers.cpp
-yt/utilities/lib/mesh_traversal.cpp
-yt/utilities/lib/mesh_intersection.cpp
 syntax: glob
 *.pyc
 .*.swp

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 clean.sh
--- a/clean.sh
+++ b/clean.sh
@@ -1,4 +1,1 @@
-find . -name "*.so" -exec rm -v {} \;
-find . -name "*.pyc" -exec rm -v {} \;
-find . -name "__config__.py" -exec rm -v {} \;
-rm -rvf build dist
+hg --config extensions.purge= purge --all yt

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -330,12 +330,17 @@
 Exodus II Data
 --------------
 
+.. note::
+   To load Exodus II data, you need to have the `netcdf4 <http://unidata.github.io/
+   netcdf4-python/>`_ python interface installed.
+
 Exodus II is a file format for Finite Element datasets that is used by the MOOSE
 framework for file IO. Support for this format (and for unstructured mesh data in 
-general) is a new feature as of yt 3.3, so while we aim to fully support it, we also expect 
-there to be some buggy features at present. Currently, yt can visualize first-order
-mesh types only (4-node quads, 8-node hexes, 3-node triangles, and 4-node tetrahedra).
-Development of higher-order visualization capability is a work in progress.
+general) is a new feature as of yt 3.3, so while we aim to fully support it, we 
+also expect there to be some buggy features at present. Currently, yt can visualize 
+quads, hexes, triangles, and tetrahedral element types at first order. Additionally,
+there is experimental support for the high-order visualization of 20-node hex elements.
+Development of more high-order visualization capability is a work in progress.
 
 To load an Exodus II dataset, you can use the ``yt.load`` command on the Exodus II
 file:
@@ -348,14 +353,15 @@
 Because Exodus II datasets can have multiple steps (which can correspond to time steps, 
 picard iterations, non-linear solve iterations, etc...), you can also specify a step
 argument when you load an Exodus II data that defines the index at which to look when
-you read data from the file.
+you read data from the file. Omitting this argument is the same as passing in 0, and
+setting ``step=-1`` selects the last time output in the file.
 
 You can access the connectivity information directly by doing:
 
 .. code-block:: python
     
    import yt
-   ds = yt.load("MOOSE_sample_data/out.e-s010", step=0)
+   ds = yt.load("MOOSE_sample_data/out.e-s010", step=-1)
    print(ds.index.meshes[0].connectivity_coords)
    print(ds.index.meshes[0].connectivity_indices)
    print(ds.index.meshes[1].connectivity_coords)
@@ -368,7 +374,7 @@
 .. code-block:: python
     
    import yt
-   ds = yt.load("MOOSE_sample_data/out.e-s010", step=0)
+   ds = yt.load("MOOSE_sample_data/out.e-s010")
    print(ds.field_list)
 
 This will give you a list of field names like ``('connect1', 'diffused')`` and 
@@ -380,7 +386,7 @@
 .. code-block:: python
     
    import yt
-   ds = yt.load("MOOSE_sample_data/out.e-s010", step=0)
+   ds = yt.load("MOOSE_sample_data/out.e-s010")
    ad = ds.all_data()  # geometric selection, this just grabs everything
    print(ad['connect1', 'convected'])
 
@@ -390,7 +396,7 @@
 .. code-block:: python
 
    import yt
-   ds = yt.load("MOOSE_sample_data/out.e-s010", step=0)
+   ds = yt.load("MOOSE_sample_data/out.e-s010")
    ad = ds.all_data()
    print(ad['connect1', 'convected'].shape)
 
@@ -401,7 +407,7 @@
 .. code-block:: python
 
    import yt
-   ds = yt.load("MOOSE_sample_data/out.e-s010", step=0)
+   ds = yt.load("MOOSE_sample_data/out.e-s010")
    ad = ds.all_data()
    print(ad['connect1', 'vertex_x'])
 
@@ -411,7 +417,7 @@
 .. code-block:: python
 
    import yt
-   ds = yt.load("MOOSE_sample_data/out.e-s010", step=0)
+   ds = yt.load("MOOSE_sample_data/out.e-s010")
    ad = ds.all_data()
    print(ad['connect1', 'conv_indicator'].shape)
 
@@ -420,6 +426,61 @@
 For information about visualizing unstructured mesh data, including Exodus II datasets, 
 please see :ref:`unstructured-mesh-slices` and :ref:`unstructured_mesh_rendering`. 
 
+Displacement Fields
+^^^^^^^^^^^^^^^^^^^
+
+Finite element codes often solve for the displacement of each vertex from its 
+original position as a node variable, rather than updating the actual vertex 
+positions with time. For analysis and visualization, it is often useful to turn 
+these displacements on or off, and to be able to scale them arbitrarily to 
+emphasize certain features of the solution. To allow this, if ``yt`` detects 
+displacement fields in an Exodus II dataset (using the convention that they will
+ be named ``disp_x``, ``disp_y``, etc...), it will add optionally add these to 
+the mesh vertex positions for the purposes of visualization. Displacement fields 
+can be controlled when a dataset is loaded by passing in an optional dictionary 
+to the ``yt.load`` command. This feature is turned off by default, meaning that 
+a dataset loaded as 
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("MOOSE_sample_data/mps_out.e")
+
+will not include the displacements in the vertex positions. The displacements can
+be turned on separately for each mesh in the file by passing in a a tuple of 
+(scale, offset) pairs for the meshes you want to enable displacements for. 
+For example, the following code snippet turns displacements on for the second 
+mesh, but not the first:
+
+.. code-block:: python
+
+    import yt
+    ds = yt.load("MOOSE_sample_data/mps_out.e", step=10,
+                 displacements={'connect2': (1.0, [0.0, 0.0, 0.0])})
+
+The displacements can also be scaled by an arbitrary factor before they are 
+added in to the vertex positions. The following code turns on displacements
+for both ``connect1`` and ``connect2``, scaling the former by a factor of 5.0
+and the later by a factor of 10.0:
+
+.. code-block:: python
+
+    import yt
+    ds = yt.load("MOOSE_sample_data/mps_out.e", step=10,
+                 displacements={'connect1': (5.0, [0.0, 0.0, 0.0]),
+                                'connect2': (10.0, [0.0, 0.0, 0.0])})
+
+Finally, we can also apply an arbitrary offset to the mesh vertices after 
+the scale factor is applied. For example, the following code scales all
+displacements in the second mesh by a factor of 5.0, and then shifts
+each vertex in the mesh by 1.0 unit in the z-direction:
+
+.. code-block:: python
+
+    import yt
+    ds = yt.load("MOOSE_sample_data/mps_out.e", step=10,
+                  displacements={'connect2': (5.0, [0.0, 0.0, 1.0])})
+
 
 FITS Data
 ---------
@@ -1232,10 +1293,9 @@
 
 yt has support for reading halo catalogs produced by Rockstar and the inline 
 FOF/SUBFIND halo finders of Gadget and OWLS.  The halo catalogs are treated as 
-particle datasets where each particle represents a single halo.  At this time, 
-yt does not have the ability to load the member particles for a given halo.  
-However, once loaded, further halo analysis can be performed using 
-:ref:`halo_catalog`.
+particle datasets where each particle represents a single halo.  Member particles
+for individual halos can be accessed through halo data containers.  Further halo
+analysis can be performed using :ref:`halo_catalog`.
 
 In the case where halo catalogs are written to multiple files, one must only 
 give the path to one of them.
@@ -1269,11 +1329,39 @@
    # x component of the spin
    print(ad["Subhalo", "SubhaloSpin_0"])
 
+Halo member particles are accessed by creating halo data containers with the
+type of halo ("Group" or "Subhalo") and the halo id.  Scalar values for halos
+can be accessed in the same way.  Halos also have mass, position, and velocity
+attributes.
+
+.. code-block:: python
+
+   halo = ds.halo("Group", 0)
+   # member particles for this halo
+   print halo["member_ids"]
+   # halo virial radius
+   print halo["Group_R_Crit200"]
+   # halo mass
+   print halo.mass
+
+Subhalos containers can be created using either their absolute ids or their
+subhalo ids.
+
+.. code-block:: python
+
+   # first subhalo of the first halo
+   subhalo = ds.halo("Subhalo", (0, 0))
+   # this subhalo's absolute id
+   print subhalo.group_identifier
+   # member particles
+   print subhalo["member_ids"]
+
 OWLS FOF/SUBFIND
 ^^^^^^^^^^^^^^^^
 
 OWLS halo catalogs have a very similar structure to regular Gadget halo catalogs.  
-The two field types are "FOF" and "SUBFIND".
+The two field types are "FOF" and "SUBFIND".  At this time, halo member particles
+cannot be loaded.
 
 .. code-block:: python
 

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 doc/source/visualizing/unstructured_mesh_rendering.rst
--- a/doc/source/visualizing/unstructured_mesh_rendering.rst
+++ b/doc/source/visualizing/unstructured_mesh_rendering.rst
@@ -14,7 +14,7 @@
 
 .. code-block:: bash
 
-    conda install -c http://use.yt/with_conda/ yt
+    conda install -c http://use.yt/with_conda/ yt=3.3_dev
 
 If you want to install from source, you can use the ``get_yt.sh`` script.
 Be sure to set the INST_YT_SOURCE and INST_UNSTRUCTURED flags to 1 at the 
@@ -114,7 +114,7 @@
     cam = sc.camera
     cam.focus = ds.arr([0.0, 0.0, 0.0], 'code_length')
     cam_pos = ds.arr([-3.0, 3.0, -3.0], 'code_length')
-    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, -1.0], 'dimensionless')
     cam.set_position(cam_pos, north_vector)
 
     # increase the default resolution
@@ -142,7 +142,7 @@
     cam = sc.camera
     cam.focus = ds.arr([0.0, 0.0, 0.0], 'code_length')
     cam_pos = ds.arr([-3.0, 3.0, -3.0], 'code_length')
-    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, -1.0], 'dimensionless')
     cam.set_position(cam_pos, north_vector)
 
     # increase the default resolution
@@ -174,7 +174,7 @@
     cam = sc.camera
     cam.focus = ds.arr([0.0, 0.0, 0.0], 'code_length')
     cam_pos = ds.arr([-3.0, 3.0, -3.0], 'code_length')
-    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, -1.0], 'dimensionless')
     cam.set_position(cam_pos, north_vector)
    
     # increase the default resolution
@@ -205,7 +205,7 @@
     cam = sc.camera
     camera_position = ds.arr([3.0, 3.0, 3.0], 'code_length')
     cam.set_width(ds.arr([2.0, 2.0, 2.0], 'code_length'))
-    north_vector = ds.arr([0.0, 1.0, 0.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, 0.0], 'dimensionless')
     cam.set_position(camera_position, north_vector)
 
     # increase the default resolution
@@ -236,7 +236,7 @@
     # adjust the camera position and orientation
     cam = sc.camera
     camera_position = ds.arr([-1.0, 1.0, -0.5], 'code_length')
-    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, -1.0], 'dimensionless')
     cam.width = ds.arr([0.04, 0.04, 0.04], 'code_length')
     cam.set_position(camera_position, north_vector)
 
@@ -248,6 +248,43 @@
     sc.annotate_mesh_lines()
     sc.save()
 
+The dataset in the above example contains displacement fields, so this is a good
+opportunity to demonstrate their use. The following example is exactly like the
+above, except we scale the displacements by a factor of a 10.0, and additionally 
+add an offset to the mesh by 1.0 unit in the x-direction:
+
+.. python-script::
+
+    import yt
+
+    # We load the last time frame
+    ds = yt.load("MOOSE_sample_data/mps_out.e", step=-1,
+                 displacements={'connect2': (10.0, [0.01, 0.0, 0.0])})
+
+    # create a default scene
+    sc = yt.create_scene(ds, ("connect2", "temp"))
+
+    # override the default colormap. This time we also override
+    # the default color bounds
+    ms = sc.get_source(0)
+    ms.cmap = 'hot'
+    ms.color_bounds = (500.0, 1700.0)
+
+    # adjust the camera position and orientation
+    cam = sc.camera
+    camera_position = ds.arr([-1.0, 1.0, -0.5], 'code_length')
+    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    cam.width = ds.arr([0.05, 0.05, 0.05], 'code_length')
+    cam.set_position(camera_position, north_vector)
+    
+    # increase the default resolution
+    cam.resolution = (800, 800)
+
+    # render, draw the element boundaries, and save
+    sc.render()
+    sc.annotate_mesh_lines()
+    sc.save()
+
 As with other volume renderings in yt, you can swap out different lenses. Here is 
 an example that uses a "perspective" lens, for which the rays diverge from the 
 camera position according to some opening angle:
@@ -270,7 +307,7 @@
     cam = Camera(ds, lens_type='perspective')
     cam.focus = ds.arr([0.0, 0.0, 0.0], 'code_length')
     cam_pos = ds.arr([-4.5, 4.5, -4.5], 'code_length')
-    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, -1.0], 'dimensionless')
     cam.set_position(cam_pos, north_vector)
    
     # tell our scene to use it
@@ -303,7 +340,7 @@
     cam = Camera(ds)
     cam.focus = ds.arr([0.0, 0.0, 0.0], 'code_length')
     cam.set_position(ds.arr([-3.0, 3.0, -3.0], 'code_length'),
-    ds.arr([0.0, 1.0, 0.0], 'dimensionless'))
+                     ds.arr([0.0, -1.0, 0.0], 'dimensionless'))
     cam.set_width = ds.arr([8.0, 8.0, 8.0], 'code_length')
     cam.resolution = (800, 800)
 
@@ -348,7 +385,7 @@
     cam = sc.camera
     cam.focus = ds.arr([0.0, 0.0, 0.0], 'code_length')
     cam_pos = ds.arr([-3.0, 3.0, -3.0], 'code_length')
-    north_vector = ds.arr([0.0, 1.0, 1.0], 'dimensionless')
+    north_vector = ds.arr([0.0, -1.0, -1.0], 'dimensionless')
     cam.set_position(cam_pos, north_vector)
 
     # increase the default resolution
@@ -398,7 +435,7 @@
 	cam = Camera(ds)
 	camera_position = ds.arr([0.1, 0.0, 0.1], 'code_length')
 	cam.focus = ds.domain_center
-	north_vector = ds.arr([0.3032476, 0.71782557, -0.62671153], 'dimensionless')
+	north_vector = ds.arr([-0.3032476, -0.71782557, 0.62671153], 'dimensionless')
 	cam.width = ds.arr([ 0.04,  0.04,  0.04], 'code_length')
 	cam.resolution = (800, 800)
 	cam.set_position(camera_position, north_vector)

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,27 +1,20 @@
 import os
-import os.path
 import glob
 import sys
-import time
-import subprocess
-import shutil
-import glob
+from sys import platform as _platform
+from setuptools import setup, find_packages
+from setuptools.extension import Extension
+from setuptools.command.build_ext import build_ext as _build_ext
+from setuptools.command.build_py import build_py as _build_py
+from setupext import \
+    check_for_openmp, check_for_pyembree, read_embree_location, \
+    get_mercurial_changeset_id
 
 if sys.version_info < (2, 7):
     print("yt currently requires Python version 2.7")
     print("certain features may fail unexpectedly and silently with older versions.")
     sys.exit(1)
 
-import setuptools
-from distutils.command.build_py import build_py
-from numpy.distutils.misc_util import appendpath
-from numpy.distutils.command import install_data as np_install_data
-from numpy.distutils import log
-from distutils import version
-
-from distutils.core import Command
-from distutils.spawn import find_executable
-
 MAPSERVER_FILES = []
 MAPSERVER_DIRS = [
     "",
@@ -36,109 +29,277 @@
         files += glob.glob("%s/*.%s" % (dir_name, ext))
     MAPSERVER_FILES.append((dir_name, files))
 
-# Verify that we have Cython installed
-REQ_CYTHON = '0.22'
-try:
-    import Cython
-    needs_cython = \
-        version.LooseVersion(Cython.__version__) < version.LooseVersion(REQ_CYTHON)
-except ImportError as e:
-    needs_cython = True
-
-if needs_cython:
-    print("Cython is a build-time requirement for the source tree of yt.")
-    print("Please either install yt from a provided, release tarball,")
-    print("or install Cython (version %s or higher)." % REQ_CYTHON)
-    print("You may be able to accomplish this by typing:")
-    print("     pip install -U Cython")
-    sys.exit(1)
-
-######
-# This next bit comes from Matthew Brett, to get Cython working with NumPy
-# distutils.  I added a bit to get C++ Cython working.
-from os.path import join as pjoin, dirname
-from distutils.dep_util import newer_group
-from distutils.errors import DistutilsError
-
-
-def generate_a_pyrex_source(self, base, ext_name, source, extension):
-    ''' Monkey patch for numpy build_src.build_src method
-
-    Uses Cython instead of Pyrex.
-
-    Assumes Cython is present
-    '''
-    if self.inplace:
-        target_dir = dirname(base)
-    else:
-        target_dir = appendpath(self.build_src, dirname(base))
-    if extension.language == "c++":
-        cplus = True
-        file_ext = ".cpp"
-    else:
-        cplus = False
-        file_ext = ".c"
-    target_file = pjoin(target_dir, ext_name + file_ext)
-    depends = [source] + extension.depends
-    if self.force or newer_group(depends, target_file, 'newer'):
-        import Cython.Compiler.Main
-        log.info("cythonc:> %s" % (target_file))
-        self.mkpath(target_dir)
-        options = Cython.Compiler.Main.CompilationOptions(
-            defaults=Cython.Compiler.Main.default_options,
-            include_path=extension.include_dirs,
-            cplus=cplus,
-            output_file=target_file)
-        cython_result = Cython.Compiler.Main.compile(source,
-                                                     options=options)
-        if cython_result.num_errors != 0:
-            raise DistutilsError("%d errors while compiling %r with Cython"
-                                 % (cython_result.num_errors, source))
-    return target_file
-
-
-from numpy.distutils.command import build_src
-build_src.build_src.generate_a_pyrex_source = generate_a_pyrex_source
-# End snippet
-######
-
 VERSION = "3.3.dev0"
 
 if os.path.exists('MANIFEST'):
     os.remove('MANIFEST')
 
 
-def get_mercurial_changeset_id(target_dir):
-    """adapted from a script by Jason F. Harris, published at
+if check_for_openmp() is True:
+    omp_args = ['-fopenmp']
+else:
+    omp_args = None
 
-    http://jasonfharris.com/blog/2010/05/versioning-your-application-with-the-mercurial-changeset-hash/
 
-    """
-    import subprocess
-    import re
-    get_changeset = subprocess.Popen('hg identify -b -i',
-                                     stdout=subprocess.PIPE,
-                                     stderr=subprocess.PIPE,
-                                     shell=True)
+cython_extensions = [
+    Extension("yt.analysis_modules.photon_simulator.utils",
+              ["yt/analysis_modules/photon_simulator/utils.pyx"]),
+    Extension("yt.analysis_modules.ppv_cube.ppv_utils",
+              ["yt/analysis_modules/ppv_cube/ppv_utils.pyx"],
+              libraries=["m"]),
+    Extension("yt.geometry.grid_visitors",
+              ["yt/geometry/grid_visitors.pyx"],
+              include_dirs=["yt/utilities/lib"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/grid_visitors.pxd"]),
+    Extension("yt.geometry.grid_container",
+              ["yt/geometry/grid_container.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/grid_container.pxd",
+                       "yt/geometry/grid_visitors.pxd"]),
+    Extension("yt.geometry.oct_container",
+              ["yt/geometry/oct_container.pyx"],
+              include_dirs=["yt/utilities/lib"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/selection_routines.pxd"]),
+    Extension("yt.geometry.oct_visitors",
+              ["yt/geometry/oct_visitors.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/selection_routines.pxd"]),
+    Extension("yt.geometry.particle_oct_container",
+              ["yt/geometry/particle_oct_container.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/selection_routines.pxd"]),
+    Extension("yt.geometry.selection_routines",
+              ["yt/geometry/selection_routines.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/utilities/lib/grid_traversal.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/oct_visitors.pxd",
+                       "yt/geometry/grid_container.pxd",
+                       "yt/geometry/grid_visitors.pxd",
+                       "yt/geometry/selection_routines.pxd"]),
+    Extension("yt.geometry.particle_deposit",
+              ["yt/geometry/particle_deposit.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/selection_routines.pxd",
+                       "yt/geometry/particle_deposit.pxd"]),
+    Extension("yt.geometry.particle_smooth",
+              ["yt/geometry/particle_smooth.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/selection_routines.pxd",
+                       "yt/geometry/particle_deposit.pxd",
+                       "yt/geometry/particle_smooth.pxd"]),
+    Extension("yt.geometry.fake_octree",
+              ["yt/geometry/fake_octree.pyx"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/geometry/oct_container.pxd",
+                       "yt/geometry/selection_routines.pxd"]),
+    Extension("yt.utilities.spatial.ckdtree",
+              ["yt/utilities/spatial/ckdtree.pyx"],
+              libraries=["m"]),
+    Extension("yt.utilities.lib.bitarray",
+              ["yt/utilities/lib/bitarray.pyx"],
+              libraries=["m"], depends=["yt/utilities/lib/bitarray.pxd"]),
+    Extension("yt.utilities.lib.contour_finding",
+              ["yt/utilities/lib/contour_finding.pyx"],
+              include_dirs=["yt/utilities/lib/",
+                            "yt/geometry/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/utilities/lib/amr_kdtools.pxd",
+                       "yt/utilities/lib/grid_traversal.pxd",
+                       "yt/utilities/lib/contour_finding.pxd",
+                       "yt/geometry/oct_container.pxd"]),
+    Extension("yt.utilities.lib.geometry_utils",
+              ["yt/utilities/lib/geometry_utils.pyx"],
+              extra_compile_args=omp_args,
+              extra_link_args=omp_args,
+              libraries=["m"], depends=["yt/utilities/lib/fp_utils.pxd"]),
+    Extension("yt.utilities.lib.marching_cubes",
+              ["yt/utilities/lib/marching_cubes.pyx",
+               "yt/utilities/lib/fixed_interpolator.c"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/utilities/lib/fixed_interpolator.pxd",
+                       "yt/utilities/lib/fixed_interpolator.h",
+                       ]),
+    Extension("yt.utilities.lib.pixelization_routines",
+              ["yt/utilities/lib/pixelization_routines.pyx",
+               "yt/utilities/lib/pixelization_constants.c"],
+              include_dirs=["yt/utilities/lib/"],
+              language="c++",
+              libraries=["m"], depends=["yt/utilities/lib/fp_utils.pxd",
+                                        "yt/utilities/lib/pixelization_constants.h",
+                                        "yt/utilities/lib/element_mappings.pxd"]),
+    Extension("yt.utilities.lib.origami",
+              ["yt/utilities/lib/origami.pyx",
+               "yt/utilities/lib/origami_tags.c"],
+              include_dirs=["yt/utilities/lib/"],
+              depends=["yt/utilities/lib/origami_tags.h"]),
+    Extension("yt.utilities.lib.grid_traversal",
+              ["yt/utilities/lib/grid_traversal.pyx",
+               "yt/utilities/lib/fixed_interpolator.c",
+               "yt/utilities/lib/kdtree.c"],
+              include_dirs=["yt/utilities/lib/"],
+              libraries=["m"],
+              extra_compile_args=omp_args,
+              extra_link_args=omp_args,
+              depends=["yt/utilities/lib/fp_utils.pxd",
+                       "yt/utilities/lib/kdtree.h",
+                       "yt/utilities/lib/fixed_interpolator.h",
+                       "yt/utilities/lib/fixed_interpolator.pxd",
+                       "yt/utilities/lib/field_interpolation_tables.pxd"]),
+    Extension("yt.utilities.lib.element_mappings",
+              ["yt/utilities/lib/element_mappings.pyx"],
+              libraries=["m"], depends=["yt/utilities/lib/element_mappings.pxd"]),
+    Extension("yt.utilities.lib.alt_ray_tracers",
+              ["yt/utilities/lib/alt_ray_tracers.pyx"],
+              libraries=["m"]),
+]
 
-    if (get_changeset.stderr.read() != ""):
-        print("Error in obtaining current changeset of the Mercurial repository")
-        changeset = None
+lib_exts = [
+    "particle_mesh_operations", "depth_first_octree", "fortran_reader",
+    "interpolators", "misc_utilities", "basic_octree", "image_utilities",
+    "points_in_volume", "quad_tree", "ray_integrators", "mesh_utilities",
+    "amr_kdtools"
+]
+for ext_name in lib_exts:
+    cython_extensions.append(
+        Extension("yt.utilities.lib.{}".format(ext_name),
+                  ["yt/utilities/lib/{}.pyx".format(ext_name)],
+                  libraries=["m"], depends=["yt/utilities/lib/fp_utils.pxd"]))
 
-    changeset = get_changeset.stdout.read().strip().decode("UTF-8")
-    if (not re.search("^[0-9a-f]{12}", changeset)):
-        print("Current changeset of the Mercurial repository is malformed")
-        changeset = None
+lib_exts = ["write_array", "ragged_arrays", "line_integral_convolution"]
+for ext_name in lib_exts:
+    cython_extensions.append(
+        Extension("yt.utilities.lib.{}".format(ext_name),
+                  ["yt/utilities/lib/{}.pyx".format(ext_name)]))
 
-    return changeset
+extensions = [
+    Extension("yt.analysis_modules.halo_finding.fof.EnzoFOF",
+              ["yt/analysis_modules/halo_finding/fof/EnzoFOF.c",
+               "yt/analysis_modules/halo_finding/fof/kd.c"],
+              libraries=["m"]),
+    Extension("yt.analysis_modules.halo_finding.hop.EnzoHop",
+              glob.glob("yt/analysis_modules/halo_finding/hop/*.c")),
+    Extension("yt.frontends.artio._artio_caller",
+              ["yt/frontends/artio/_artio_caller.pyx"] +
+              glob.glob("yt/frontends/artio/artio_headers/*.c"),
+              include_dirs=["yt/frontends/artio/artio_headers/",
+                            "yt/geometry/",
+                            "yt/utilities/lib/"],
+              depends=glob.glob("yt/frontends/artio/artio_headers/*.c") +
+              ["yt/utilities/lib/fp_utils.pxd",
+               "yt/geometry/oct_container.pxd",
+               "yt/geometry/selection_routines.pxd",
+               "yt/geometry/particle_deposit.pxd"]),
+    Extension("yt.utilities.spatial._distance_wrap",
+              glob.glob("yt/utilities/spatial/src/*.c")),
+    Extension("yt.visualization._MPL",
+              ["yt/visualization/_MPL.c"],
+              libraries=["m"]),
+    Extension("yt.utilities.data_point_utilities",
+              ["yt/utilities/data_point_utilities.c"],
+              libraries=["m"]),
+]
 
+# EMBREE
+if check_for_pyembree() is not None:
+    embree_extensions = [
+        Extension("yt.utilities.lib.mesh_construction",
+                  ["yt/utilities/lib/mesh_construction.pyx"],
+                  depends=["yt/utilities/lib/mesh_construction.pxd"]),
+        Extension("yt.utilities.lib.mesh_traversal",
+                  ["yt/utilities/lib/mesh_traversal.pyx"],
+                  depends=["yt/utilities/lib/mesh_traversal.pxd",
+                           "yt/utilities/lib/grid_traversal.pxd"]),
+        Extension("yt.utilities.lib.mesh_samplers",
+                  ["yt/utilities/lib/mesh_samplers.pyx"],
+                  depends=["yt/utilities/lib/mesh_samplers.pxd",
+                           "yt/utilities/lib/element_mappings.pxd",
+                           "yt/utilities/lib/mesh_construction.pxd"]),
+        Extension("yt.utilities.lib.mesh_intersection",
+                  ["yt/utilities/lib/mesh_intersection.pyx"],
+                  depends=["yt/utilities/lib/mesh_intersection.pxd",
+                           "yt/utilities/lib/mesh_construction.pxd"]),
+    ]
 
-class my_build_src(build_src.build_src):
-    def run(self):
-        build_src.build_src.run(self)
+    embree_prefix = os.path.abspath(read_embree_location())
+    if _platform == "darwin":
+        embree_lib_name = "embree.2"
+    else:
+        embree_lib_name = "embree"
 
+    for ext in embree_extensions:
+        ext.include_dirs.append(os.path.join(embree_prefix, 'include'))
+        ext.library_dirs.append(os.path.join(embree_prefix, 'lib'))
+        ext.language = "c++"
+        ext.libraries += ["m", embree_lib_name]
 
-class my_build_py(build_py):
+    cython_extensions += embree_extensions
+
+# ROCKSTAR
+if os.path.exists("rockstar.cfg"):
+    try:
+        rd = open("rockstar.cfg").read().strip()
+    except IOError:
+        print("Reading Rockstar location from rockstar.cfg failed.")
+        print("Please place the base directory of your")
+        print("Rockstar install in rockstar.cfg and restart.")
+        print("(ex: \"echo '/path/to/Rockstar-0.99' > rockstar.cfg\" )")
+        sys.exit(1)
+
+    rockstar_extdir = "yt/analysis_modules/halo_finding/rockstar"
+    rockstar_extensions = [
+        Extension("yt.analysis_modules.halo_finding.rockstar.rockstar_interface",
+                  sources=[os.path.join(rockstar_extdir, "rockstar_interface.pyx")]),
+        Extension("yt.analysis_modules.halo_finding.rockstar.rockstar_groupies",
+                  sources=[os.path.join(rockstar_extdir, "rockstar_groupies.pyx")])
+    ]
+    for ext in rockstar_extensions:
+        ext.library_dirs.append(rd)
+        ext.libraries.append("rockstar")
+        ext.define_macros.append(("THREADSAFE", ""))
+        ext.include_dirs += [rd,
+                             os.path.join(rd, "io"), os.path.join(rd, "util")]
+    extensions += rockstar_extensions
+
+if os.environ.get("GPERFTOOLS", "no").upper() != "NO":
+    gpd = os.environ["GPERFTOOLS"]
+    idir = os.path.join(gpd, "include")
+    ldir = os.path.join(gpd, "lib")
+    print(("INCLUDE AND LIB DIRS", idir, ldir))
+    cython_extensions.append(
+        Extension("yt.utilities.lib.perftools_wrap",
+                  ["yt/utilities/lib/perftools_wrap.pyx"],
+                  libraries=["profiler"],
+                  library_dirs=[ldir],
+                  include_dirs=[idir]))
+
+class build_py(_build_py):
     def run(self):
         # honor the --dry-run flag
         if not self.dry_run:
@@ -148,68 +309,74 @@
             self.mkpath(target_dir)
             with open(os.path.join(target_dir, '__hg_version__.py'), 'w') as fobj:
                 fobj.write("hg_version = '%s'\n" % changeset)
+        _build_py.run(self)
 
-        build_py.run(self)
+class build_ext(_build_ext):
+    # subclass setuptools extension builder to avoid importing numpy
+    # at top level in setup.py. See http://stackoverflow.com/a/21621689/1382869
+    def finalize_options(self):
+        _build_ext.finalize_options(self)
+        # Prevent numpy from thinking it is still in its setup process
+        # see http://stackoverflow.com/a/21621493/1382869
+        __builtins__.__NUMPY_SETUP__ = False
+        import numpy
+        self.include_dirs.append(numpy.get_include())
 
+setup(
+    name="yt",
+    version=VERSION,
+    description="An analysis and visualization toolkit for Astrophysical "
+                + "simulations, focusing on Adaptive Mesh Refinement data "
+                  "from Enzo, Orion, FLASH, and others.",
+    classifiers=["Development Status :: 5 - Production/Stable",
+                 "Environment :: Console",
+                 "Intended Audience :: Science/Research",
+                 "License :: OSI Approved :: BSD License",
+                 "Operating System :: MacOS :: MacOS X",
+                 "Operating System :: POSIX :: AIX",
+                 "Operating System :: POSIX :: Linux",
+                 "Programming Language :: C",
+                 "Programming Language :: Python",
+                 "Topic :: Scientific/Engineering :: Astronomy",
+                 "Topic :: Scientific/Engineering :: Physics",
+                 "Topic :: Scientific/Engineering :: Visualization"],
+    keywords='astronomy astrophysics visualization ' +
+    'amr adaptivemeshrefinement',
+    entry_points={'console_scripts': [
+        'yt = yt.utilities.command_line:run_main',
+    ],
+        'nose.plugins.0.10': [
+            'answer-testing = yt.utilities.answer_testing.framework:AnswerTesting'
+    ]
+    },
+    packages=find_packages(),
+    setup_requires=[
+        'numpy',
+        'cython>=0.22'
+    ],
+    install_requires=[
+        # 'matplotlib',  # messes up nosetests will be fixed in future PRs
+        'sympy',
+        'numpy',
+        'IPython',
+    ],
+    cmdclass={'build_ext': build_ext, 'build_py': build_py},
+    author="The yt project",
+    author_email="yt-dev at lists.spacepope.org",
+    url="http://yt-project.org/",
+    license="BSD",
+    zip_safe=False,
+    scripts=["scripts/iyt"],
+    data_files=MAPSERVER_FILES,
+    ext_modules=cython_extensions + extensions
+)
 
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-
-    config = Configuration(None, parent_package, top_path)
-    config.set_options(ignore_setup_xxx_py=True,
-                       assume_default_configuration=True,
-                       delegate_options_to_subpackages=True,
-                       quiet=True)
-
-    config.make_config_py()
-    # config.make_svn_version_py()
-    config.add_subpackage('yt', 'yt')
-    config.add_scripts("scripts/iyt")
-
-    return config
-
-
-def setup_package():
-
-    from numpy.distutils.core import setup
-
-    setup(
-        name="yt",
-        version=VERSION,
-        description="An analysis and visualization toolkit for Astrophysical "
-                    + "simulations, focusing on Adaptive Mesh Refinement data "
-                      "from Enzo, Orion, FLASH, and others.",
-        classifiers=["Development Status :: 5 - Production/Stable",
-                     "Environment :: Console",
-                     "Intended Audience :: Science/Research",
-                     "License :: OSI Approved :: BSD License",
-                     "Operating System :: MacOS :: MacOS X",
-                     "Operating System :: POSIX :: AIX",
-                     "Operating System :: POSIX :: Linux",
-                     "Programming Language :: C",
-                     "Programming Language :: Python",
-                     "Topic :: Scientific/Engineering :: Astronomy",
-                     "Topic :: Scientific/Engineering :: Physics",
-                     "Topic :: Scientific/Engineering :: Visualization"],
-        keywords='astronomy astrophysics visualization ' +
-        'amr adaptivemeshrefinement',
-        entry_points={'console_scripts': [
-        'yt = yt.utilities.command_line:run_main',
-        ],
-            'nose.plugins.0.10': [
-                'answer-testing = yt.utilities.answer_testing.framework:AnswerTesting'
-            ]
-        },
-        author="The yt project",
-        author_email="yt-dev at lists.spacepope.org",
-        url="http://yt-project.org/",
-        license="BSD",
-        configuration=configuration,
-        zip_safe=False,
-        data_files=MAPSERVER_FILES,
-        cmdclass={'build_py': my_build_py, 'build_src': my_build_src},
-    )
-    return
-
-if __name__ == '__main__':
-    setup_package()
+# This info about 'ckdtree' should be incorporated somehow...
+#    setup(maintainer="SciPy Developers",
+#          author="Anne Archibald",
+#          maintainer_email="scipy-dev at scipy.org",
+#          description="Spatial algorithms and data structures",
+#          url="http://www.scipy.org",
+#          license="SciPy License (BSD Style)",
+#          **configuration(top_path='').todict()
+#   )

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 setupext.py
--- /dev/null
+++ b/setupext.py
@@ -0,0 +1,100 @@
+import os
+from pkg_resources import resource_filename
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def check_for_openmp():
+    """Returns True if local setup supports OpenMP, False otherwise"""
+
+    # See https://bugs.python.org/issue25150
+    if sys.version_info[:3] == (3, 5, 0) or os.name == 'nt':
+        return False
+
+    # Create a temporary directory
+    tmpdir = tempfile.mkdtemp()
+    curdir = os.getcwd()
+    exit_code = 1
+
+    try:
+        os.chdir(tmpdir)
+
+        # Get compiler invocation
+        compiler = os.getenv('CC', 'cc')
+        compiler = compiler.split(' ')
+
+        # Attempt to compile a test script.
+        # See http://openmp.org/wp/openmp-compilers/
+        filename = r'test.c'
+        file = open(filename, 'wt', 1)
+        file.write(
+            "#include <omp.h>\n"
+            "#include <stdio.h>\n"
+            "int main() {\n"
+            "#pragma omp parallel\n"
+            "printf(\"Hello from thread %d, nthreads %d\\n\", omp_get_thread_num(), omp_get_num_threads());\n"
+            "}"
+        )
+        file.flush()
+        with open(os.devnull, 'w') as fnull:
+            exit_code = subprocess.call(compiler + ['-fopenmp', filename],
+                                        stdout=fnull, stderr=fnull)
+
+        # Clean up
+        file.close()
+    except OSError:
+        return False
+    finally:
+        os.chdir(curdir)
+        shutil.rmtree(tmpdir)
+
+    return exit_code == 0
+
+
+def check_for_pyembree():
+    try:
+        fn = resource_filename("pyembree", "rtcore.pxd")
+    except ImportError:
+        return None
+    return os.path.dirname(fn)
+
+
+def read_embree_location():
+    '''
+
+    Attempts to locate the embree installation. First, we check for an
+    EMBREE_DIR environment variable. If one is not defined, we look for
+    an embree.cfg file in the root yt source directory. Finally, if that
+    is not present, we default to /usr/local. If embree is installed in a
+    non-standard location and none of the above are set, the compile will
+    not succeed. This only gets called if check_for_pyembree() returns
+    something other than None.
+
+    '''
+
+    rd = os.environ.get('EMBREE_DIR')
+    if rd is not None:
+        return rd
+    print("EMBREE_DIR not set. Attempting to read embree.cfg")
+    try:
+        rd = open("embree.cfg").read().strip()
+        return rd
+    except IOError:
+        print("Reading Embree location from embree.cfg failed.")
+        print("If compilation fails, please place the base directory")
+        print("of your Embree install in embree.cfg and restart.")
+        return '/usr/local'
+
+
+def get_mercurial_changeset_id(target_dir):
+    '''
+    Returns changeset and branch using hglib
+    '''
+    try:
+        import hglib
+    except ImportError:
+        return None
+    with hglib.open(target_dir) as repo:
+        changeset = repo.identify(id=True, branch=True).strip().decode('utf8')
+    return changeset

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/cosmological_observation/light_cone/setup.py
--- a/yt/analysis_modules/cosmological_observation/light_cone/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('light_cone', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/cosmological_observation/light_ray/setup.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('light_ray', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/cosmological_observation/setup.py
--- a/yt/analysis_modules/cosmological_observation/setup.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('cosmological_observation', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    config.add_subpackage("light_cone")
-    config.add_subpackage("light_ray")
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/halo_finding/fof/setup.py
--- a/yt/analysis_modules/halo_finding/fof/setup.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('fof', parent_package, top_path)
-    config.add_extension("EnzoFOF", sources=["EnzoFOF.c",
-                                     "kd.c"],
-                                    libraries=["m"])
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/halo_finding/hop/setup.py
--- a/yt/analysis_modules/halo_finding/hop/setup.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('hop', parent_package, top_path)
-    config.add_extension("EnzoHop", sources=["EnzoHop.c",
-                                     "hop_hop.c",
-                                     "hop_kd.c",
-                                     "hop_regroup.c",
-                                     "hop_slice.c",
-                                     "hop_smooth.c"])
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/halo_finding/rockstar/setup.py
--- a/yt/analysis_modules/halo_finding/rockstar/setup.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-from __future__ import print_function
-import os.path
-import sys
-
-def configuration(parent_package='',top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('rockstar',parent_package,top_path)
-    config.make_config_py() # installs __config__.py
-    #config.make_svn_version_py()
-    try:
-        rd = open("rockstar.cfg").read().strip()
-    except IOError:
-        print("Reading Rockstar location from rockstar.cfg failed.")
-        print("Please place the base directory of your")
-        print("Rockstar install in rockstar.cfg and restart.")
-        print("(ex: \"echo '/path/to/Rockstar-0.99' > rockstar.cfg\" )")
-        sys.exit(1)
-    config.add_extension("rockstar_interface",
-                         "yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx",
-                         library_dirs=[rd],
-                         libraries=["rockstar"],
-                         #define_macros = [("THREADSAFE", "__thread")],
-                         define_macros = [("THREADSAFE", "")],
-                         include_dirs=[rd,
-                                       os.path.join(rd, "io"),
-                                       os.path.join(rd, "util")])
-    config.add_extension("rockstar_groupies",
-                         "yt/analysis_modules/halo_finding/rockstar/rockstar_groupies.pyx",
-                         library_dirs=[rd],
-                         libraries=["rockstar"],
-                         #define_macros = [("THREADSAFE", "__thread")],
-                         define_macros = [("THREADSAFE", "")],
-                         include_dirs=[rd,
-                                       os.path.join(rd, "io"),
-                                       os.path.join(rd, "util")])
-    return config
-

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/halo_finding/setup.py
--- a/yt/analysis_modules/halo_finding/setup.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-import os.path
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('halo_finding', parent_package, top_path)
-    config.add_subpackage("fof")
-    config.add_subpackage("hop")
-    if os.path.exists("rockstar.cfg"):
-        config.add_subpackage("rockstar")
-    config.make_config_py()  # installs __config__.py
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/halo_mass_function/setup.py
--- a/yt/analysis_modules/halo_mass_function/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('halo_mass_function', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/level_sets/setup.py
--- a/yt/analysis_modules/level_sets/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('level_sets', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/particle_trajectories/setup.py
--- a/yt/analysis_modules/particle_trajectories/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('particle_trajectories', parent_package, top_path)
-    #config.add_subpackage("tests")
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/photon_simulator/setup.py
--- a/yt/analysis_modules/photon_simulator/setup.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('photon_simulator', parent_package, top_path)
-    config.add_extension("utils",
-                         ["yt/analysis_modules/photon_simulator/utils.pyx"])
-    config.add_subpackage("tests")
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/ppv_cube/setup.py
--- a/yt/analysis_modules/ppv_cube/setup.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('ppv_cube', parent_package, top_path)
-    config.add_extension("ppv_utils", 
-                         ["yt/analysis_modules/ppv_cube/ppv_utils.pyx"],
-                         libraries=["m"])
-    config.add_subpackage("tests")
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/setup.py
--- a/yt/analysis_modules/setup.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('analysis_modules', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    config.add_subpackage("absorption_spectrum")
-    config.add_subpackage("cosmological_observation")
-    config.add_subpackage("halo_analysis")
-    config.add_subpackage("halo_finding")
-    config.add_subpackage("halo_mass_function")
-    config.add_subpackage("level_sets")
-    config.add_subpackage("particle_trajectories")
-    config.add_subpackage("photon_simulator")
-    config.add_subpackage("spectral_integrator")
-    config.add_subpackage("star_analysis")
-    config.add_subpackage("two_point_functions")
-    config.add_subpackage("radmc3d_export")
-    config.add_subpackage("sunrise_export")
-    config.add_subpackage("sunyaev_zeldovich")
-    config.add_subpackage("particle_trajectories")
-    config.add_subpackage("photon_simulator")
-    config.add_subpackage("ppv_cube")
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/spectral_integrator/setup.py
--- a/yt/analysis_modules/spectral_integrator/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('spectral_integrator', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/star_analysis/setup.py
--- a/yt/analysis_modules/star_analysis/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('star_analysis', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/sunyaev_zeldovich/setup.py
--- a/yt/analysis_modules/sunyaev_zeldovich/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('sunyaev_zeldovich', parent_package, top_path)
-    config.add_subpackage("tests")
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/analysis_modules/two_point_functions/setup.py
--- a/yt/analysis_modules/two_point_functions/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('two_point_functions', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -39,7 +39,7 @@
     storeparameterfiles = 'False',
     parameterfilestore = 'parameter_files.csv',
     maximumstoreddatasets = '500',
-    skip_dataset_cache = 'False',
+    skip_dataset_cache = 'True',
     loadfieldplugins = 'True',
     pluginfilename = 'my_plugins.py',
     parallel_traceback = 'False',

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -702,11 +702,11 @@
         if cls is None:
             raise YTParticleDepositionNotImplemented(method)
         # We allocate number of zones, not number of octs
-        op = cls(self.ActiveDimensions.prod(), kernel_name)
+        op = cls(self.ActiveDimensions, kernel_name)
         op.initialize()
         op.process_grid(self, positions, fields)
         vals = op.finalize()
-        return vals.reshape(self.ActiveDimensions, order="C")
+        return vals.copy(order="C")
 
     def write_to_gdf(self, gdf_path, fields, nprocs=1, field_units=None,
                      **kwargs):

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/derived_quantities.py
--- a/yt/data_objects/derived_quantities.py
+++ b/yt/data_objects/derived_quantities.py
@@ -436,22 +436,26 @@
     """
     def count_values(self, use_gas=True, use_particles=True):
         num_vals = 0
-        if use_gas: num_vals += 4
-        if use_particles: num_vals += 4
+        # create the index if it doesn't exist yet
+        self.data_source.ds.index
+        self.use_gas = use_gas & \
+            (("gas", "cell_mass") in self.data_source.ds.field_info)
+        self.use_particles = use_particles & \
+            (("all", "particle_mass") in self.data_source.ds.field_info)
+        if self.use_gas:
+            num_vals += 4
+        if self.use_particles:
+            num_vals += 4
         self.num_vals = num_vals
 
-    def process_chunk(self, data, use_gas=True, use_particles=True):
-        use_gas &= \
-          (("gas", "cell_mass") in self.data_source.ds.field_info)
-        use_particles &= \
-          (("all", "particle_mass") in self.data_source.ds.field_info)
+    def process_chunk(self, data, **kwargs):
         rvals = []
-        if use_gas:
+        if self.use_gas:
             rvals.extend([(data["gas", "specific_angular_momentum_%s" % axis] *
                            data["gas", "cell_mass"]).sum(dtype=np.float64) \
                           for axis in "xyz"])
             rvals.append(data["gas", "cell_mass"].sum(dtype=np.float64))
-        if use_particles:
+        if self.use_particles:
             rvals.extend([(data["all", "particle_specific_angular_momentum_%s" % axis] *
                            data["all", "particle_mass"]).sum(dtype=np.float64) \
                           for axis in "xyz"])

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -327,12 +327,13 @@
         if cls is None:
             raise YTParticleDepositionNotImplemented(method)
         # We allocate number of zones, not number of octs
-        op = cls(self.ActiveDimensions.prod(), kernel_name)
+        # Everything inside this is fortran ordered, so we reverse it here.
+        op = cls(tuple(self.ActiveDimensions)[::-1], kernel_name)
         op.initialize()
         op.process_grid(self, positions, fields)
         vals = op.finalize()
         if vals is None: return
-        return vals.reshape(self.ActiveDimensions, order="C")
+        return vals.transpose() # Fortran-ordered, so transpose.
 
     def select_blocks(self, selector):
         mask = self._get_selector_mask(selector)

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/octree_subset.py
--- a/yt/data_objects/octree_subset.py
+++ b/yt/data_objects/octree_subset.py
@@ -90,10 +90,11 @@
         return self._num_zones + 2*self._num_ghost_zones
 
     def _reshape_vals(self, arr):
-        if len(arr.shape) == 4 and arr.flags["F_CONTIGUOUS"]:
-            return arr
         nz = self.nz
-        n_oct = arr.shape[0] / (nz**3.0)
+        if len(arr.shape) <= 2:
+            n_oct = arr.shape[0] / (nz**3)
+        else:
+            n_oct = max(arr.shape)
         if arr.size == nz*nz*nz*n_oct:
             new_shape = (nz, nz, nz, n_oct)
         elif arr.size == nz*nz*nz*n_oct * 3:
@@ -115,10 +116,9 @@
 
     def select_blocks(self, selector):
         mask = self.oct_handler.mask(selector, domain_id = self.domain_id)
-        mask = self._reshape_vals(mask)
         slicer = OctreeSubsetBlockSlice(self)
         for i, sl in slicer:
-            yield sl, mask[:,:,:,i]
+            yield sl, mask[i,...]
 
     def select_tcoords(self, dobj):
         # These will not be pre-allocated, which can be a problem for speed and

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -69,7 +69,11 @@
     _con_args = ('p',)
     def __init__(self, p, ds=None, field_parameters=None, data_source=None):
         super(YTPoint, self).__init__(ds, field_parameters, data_source)
-        self.p = p
+        if isinstance(p, YTArray):
+            # we pass p through ds.arr to ensure code units are attached
+            self.p = self.ds.arr(p)
+        else:
+            self.p = self.ds.arr(p, 'code_length')
 
 class YTOrthoRay(YTSelectionContainer1D):
     """

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/setup.py
--- a/yt/data_objects/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('data_objects', parent_package, top_path)
-    config.add_subpackage("tests")
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -988,7 +988,8 @@
         deps, _ = self.field_info.check_derived_fields([name])
         self.field_dependencies.update(deps)
 
-    def add_deposited_particle_field(self, deposit_field, method, kernel_name='cubic'):
+    def add_deposited_particle_field(self, deposit_field, method, kernel_name='cubic',
+                                     weight_field='particle_mass'):
         """Add a new deposited particle field
 
         Creates a new deposited field based on the particle *deposit_field*.
@@ -1003,13 +1004,15 @@
         method : string
            This is the "method name" which will be looked up in the
            `particle_deposit` namespace as `methodname_deposit`.  Current
-           methods include `count`, `simple_smooth`, `sum`, `std`, `cic`,
-           `weighted_mean`, `mesh_id`, and `nearest`.
+           methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
+           `mesh_id`, and `nearest`.
         kernel_name : string, default 'cubic'
            This is the name of the smoothing kernel to use. It is only used for
            the `simple_smooth` method and is otherwise ignored. Current
            supported kernel names include `cubic`, `quartic`, `quintic`,
            `wendland2`, `wendland4`, and `wendland6`.
+        weight_field : string, default 'particle_mass'
+           Weighting field name for deposition method `weighted_mean`.
 
         Returns
         -------
@@ -1021,38 +1024,45 @@
             ptype, deposit_field = deposit_field[0], deposit_field[1]
         else:
             raise RuntimeError
+
         units = self.field_info[ptype, deposit_field].units
+        take_log = self.field_info[ptype, deposit_field].take_log
+        name_map = {"sum": "sum", "std":"std", "cic": "cic", "weighted_mean": "avg",
+                    "nearest": "nn", "simple_smooth": "ss", "count": "count"}
+        field_name = "%s_" + name_map[method] + "_%s"
+        field_name = field_name % (ptype, deposit_field.replace('particle_', ''))
+
+        if method == "count":
+            field_name = "%s_count" % ptype
+            if ("deposit", field_name) in self.field_info:
+                mylog.warning("The deposited field %s already exists" % field_name)
+                return ("deposit", field_name)
+            else:
+                units = "dimensionless"
+                take_log = False
 
         def _deposit_field(field, data):
             """
-            Create a grid field for particle wuantities weighted by particle
-            mass, using cloud-in-cell deposition.
+            Create a grid field for particle quantities using given method.
             """
             pos = data[ptype, "particle_position"]
-            # get back into density
-            if method != 'count':
-                pden = data[ptype, "particle_mass"]
-                top = data.deposit(pos, [data[(ptype, deposit_field)]*pden],
-                                   method=method, kernel_name=kernel_name)
-                bottom = data.deposit(pos, [pden], method=method,
-                                      kernel_name=kernel_name)
-                top[bottom == 0] = 0.0
-                bnz = bottom.nonzero()
-                top[bnz] /= bottom[bnz]
-                d = data.ds.arr(top, input_units=units)
+            if method == 'weighted_mean':
+                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field],
+                                                   data[ptype, weight_field]],
+                                             method=method, kernel_name=kernel_name),
+                                             input_units=units)
+                d[np.isnan(d)] = 0.0
             else:
                 d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field]],
-                                             method=method,
-                                             kernel_name=kernel_name))
+                                             method=method, kernel_name=kernel_name),
+                                             input_units=units)
             return d
-        name_map = {"cic": "cic", "sum": "nn", "count": "count"}
-        field_name = "%s_" + name_map[method] + "_%s"
-        field_name = field_name % (ptype, deposit_field.replace('particle_', ''))
+
         self.add_field(
             ("deposit", field_name),
             function=_deposit_field,
             units=units,
-            take_log=False,
+            take_log=take_log,
             validators=[ValidateSpatial()])
         return ("deposit", field_name)
 

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/data_objects/tests/test_points.py
--- a/yt/data_objects/tests/test_points.py
+++ b/yt/data_objects/tests/test_points.py
@@ -10,6 +10,17 @@
     from yt.config import ytcfg
     ytcfg["yt","__withintesting"] = "True"
 
+def test_point_creation():
+    ds = fake_random_ds(16)
+    p1 = ds.point(ds.domain_center)
+    p2 = ds.point([0.5, 0.5, 0.5])
+    p3 = ds.point([0.5, 0.5, 0.5]*yt.units.cm)
+
+    # ensure all three points are really at the same position
+    for fname in 'xyz':
+        assert_equal(p1[fname], p2[fname])
+        assert_equal(p1[fname], p3[fname])
+
 def test_domain_point():
     nparticles = 3
     ds = fake_random_ds(16, particles=nparticles)

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/extern/setup.py
--- a/yt/extern/setup.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-#----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('extern', parent_package, top_path)
-    config.add_subpackage("tqdm")
-    config.make_config_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/fields/setup.py
--- a/yt/fields/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('fields', parent_package, top_path)
-    config.add_subpackage("tests")
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/fields/tests/test_fields.py
--- a/yt/fields/tests/test_fields.py
+++ b/yt/fields/tests/test_fields.py
@@ -33,7 +33,7 @@
         fields.append(("gas", fname))
         units.append(code_units)
 
-    base_ds = fake_random_ds(4, fields=fields, units=units, particles=10)
+    base_ds = fake_random_ds(4, fields=fields, units=units, particles=20)
 
     base_ds.index
     base_ds.cosmological_simulation = 1
@@ -195,12 +195,28 @@
             yield TestFieldAccess(field, nproc)
 
 def test_add_deposited_particle_field():
+    # NOT tested: "std", "mesh_id", "nearest" and "simple_smooth"
     global base_ds
-    fn = base_ds.add_deposited_particle_field(('io', 'particle_ones'), 'count')
-    assert_equal(fn, ('deposit', 'io_count_ones'))
     ad = base_ds.all_data()
+
+    # Test "count", "sum" and "cic" method
+    for method in ["count", "sum", "cic"]:
+        fn = base_ds.add_deposited_particle_field(('io', 'particle_mass'), method)
+        expected_fn = 'io_%s' if method == "count" else 'io_%s_mass'
+        assert_equal(fn, ('deposit', expected_fn % method))
+        ret = ad[fn]
+        if method == "count":
+            assert_equal(ret.sum(), ad['particle_ones'].sum())
+        else:
+            assert_almost_equal(ret.sum(), ad['particle_mass'].sum())
+
+    # Test "weighted_mean" method
+    fn = base_ds.add_deposited_particle_field(('io', 'particle_ones'), 'weighted_mean',
+                                              weight_field='particle_ones')
+    assert_equal(fn, ('deposit', 'io_avg_ones'))
     ret = ad[fn]
-    assert_equal(ret.sum(), ad['particle_ones'].sum())
+    # The sum should equal the number of cells that have particles
+    assert_equal(ret.sum(), np.count_nonzero(ad[("deposit", "io_count")]))
 
 @requires_file('GadgetDiskGalaxy/snapshot_200.hdf5')
 def test_add_smoothed_particle_field():

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/_skeleton/setup.py
--- a/yt/frontends/_skeleton/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('skeleton', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/art/setup.py
--- a/yt/frontends/art/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='',top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('art',parent_package,top_path)
-    config.make_config_py() # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/artio/_artio_caller.pyx
--- a/yt/frontends/artio/_artio_caller.pyx
+++ b/yt/frontends/artio/_artio_caller.pyx
@@ -8,15 +8,14 @@
 from yt.utilities.lib.fp_utils cimport imax
 from yt.geometry.oct_container cimport \
     SparseOctreeContainer
-from yt.geometry.oct_visitors cimport \
-    OctVisitorData, oct_visitor_function, Oct, \
-    fill_file_indices_oind, fill_file_indices_rind
+from yt.geometry.oct_visitors cimport Oct
 from yt.geometry.particle_deposit cimport \
     ParticleDepositOperation
 from libc.stdint cimport int32_t, int64_t
 from libc.stdlib cimport malloc, free
 from libc.string cimport memcpy
 import data_structures
+from yt.utilities.lib.misc_utilities import OnceIndirect
 
 cdef extern from "platform_dep.h":
     void *alloca(int)
@@ -1556,6 +1555,9 @@
         if fields is None:
             fields = []
         nf = len(fields)
+        cdef np.float64_t[::cython.view.indirect, ::1] field_pointers 
+        if nf > 0: field_pointers = OnceIndirect(fields)
+        cdef np.float64_t[:] field_vals = np.empty(nf, dtype="float64")
         cdef np.ndarray[np.uint8_t, ndim=1, cast=True] mask
         mask = self.mask(selector, -1)
         cdef np.ndarray[np.int64_t, ndim=1] domain_ind
@@ -1570,17 +1572,9 @@
                 continue
             domain_ind[sfc - self.sfc_start] = j
             j += 1
-        cdef np.float64_t **field_pointers
-        cdef np.float64_t *field_vals
         cdef np.float64_t pos[3]
         cdef np.float64_t left_edge[3]
         cdef int coords[3]
-        cdef np.ndarray[np.float64_t, ndim=1] tarr
-        field_pointers = <np.float64_t**> alloca(sizeof(np.float64_t *) * nf)
-        field_vals = <np.float64_t*>alloca(sizeof(np.float64_t) * nf)
-        for i in range(nf):
-            tarr = fields[i]
-            field_pointers[i] = <np.float64_t *> tarr.data
         cdef int dims[3]
         dims[0] = dims[1] = dims[2] = 1
         cdef np.int64_t offset, moff

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/artio/artio_headers/cosmology.c
--- a/yt/frontends/artio/artio_headers/cosmology.c
+++ b/yt/frontends/artio/artio_headers/cosmology.c
@@ -443,9 +443,18 @@
 
 double cosmology_get_value_from_table(CosmologyParameters *c, double a, double table[])
 {
-  int idx = (int)(c->ndex*(log10(a)-c->la[0]));
+  // This is special case code for boundary conditions
+  double la = log10(a);
+  if (fabs(la - c->la[c->size-1]) < 1.0e-14) {
+    return table[c->size-1];
+  } else if (fabs(la - c->la[0]) < 1.0e-14) {
+    return table[0];
+  }
 
-  ASSERT(idx>=0 && idx<c->size);
+  int idx = (int)(c->ndex*(la-c->la[0]));
+
+  // Note that because we do idx+1 below, we need -1 here.
+  ASSERT(idx>=0 && (idx<c->size-1));
 
   /*
   //  Do it as a function of aUni rather than la to ensure exact inversion

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/artio/setup.py
--- a/yt/frontends/artio/setup.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-import glob
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    artio_sources = glob.glob("yt/frontends/artio/artio_headers/*.c")
-    config = Configuration('artio', parent_package, top_path)
-    config.add_extension("_artio_caller",
-                         ["yt/frontends/artio/_artio_caller.pyx"] +
-                         artio_sources,
-                         include_dirs=["yt/frontends/artio/artio_headers/",
-                                       "yt/geometry/",
-                                       "yt/utilities/lib/"],
-                         depends=artio_sources + 
-                                 ["yt/utilities/lib/fp_utils.pxd",
-                                  "yt/geometry/oct_container.pxd",
-                                  "yt/geometry/selection_routines.pxd",
-                                  "yt/geometry/particle_deposit.pxd"])
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/athena/setup.py
--- a/yt/frontends/athena/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('athena', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -178,8 +178,15 @@
                 raise RuntimeError("yt needs cylindrical to be 2D")
             self.level_dds[:,2] = 2*np.pi
             default_zbounds = (0.0, 2*np.pi)
+        elif self.ds.geometry == "spherical":
+            # BoxLib only supports 1D spherical, so ensure
+            # the other dimensions have the right extent.
+            self.level_dds[:,1] = np.pi
+            self.level_dds[:,2] = 2*np.pi
+            default_ybounds = (0.0, np.pi)
+            default_zbounds = (0.0, 2*np.pi)
         else:
-            raise RuntimeError("yt only supports cartesian and cylindrical coordinates.")
+            raise RuntimeError("Unknown BoxLib coordinate system.")
         if int(next(header_file)) != 0:
             raise RuntimeError("INTERNAL ERROR! This should be a zero.")
 
@@ -586,8 +593,10 @@
             self.geometry = "cartesian"
         elif coordinate_type == 1:
             self.geometry = "cylindrical"
+        elif coordinate_type == 2:
+            self.geometry = "spherical"
         else:
-            raise RuntimeError("yt does not yet support spherical geometry")
+            raise RuntimeError("Unknown BoxLib coord_type")
 
         # overrides for 1/2-dimensional data
         if self.dimensionality == 1:

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/boxlib/setup.py
--- a/yt/frontends/boxlib/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('boxlib', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/chombo/setup.py
--- a/yt/frontends/chombo/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('chombo', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/eagle/setup.py
--- a/yt/frontends/eagle/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('eagle', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -277,14 +277,14 @@
             active_particles = True
             nap = dict((ap_type, []) for ap_type in 
                 params["Physics"]["ActiveParticles"]["ActiveParticlesEnabled"])
-        elif version == 2.2:
-            active_particles = True
+        else:
             nap = {}
-            for type in self.parameters.get("AppendActiveParticleType", []):
-                nap[type] = []
-        else:
-            active_particles = False
-            nap = None
+            if "AppendActiveParticleType" in self.parameters:
+                active_particles = True
+                for type in self.parameters.get("AppendActiveParticleType", []):
+                    nap[type] = []
+            else:
+                active_particles = False
         for grid_id in range(self.num_grids):
             pbar.update(grid_id)
             # We will unroll this list
@@ -394,7 +394,7 @@
         fields = []
         for ptype in self.dataset["AppendActiveParticleType"]:
             select_grids = self.grid_active_particle_count[ptype].flat
-            if np.any(select_grids) is False:
+            if not np.any(select_grids):
                 current_ptypes = self.dataset.particle_types
                 new_ptypes = [p for p in current_ptypes if p != ptype]
                 self.dataset.particle_types = new_ptypes

diff -r f45f7851fbd329f397e4f865ab5f249ba16d77a8 -r 2ef50ab5517d0d772ff1484fa559596cca7546b6 yt/frontends/enzo/setup.py
--- a/yt/frontends/enzo/setup.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-
-def configuration(parent_package='', top_path=None):
-    from numpy.distutils.misc_util import Configuration
-    config = Configuration('enzo', parent_package, top_path)
-    config.make_config_py()  # installs __config__.py
-    #config.make_svn_version_py()
-    return config

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/dda193582d6c/
Changeset:   dda193582d6c
Branch:      yt
User:        ngoldbaum
Date:        2016-02-24 19:33:14+00:00
Summary:     Merged in brittonsmith/yt (pull request #1972)

Porting c function, FindBindingEnergy, to Cython
Affected #:  2 files

diff -r cef8aa8a3bb23ecdc6cd8f91d3bdf1701fb1608e -r dda193582d6c210518df5d127cef1a7294dca692 yt/analysis_modules/level_sets/clump_validators.py
--- a/yt/analysis_modules/level_sets/clump_validators.py
+++ b/yt/analysis_modules/level_sets/clump_validators.py
@@ -13,7 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.utilities.data_point_utilities import FindBindingEnergy
+from yt.utilities.lib.misc_utilities import \
+    gravitational_binding_energy
 from yt.utilities.operator_registry import \
     OperatorRegistry
 from yt.utilities.physical_constants import \
@@ -64,11 +65,12 @@
              (bulk_velocity[2] - clump["all", "particle_velocity_z"])**2)).sum()
 
     potential = clump.data.ds.quan(G *
-        FindBindingEnergy(clump["gas", "cell_mass"].in_cgs(),
-                          clump["index", "x"].in_cgs(),
-                          clump["index", "y"].in_cgs(),
-                          clump["index", "z"].in_cgs(),
-                          truncate, (kinetic / G).in_cgs()),
+        gravitational_binding_energy(
+            clump["gas", "cell_mass"].in_cgs(),
+            clump["index", "x"].in_cgs(),
+            clump["index", "y"].in_cgs(),
+            clump["index", "z"].in_cgs(),
+            truncate, (kinetic / G).in_cgs()),
         kinetic.in_cgs().units)
     
     if truncate and potential >= kinetic:
@@ -76,7 +78,7 @@
 
     if use_particles:
         potential += clump.data.ds.quan(G *
-            FindBindingEnergy(
+            gravitational_binding_energy(
                 clump["all", "particle_mass"].in_cgs(),
                 clump["all", "particle_position_x"].in_cgs(),
                 clump["all", "particle_position_y"].in_cgs(),

diff -r cef8aa8a3bb23ecdc6cd8f91d3bdf1701fb1608e -r dda193582d6c210518df5d127cef1a7294dca692 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -13,12 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+from yt.funcs import get_pbar
 import numpy as np
 from yt.units.yt_array import YTArray
 cimport numpy as np
 cimport cython
 cimport libc.math as math
-from libc.math cimport abs
+from libc.math cimport abs, sqrt
 from yt.utilities.lib.fp_utils cimport fmin, fmax, i64min, i64max
 from yt.geometry.selection_routines cimport _ensure_code
 
@@ -974,6 +975,51 @@
                                     else:
                                         dest[i,j,k] = dsp
 
+ at cython.cdivision(True)
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+def gravitational_binding_energy(
+        np.float64_t[:] mass,
+        np.float64_t[:] x,
+        np.float64_t[:] y,
+        np.float64_t[:] z,
+        int truncate,
+        np.float64_t kinetic):
+
+    cdef int q_outer, q_inner, n_q, i
+    cdef np.float64_t mass_o, x_o, y_o, z_o
+    cdef np.float64_t mass_i, x_i, y_i, z_i
+    cdef np.float64_t this_potential, total_potential
+    total_potential = 0.
+
+    i = 0
+    n_q = mass.size
+    pbar = get_pbar("Calculating potential for %d cells" % n_q,
+                    0.5 * (n_q**2 - n_q))
+    for q_outer in range(n_q - 1):
+        this_potential = 0.
+        mass_o = mass[q_outer]
+        x_o = x[q_outer]
+        y_o = y[q_outer]
+        z_o = z[q_outer]
+        for q_inner in range(q_outer + 1, n_q):
+            mass_i = mass[q_inner]
+            x_i = x[q_inner]
+            y_i = y[q_inner]
+            z_i = z[q_inner]
+            this_potential += mass_o * mass_i / \
+              sqrt((x_i - x_o) * (x_i - x_o) +
+                   (y_i - y_o) * (y_i - y_o) +
+                   (z_i - z_o) * (z_i - z_o))
+        i += n_q - q_outer
+        pbar.update(i)
+        total_potential += this_potential
+        if truncate and total_potential / kinetic > 1.:
+            break
+    pbar.finish()
+
+    return total_potential
+
 # The OnceIndirect code is from:
 # http://stackoverflow.com/questions/10465091/assembling-a-cython-memoryview-from-numpy-arrays/12991519#12991519
 # This is under the CC-BY-SA license.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list