[yt-svn] commit/yt: 31 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Fri Jul 18 08:09:16 PDT 2014


31 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/f2585c5f7bff/
Changeset:   f2585c5f7bff
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-07 22:38:32
Summary:     Add YTGDFAlreadyExists exception.
Affected #:  1 file

diff -r 599797ab880b78ea678c7ce63bb4c70b41394620 -r f2585c5f7bffd9a8338d977646312ab7f5d20410 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -409,3 +409,10 @@
             """ % (self.field,)
         r += "\n".join([c for c in self.conditions])
         return r
+
+class YTGDFAlreadyExists(Exception):
+    def __init__(self, filename):
+        self.filename = filename
+
+    def __str__(self):
+        return "A file already exists at %s and clobber=False." % self.filename
\ No newline at end of file


https://bitbucket.org/yt_analysis/yt/commits/f8f4e24ed781/
Changeset:   f8f4e24ed781
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-07 22:38:56
Summary:     Allow the GDF file to be clobbered if it already exists.
Affected #:  1 file

diff -r f2585c5f7bffd9a8338d977646312ab7f5d20410 -r f8f4e24ed78171c4c95e44d05fabcf042a2f95a5 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -18,10 +18,10 @@
 import numpy as np
 
 from yt import __version__ as yt_version
-
+from yt.utilities.exceptions import YTGDFAlreadyExists
 
 def write_to_gdf(pf, gdf_path, data_author=None, data_comment=None,
-                 particle_type_name="dark_matter"):
+                 particle_type_name="dark_matter", clobber=False):
     """
     Write a parameter file to the given path in the Grid Data Format.
 
@@ -35,7 +35,7 @@
     """
 
     f = _create_new_gdf(pf, gdf_path, data_author, data_comment,
-                        particle_type_name)
+                        particle_type_name, clobber=clobber)
 
     # now add the fields one-by-one
     for field_name in pf.field_list:
@@ -140,15 +140,14 @@
 
 
 def _create_new_gdf(pf, gdf_path, data_author=None, data_comment=None,
-                    particle_type_name="dark_matter"):
+                    particle_type_name="dark_matter", clobber=False):
     # Make sure we have the absolute path to the file first
     gdf_path = os.path.abspath(gdf_path)
 
     # Stupid check -- is the file already there?
     # @todo: make this a specific exception/error.
-    if os.path.exists(gdf_path):
-        raise IOError("A file already exists in the location: %s. Please \
-                      provide a new one or remove that file." % gdf_path)
+    if os.path.exists(gdf_path) and not clobber:
+        raise YTGDFAlreadyExists(gdf_path)
 
     ###
     # Create and open the file with h5py


https://bitbucket.org/yt_analysis/yt/commits/098075c1eff5/
Changeset:   098075c1eff5
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-08 00:09:15
Summary:     This array should be flattened before writing it
Affected #:  1 file

diff -r f8f4e24ed78171c4c95e44d05fabcf042a2f95a5 -r 098075c1eff5e1e19173d125a35f3f9dd67b2447 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -211,7 +211,7 @@
     f["grid_left_index"] = np.array(
         [grid.get_global_startindex() for grid in pf.index.grids]
     ).reshape(pf.index.grid_dimensions.shape[0], 3)
-    f["grid_level"] = pf.index.grid_levels
+    f["grid_level"] = pf.index.grid_levels.flat
     # @todo: Fill with proper values
     f["grid_parent_id"] = -np.ones(pf.index.grid_dimensions.shape[0])
     f["grid_particle_count"] = pf.index.grid_particle_count


https://bitbucket.org/yt_analysis/yt/commits/d1d0bc4ea8a9/
Changeset:   d1d0bc4ea8a9
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-08 00:10:53
Summary:     Write GDF files from covering grids.
Affected #:  1 file

diff -r 098075c1eff5e1e19173d125a35f3f9dd67b2447 -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -45,6 +45,8 @@
     parallel_objects, parallel_root_only, ParallelAnalysisInterface
 from yt.units.unit_object import Unit
 import yt.geometry.particle_deposit as particle_deposit
+from yt.utilities.grid_data_format.writer import write_to_gdf
+from yt.frontends.stream.api import load_uniform_grid
 
 from yt.fields.field_exceptions import \
     NeedsGridType,\
@@ -581,6 +583,17 @@
         vals = op.finalize()
         return vals.reshape(self.ActiveDimensions, order="C")
 
+    def write_to_gdf(self, gdf_path, fields, nprocs=1, **kwargs):
+        data = {}
+        for field in fields:
+            data[field] = (self[field].v, str(self[field].units))
+        le = self.left_edge.in_cgs().v
+        re = self.right_edge.in_cgs().v
+        bbox = np.array([[l,r] for l,r in zip(le, re)])
+        ds = load_uniform_grid(data, self.ActiveDimensions, bbox=bbox,
+                               nprocs=nprocs, sim_time=self.pf.current_time.in_cgs())
+        write_to_gdf(ds, gdf_path, **kwargs)
+
 class YTArbitraryGridBase(YTCoveringGridBase):
     """A 3D region with arbitrary bounds and dimensions.
 


https://bitbucket.org/yt_analysis/yt/commits/566e7e089056/
Changeset:   566e7e089056
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-08 00:42:02
Summary:     Merge
Affected #:  18 files

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/aligned_cutting_plane.py
--- a/doc/source/cookbook/aligned_cutting_plane.py
+++ /dev/null
@@ -1,20 +0,0 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
-import yt
-
-# Load the dataset.
-ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
-
-# Create a 15 kpc radius sphere, centered on the center of the sim volume
-sp = ds.sphere("center", (15.0, "kpc"))
-
-# Get the angular momentum vector for the sphere.
-L = sp.quantities.angular_momentum_vector()
-
-print "Angular momentum vector: {0}".format(L)
-
-# Create an OffAxisSlicePlot of density centered on the object with the L 
-# vector as its normal and a width of 25 kpc on a side
-p = yt.OffAxisSlicePlot(ds, L, "density", sp.center, (25, "kpc"))
-p.save()

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/camera_movement.py
--- a/doc/source/cookbook/camera_movement.py
+++ b/doc/source/cookbook/camera_movement.py
@@ -1,6 +1,3 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
 import yt
 import numpy as np
 

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/halo_plotting.py
--- a/doc/source/cookbook/halo_plotting.py
+++ b/doc/source/cookbook/halo_plotting.py
@@ -1,6 +1,3 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
 import yt
 from yt.analysis_modules.halo_analysis.halo_catalog import HaloCatalog
 

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/rad_velocity.py
--- a/doc/source/cookbook/rad_velocity.py
+++ b/doc/source/cookbook/rad_velocity.py
@@ -1,6 +1,3 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
 import yt
 import matplotlib.pyplot as plt
 

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/radial_profile_styles.py
--- a/doc/source/cookbook/radial_profile_styles.py
+++ b/doc/source/cookbook/radial_profile_styles.py
@@ -1,6 +1,3 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
 import yt
 import matplotlib.pyplot as plt
 

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/simple_off_axis_projection.py
--- a/doc/source/cookbook/simple_off_axis_projection.py
+++ b/doc/source/cookbook/simple_off_axis_projection.py
@@ -3,9 +3,7 @@
 # Load the dataset.
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
-# Create a 1 kpc radius sphere, centered on the max density.  Note that this
-# sphere is very small compared to the size of our final plot, and it has a
-# non-axially aligned L vector.
+# Create a 15 kpc radius sphere, centered on the center of the sim volume
 sp = ds.sphere("center", (15.0, "kpc"))
 
 # Get the angular momentum vector for the sphere.
@@ -13,6 +11,7 @@
 
 print "Angular momentum vector: {0}".format(L)
 
-# Create an OffAxisSlicePlot on the object with the L vector as its normal
+# Create an OffAxisProjectionPlot of density centered on the object with the L 
+# vector as its normal and a width of 25 kpc on a side
 p = yt.OffAxisProjectionPlot(ds, L, "density", sp.center, (25, "kpc"))
 p.save()

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/simple_off_axis_slice.py
--- /dev/null
+++ b/doc/source/cookbook/simple_off_axis_slice.py
@@ -0,0 +1,17 @@
+import yt
+
+# Load the dataset.
+ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
+
+# Create a 15 kpc radius sphere, centered on the center of the sim volume
+sp = ds.sphere("center", (15.0, "kpc"))
+
+# Get the angular momentum vector for the sphere.
+L = sp.quantities.angular_momentum_vector()
+
+print "Angular momentum vector: {0}".format(L)
+
+# Create an OffAxisSlicePlot of density centered on the object with the L 
+# vector as its normal and a width of 25 kpc on a side
+p = yt.OffAxisSlicePlot(ds, L, "density", sp.center, (25, "kpc"))
+p.save()

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/simple_plots.rst
--- a/doc/source/cookbook/simple_plots.rst
+++ b/doc/source/cookbook/simple_plots.rst
@@ -49,8 +49,7 @@
 Simple Radial Profiles
 ~~~~~~~~~~~~~~~~~~~~~~
 
-This shows how to make a profile of a quantity with respect to the radius, in
-this case the radius in Mpc.
+This shows how to make a profile of a quantity with respect to the radius.
 
 .. yt_cookbook:: simple_radial_profile.py
 
@@ -87,17 +86,17 @@
 Off-Axis Slicing
 ~~~~~~~~~~~~~~~~
 
-A cutting plane allows you to slice at some angle that isn't aligned with the
-axes.
+One can create slices from any arbitrary angle, not just those aligned with
+the x,y,z axes.
 
-.. yt_cookbook:: aligned_cutting_plane.py
+.. yt_cookbook:: simple_off_axis_slice.py
 
 .. _cookbook-simple-off-axis-projection:
 
 Off-Axis Projection
 ~~~~~~~~~~~~~~~~~~~
 
-Like cutting planes, off-axis projections can be created from any arbitrary 
+Like off-axis slices, off-axis projections can be created from any arbitrary 
 viewing angle.
 
 .. yt_cookbook:: simple_off_axis_projection.py

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/simple_slice_with_multiple_fields.py
--- a/doc/source/cookbook/simple_slice_with_multiple_fields.py
+++ b/doc/source/cookbook/simple_slice_with_multiple_fields.py
@@ -1,6 +1,3 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
 import yt
 
 # Load the dataset

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 doc/source/cookbook/time_series_profiles.py
--- a/doc/source/cookbook/time_series_profiles.py
+++ b/doc/source/cookbook/time_series_profiles.py
@@ -1,6 +1,3 @@
-### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
-### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
-
 import yt
 
 # Create a time-series object.

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -639,7 +639,7 @@
         self.ActiveDimensions = np.array(dims, dtype='int32')
         if self.ActiveDimensions.size == 1:
             self.ActiveDimensions = np.array([dims, dims, dims], dtype="int32")
-        self.dds = (self.right_edge - self.left_edge)/self.ActiveDimensions
+        self.dds = self.base_dds = (self.right_edge - self.left_edge)/self.ActiveDimensions
         self.level = 99
         self._setup_data_source()
 

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/data_objects/octree_subset.py
--- a/yt/data_objects/octree_subset.py
+++ b/yt/data_objects/octree_subset.py
@@ -199,7 +199,7 @@
         op.process_octree(self.oct_handler, mdom_ind, positions, 
             self.fcoords, fields,
             self.domain_id, self._domain_offset, self.pf.periodicity,
-            index_fields, particle_octree, pdom_ind)
+            index_fields, particle_octree, pdom_ind, self.pf.geometry)
         vals = op.finalize()
         if vals is None: return
         if isinstance(vals, list):

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -970,7 +970,7 @@
 def load_particles(data, length_unit = None, bbox=None,
                    sim_time=0.0, mass_unit = None, time_unit = None,
                    velocity_unit=None, periodicity=(True, True, True),
-                   n_ref = 64, over_refine_factor = 1):
+                   n_ref = 64, over_refine_factor = 1, geometry = "cartesian"):
     r"""Load a set of particles into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamParticleHandler`.
 
@@ -1083,7 +1083,7 @@
     handler.simulation_time = sim_time
     handler.cosmology_simulation = 0
 
-    spf = StreamParticlesDataset(handler)
+    spf = StreamParticlesDataset(handler, geometry = geometry)
     spf.n_ref = n_ref
     spf.over_refine_factor = over_refine_factor
 

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/geometry/particle_deposit.pyx
--- a/yt/geometry/particle_deposit.pyx
+++ b/yt/geometry/particle_deposit.pyx
@@ -95,7 +95,7 @@
             if self.update_values == 1:
                 for j in range(nf):
                     field_pointers[j][i] = field_vals[j] 
-        
+
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def process_grid(self, gobj,
@@ -424,3 +424,56 @@
         return
 
 deposit_mesh_id = MeshIdentifier
+
+cdef class NNParticleField(ParticleDepositOperation):
+    cdef np.float64_t *nnfield
+    cdef np.float64_t *distfield
+    cdef public object onnfield
+    cdef public object odistfield
+    def initialize(self):
+        self.onnfield = np.zeros(self.nvals, dtype="float64", order='F')
+        cdef np.ndarray arr = self.onnfield
+        self.nnfield = <np.float64_t*> arr.data
+
+        self.odistfield = np.zeros(self.nvals, dtype="float64", order='F')
+        self.odistfield[:] = np.inf
+        arr = self.odistfield
+        self.distfield = <np.float64_t*> arr.data
+
+    @cython.cdivision(True)
+    cdef void process(self, int dim[3],
+                      np.float64_t left_edge[3], 
+                      np.float64_t dds[3],
+                      np.int64_t offset, 
+                      np.float64_t ppos[3],
+                      np.float64_t *fields,
+                      np.int64_t domain_ind
+                      ):
+        # This one is a bit slow.  Every grid cell is going to be iterated
+        # over, and we're going to deposit particles in it.
+        cdef int ii[3], i, j, k
+        cdef np.int64_t ggind
+        cdef np.float64_t r2, gpos[3]
+        gpos[0] = left_edge[0] + 0.5 * dds[0]
+        for i in range(dim[0]):
+            gpos[1] = left_edge[1] + 0.5 * dds[1]
+            for j in range(dim[1]):
+                gpos[2] = left_edge[2] + 0.5 * dds[2]
+                for k in range(dim[2]):
+                    ggind = gind(i, j, k, dim) + offset
+                    r2 = ((ppos[0] - gpos[0])*(ppos[0] - gpos[0]) +
+                          (ppos[1] - gpos[1])*(ppos[1] - gpos[1]) +
+                          (ppos[2] - gpos[2])*(ppos[2] - gpos[2]))
+                    if r2 < self.distfield[ggind]:
+                        self.distfield[ggind] = r2
+                        self.nnfield[ggind] = fields[0]
+                    gpos[2] += dds[2]
+                gpos[1] += dds[1]
+            gpos[0] += dds[0]
+        return
+        
+    def finalize(self):
+        return self.onnfield
+
+deposit_nearest = NNParticleField
+

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/geometry/particle_smooth.pxd
--- a/yt/geometry/particle_smooth.pxd
+++ b/yt/geometry/particle_smooth.pxd
@@ -36,27 +36,6 @@
     np.int64_t pn       # Particle number
     np.float64_t r2     # radius**2
 
- at cython.cdivision(True)
- at cython.boundscheck(False)
- at cython.wraparound(False)
-cdef inline np.float64_t r2dist(np.float64_t ppos[3],
-                                np.float64_t cpos[3],
-                                np.float64_t DW[3],
-                                bint periodicity[3]):
-    cdef int i
-    cdef np.float64_t r2, DR
-    r2 = 0.0
-    for i in range(3):
-        DR = (ppos[i] - cpos[i])
-        if not periodicity[i]:
-            pass
-        elif (DR > DW[i]/2.0):
-            DR -= DW[i]
-        elif (DR < -DW[i]/2.0):
-            DR += DW[i]
-        r2 += DR * DR
-    return r2
-
 cdef class ParticleSmoothOperation:
     # We assume each will allocate and define their own temporary storage
     cdef public object nvals
@@ -71,6 +50,7 @@
     cdef np.float64_t *ppos
     # Note that we are preallocating here, so this is *not* threadsafe.
     cdef NeighborList *neighbors
+    cdef void (*pos_setup)(np.float64_t ipos[3], np.float64_t opos[3])
     cdef void neighbor_process(self, int dim[3], np.float64_t left_edge[3],
                                np.float64_t dds[3], np.float64_t *ppos,
                                np.float64_t **fields, np.int64_t nneighbors,

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/geometry/particle_smooth.pyx
--- a/yt/geometry/particle_smooth.pyx
+++ b/yt/geometry/particle_smooth.pyx
@@ -18,7 +18,7 @@
 import numpy as np
 from libc.stdlib cimport malloc, free, realloc
 cimport cython
-from libc.math cimport sqrt, fabs
+from libc.math cimport sqrt, fabs, sin, cos
 
 from fp_utils cimport *
 from oct_container cimport Oct, OctAllocationContainer, \
@@ -37,6 +37,40 @@
     else:
         return 1
 
+ at cython.cdivision(True)
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+cdef np.float64_t r2dist(np.float64_t ppos[3],
+                         np.float64_t cpos[3],
+                         np.float64_t DW[3],
+                         bint periodicity[3],
+                         np.float64_t max_dist2):
+    cdef int i
+    cdef np.float64_t r2, DR
+    r2 = 0.0
+    for i in range(3):
+        DR = (ppos[i] - cpos[i])
+        if not periodicity[i]:
+            pass
+        elif (DR > DW[i]/2.0):
+            DR -= DW[i]
+        elif (DR < -DW[i]/2.0):
+            DR += DW[i]
+        r2 += DR * DR
+        if max_dist2 >= 0.0 and r2 > max_dist2:
+            return -1.0
+    return r2
+
+cdef void spherical_coord_setup(np.float64_t ipos[3], np.float64_t opos[3]):
+    opos[0] = ipos[0] * sin(ipos[1]) * cos(ipos[2])
+    opos[1] = ipos[0] * sin(ipos[1]) * sin(ipos[2])
+    opos[2] = ipos[0] * cos(ipos[1])
+
+cdef void cart_coord_setup(np.float64_t ipos[3], np.float64_t opos[3]):
+    opos[0] = ipos[0]
+    opos[1] = ipos[1]
+    opos[2] = ipos[2]
+
 cdef class ParticleSmoothOperation:
     def __init__(self, nvals, nfields, max_neighbors):
         # This is the set of cells, in grids, blocks or octs, we are handling.
@@ -66,7 +100,8 @@
                      periodicity = (True, True, True),
                      index_fields = None,
                      OctreeContainer particle_octree = None,
-                     np.ndarray[np.int64_t, ndim=1] pdom_ind = None):
+                     np.ndarray[np.int64_t, ndim=1] pdom_ind = None,
+                     geometry = "cartesian"):
         # This will be a several-step operation.
         #
         # We first take all of our particles and assign them to Octs.  If they
@@ -108,6 +143,25 @@
         cdef np.ndarray[np.int64_t, ndim=2] doff_m
         cdef np.ndarray[np.float64_t, ndim=1] tarr
         cdef np.ndarray[np.float64_t, ndim=4] iarr
+        cdef np.ndarray[np.float64_t, ndim=2] cart_positions
+        if geometry == "cartesian":
+            self.pos_setup = cart_coord_setup
+            cart_positions = positions
+        elif geometry == "spherical":
+            self.pos_setup = spherical_coord_setup
+            cart_positions = np.empty((positions.shape[0], 3), dtype="float64")
+
+            cart_positions[:,0] = positions[:,0] * \
+                                  np.sin(positions[:,1]) * \
+                                  np.cos(positions[:,2])
+            cart_positions[:,1] = positions[:,0] * \
+                                  np.sin(positions[:,1]) * \
+                                  np.sin(positions[:,2])
+            cart_positions[:,2] = positions[:,0] * \
+                                  np.cos(positions[:,1])
+            periodicity = (False, False, False)
+        else:
+            raise NotImplementedError
         dims[0] = dims[1] = dims[2] = (1 << mesh_octree.oref)
         cdef int nz = dims[0] * dims[1] * dims[2]
         numpart = positions.shape[0]
@@ -174,6 +228,7 @@
         # Now doff is full of offsets to the first entry in the pind that
         # refers to that oct's particles.
         ppos = <np.float64_t *> positions.data
+        cart_pos = <np.float64_t *> cart_positions.data
         doffs = <np.int64_t*> doff.data
         pinds = <np.int64_t*> pind.data
         pcounts = <np.int64_t*> pcount.data
@@ -213,7 +268,7 @@
             free(neighbors)
             nproc += 1
             self.neighbor_process(dims, moi.left_edge, moi.dds,
-                         ppos, field_pointers, nneighbors, nind, doffs,
+                         cart_pos, field_pointers, nneighbors, nind, doffs,
                          pinds, pcounts, offset, index_field_pointers)
         #print "VISITED", visited.sum(), visited.size,
         #print 100.0*float(visited.sum())/visited.size
@@ -253,7 +308,7 @@
         if self.curn < self.maxn:
             cur = &self.neighbors[self.curn]
             cur.pn = pn
-            cur.r2 = r2dist(ppos, cpos, self.DW, self.periodicity)
+            cur.r2 = r2dist(ppos, cpos, self.DW, self.periodicity, -1)
             self.curn += 1
             if self.curn == self.maxn:
                 # This time we sort it, so that future insertions will be able
@@ -262,7 +317,10 @@
                       Neighbor_compare)
             return
         # This will go (curn - 1) through 0.
-        r2_c = r2dist(ppos, cpos, self.DW, self.periodicity)
+        r2_o = self.neighbors[self.curn - 1].r2
+        r2_c = r2dist(ppos, cpos, self.DW, self.periodicity, r2_o)
+        # Early terminate
+        if r2_c < 0: return
         pn_c = pn
         for i in range((self.curn - 1), -1, -1):
             # First we evaluate against i.  If our candidate radius is greater
@@ -320,15 +378,16 @@
         # units supplied.  We can now iterate over every cell in the block and
         # every particle to find the nearest.  We will use a priority heap.
         cdef int i, j, k, ntot, nntot, m
-        cdef np.float64_t cpos[3]
+        cdef np.float64_t cpos[3], opos[3]
         cpos[0] = left_edge[0] + 0.5*dds[0]
         for i in range(dim[0]):
             cpos[1] = left_edge[1] + 0.5*dds[1]
             for j in range(dim[1]):
                 cpos[2] = left_edge[2] + 0.5*dds[2]
                 for k in range(dim[2]):
+                    self.pos_setup(cpos, opos)
                     self.neighbor_find(nneighbors, nind, doffs, pcounts,
-                        pinds, ppos, cpos)
+                        pinds, ppos, opos)
                     # Now we have all our neighbors in our neighbor list.
                     if self.curn <-1*self.maxn:
                         ntot = nntot = 0
@@ -337,7 +396,7 @@
                             nntot += 1
                             ntot += pcounts[nind[m]]
                         print "SOMETHING WRONG", self.curn, nneighbors, ntot, nntot
-                    self.process(offset, i, j, k, dim, cpos, fields,
+                    self.process(offset, i, j, k, dim, opos, fields,
                                  index_fields)
                     cpos[2] += dds[2]
                 cpos[1] += dds[1]
@@ -407,3 +466,76 @@
         return
 
 volume_weighted_smooth = VolumeWeightedSmooth
+
+cdef class NearestNeighborSmooth(ParticleSmoothOperation):
+    cdef np.float64_t *fp
+    cdef public object vals
+    def initialize(self):
+        cdef np.ndarray tarr
+        assert(self.nfields == 1)
+        tarr = np.zeros(self.nvals, dtype="float64", order="F")
+        self.vals = tarr
+        self.fp = <np.float64_t *> tarr.data
+
+    def finalize(self):
+        return self.vals
+
+    @cython.cdivision(True)
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    cdef void process(self, np.int64_t offset, int i, int j, int k,
+                      int dim[3], np.float64_t cpos[3], np.float64_t **fields,
+                      np.float64_t **index_fields):
+        # We have our i, j, k for our cell, as well as the cell position.
+        # We also have a list of neighboring particles with particle numbers.
+        cdef np.int64_t pn
+        # We get back our mass 
+        # rho_i = sum(j = 1 .. n) m_j * W_ij
+        pn = self.neighbors[0].pn
+        self.fp[gind(i,j,k,dim) + offset] = fields[0][pn]
+        #self.fp[gind(i,j,k,dim) + offset] = self.neighbors[0].r2
+        return
+
+nearest_smooth = NearestNeighborSmooth
+
+cdef class IDWInterpolationSmooth(ParticleSmoothOperation):
+    cdef np.float64_t *fp
+    cdef public int p2
+    cdef public object vals
+    def initialize(self):
+        cdef np.ndarray tarr
+        assert(self.nfields == 1)
+        tarr = np.zeros(self.nvals, dtype="float64", order="F")
+        self.vals = tarr
+        self.fp = <np.float64_t *> tarr.data
+        self.p2 = 2 # Power, for IDW, in units of 2.  So we only do even p's.
+
+    def finalize(self):
+        return self.vals
+
+    @cython.cdivision(True)
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    cdef void process(self, np.int64_t offset, int i, int j, int k,
+                      int dim[3], np.float64_t cpos[3], np.float64_t **fields,
+                      np.float64_t **index_fields):
+        # We have our i, j, k for our cell, as well as the cell position.
+        # We also have a list of neighboring particles with particle numbers.
+        cdef np.int64_t pn, ni, di
+        cdef np.float64_t total_weight = 0.0, total_value = 0.0, r2, val, w
+        # We're going to do a very simple IDW average
+        if self.neighbors[0].r2 == 0.0:
+            pn = self.neighbors[0].pn
+            self.fp[gind(i,j,k,dim) + offset] = fields[0][pn]
+        for ni in range(self.curn):
+            r2 = self.neighbors[ni].r2
+            val = fields[0][self.neighbors[ni].pn]
+            w = r2
+            for di in range(self.p2 - 1):
+                w *= r2
+            total_value += w * val
+            total_weight += w
+        self.fp[gind(i,j,k,dim) + offset] = total_value / total_weight
+        return
+
+idw_smooth = IDWInterpolationSmooth

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/utilities/lib/ContourFinding.pyx
--- a/yt/utilities/lib/ContourFinding.pyx
+++ b/yt/utilities/lib/ContourFinding.pyx
@@ -24,7 +24,6 @@
     OctreeContainer, OctInfo
 from yt.geometry.oct_visitors cimport \
     Oct
-from yt.geometry.particle_smooth cimport r2dist
 from .amr_kdtools cimport _find_node, Node
 from .grid_traversal cimport VolumeContainer, PartitionedGrid, \
     vc_index, vc_pos_index

diff -r d1d0bc4ea8a916dd2401fa021cca5627f26522b1 -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -16,6 +16,7 @@
 
 import numpy as np
 import h5py
+from matplotlib.patches import Circle
 
 from yt.funcs import *
 from yt.extern.six import add_metaclass
@@ -865,21 +866,49 @@
         plot._axes.text(x, y, self.text, **kwargs)
 
 class HaloCatalogCallback(PlotCallback):
+    """
+    annotate_halos(halo_catalog, circle_kwargs=None,
+        width = None, annotate_field=False,
+        font_kwargs = None, factor = 1.0)
+
+    Plots circles at the locations of all the halos
+    in a halo catalog with radii corresponding to the
+    virial radius of each halo. 
+
+    circle_kwargs: Contains the arguments controlling the
+        appearance of the circles, supplied to the 
+        Matplotlib patch Circle.
+    width: the width over which to select halos to plot,
+        useful when overplotting to a slice plot. Accepts
+        a tuple in the form (1.0, 'Mpc').
+    annotate_field: Accepts a field contained in the 
+        halo catalog to add text to the plot near the halo.
+        Example: annotate_field = 'particle_mass' will
+        write the halo mass next to each halo.
+    font_kwargs: Contains the arguments controlling the text
+        appearance of the annotated field.
+    factor: A number the virial radius is multiplied by for
+        plotting the circles. Ex: factor = 2.0 will plot
+        circles with twice the radius of each halo virial radius.
+    """
 
     _type_name = 'halos'
     region = None
     _descriptor = None
 
-    def __init__(self, halo_catalog, col='white', alpha =1, 
-            width = None, annotate_field = False, font_kwargs = None):
+    def __init__(self, halo_catalog, circle_kwargs = None, 
+            width = None, annotate_field = False,
+            font_kwargs = None, factor = 1.0):
 
         PlotCallback.__init__(self)
         self.halo_catalog = halo_catalog
-        self.color = col
-        self.alpha = alpha
         self.width = width
         self.annotate_field = annotate_field
         self.font_kwargs = font_kwargs
+        self.factor = factor
+        if circle_kwargs is None:
+            circle_kwargs = {'edgecolor':'white', 'facecolor':'None'}
+        self.circle_kwargs = circle_kwargs
 
     def __call__(self, plot):
         data = plot.data
@@ -898,20 +927,20 @@
         plot._axes.hold(True)
 
         # Set up scales for pixel size and original data
-        units = 'Mpccm'
         pixel_scale = self.pixel_scale(plot)[0]
         data_scale = data.pf.length_unit
+        units = data_scale.units
 
         # Convert halo positions to code units of the plotted data
         # and then to units of the plotted window
         px = halo_data[field_x][:].in_units(units) / data_scale
         py = halo_data[field_y][:].in_units(units) / data_scale
         px, py = self.convert_to_plot(plot,[px,py])
+
+        # Convert halo radii to a radius in pixels
+        radius = halo_data['virial_radius'][:].in_units(units)
+        radius = np.array(radius*pixel_scale*self.factor/data_scale)
         
-        # Convert halo radii to a radius in pixels
-        radius = halo_data['radius'][:].in_units(units)
-        radius = radius*pixel_scale/data_scale
-
         if self.width:
             pz = halo_data[field_z][:].in_units(units)/data_scale
             pz = data.pf.arr(pz, 'code_length')
@@ -927,8 +956,10 @@
             py = py[indices]
             radius = radius[indices]
 
-        plot._axes.scatter(px, py, edgecolors='None', marker='o',
-                           s=radius, c=self.color,alpha=self.alpha)
+        for x,y,r in zip(px, py, radius):
+            plot._axes.add_artist(Circle(xy=(x,y), 
+                radius = r, **self.circle_kwargs)) 
+
         plot._axes.set_xlim(xx0,xx1)
         plot._axes.set_ylim(yy0,yy1)
         plot._axes.hold(False)


https://bitbucket.org/yt_analysis/yt/commits/9c703b50fef8/
Changeset:   9c703b50fef8
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-08 02:20:42
Summary:     Not sure why this isn't here
Affected #:  1 file

diff -r 566e7e089056ea4c2ca8cc0a707985c068e2b7d3 -r 9c703b50fef8b872cbbba96cabc446581c4947f7 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -573,6 +573,10 @@
     def LeftEdge(self):
         return self.left_edge
 
+    @property
+    def RightEdge(self):
+        return self.right_edge
+
     def deposit(self, positions, fields = None, method = None):
         cls = getattr(particle_deposit, "deposit_%s" % method, None)
         if cls is None:


https://bitbucket.org/yt_analysis/yt/commits/6c0410e90e8a/
Changeset:   6c0410e90e8a
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-09 01:09:44
Summary:     Docstrings
Affected #:  2 files

diff -r 9c703b50fef8b872cbbba96cabc446581c4947f7 -r 6c0410e90e8a11adf40aed0ef786ef0432e7e610 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -588,6 +588,27 @@
         return vals.reshape(self.ActiveDimensions, order="C")
 
     def write_to_gdf(self, gdf_path, fields, nprocs=1, **kwargs):
+        r"""
+        Write the covering grid data to a GDF file.
+
+        Parameters
+        ----------
+        gdf_path : string
+            Pathname of the GDF file to write.
+        fields : list of strings
+            Fields to write to the GDF file.
+        nprocs : integer, optional
+            Split the covering grid into *nprocs* subgrids before
+            writing to the GDF file. Default: 1
+
+        All remaining keyword arguments are passed to
+        yt.utilities.grid_data_format.writer.
+
+        Examples
+        --------
+        >>> cube.write_to_gdf("clumps.h5", ["density","temperature"], nprocs=16,
+        ...                   clobber=True)
+        """
         data = {}
         for field in fields:
             data[field] = (self[field].v, str(self[field].units))

diff -r 9c703b50fef8b872cbbba96cabc446581c4947f7 -r 6c0410e90e8a11adf40aed0ef786ef0432e7e610 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -22,7 +22,7 @@
 
 def write_to_gdf(pf, gdf_path, data_author=None, data_comment=None,
                  particle_type_name="dark_matter", clobber=False):
-    """
+    r"""
     Write a parameter file to the given path in the Grid Data Format.
 
     Parameters
@@ -31,7 +31,20 @@
         The yt data to write out.
     gdf_path : string
         The path of the file to output.
+    data_author : string, optional
+        The name of the author who wrote the data. Default: None.
+    data_comment : string, optional
+        A descriptive comment. Default: None.
+    particle_type_name : string, optional
+        The particle type of the particles in the dataset. Default: "dark_matter"
+    clobber : boolean, optional
+        Whether or not to clobber an already existing file. If False, attempting
+        to overwrite an existing file will result in an exception.
 
+    Examples
+    --------
+    >>> write_to_gdf(ds, "clumps.h5", data_author="Your Mom",
+    ...              data_comment="All Your Base Are Belong To Us", clobber=True)
     """
 
     f = _create_new_gdf(pf, gdf_path, data_author, data_comment,


https://bitbucket.org/yt_analysis/yt/commits/c68c5f1d67c2/
Changeset:   c68c5f1d67c2
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-09 17:06:22
Summary:     Updated comment
Affected #:  1 file

diff -r 6c0410e90e8a11adf40aed0ef786ef0432e7e610 -r c68c5f1d67c2ec0a8ae7ea2ccb8f377f78bff092 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -157,8 +157,8 @@
     # Make sure we have the absolute path to the file first
     gdf_path = os.path.abspath(gdf_path)
 
-    # Stupid check -- is the file already there?
-    # @todo: make this a specific exception/error.
+    # Is the file already there? If so, are we allowing
+    # clobbering?
     if os.path.exists(gdf_path) and not clobber:
         raise YTGDFAlreadyExists(gdf_path)
 


https://bitbucket.org/yt_analysis/yt/commits/32feadd4835c/
Changeset:   32feadd4835c
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-09 17:06:36
Summary:     Updated field definitions
Affected #:  1 file

diff -r c68c5f1d67c2ec0a8ae7ea2ccb8f377f78bff092 -r 32feadd4835c85e18e56700b40582debec793ea3 yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -25,13 +25,14 @@
 class GDFFieldInfo(FieldInfoContainer):
     known_other_fields = (
         ("density", ("g/cm**3", ["density"], None)),
-        ("specific_energy", ("erg / g", ["thermal_energy"], None)),
-        ("pressure", ("", ["pressure"], None)),
+        ("specific_energy", ("erg/g", ["thermal_energy"], None)),
+        ("pressure", ("erg/cm**3", ["pressure"], None)),
+        ("temperature", ("K", ["temperature"], None)),
         ("velocity_x", ("cm/s", ["velocity_x"], None)),
         ("velocity_y", ("cm/s", ["velocity_y"], None)),
         ("velocity_z", ("cm/s", ["velocity_z"], None)),
-        ("mag_field_x", ("gauss", ["magnetic_field_x"], None)),
-        ("mag_field_y", ("gauss", ["magnetic_field_y"], None)),
-        ("mag_field_z", ("gauss", ["magnetic_field_z"], None)),
+        ("magnetic_field_x", ("gauss", ["magnetic_field_x"], None)),
+        ("magnetic_field_y", ("gauss", ["magnetic_field_y"], None)),
+        ("magnetic_field_z", ("gauss", ["magnetic_field_z"], None)),
     )
     known_particle_fields = ()


https://bitbucket.org/yt_analysis/yt/commits/339d52f5c270/
Changeset:   339d52f5c270
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-09 17:06:49
Summary:     dds wasn't unitified
Affected #:  1 file

diff -r 32feadd4835c85e18e56700b40582debec793ea3 -r 339d52f5c2702f4d2a0da13fcfeed7db3b0aad0a yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -63,7 +63,7 @@
                 self.dds[2] = 1.0
         self.field_data['dx'], self.field_data['dy'], self.field_data['dz'] = \
             self.dds
-
+        self.dds = self.pf.arr(self.dds, "code_length")
 
 class GDFHierarchy(GridIndex):
 


https://bitbucket.org/yt_analysis/yt/commits/65ca9e6160df/
Changeset:   65ca9e6160df
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-10 01:54:01
Summary:     Merge
Affected #:  3 files

diff -r 339d52f5c2702f4d2a0da13fcfeed7db3b0aad0a -r 65ca9e6160df5fd1b2e361d4428c25881c0566b3 doc/source/cookbook/fits_radio_cubes.ipynb
--- a/doc/source/cookbook/fits_radio_cubes.ipynb
+++ b/doc/source/cookbook/fits_radio_cubes.ipynb
@@ -81,8 +81,7 @@
      "collapsed": false,
      "input": [
       "from yt.frontends.fits.misc import PlotWindowWCS\n",
-      "wcs_slc = PlotWindowWCS(slc)\n",
-      "wcs_slc[\"intensity\"]"
+      "PlotWindowWCS(slc)\n"
      ],
      "language": "python",
      "metadata": {},

diff -r 339d52f5c2702f4d2a0da13fcfeed7db3b0aad0a -r 65ca9e6160df5fd1b2e361d4428c25881c0566b3 yt/frontends/fits/misc.py
--- a/yt/frontends/fits/misc.py
+++ b/yt/frontends/fits/misc.py
@@ -11,6 +11,8 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
+import base64
+from yt.extern.six.moves import StringIO
 from yt.fields.derived_field import ValidateSpatial
 from yt.utilities.on_demand_imports import _astropy
 from yt.funcs import mylog, get_image_suffix
@@ -177,13 +179,21 @@
                 return self.plots[k]
 
     def show(self):
-        from IPython.core.display import display
-        for k, v in sorted(self.plots.iteritems()):
-            canvas = FigureCanvasAgg(v)
-            display(v)
+        return self
 
     def save(self, name=None, mpl_kwargs=None):
         if mpl_kwargs is None:
             mpl_kwargs = {}
         mpl_kwargs["bbox_inches"] = "tight"
         self.pw.save(name=name, mpl_kwargs=mpl_kwargs)
+
+    def _repr_html_(self):
+        ret = ''
+        for k, v in self.plots.iteritems():
+            canvas = FigureCanvasAgg(v)
+            f = StringIO()
+            canvas.print_figure(f)
+            f.seek(0)
+            img = base64.b64encode(f.read())
+            ret += '<img src="data:image/png;base64,%s"><br>' % img
+        return ret

diff -r 339d52f5c2702f4d2a0da13fcfeed7db3b0aad0a -r 65ca9e6160df5fd1b2e361d4428c25881c0566b3 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -1697,7 +1697,7 @@
         else:
             fsize = figure_size
         self._cb_size = 0.0375*fsize
-        self._ax_text_size = [0.9*fontscale, 0.7*fontscale]
+        self._ax_text_size = [1.2*fontscale, 0.9*fontscale]
         self._top_buff_size = 0.30*fontscale
         self._aspect = ((extent[1] - extent[0])/(extent[3] - extent[2]))
 


https://bitbucket.org/yt_analysis/yt/commits/ef39b8212804/
Changeset:   ef39b8212804
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-10 16:39:29
Summary:     Merge
Affected #:  1 file

diff -r 65ca9e6160df5fd1b2e361d4428c25881c0566b3 -r ef39b82128040b91a089a1b948a41f4c619f62ed yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -210,16 +210,19 @@
         self.parameters["Time"] = 1. # default unit is 1...
         
     def _set_code_unit_attributes(self):
-        if "cgs" in (self.parameters.get('pc_unitsbase', "").lower(),
-                     self.parameters.get('unitsystem', "").lower()):
-             b_factor = 1
-        elif self['unitsystem'].lower() == "si":
-             b_factor = np.sqrt(4*np.pi/1e7)
-        elif self['unitsystem'].lower() == "none":
-             b_factor = np.sqrt(4*np.pi)
+
+        if 'unitsystem' in self.parameters:
+            if self['unitsystem'].lower() == "cgs":
+                b_factor = 1.0
+            elif self['unitsystem'].lower() == "si":
+                b_factor = np.sqrt(4*np.pi/1e7)
+            elif self['unitsystem'].lower() == "none":
+                b_factor = np.sqrt(4*np.pi)
+            else:
+                raise RuntimeError("Runtime parameter unitsystem with "
+                                   "value %s is unrecognized" % self['unitsystem'])
         else:
-            raise RuntimeError("Runtime parameter unitsystem with "
-                               "value %s is unrecognized" % self['unitsystem'])
+            b_factor = 1.
         if self.cosmological_simulation == 1:
             length_factor = 1.0 / (1.0 + self.current_redshift)
             temperature_factor = 1.0 / (1.0 + self.current_redshift)**2
@@ -227,6 +230,7 @@
             length_factor = 1.0
             temperature_factor = 1.0
         self.magnetic_unit = self.quan(b_factor, "gauss")
+
         self.length_unit = self.quan(length_factor, "cm")
         self.mass_unit = self.quan(1.0, "g")
         self.time_unit = self.quan(1.0, "s")
@@ -234,7 +238,8 @@
         self.temperature_unit = self.quan(temperature_factor, "K")
         # Still need to deal with:
         #self.conversion_factors['temp'] = (1.0 + self.current_redshift)**-2.0
-
+        self.unit_registry.modify("code_magnetic", self.magnetic_unit)
+        
     def set_code_units(self):
         super(FLASHDataset, self).set_code_units()
         self.unit_registry.modify("code_temperature",
@@ -247,9 +252,9 @@
         for tpname, pval in zip(self._handle[nn][:,'name'],
                                 self._handle[nn][:,'value']):
             if tpname.strip() == pname:
-                if ptype == "string" :
+                if ptype == "string":
                     return pval.strip()
-                else :
+                else:
                     return pval
         raise KeyError(pname)
 


https://bitbucket.org/yt_analysis/yt/commits/e39610437383/
Changeset:   e39610437383
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-11 17:06:22
Summary:     These fixes allow the tests to pass on 32-bit Windows while still preserving int64 on 64-bit platforms (including Windows).
Affected #:  5 files

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r e39610437383647d8bf483d86eb6ed9f094342b5 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -436,7 +436,7 @@
         pts = MatchPointsToGrids(grid_tree, len(x), x, y, z)
         particle_grid_inds = pts.find_points_in_tree()
         idxs = np.argsort(particle_grid_inds)
-        particle_grid_count = np.bincount(particle_grid_inds,
+        particle_grid_count = np.bincount(particle_grid_inds.astype("intp"),
                                           minlength=num_grids)
         particle_indices = np.zeros(num_grids + 1, dtype='int64')
         if num_grids > 1 :

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r e39610437383647d8bf483d86eb6ed9f094342b5 yt/geometry/tests/test_particle_octree.py
--- a/yt/geometry/tests/test_particle_octree.py
+++ b/yt/geometry/tests/test_particle_octree.py
@@ -91,7 +91,7 @@
         pf = load_particles(data, 1.0, bbox = bbox, n_ref = n_ref)
         dd = pf.h.all_data()
         bi = dd["io","mesh_id"]
-        v = np.bincount(bi.astype("int64"))
+        v = np.bincount(bi.astype("intp"))
         yield assert_equal, v.max() <= n_ref, True
         bi2 = dd["all","mesh_id"]
         yield assert_equal, bi, bi2

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r e39610437383647d8bf483d86eb6ed9f094342b5 yt/utilities/lib/alt_ray_tracers.pyx
--- a/yt/utilities/lib/alt_ray_tracers.pyx
+++ b/yt/utilities/lib/alt_ray_tracers.pyx
@@ -101,7 +101,7 @@
                                           rleft, rright, zleft, zright, \
                                           cleft, cright, thetaleft, thetaright, \
                                           tmleft, tpleft, tmright, tpright, tsect
-    cdef np.ndarray[np.int64_t, ndim=1] inds, tinds, sinds
+    cdef np.ndarray[np.intp_t, ndim=1] inds, tinds, sinds
     cdef np.ndarray[np.float64_t, ndim=2] xyz, rztheta, ptemp, b1, b2, dsect
 
     # set up  points
@@ -152,7 +152,7 @@
                                      np.argwhere(tmmright).flat, 
                                      np.argwhere(tpmright).flat,]))
     if 0 == inds.shape[0]:
-        inds = np.arange(np.int64(I))
+        inds = np.arange(I)
         thetaleft = np.empty(I)
         thetaleft.fill(p1[2])
         thetaright = np.empty(I)

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r e39610437383647d8bf483d86eb6ed9f094342b5 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -27,7 +27,7 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
-def new_bin_profile1d(np.ndarray[np.int64_t, ndim=1] bins_x,
+def new_bin_profile1d(np.ndarray[np.intp_t, ndim=1] bins_x,
                   np.ndarray[np.float64_t, ndim=1] wsource,
                   np.ndarray[np.float64_t, ndim=2] bsource,
                   np.ndarray[np.float64_t, ndim=1] wresult,
@@ -58,8 +58,8 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
-def new_bin_profile2d(np.ndarray[np.int64_t, ndim=1] bins_x,
-                  np.ndarray[np.int64_t, ndim=1] bins_y,
+def new_bin_profile2d(np.ndarray[np.intp_t, ndim=1] bins_x,
+                  np.ndarray[np.intp_t, ndim=1] bins_y,
                   np.ndarray[np.float64_t, ndim=1] wsource,
                   np.ndarray[np.float64_t, ndim=2] bsource,
                   np.ndarray[np.float64_t, ndim=2] wresult,
@@ -91,9 +91,9 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
-def new_bin_profile3d(np.ndarray[np.int64_t, ndim=1] bins_x,
-                  np.ndarray[np.int64_t, ndim=1] bins_y,
-                  np.ndarray[np.int64_t, ndim=1] bins_z,
+def new_bin_profile3d(np.ndarray[np.intp_t, ndim=1] bins_x,
+                  np.ndarray[np.intp_t, ndim=1] bins_y,
+                  np.ndarray[np.intp_t, ndim=1] bins_z,
                   np.ndarray[np.float64_t, ndim=1] wsource,
                   np.ndarray[np.float64_t, ndim=2] bsource,
                   np.ndarray[np.float64_t, ndim=3] wresult,

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r e39610437383647d8bf483d86eb6ed9f094342b5 yt/utilities/particle_generator.py
--- a/yt/utilities/particle_generator.py
+++ b/yt/utilities/particle_generator.py
@@ -104,7 +104,7 @@
         self.particles[:,self.posx_index] = x[idxs]
         self.particles[:,self.posy_index] = y[idxs]
         self.particles[:,self.posz_index] = z[idxs]
-        self.NumberOfParticles = np.bincount(particle_grid_inds,
+        self.NumberOfParticles = np.bincount(particle_grid_inds.astype("intp"),
                                              minlength=self.num_grids)
         if self.num_grids > 1 :
             np.add.accumulate(self.NumberOfParticles.squeeze(),


https://bitbucket.org/yt_analysis/yt/commits/15885ed74b8a/
Changeset:   15885ed74b8a
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-11 21:52:19
Summary:     Backout changeset e39610437383647d8bf483d86eb6ed9f094342b5
Affected #:  5 files

diff -r e39610437383647d8bf483d86eb6ed9f094342b5 -r 15885ed74b8a9b42d3733d31173749674429f8cb yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -436,7 +436,7 @@
         pts = MatchPointsToGrids(grid_tree, len(x), x, y, z)
         particle_grid_inds = pts.find_points_in_tree()
         idxs = np.argsort(particle_grid_inds)
-        particle_grid_count = np.bincount(particle_grid_inds.astype("intp"),
+        particle_grid_count = np.bincount(particle_grid_inds,
                                           minlength=num_grids)
         particle_indices = np.zeros(num_grids + 1, dtype='int64')
         if num_grids > 1 :

diff -r e39610437383647d8bf483d86eb6ed9f094342b5 -r 15885ed74b8a9b42d3733d31173749674429f8cb yt/geometry/tests/test_particle_octree.py
--- a/yt/geometry/tests/test_particle_octree.py
+++ b/yt/geometry/tests/test_particle_octree.py
@@ -91,7 +91,7 @@
         pf = load_particles(data, 1.0, bbox = bbox, n_ref = n_ref)
         dd = pf.h.all_data()
         bi = dd["io","mesh_id"]
-        v = np.bincount(bi.astype("intp"))
+        v = np.bincount(bi.astype("int64"))
         yield assert_equal, v.max() <= n_ref, True
         bi2 = dd["all","mesh_id"]
         yield assert_equal, bi, bi2

diff -r e39610437383647d8bf483d86eb6ed9f094342b5 -r 15885ed74b8a9b42d3733d31173749674429f8cb yt/utilities/lib/alt_ray_tracers.pyx
--- a/yt/utilities/lib/alt_ray_tracers.pyx
+++ b/yt/utilities/lib/alt_ray_tracers.pyx
@@ -101,7 +101,7 @@
                                           rleft, rright, zleft, zright, \
                                           cleft, cright, thetaleft, thetaright, \
                                           tmleft, tpleft, tmright, tpright, tsect
-    cdef np.ndarray[np.intp_t, ndim=1] inds, tinds, sinds
+    cdef np.ndarray[np.int64_t, ndim=1] inds, tinds, sinds
     cdef np.ndarray[np.float64_t, ndim=2] xyz, rztheta, ptemp, b1, b2, dsect
 
     # set up  points
@@ -152,7 +152,7 @@
                                      np.argwhere(tmmright).flat, 
                                      np.argwhere(tpmright).flat,]))
     if 0 == inds.shape[0]:
-        inds = np.arange(I)
+        inds = np.arange(np.int64(I))
         thetaleft = np.empty(I)
         thetaleft.fill(p1[2])
         thetaright = np.empty(I)

diff -r e39610437383647d8bf483d86eb6ed9f094342b5 -r 15885ed74b8a9b42d3733d31173749674429f8cb yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -27,7 +27,7 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
-def new_bin_profile1d(np.ndarray[np.intp_t, ndim=1] bins_x,
+def new_bin_profile1d(np.ndarray[np.int64_t, ndim=1] bins_x,
                   np.ndarray[np.float64_t, ndim=1] wsource,
                   np.ndarray[np.float64_t, ndim=2] bsource,
                   np.ndarray[np.float64_t, ndim=1] wresult,
@@ -58,8 +58,8 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
-def new_bin_profile2d(np.ndarray[np.intp_t, ndim=1] bins_x,
-                  np.ndarray[np.intp_t, ndim=1] bins_y,
+def new_bin_profile2d(np.ndarray[np.int64_t, ndim=1] bins_x,
+                  np.ndarray[np.int64_t, ndim=1] bins_y,
                   np.ndarray[np.float64_t, ndim=1] wsource,
                   np.ndarray[np.float64_t, ndim=2] bsource,
                   np.ndarray[np.float64_t, ndim=2] wresult,
@@ -91,9 +91,9 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
-def new_bin_profile3d(np.ndarray[np.intp_t, ndim=1] bins_x,
-                  np.ndarray[np.intp_t, ndim=1] bins_y,
-                  np.ndarray[np.intp_t, ndim=1] bins_z,
+def new_bin_profile3d(np.ndarray[np.int64_t, ndim=1] bins_x,
+                  np.ndarray[np.int64_t, ndim=1] bins_y,
+                  np.ndarray[np.int64_t, ndim=1] bins_z,
                   np.ndarray[np.float64_t, ndim=1] wsource,
                   np.ndarray[np.float64_t, ndim=2] bsource,
                   np.ndarray[np.float64_t, ndim=3] wresult,

diff -r e39610437383647d8bf483d86eb6ed9f094342b5 -r 15885ed74b8a9b42d3733d31173749674429f8cb yt/utilities/particle_generator.py
--- a/yt/utilities/particle_generator.py
+++ b/yt/utilities/particle_generator.py
@@ -104,7 +104,7 @@
         self.particles[:,self.posx_index] = x[idxs]
         self.particles[:,self.posy_index] = y[idxs]
         self.particles[:,self.posz_index] = z[idxs]
-        self.NumberOfParticles = np.bincount(particle_grid_inds.astype("intp"),
+        self.NumberOfParticles = np.bincount(particle_grid_inds,
                                              minlength=self.num_grids)
         if self.num_grids > 1 :
             np.add.accumulate(self.NumberOfParticles.squeeze(),


https://bitbucket.org/yt_analysis/yt/commits/2844b368ab5f/
Changeset:   2844b368ab5f
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-11 16:09:58
Summary:     Reworking the way units are done.
Affected #:  2 files

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r 2844b368ab5fbb3a1779e097e26984a99548541a yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -180,19 +180,16 @@
         h5f = h5py.File(self.parameter_filename, "r")
         for field_name in h5f["/field_types"]:
             current_field = h5f["/field_types/%s" % field_name]
-            if 'field_to_cgs' in current_field.attrs:
-                field_conv = current_field.attrs['field_to_cgs']
-                self.field_units[field_name] = just_one(field_conv)
-            elif 'field_units' in current_field.attrs:
+            if 'field_units' in current_field.attrs:
                 field_units = current_field.attrs['field_units']
                 if isinstance(field_units, types.StringTypes):
-                    current_fields_unit = current_field.attrs['field_units']
+                    current_field_units = current_field.attrs['field_units']
                 else:
-                    current_fields_unit = \
+                    current_field_units = \
                         just_one(current_field.attrs['field_units'])
                 self.field_units[field_name] = current_field_units
             else:
-                current_fields_unit = ""
+                self.field_units[field_name] = ""
         h5f.close()
         self.length_unit = self.quan(1.0, "cm")
         self.mass_unit = self.quan(1.0, "g")

diff -r ef39b82128040b91a089a1b948a41f4c619f62ed -r 2844b368ab5fbb3a1779e097e26984a99548541a yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -115,7 +115,7 @@
 
     # grab the display name and units from the field info container.
     display_name = fi.display_name
-    units = fi.get_units()
+    units = fi.units
 
     # check that they actually contain something...
     if display_name:
@@ -126,8 +126,6 @@
         sg.attrs["field_units"] = units
     else:
         sg.attrs["field_units"] = "None"
-    # @todo: the values must be in CGS already right?
-    sg.attrs["field_to_cgs"] = 1.0
     # @todo: is this always true?
     sg.attrs["staggering"] = 0
 


https://bitbucket.org/yt_analysis/yt/commits/9c272c78ed16/
Changeset:   9c272c78ed16
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-11 22:33:21
Summary:     Set length, mass, time units for the dataset within the GDF file. This is a departure from the standard that will probably have to be discussed.
Affected #:  2 files

diff -r 2844b368ab5fbb3a1779e097e26984a99548541a -r 9c272c78ed16e7ba01ec41d6ef1341df72832856 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -190,10 +190,14 @@
                 self.field_units[field_name] = current_field_units
             else:
                 self.field_units[field_name] = ""
+
+        for unit_name in h5f["/dataset_units"]:
+            current_unit = h5f["/dataset_units/%s" % unit_name]
+            value = current_unit.value
+            unit = current_unit.attrs["unit"]
+            setattr(self, unit_name, self.quan(value,unit))
+
         h5f.close()
-        self.length_unit = self.quan(1.0, "cm")
-        self.mass_unit = self.quan(1.0, "g")
-        self.time_unit = self.quan(1.0, "s")
 
     def _parse_parameter_file(self):
         self._handle = h5py.File(self.parameter_filename, "r")

diff -r 2844b368ab5fbb3a1779e097e26984a99548541a -r 9c272c78ed16e7ba01ec41d6ef1341df72832856 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -194,13 +194,19 @@
     g.attrs["field_ordering"] = 0
     # @todo: not yet supported by yt.
     g.attrs["boundary_conditions"] = np.array([0, 0, 0, 0, 0, 0], 'int32')
-
     if pf.cosmological_simulation:
         g.attrs["current_redshift"] = pf.current_redshift
         g.attrs["omega_matter"] = pf.omega_matter
         g.attrs["omega_lambda"] = pf.omega_lambda
         g.attrs["hubble_constant"] = pf.hubble_constant
 
+    g = f.create_group("dataset_units")
+    for u in ["length","time","mass"]:
+        unit_name = u+"_unit"
+        attr = getattr(pf, unit_name)
+        d = g.create_dataset(unit_name, data=float(attr))
+        d.attrs["unit"] = str(attr.units)
+
     ###
     # "field_types" group
     ###


https://bitbucket.org/yt_analysis/yt/commits/f34dc27c7b6f/
Changeset:   f34dc27c7b6f
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-12 04:52:30
Summary:     Merge
Affected #:  2 files

diff -r 15885ed74b8a9b42d3733d31173749674429f8cb -r f34dc27c7b6f58782d13b931fadec8a22f18782a yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -180,23 +180,24 @@
         h5f = h5py.File(self.parameter_filename, "r")
         for field_name in h5f["/field_types"]:
             current_field = h5f["/field_types/%s" % field_name]
-            if 'field_to_cgs' in current_field.attrs:
-                field_conv = current_field.attrs['field_to_cgs']
-                self.field_units[field_name] = just_one(field_conv)
-            elif 'field_units' in current_field.attrs:
+            if 'field_units' in current_field.attrs:
                 field_units = current_field.attrs['field_units']
                 if isinstance(field_units, types.StringTypes):
-                    current_fields_unit = current_field.attrs['field_units']
+                    current_field_units = current_field.attrs['field_units']
                 else:
-                    current_fields_unit = \
+                    current_field_units = \
                         just_one(current_field.attrs['field_units'])
                 self.field_units[field_name] = current_field_units
             else:
-                current_fields_unit = ""
+                self.field_units[field_name] = ""
+
+        for unit_name in h5f["/dataset_units"]:
+            current_unit = h5f["/dataset_units/%s" % unit_name]
+            value = current_unit.value
+            unit = current_unit.attrs["unit"]
+            setattr(self, unit_name, self.quan(value,unit))
+
         h5f.close()
-        self.length_unit = self.quan(1.0, "cm")
-        self.mass_unit = self.quan(1.0, "g")
-        self.time_unit = self.quan(1.0, "s")
 
     def _parse_parameter_file(self):
         self._handle = h5py.File(self.parameter_filename, "r")

diff -r 15885ed74b8a9b42d3733d31173749674429f8cb -r f34dc27c7b6f58782d13b931fadec8a22f18782a yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -115,7 +115,7 @@
 
     # grab the display name and units from the field info container.
     display_name = fi.display_name
-    units = fi.get_units()
+    units = fi.units
 
     # check that they actually contain something...
     if display_name:
@@ -126,8 +126,6 @@
         sg.attrs["field_units"] = units
     else:
         sg.attrs["field_units"] = "None"
-    # @todo: the values must be in CGS already right?
-    sg.attrs["field_to_cgs"] = 1.0
     # @todo: is this always true?
     sg.attrs["staggering"] = 0
 
@@ -196,13 +194,19 @@
     g.attrs["field_ordering"] = 0
     # @todo: not yet supported by yt.
     g.attrs["boundary_conditions"] = np.array([0, 0, 0, 0, 0, 0], 'int32')
-
     if pf.cosmological_simulation:
         g.attrs["current_redshift"] = pf.current_redshift
         g.attrs["omega_matter"] = pf.omega_matter
         g.attrs["omega_lambda"] = pf.omega_lambda
         g.attrs["hubble_constant"] = pf.hubble_constant
 
+    g = f.create_group("dataset_units")
+    for u in ["length","time","mass"]:
+        unit_name = u+"_unit"
+        attr = getattr(pf, unit_name)
+        d = g.create_dataset(unit_name, data=float(attr))
+        d.attrs["unit"] = str(attr.units)
+
     ###
     # "field_types" group
     ###


https://bitbucket.org/yt_analysis/yt/commits/be574ae2668a/
Changeset:   be574ae2668a
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-14 03:38:48
Summary:     Bringing field_to_cgs back
Affected #:  2 files

diff -r f34dc27c7b6f58782d13b931fadec8a22f18782a -r be574ae2668a5f243ca1f0d55ab25821ed15d0a2 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -180,7 +180,10 @@
         h5f = h5py.File(self.parameter_filename, "r")
         for field_name in h5f["/field_types"]:
             current_field = h5f["/field_types/%s" % field_name]
-            if 'field_units' in current_field.attrs:
+            if 'field_to_cgs' in current_field.attrs:
+                field_conv = current_field.attrs['field_to_cgs']
+                self.field_units[field_name] = just_one(field_conv)
+            elif 'field_units' in current_field.attrs:
                 field_units = current_field.attrs['field_units']
                 if isinstance(field_units, types.StringTypes):
                     current_field_units = current_field.attrs['field_units']

diff -r f34dc27c7b6f58782d13b931fadec8a22f18782a -r be574ae2668a5f243ca1f0d55ab25821ed15d0a2 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -126,6 +126,7 @@
         sg.attrs["field_units"] = units
     else:
         sg.attrs["field_units"] = "None"
+    sg.attrs["field_to_cgs"] = 1.0
     # @todo: is this always true?
     sg.attrs["staggering"] = 0
 


https://bitbucket.org/yt_analysis/yt/commits/5fecbe9b9f3e/
Changeset:   5fecbe9b9f3e
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-14 05:44:05
Summary:     Didn't mean to add it back here--we're only adding it to the frontend for backwards compatibility.
Affected #:  1 file

diff -r be574ae2668a5f243ca1f0d55ab25821ed15d0a2 -r 5fecbe9b9f3e8d26e7e28169a01a448d1084397e yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -126,7 +126,6 @@
         sg.attrs["field_units"] = units
     else:
         sg.attrs["field_units"] = "None"
-    sg.attrs["field_to_cgs"] = 1.0
     # @todo: is this always true?
     sg.attrs["staggering"] = 0
 


https://bitbucket.org/yt_analysis/yt/commits/8648f2ed9f3e/
Changeset:   8648f2ed9f3e
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-14 06:39:08
Summary:     Replacing these field names
Affected #:  1 file

diff -r 5fecbe9b9f3e8d26e7e28169a01a448d1084397e -r 8648f2ed9f3ea4ee3eec94d4c6ba3f2c42a25040 yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -31,8 +31,8 @@
         ("velocity_x", ("cm/s", ["velocity_x"], None)),
         ("velocity_y", ("cm/s", ["velocity_y"], None)),
         ("velocity_z", ("cm/s", ["velocity_z"], None)),
-        ("magnetic_field_x", ("gauss", ["magnetic_field_x"], None)),
-        ("magnetic_field_y", ("gauss", ["magnetic_field_y"], None)),
-        ("magnetic_field_z", ("gauss", ["magnetic_field_z"], None)),
+        ("mag_field_x", ("gauss", ["magnetic_field_x"], None)),
+        ("mag_field_y", ("gauss", ["magnetic_field_y"], None)),
+        ("mag_field_z", ("gauss", ["magnetic_field_z"], None)),
     )
     known_particle_fields = ()


https://bitbucket.org/yt_analysis/yt/commits/90e4b1d7d24a/
Changeset:   90e4b1d7d24a
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-14 06:39:23
Summary:     Handling old datasets
Affected #:  1 file

diff -r 8648f2ed9f3ea4ee3eec94d4c6ba3f2c42a25040 -r 90e4b1d7d24ade195d6c475dbba2daaba30dcd15 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -194,11 +194,16 @@
             else:
                 self.field_units[field_name] = ""
 
-        for unit_name in h5f["/dataset_units"]:
-            current_unit = h5f["/dataset_units/%s" % unit_name]
-            value = current_unit.value
-            unit = current_unit.attrs["unit"]
-            setattr(self, unit_name, self.quan(value,unit))
+        if "dataset_units" in h5f:
+            for unit_name in h5f["/dataset_units"]:
+                current_unit = h5f["/dataset_units/%s" % unit_name]
+                value = current_unit.value
+                unit = current_unit.attrs["unit"]
+                setattr(self, unit_name, self.quan(value,unit))
+        else:
+            self.length_unit = self.quan(1.0, "cm")
+            self.mass_unit = self.quan(1.0, "g")
+            self.time_unit = self.quan(1.0, "s")
 
         h5f.close()
 


https://bitbucket.org/yt_analysis/yt/commits/09038c0bf1d5/
Changeset:   09038c0bf1d5
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-14 07:29:26
Summary:     Allow the user to change the units of the field.
Affected #:  1 file

diff -r 90e4b1d7d24ade195d6c475dbba2daaba30dcd15 -r 09038c0bf1d5b1057d35a413a97de447e3c7ca87 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -587,7 +587,8 @@
         vals = op.finalize()
         return vals.reshape(self.ActiveDimensions, order="C")
 
-    def write_to_gdf(self, gdf_path, fields, nprocs=1, **kwargs):
+    def write_to_gdf(self, gdf_path, fields, nprocs=1, field_units=None,
+                     **kwargs):
         r"""
         Write the covering grid data to a GDF file.
 
@@ -600,7 +601,9 @@
         nprocs : integer, optional
             Split the covering grid into *nprocs* subgrids before
             writing to the GDF file. Default: 1
-
+        field_units : dictionary, optional
+            Dictionary of units to convert fields to. If not set, fields are
+            in their default units.
         All remaining keyword arguments are passed to
         yt.utilities.grid_data_format.writer.
 
@@ -611,12 +614,18 @@
         """
         data = {}
         for field in fields:
-            data[field] = (self[field].v, str(self[field].units))
-        le = self.left_edge.in_cgs().v
-        re = self.right_edge.in_cgs().v
+            if field in field_units:
+                units = field_units[field]
+            else:
+                units = str(self[field].units)
+            data[field] = (self[field].in_units(units).v, units)
+        le = self.left_edge.v
+        re = self.right_edge.v
         bbox = np.array([[l,r] for l,r in zip(le, re)])
         ds = load_uniform_grid(data, self.ActiveDimensions, bbox=bbox,
-                               nprocs=nprocs, sim_time=self.pf.current_time.in_cgs())
+                               length_unit=self.pf.length_unit, time_unit=self.pf.time_unit,
+                               mass_unit=self.pf.mass_unit, nprocs=nprocs,
+                               sim_time=self.pf.current_time.v)
         write_to_gdf(ds, gdf_path, **kwargs)
 
 class YTArbitraryGridBase(YTCoveringGridBase):


https://bitbucket.org/yt_analysis/yt/commits/c2fdeb2a0a4a/
Changeset:   c2fdeb2a0a4a
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-14 21:27:41
Summary:     More units
Affected #:  1 file

diff -r 09038c0bf1d5b1057d35a413a97de447e3c7ca87 -r c2fdeb2a0a4ad14eb7868474168e2473b6fdfc63 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -21,7 +21,8 @@
 from yt.utilities.exceptions import YTGDFAlreadyExists
 
 def write_to_gdf(pf, gdf_path, data_author=None, data_comment=None,
-                 particle_type_name="dark_matter", clobber=False):
+                 dataset_units=None, particle_type_name="dark_matter",
+                 clobber=False):
     r"""
     Write a parameter file to the given path in the Grid Data Format.
 
@@ -43,8 +44,8 @@
 
     Examples
     --------
-    >>> write_to_gdf(ds, "clumps.h5", data_author="Your Mom",
-    ...              data_comment="All Your Base Are Belong To Us", clobber=True)
+    >>> write_to_gdf(ds, "clumps.h5", data_author="John ZuHone",
+    ...              data_comment="My Really Cool Dataset", clobber=True)
     """
 
     f = _create_new_gdf(pf, gdf_path, data_author, data_comment,
@@ -201,7 +202,7 @@
         g.attrs["hubble_constant"] = pf.hubble_constant
 
     g = f.create_group("dataset_units")
-    for u in ["length","time","mass"]:
+    for u in ["length","time","mass","velocity","magnetic"]:
         unit_name = u+"_unit"
         attr = getattr(pf, unit_name)
         d = g.create_dataset(unit_name, data=float(attr))


https://bitbucket.org/yt_analysis/yt/commits/48e0c1630832/
Changeset:   48e0c1630832
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-16 15:12:30
Summary:     Optionally set dataset_units by hand
Affected #:  1 file

diff -r c2fdeb2a0a4ad14eb7868474168e2473b6fdfc63 -r 48e0c1630832153e57e713d70a19ecb89dd13acc yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -36,6 +36,16 @@
         The name of the author who wrote the data. Default: None.
     data_comment : string, optional
         A descriptive comment. Default: None.
+    dataset_units : dictionary, optional
+        A dictionary of (value, unit) tuples to set the default units
+        of the dataset. Keys can be:
+            "length_unit"
+            "time_unit"
+            "mass_unit"
+            "velocity_unit"
+            "magnetic_unit"
+        If not specified, these will carry over from the parent
+        dataset.
     particle_type_name : string, optional
         The particle type of the particles in the dataset. Default: "dark_matter"
     clobber : boolean, optional
@@ -44,12 +54,16 @@
 
     Examples
     --------
+    >>> dataset_units = {"length_unit":(1.0,"Mpc"),
+    ...                  "time_unit":(1.0,"Myr")}
     >>> write_to_gdf(ds, "clumps.h5", data_author="John ZuHone",
+    ...              dataset_units=dataset_units,
     ...              data_comment="My Really Cool Dataset", clobber=True)
     """
 
     f = _create_new_gdf(pf, gdf_path, data_author, data_comment,
-                        particle_type_name, clobber=clobber)
+                        dataset_units=dataset_units,
+                        particle_type_name=particle_type_name, clobber=clobber)
 
     # now add the fields one-by-one
     for field_name in pf.field_list:
@@ -152,7 +166,9 @@
 
 
 def _create_new_gdf(pf, gdf_path, data_author=None, data_comment=None,
-                    particle_type_name="dark_matter", clobber=False):
+                    dataset_units=None, particle_type_name="dark_matter",
+                    clobber=False):
+
     # Make sure we have the absolute path to the file first
     gdf_path = os.path.abspath(gdf_path)
 
@@ -204,9 +220,14 @@
     g = f.create_group("dataset_units")
     for u in ["length","time","mass","velocity","magnetic"]:
         unit_name = u+"_unit"
-        attr = getattr(pf, unit_name)
-        d = g.create_dataset(unit_name, data=float(attr))
-        d.attrs["unit"] = str(attr.units)
+        if unit_name in dataset_units:
+            value, units = dataset_units[unit_name]
+        else:
+            attr = getattr(pf, unit_name)
+            value = float(attr)
+            units = str(attr.units)
+        d = g.create_dataset(unit_name, data=value)
+        d.attrs["unit"] = units
 
     ###
     # "field_types" group


https://bitbucket.org/yt_analysis/yt/commits/ebd5dba8007d/
Changeset:   ebd5dba8007d
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-16 15:53:48
Summary:     Fix units on output
Affected #:  1 file

diff -r 48e0c1630832153e57e713d70a19ecb89dd13acc -r ebd5dba8007d6d2f805816c2ca8eb0d3dbd7eb35 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -160,9 +160,9 @@
         # Check if this is a real field or particle data.
         grid.get_data(field_name)
         if fi.particle_type:  # particle data
-            pt_group[field_name] = grid[field_name]
+            pt_group[field_name] = grid[field_name].in_units(units)
         else:  # a field
-            grid_group[field_name] = grid[field_name]
+            grid_group[field_name] = grid[field_name].in_units(units)
 
 
 def _create_new_gdf(pf, gdf_path, data_author=None, data_comment=None,
@@ -217,6 +217,9 @@
         g.attrs["omega_lambda"] = pf.omega_lambda
         g.attrs["hubble_constant"] = pf.hubble_constant
 
+    if dataset_units is None:
+        dataset_units = {}
+
     g = f.create_group("dataset_units")
     for u in ["length","time","mass","velocity","magnetic"]:
         unit_name = u+"_unit"


https://bitbucket.org/yt_analysis/yt/commits/5e6004222abe/
Changeset:   5e6004222abe
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-16 16:33:05
Summary:     Change name of backup filename
Affected #:  1 file

diff -r ebd5dba8007d6d2f805816c2ca8eb0d3dbd7eb35 -r 5e6004222abe6b256cb7a3ad924152f27b672e58 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -362,6 +362,7 @@
         if storage_filename is None:
             storage_filename = '%s.yt' % filename.split('/')[-1]
         self.storage_filename = storage_filename
+        self.backup_filename = self.filename[:-4] + "_backup.gdf"
         # Unfortunately we now have to mandate that the index gets 
         # instantiated so that we can make sure we have the correct left 
         # and right domain edges.


https://bitbucket.org/yt_analysis/yt/commits/8d461bae14c8/
Changeset:   8d461bae14c8
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-16 16:34:51
Summary:     Should be more specific here
Affected #:  1 file

diff -r 5e6004222abe6b256cb7a3ad924152f27b672e58 -r 8d461bae14c8284852cfab28fc5e03a713fed8d4 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -605,7 +605,7 @@
             Dictionary of units to convert fields to. If not set, fields are
             in their default units.
         All remaining keyword arguments are passed to
-        yt.utilities.grid_data_format.writer.
+        yt.utilities.grid_data_format.writer.write_to_gdf.
 
         Examples
         --------


https://bitbucket.org/yt_analysis/yt/commits/62a6f73dc567/
Changeset:   62a6f73dc567
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-17 16:38:44
Summary:     Merge
Affected #:  5 files

diff -r 8d461bae14c8284852cfab28fc5e03a713fed8d4 -r 62a6f73dc567b51293987545850f40ab4d245aca doc/source/analyzing/units/2)_Data_Selection_and_fields.ipynb
--- a/doc/source/analyzing/units/2)_Data_Selection_and_fields.ipynb
+++ b/doc/source/analyzing/units/2)_Data_Selection_and_fields.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:b7541e0167001c6dd74306c8490385ace7bdb0533a829286f0505c0b24c67f16"
+  "signature": "sha256:882b31591c60bfe6ad4cb0f8842953d2e94fb8a12ce742be831a65642eea72c9"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -325,8 +325,7 @@
      "input": [
       "from astropy import units as u\n",
       "x = 42.0 * u.meter\n",
-      "y = YTQuantity(x)\n",
-      "y2 = YTQuantity.from_astropy(x) # Another way to create the quantity"
+      "y = YTQuantity.from_astropy(x) "
      ],
      "language": "python",
      "metadata": {},
@@ -337,8 +336,7 @@
      "collapsed": false,
      "input": [
       "print x, type(x)\n",
-      "print y, type(y)\n",
-      "print y2, type(y2)"
+      "print y, type(y)"
      ],
      "language": "python",
      "metadata": {},
@@ -349,8 +347,7 @@
      "collapsed": false,
      "input": [
       "a = np.random.random(size=10) * u.km/u.s\n",
-      "b = YTArray(a)\n",
-      "b2 = YTArray.from_astropy(a) # Another way to create the quantity"
+      "b = YTArray.from_astropy(a)"
      ],
      "language": "python",
      "metadata": {},
@@ -361,8 +358,7 @@
      "collapsed": false,
      "input": [
       "print a, type(a)\n",
-      "print b, type(b)\n",
-      "print b2, type(b2)"
+      "print b, type(b)"
      ],
      "language": "python",
      "metadata": {},
@@ -438,7 +434,7 @@
      "collapsed": false,
      "input": [
       "k1 = kboltz.to_astropy()\n",
-      "k2 = YTQuantity(kb)\n",
+      "k2 = YTQuantity.from_astropy(kb)\n",
       "print k1 == k2"
      ],
      "language": "python",
@@ -449,7 +445,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "c = YTArray(a)\n",
+      "c = YTArray.from_astropy(a)\n",
       "d = c.to_astropy()\n",
       "print a == d"
      ],

diff -r 8d461bae14c8284852cfab28fc5e03a713fed8d4 -r 62a6f73dc567b51293987545850f40ab4d245aca doc/source/cookbook/fits_radio_cubes.ipynb
--- a/doc/source/cookbook/fits_radio_cubes.ipynb
+++ b/doc/source/cookbook/fits_radio_cubes.ipynb
@@ -98,7 +98,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "wcs_slc.save()"
+      "slc.save()"
      ],
      "language": "python",
      "metadata": {},
@@ -462,4 +462,4 @@
    "metadata": {}
   }
  ]
-}
\ No newline at end of file
+}

diff -r 8d461bae14c8284852cfab28fc5e03a713fed8d4 -r 62a6f73dc567b51293987545850f40ab4d245aca yt/units/tests/test_ytarray.py
--- a/yt/units/tests/test_ytarray.py
+++ b/yt/units/tests/test_ytarray.py
@@ -746,15 +746,15 @@
     yt_quan2 = YTQuantity.from_astropy(ap_quan)
 
     yield assert_array_equal, ap_arr, yt_arr.to_astropy()
-    yield assert_array_equal, yt_arr, YTArray(ap_arr)
+    yield assert_array_equal, yt_arr, YTArray.from_astropy(ap_arr)
     yield assert_array_equal, yt_arr, yt_arr2
 
     yield assert_equal, ap_quan, yt_quan.to_astropy()
-    yield assert_equal, yt_quan, YTQuantity(ap_quan)
+    yield assert_equal, yt_quan, YTQuantity.from_astropy(ap_quan)
     yield assert_equal, yt_quan, yt_quan2
 
-    yield assert_array_equal, yt_arr, YTArray(yt_arr.to_astropy())
-    yield assert_equal, yt_quan, YTQuantity(yt_quan.to_astropy())
+    yield assert_array_equal, yt_arr, YTArray.from_astropy(yt_arr.to_astropy())
+    yield assert_equal, yt_quan, YTQuantity.from_astropy(yt_quan.to_astropy())
 
 def test_subclass():
 

diff -r 8d461bae14c8284852cfab28fc5e03a713fed8d4 -r 62a6f73dc567b51293987545850f40ab4d245aca yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -323,9 +323,6 @@
                     "Perhaps you meant to do something like this instead: \n"
                     "ds.arr(%s, \"%s\")" % (input_array, input_units)
                     )
-        if _astropy._units is not None:
-            if isinstance(input_array, _astropy.units.quantity.Quantity):
-                return cls.from_astropy(input_array)
         if isinstance(input_array, YTArray):
             if input_units is None:
                 if registry is None:


https://bitbucket.org/yt_analysis/yt/commits/fdd1e01097e9/
Changeset:   fdd1e01097e9
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-17 16:39:25
Summary:     Adding magnetic units to stream datasets.
Affected #:  1 file

diff -r 62a6f73dc567b51293987545850f40ab4d245aca -r fdd1e01097e992c4a255f141b87e14c53bcee9d6 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -337,8 +337,8 @@
 
     def _set_code_unit_attributes(self):
         base_units = self.stream_handler.code_units
-        attrs = ('length_unit', 'mass_unit', 'time_unit', 'velocity_unit')
-        cgs_units = ('cm', 'g', 's', 'cm/s')
+        attrs = ('length_unit', 'mass_unit', 'time_unit', 'velocity_unit', 'magnetic_unit')
+        cgs_units = ('cm', 'g', 's', 'cm/s', 'gauss')
         for unit, attr, cgs_unit in zip(base_units, attrs, cgs_units):
             if isinstance(unit, basestring):
                 uq = self.quan(1.0, unit)


https://bitbucket.org/yt_analysis/yt/commits/6b928a2211bd/
Changeset:   6b928a2211bd
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-17 21:21:24
Summary:     Adding magnetic units to stream datasets.
Affected #:  1 file

diff -r fdd1e01097e992c4a255f141b87e14c53bcee9d6 -r 6b928a2211bd4051ef3f568e351ce458d15ac21b yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -512,7 +512,8 @@
 
 def load_uniform_grid(data, domain_dimensions, length_unit=None, bbox=None,
                       nprocs=1, sim_time=0.0, mass_unit=None, time_unit=None,
-                      velocity_unit=None, periodicity=(True, True, True),
+                      velocity_unit=None, magnetic_unit=None,
+                      periodicity=(True, True, True),
                       geometry = "cartesian"):
     r"""Load a uniform grid of data into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
@@ -551,6 +552,8 @@
         Unit to use for times.  Defaults to unitless.
     velocity_unit : string
         Unit to use for velocities.  Defaults to unitless.
+    magnetic_unit : string
+        Unit to use for magnetic fields. Defaults to unitless.
     periodicity : tuple of booleans
         Determines whether the data will be treated as periodic along
         each axis
@@ -640,6 +643,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     handler = StreamHandler(
         grid_left_edges,
@@ -651,7 +656,7 @@
         np.zeros(nprocs).reshape((nprocs,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=particle_types,
         periodicity=periodicity
     )
@@ -685,7 +690,8 @@
 def load_amr_grids(grid_data, domain_dimensions,
                    field_units=None, bbox=None, sim_time=0.0, length_unit=None,
                    mass_unit=None, time_unit=None, velocity_unit=None,
-                   periodicity=(True, True, True), geometry = "cartesian"):
+                   magnetic_unit=None, periodicity=(True, True, True),
+                   geometry = "cartesian"):
     r"""Load a set of grids of data into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
     This should allow a sequence of grids of varying resolution of data to be
@@ -723,6 +729,8 @@
         Unit to use for times.  Defaults to unitless.
     velocity_unit : string or float
         Unit to use for velocities.  Defaults to unitless.
+    magnetic_unit : string or float
+        Unit to use for magnetic fields.  Defaults to unitless.
     bbox : array_like (xdim:zdim, LE:RE), optional
         Size of computational domain in units specified by length_unit.
         Defaults to a cubic unit-length domain.
@@ -806,6 +814,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     handler = StreamHandler(
         grid_left_edges,
@@ -817,7 +827,7 @@
         np.zeros(ngrids).reshape((ngrids,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=set_particle_types(grid_data[0])
     )
 
@@ -969,7 +979,8 @@
 
 def load_particles(data, length_unit = None, bbox=None,
                    sim_time=0.0, mass_unit = None, time_unit = None,
-                   velocity_unit=None, periodicity=(True, True, True),
+                   velocity_unit=None, magnetic_unit=None,
+                   periodicity=(True, True, True),
                    n_ref = 64, over_refine_factor = 1, geometry = "cartesian"):
     r"""Load a set of particles into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamParticleHandler`.
@@ -997,6 +1008,10 @@
         Conversion factor from simulation mass units to grams
     time_unit : float
         Conversion factor from simulation time units to seconds
+    velocity_unit : float
+        Conversion factor from simulation velocity units to cm/s
+    magnetic_unit : float
+        Conversion factor from simulation magnetic units to gauss
     bbox : array_like (xdim:zdim, LE:RE), optional
         Size of computational domain in units sim_unit_to_cm
     sim_time : float, optional
@@ -1057,6 +1072,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     # I'm not sure we need any of this.
     handler = StreamHandler(
@@ -1069,7 +1086,7 @@
         np.zeros(nprocs).reshape((nprocs,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=particle_types,
         periodicity=periodicity
     )
@@ -1141,7 +1158,8 @@
 def load_hexahedral_mesh(data, connectivity, coordinates,
                          length_unit = None, bbox=None, sim_time=0.0,
                          mass_unit = None, time_unit = None,
-                         velocity_unit = None, periodicity=(True, True, True),
+                         velocity_unit = None, magnetic_unit = None,
+                         periodicity=(True, True, True),
                          geometry = "cartesian"):
     r"""Load a hexahedral mesh of data into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
@@ -1210,6 +1228,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     # I'm not sure we need any of this.
     handler = StreamHandler(
@@ -1222,7 +1242,7 @@
         np.zeros(nprocs).reshape((nprocs,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=particle_types,
         periodicity=periodicity
     )


https://bitbucket.org/yt_analysis/yt/commits/0dd4980aa84e/
Changeset:   0dd4980aa84e
Branch:      yt-3.0
User:        xarthisius
Date:        2014-07-18 17:09:07
Summary:     Merged in jzuhone/yt-3.x/yt-3.0 (pull request #1001)

Some GDF writing work
Affected #:  12 files

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -45,6 +45,8 @@
     parallel_objects, parallel_root_only, ParallelAnalysisInterface
 from yt.units.unit_object import Unit
 import yt.geometry.particle_deposit as particle_deposit
+from yt.utilities.grid_data_format.writer import write_to_gdf
+from yt.frontends.stream.api import load_uniform_grid
 
 from yt.fields.field_exceptions import \
     NeedsGridType,\
@@ -571,6 +573,10 @@
     def LeftEdge(self):
         return self.left_edge
 
+    @property
+    def RightEdge(self):
+        return self.right_edge
+
     def deposit(self, positions, fields = None, method = None):
         cls = getattr(particle_deposit, "deposit_%s" % method, None)
         if cls is None:
@@ -581,6 +587,47 @@
         vals = op.finalize()
         return vals.reshape(self.ActiveDimensions, order="C")
 
+    def write_to_gdf(self, gdf_path, fields, nprocs=1, field_units=None,
+                     **kwargs):
+        r"""
+        Write the covering grid data to a GDF file.
+
+        Parameters
+        ----------
+        gdf_path : string
+            Pathname of the GDF file to write.
+        fields : list of strings
+            Fields to write to the GDF file.
+        nprocs : integer, optional
+            Split the covering grid into *nprocs* subgrids before
+            writing to the GDF file. Default: 1
+        field_units : dictionary, optional
+            Dictionary of units to convert fields to. If not set, fields are
+            in their default units.
+        All remaining keyword arguments are passed to
+        yt.utilities.grid_data_format.writer.write_to_gdf.
+
+        Examples
+        --------
+        >>> cube.write_to_gdf("clumps.h5", ["density","temperature"], nprocs=16,
+        ...                   clobber=True)
+        """
+        data = {}
+        for field in fields:
+            if field in field_units:
+                units = field_units[field]
+            else:
+                units = str(self[field].units)
+            data[field] = (self[field].in_units(units).v, units)
+        le = self.left_edge.v
+        re = self.right_edge.v
+        bbox = np.array([[l,r] for l,r in zip(le, re)])
+        ds = load_uniform_grid(data, self.ActiveDimensions, bbox=bbox,
+                               length_unit=self.pf.length_unit, time_unit=self.pf.time_unit,
+                               mass_unit=self.pf.mass_unit, nprocs=nprocs,
+                               sim_time=self.pf.current_time.v)
+        write_to_gdf(ds, gdf_path, **kwargs)
+
 class YTArbitraryGridBase(YTCoveringGridBase):
     """A 3D region with arbitrary bounds and dimensions.
 

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -362,6 +362,7 @@
         if storage_filename is None:
             storage_filename = '%s.yt' % filename.split('/')[-1]
         self.storage_filename = storage_filename
+        self.backup_filename = self.filename[:-4] + "_backup.gdf"
         # Unfortunately we now have to mandate that the index gets 
         # instantiated so that we can make sure we have the correct left 
         # and right domain edges.

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -63,7 +63,7 @@
                 self.dds[2] = 1.0
         self.field_data['dx'], self.field_data['dy'], self.field_data['dz'] = \
             self.dds
-
+        self.dds = self.pf.arr(self.dds, "code_length")
 
 class GDFHierarchy(GridIndex):
 
@@ -186,17 +186,26 @@
             elif 'field_units' in current_field.attrs:
                 field_units = current_field.attrs['field_units']
                 if isinstance(field_units, types.StringTypes):
-                    current_fields_unit = current_field.attrs['field_units']
+                    current_field_units = current_field.attrs['field_units']
                 else:
-                    current_fields_unit = \
+                    current_field_units = \
                         just_one(current_field.attrs['field_units'])
                 self.field_units[field_name] = current_field_units
             else:
-                current_fields_unit = ""
+                self.field_units[field_name] = ""
+
+        if "dataset_units" in h5f:
+            for unit_name in h5f["/dataset_units"]:
+                current_unit = h5f["/dataset_units/%s" % unit_name]
+                value = current_unit.value
+                unit = current_unit.attrs["unit"]
+                setattr(self, unit_name, self.quan(value,unit))
+        else:
+            self.length_unit = self.quan(1.0, "cm")
+            self.mass_unit = self.quan(1.0, "g")
+            self.time_unit = self.quan(1.0, "s")
+
         h5f.close()
-        self.length_unit = self.quan(1.0, "cm")
-        self.mass_unit = self.quan(1.0, "g")
-        self.time_unit = self.quan(1.0, "s")
 
     def _parse_parameter_file(self):
         self._handle = h5py.File(self.parameter_filename, "r")

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -25,8 +25,9 @@
 class GDFFieldInfo(FieldInfoContainer):
     known_other_fields = (
         ("density", ("g/cm**3", ["density"], None)),
-        ("specific_energy", ("erg / g", ["thermal_energy"], None)),
-        ("pressure", ("", ["pressure"], None)),
+        ("specific_energy", ("erg/g", ["thermal_energy"], None)),
+        ("pressure", ("erg/cm**3", ["pressure"], None)),
+        ("temperature", ("K", ["temperature"], None)),
         ("velocity_x", ("cm/s", ["velocity_x"], None)),
         ("velocity_y", ("cm/s", ["velocity_y"], None)),
         ("velocity_z", ("cm/s", ["velocity_z"], None)),

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -337,8 +337,8 @@
 
     def _set_code_unit_attributes(self):
         base_units = self.stream_handler.code_units
-        attrs = ('length_unit', 'mass_unit', 'time_unit', 'velocity_unit')
-        cgs_units = ('cm', 'g', 's', 'cm/s')
+        attrs = ('length_unit', 'mass_unit', 'time_unit', 'velocity_unit', 'magnetic_unit')
+        cgs_units = ('cm', 'g', 's', 'cm/s', 'gauss')
         for unit, attr, cgs_unit in zip(base_units, attrs, cgs_units):
             if isinstance(unit, basestring):
                 uq = self.quan(1.0, unit)
@@ -512,7 +512,8 @@
 
 def load_uniform_grid(data, domain_dimensions, length_unit=None, bbox=None,
                       nprocs=1, sim_time=0.0, mass_unit=None, time_unit=None,
-                      velocity_unit=None, periodicity=(True, True, True),
+                      velocity_unit=None, magnetic_unit=None,
+                      periodicity=(True, True, True),
                       geometry = "cartesian"):
     r"""Load a uniform grid of data into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
@@ -551,6 +552,8 @@
         Unit to use for times.  Defaults to unitless.
     velocity_unit : string
         Unit to use for velocities.  Defaults to unitless.
+    magnetic_unit : string
+        Unit to use for magnetic fields. Defaults to unitless.
     periodicity : tuple of booleans
         Determines whether the data will be treated as periodic along
         each axis
@@ -640,6 +643,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     handler = StreamHandler(
         grid_left_edges,
@@ -651,7 +656,7 @@
         np.zeros(nprocs).reshape((nprocs,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=particle_types,
         periodicity=periodicity
     )
@@ -685,7 +690,8 @@
 def load_amr_grids(grid_data, domain_dimensions,
                    field_units=None, bbox=None, sim_time=0.0, length_unit=None,
                    mass_unit=None, time_unit=None, velocity_unit=None,
-                   periodicity=(True, True, True), geometry = "cartesian"):
+                   magnetic_unit=None, periodicity=(True, True, True),
+                   geometry = "cartesian"):
     r"""Load a set of grids of data into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
     This should allow a sequence of grids of varying resolution of data to be
@@ -723,6 +729,8 @@
         Unit to use for times.  Defaults to unitless.
     velocity_unit : string or float
         Unit to use for velocities.  Defaults to unitless.
+    magnetic_unit : string or float
+        Unit to use for magnetic fields.  Defaults to unitless.
     bbox : array_like (xdim:zdim, LE:RE), optional
         Size of computational domain in units specified by length_unit.
         Defaults to a cubic unit-length domain.
@@ -806,6 +814,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     handler = StreamHandler(
         grid_left_edges,
@@ -817,7 +827,7 @@
         np.zeros(ngrids).reshape((ngrids,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=set_particle_types(grid_data[0])
     )
 
@@ -969,7 +979,8 @@
 
 def load_particles(data, length_unit = None, bbox=None,
                    sim_time=0.0, mass_unit = None, time_unit = None,
-                   velocity_unit=None, periodicity=(True, True, True),
+                   velocity_unit=None, magnetic_unit=None,
+                   periodicity=(True, True, True),
                    n_ref = 64, over_refine_factor = 1, geometry = "cartesian"):
     r"""Load a set of particles into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamParticleHandler`.
@@ -997,6 +1008,10 @@
         Conversion factor from simulation mass units to grams
     time_unit : float
         Conversion factor from simulation time units to seconds
+    velocity_unit : float
+        Conversion factor from simulation velocity units to cm/s
+    magnetic_unit : float
+        Conversion factor from simulation magnetic units to gauss
     bbox : array_like (xdim:zdim, LE:RE), optional
         Size of computational domain in units sim_unit_to_cm
     sim_time : float, optional
@@ -1057,6 +1072,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     # I'm not sure we need any of this.
     handler = StreamHandler(
@@ -1069,7 +1086,7 @@
         np.zeros(nprocs).reshape((nprocs,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=particle_types,
         periodicity=periodicity
     )
@@ -1141,7 +1158,8 @@
 def load_hexahedral_mesh(data, connectivity, coordinates,
                          length_unit = None, bbox=None, sim_time=0.0,
                          mass_unit = None, time_unit = None,
-                         velocity_unit = None, periodicity=(True, True, True),
+                         velocity_unit = None, magnetic_unit = None,
+                         periodicity=(True, True, True),
                          geometry = "cartesian"):
     r"""Load a hexahedral mesh of data into yt as a
     :class:`~yt.frontends.stream.data_structures.StreamHandler`.
@@ -1210,6 +1228,8 @@
         time_unit = 'code_time'
     if velocity_unit is None:
         velocity_unit = 'code_velocity'
+    if magnetic_unit is None:
+        magnetic_unit = 'code_magnetic'
 
     # I'm not sure we need any of this.
     handler = StreamHandler(
@@ -1222,7 +1242,7 @@
         np.zeros(nprocs).reshape((nprocs,1)),
         sfh,
         field_units,
-        (length_unit, mass_unit, time_unit, velocity_unit),
+        (length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
         particle_types=particle_types,
         periodicity=periodicity
     )

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -409,3 +409,10 @@
             """ % (self.field,)
         r += "\n".join([c for c in self.conditions])
         return r
+
+class YTGDFAlreadyExists(Exception):
+    def __init__(self, filename):
+        self.filename = filename
+
+    def __str__(self):
+        return "A file already exists at %s and clobber=False." % self.filename
\ No newline at end of file

diff -r d44a659ea62cc86fb53c327e17e9b70d6e5f53ef -r 0dd4980aa84e80807a030f12b77f07ae9902a935 yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -18,11 +18,12 @@
 import numpy as np
 
 from yt import __version__ as yt_version
-
+from yt.utilities.exceptions import YTGDFAlreadyExists
 
 def write_to_gdf(pf, gdf_path, data_author=None, data_comment=None,
-                 particle_type_name="dark_matter"):
-    """
+                 dataset_units=None, particle_type_name="dark_matter",
+                 clobber=False):
+    r"""
     Write a parameter file to the given path in the Grid Data Format.
 
     Parameters
@@ -31,11 +32,38 @@
         The yt data to write out.
     gdf_path : string
         The path of the file to output.
+    data_author : string, optional
+        The name of the author who wrote the data. Default: None.
+    data_comment : string, optional
+        A descriptive comment. Default: None.
+    dataset_units : dictionary, optional
+        A dictionary of (value, unit) tuples to set the default units
+        of the dataset. Keys can be:
+            "length_unit"
+            "time_unit"
+            "mass_unit"
+            "velocity_unit"
+            "magnetic_unit"
+        If not specified, these will carry over from the parent
+        dataset.
+    particle_type_name : string, optional
+        The particle type of the particles in the dataset. Default: "dark_matter"
+    clobber : boolean, optional
+        Whether or not to clobber an already existing file. If False, attempting
+        to overwrite an existing file will result in an exception.
 
+    Examples
+    --------
+    >>> dataset_units = {"length_unit":(1.0,"Mpc"),
+    ...                  "time_unit":(1.0,"Myr")}
+    >>> write_to_gdf(ds, "clumps.h5", data_author="John ZuHone",
+    ...              dataset_units=dataset_units,
+    ...              data_comment="My Really Cool Dataset", clobber=True)
     """
 
     f = _create_new_gdf(pf, gdf_path, data_author, data_comment,
-                        particle_type_name)
+                        dataset_units=dataset_units,
+                        particle_type_name=particle_type_name, clobber=clobber)
 
     # now add the fields one-by-one
     for field_name in pf.field_list:
@@ -102,7 +130,7 @@
 
     # grab the display name and units from the field info container.
     display_name = fi.display_name
-    units = fi.get_units()
+    units = fi.units
 
     # check that they actually contain something...
     if display_name:
@@ -113,8 +141,6 @@
         sg.attrs["field_units"] = units
     else:
         sg.attrs["field_units"] = "None"
-    # @todo: the values must be in CGS already right?
-    sg.attrs["field_to_cgs"] = 1.0
     # @todo: is this always true?
     sg.attrs["staggering"] = 0
 
@@ -134,21 +160,22 @@
         # Check if this is a real field or particle data.
         grid.get_data(field_name)
         if fi.particle_type:  # particle data
-            pt_group[field_name] = grid[field_name]
+            pt_group[field_name] = grid[field_name].in_units(units)
         else:  # a field
-            grid_group[field_name] = grid[field_name]
+            grid_group[field_name] = grid[field_name].in_units(units)
 
 
 def _create_new_gdf(pf, gdf_path, data_author=None, data_comment=None,
-                    particle_type_name="dark_matter"):
+                    dataset_units=None, particle_type_name="dark_matter",
+                    clobber=False):
+
     # Make sure we have the absolute path to the file first
     gdf_path = os.path.abspath(gdf_path)
 
-    # Stupid check -- is the file already there?
-    # @todo: make this a specific exception/error.
-    if os.path.exists(gdf_path):
-        raise IOError("A file already exists in the location: %s. Please \
-                      provide a new one or remove that file." % gdf_path)
+    # Is the file already there? If so, are we allowing
+    # clobbering?
+    if os.path.exists(gdf_path) and not clobber:
+        raise YTGDFAlreadyExists(gdf_path)
 
     ###
     # Create and open the file with h5py
@@ -184,13 +211,27 @@
     g.attrs["field_ordering"] = 0
     # @todo: not yet supported by yt.
     g.attrs["boundary_conditions"] = np.array([0, 0, 0, 0, 0, 0], 'int32')
-
     if pf.cosmological_simulation:
         g.attrs["current_redshift"] = pf.current_redshift
         g.attrs["omega_matter"] = pf.omega_matter
         g.attrs["omega_lambda"] = pf.omega_lambda
         g.attrs["hubble_constant"] = pf.hubble_constant
 
+    if dataset_units is None:
+        dataset_units = {}
+
+    g = f.create_group("dataset_units")
+    for u in ["length","time","mass","velocity","magnetic"]:
+        unit_name = u+"_unit"
+        if unit_name in dataset_units:
+            value, units = dataset_units[unit_name]
+        else:
+            attr = getattr(pf, unit_name)
+            value = float(attr)
+            units = str(attr.units)
+        d = g.create_dataset(unit_name, data=value)
+        d.attrs["unit"] = units
+
     ###
     # "field_types" group
     ###
@@ -212,7 +253,7 @@
     f["grid_left_index"] = np.array(
         [grid.get_global_startindex() for grid in pf.index.grids]
     ).reshape(pf.index.grid_dimensions.shape[0], 3)
-    f["grid_level"] = pf.index.grid_levels
+    f["grid_level"] = pf.index.grid_levels.flat
     # @todo: Fill with proper values
     f["grid_parent_id"] = -np.ones(pf.index.grid_dimensions.shape[0])
     f["grid_particle_count"] = pf.index.grid_particle_count

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list