[yt-svn] commit/yt: 5 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Nov 9 11:17:52 PST 2015


5 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/9c99f941d4dc/
Changeset:   9c99f941d4dc
Branch:      yt
User:        MatthewTurk
Date:        2015-10-20 21:32:25+00:00
Summary:     Defer to coordinate handlers for width.  Fix geographic.
Affected #:  3 files

diff -r 35741b799d57ac96134c4f869cfa92e8c4f9e2be -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -230,19 +230,6 @@
     def convert_from_spherical(self, coord):
         raise NotImplementedError
 
-    # Despite being mutables, we uses these here to be clear about how these
-    # are generated and to ensure that they are not re-generated unnecessarily
-    axis_name = { 0  : 'latitude',  1  : 'longitude',  2  : 'altitude',
-                 'latitude' : 'latitude',
-                 'longitude' : 'longitude', 
-                 'altitude' : 'altitude',
-                 'Latitude' : 'latitude',
-                 'Longitude' : 'longitude', 
-                 'Altitude' : 'altitude',
-                 'lat' : 'latitude',
-                 'lon' : 'longitude', 
-                 'alt' : 'altitude' }
-
     _image_axis_name = None
     @property
     def image_axis_name(self):    
@@ -287,9 +274,31 @@
                               0.0 * display_center[1],
                               0.0 * display_center[2])
         elif name == 'longitude':
-            c = self.ds.domain_right_edge[self.axis_id['altitude']]/2.0
+            ri = self.axis_id['altitude']
+            c = (self.ds.domain_right_edge[ri] +
+                 self.ds.domain_left_edge[ri])/2.0
             display_center = [0.0 * display_center[0], 
                               0.0 * display_center[1],
                               0.0 * display_center[2]]
             display_center[self.axis_id['latitude']] = c
         return center, display_center
+
+    def sanitize_width(self, axis, width, depth):
+        name = self.axis_name[axis]
+        if width is not None:
+            width = super(GeographicCoordinateHandler, self).sanitize_width(
+              axis, width, depth)
+        elif name == 'altitude':
+            width = [self.ds.domain_width[self.y_axis['altitude']],
+                     self.ds.domain_width[self.x_axis['altitude']]]
+        elif name == 'latitude':
+            ri = self.axis_id['altitude']
+            # Remember, in spherical coordinates when we cut in theta,
+            # we create a conic section
+            width = [2.0*self.ds.domain_width[ri],
+                     2.0*self.ds.domain_width[ri]]
+        elif name == 'longitude':
+            ri = self.axis_id['altitude']
+            width = [self.ds.domain_width[ri],
+                     2.0*self.ds.domain_width[ri]]
+        return width

diff -r 35741b799d57ac96134c4f869cfa92e8c4f9e2be -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -235,7 +235,7 @@
                      2.0*self.ds.domain_width[ri]]
         elif name == 'phi':
             ri = self.axis_id['r']
-            width = [self.ds.domain_right_edge[ri] / 2.0,
+            width = [self.ds.domain_right_edge[ri],
                      2.0*self.ds.domain_width[ri]]
         return width
 

diff -r 35741b799d57ac96134c4f869cfa92e8c4f9e2be -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -86,46 +86,9 @@
     from pyparsing import ParseFatalException
 
 def get_window_parameters(axis, center, width, ds):
-    if ds.geometry in ("cartesian", "spectral_cube"):
-        width = ds.coordinates.sanitize_width(axis, width, None)
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-    elif ds.geometry in ("polar", "cylindrical"):
-        # Set our default width to be the full domain
-        axis_name = ds.coordinates.axis_name[axis]
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-        # Note: regardless of axes, these are set up to give consistent plots
-        # when plotted, which is not strictly a "right hand rule" for axes.
-        r_ax, theta_ax, z_ax = (ds.coordinates.axis_id[ax]
-                                for ax in ('r', 'theta', 'z'))
-        if axis_name == "r": # soup can label
-            width = [2.0*np.pi * ds.domain_width.uq, ds.domain_width[z_ax]]
-        elif axis_name == "theta":
-            width = [ds.domain_right_edge[r_ax], ds.domain_width[z_ax]]
-        elif axis_name == "z":
-            width = [2.0*ds.domain_right_edge[r_ax],
-                     2.0*ds.domain_right_edge[r_ax]]
-    elif ds.geometry == "spherical":
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-        if axis == 0:
-            # latitude slice
-            width = ds.arr([2*np.pi, np.pi], "code_length")
-        elif axis == 1:
-            width = [2.0*ds.domain_right_edge[0], 2.0*ds.domain_right_edge[0]]
-        elif axis == 2:
-            width = [ds.domain_right_edge[0], 2.0*ds.domain_right_edge[0]]
-    elif ds.geometry == "geographic":
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-        if axis == 0:
-            width = [2.0*(ds.domain_right_edge[2] + ds.surface_height),
-                     2.0*(ds.domain_right_edge[2] + ds.surface_height)]
-        elif axis == 1:
-            width = [(ds.domain_left_edge[2] + ds.domain_width[2] + ds.surface_height),
-                     2.0*(ds.domain_right_edge[2] + ds.surface_height)]
-        elif axis == 2:
-            # latitude slice
-            width = ds.arr([360, 180], "code_length")
-    else:
-        raise NotImplementedError
+    axis_name = ds.coordinates.axis_name[axis]
+    width = ds.coordinates.sanitize_width(axis, width, None)
+    center, display_center = ds.coordinates.sanitize_center(center, axis)
     xax = ds.coordinates.x_axis[axis]
     yax = ds.coordinates.y_axis[axis]
     bounds = (display_center[xax]-width[0] / 2,


https://bitbucket.org/yt_analysis/yt/commits/4b8406ab6443/
Changeset:   4b8406ab6443
Branch:      yt
User:        MatthewTurk
Date:        2015-10-30 19:41:08+00:00
Summary:     Merging from upstream
Affected #:  19 files

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -441,7 +441,8 @@
                     sub_vel = ds.arr([sub_ray['velocity_x'],
                                       sub_ray['velocity_y'],
                                       sub_ray['velocity_z']])
-                    sub_data['velocity_los'].extend((np.rollaxis(sub_vel, 1) *
+                    # line of sight velocity is reversed relative to ray
+                    sub_data['velocity_los'].extend(-1*(np.rollaxis(sub_vel, 1) *
                                                      line_of_sight).sum(axis=1)[asort])
                     del sub_vel
 

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -37,6 +37,8 @@
     FieldInfoContainer, NullFunc
 from yt.fields.fluid_fields import \
     setup_gradient_fields
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
 from yt.data_objects.particle_filters import \
     filter_registry
 from yt.data_objects.particle_unions import \
@@ -968,6 +970,56 @@
             validators=[ValidateSpatial()])
         return ("deposit", field_name)
 
+    def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
+                                    nneighbors=64, kernel_name="cubic"):
+        """Add a new smoothed particle field
+
+        Creates a new smoothed field based on the particle *smooth_field*.
+
+        Parameters
+        ----------
+
+        smooth_field : tuple
+           The field name tuple of the particle field the smoothed field will
+           be created from.  This must be a field name tuple so yt can
+           appropriately infer the correct particle type.
+        method : string, default 'volume_weighted'
+           The particle smoothing method to use. Can only be 'volume_weighted'
+           for now.
+        nneighbors : int, default 64
+            The number of neighbors to examine during the process.
+        kernel_name : string, default 'cubic'
+            This is the name of the smoothing kernel to use. Current supported
+            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
+            `wendland4`, and `wendland6`.
+
+        Returns
+        -------
+
+        The field name tuple for the newly created field.
+        """
+        self.index
+        if isinstance(smooth_field, tuple):
+            ptype, smooth_field = smooth_field[0], smooth_field[1]
+        else:
+            raise RuntimeError("smooth_field must be a tuple, received %s" %
+                               smooth_field)
+        if method != "volume_weighted":
+            raise NotImplementedError("method must be 'volume_weighted'")
+
+        coord_name = "particle_position"
+        mass_name = "particle_mass"
+        smoothing_length_name = "smoothing_length"
+        if (ptype, smoothing_length_name) not in self.derived_field_list:
+            raise ValueError("%s not in derived_field_list" %
+                             ((ptype, smoothing_length_name),))
+        density_name = "density"
+        registry = self.field_info
+
+        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
+                   smoothing_length_name, density_name, smooth_field, registry,
+                   nneighbors=nneighbors, kernel_name=kernel_name)[0]
+
     def add_gradient_fields(self, input_field):
         """Add gradient fields.
 

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/fields/tests/test_fields.py
--- a/yt/fields/tests/test_fields.py
+++ b/yt/fields/tests/test_fields.py
@@ -1,11 +1,15 @@
 import numpy as np
 
+from yt import \
+    load
 from yt.testing import \
     fake_random_ds, \
+    assert_almost_equal, \
     assert_equal, \
     assert_array_almost_equal_nulp, \
     assert_array_equal, \
-    assert_raises
+    assert_raises, \
+    requires_file
 from yt.utilities.cosmology import \
     Cosmology
 from yt.frontends.stream.fields import \
@@ -188,6 +192,15 @@
     ret = ad[fn]
     assert_equal(ret.sum(), ad['particle_ones'].sum())
 
+ at requires_file('GadgetDiskGalaxy/snapshot_200.hdf5')
+def test_add_smoothed_particle_field():
+    ds = load('GadgetDiskGalaxy/snapshot_200.hdf5')
+    fn = ds.add_smoothed_particle_field(('PartType0', 'particle_ones'))
+    assert_equal(fn, ('deposit', 'PartType0_smoothed_particle_ones'))
+    ad = ds.all_data()
+    ret = ad[fn]
+    assert_almost_equal(ret.sum(), 3824750.912653606)
+
 def test_add_gradient_fields():
     gfields = base_ds.add_gradient_fields(("gas","density"))
     gfields += base_ds.add_gradient_fields(("index", "ones"))

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -457,7 +457,7 @@
             units_override = {}
         # This is for backwards-compatibility
         already_warned = False
-        for k,v in self.specified_parameters.items():
+        for k, v in list(self.specified_parameters.items()):
             if k.endswith("_unit") and k not in units_override:
                 if not already_warned:
                     mylog.warning("Supplying unit conversions from the parameters dict is deprecated, "+
@@ -489,12 +489,15 @@
     def set_code_units(self):
         super(AthenaDataset, self).set_code_units()
         mag_unit = getattr(self, "magnetic_unit", None)
+        vel_unit = getattr(self, "velocity_unit", None)
         if mag_unit is None:
             self.magnetic_unit = np.sqrt(4*np.pi * self.mass_unit /
                                          (self.time_unit**2 * self.length_unit))
         self.magnetic_unit.convert_to_units("gauss")
-
         self.unit_registry.modify("code_magnetic", self.magnetic_unit)
+        if vel_unit is None:
+            self.velocity_unit = self.length_unit/self.time_unit
+        self.unit_registry.modify("code_velocity", self.velocity_unit)
 
     def _parse_parameter_file(self):
         self._handle = open(self.parameter_filename, "rb")

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -25,6 +25,8 @@
 from yt.config import ytcfg
 from yt.convenience import load
 
+import yt.units as u
+
 _fields_cloud = ("scalar[0]", "density", "total_energy")
 
 cloud = "ShockCloud/id0/Cloud.0050.vtk"
@@ -64,9 +66,9 @@
 
 sloshing = "MHDSloshing/virgo_low_res.0054.vtk"
 
-uo_sloshing = {"length_unit":(1.0,"Mpc"),
-               "time_unit":(1.0,"Myr"),
-               "mass_unit":(1.0e14,"Msun")}
+uo_sloshing = {"length_unit": (1.0,"Mpc"),
+               "time_unit": (1.0,"Myr"),
+               "mass_unit": (1.0e14,"Msun")}
 
 @requires_file(sloshing)
 def test_nprocs():
@@ -79,6 +81,11 @@
     sp2 = ds2.sphere("c", (100.,"kpc"))
     prj2 = ds1.proj("density",0)
 
+    ds3 = load(sloshing, parameters=uo_sloshing)
+    assert_equal(ds3.length_unit, u.Mpc)
+    assert_equal(ds3.time_unit, u.Myr)
+    assert_equal(ds3.mass_unit, 1e14*u.Msun)
+
     yield assert_equal, sp1.quantities.extrema("pressure"), sp2.quantities.extrema("pressure")
     yield assert_allclose_units, sp1.quantities.total_quantity("pressure"), sp2.quantities.total_quantity("pressure")
     for ax in "xyz":

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -200,7 +200,7 @@
                 else:
                     current_field_units = \
                         just_one(current_field.attrs['field_units'])
-                self.field_units[field_name] = current_field_units
+                self.field_units[field_name] = current_field_units.decode("utf8")
             else:
                 self.field_units[field_name] = ""
 

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -23,6 +23,7 @@
 
 from yt.frontends.sph.data_structures import \
     ParticleDataset
+from yt.funcs import deprecate
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
@@ -167,12 +168,14 @@
         periodic = self.parameters.get('bPeriodic', True)
         period = self.parameters.get('dPeriod', None)
         self.periodicity = (periodic, periodic, periodic)
-        self.comoving = self.parameters.get('bComove', False)
-        if self.comoving and period is None:
+        self.cosmological_simulation = float(self.parameters.get(
+            'bComove', self._cosmology_parameters is not None))
+        if self.cosmological_simulation and period is None:
             period = 1.0
         if self.bounding_box is None:
             if periodic and period is not None:
-                # If we are periodic, that sets our domain width to either 1 or dPeriod.
+                # If we are periodic, that sets our domain width to
+                # either 1 or dPeriod.
                 self.domain_left_edge = np.zeros(3, "float64") - 0.5*period
                 self.domain_right_edge = np.zeros(3, "float64") + 0.5*period
             else:
@@ -187,19 +190,22 @@
 
         # If the cosmology parameters dictionary got set when data is
         # loaded, we can assume it's a cosmological data set
-        if self.comoving or self._cosmology_parameters is not None:
+        if self.cosmological_simulation == 1.0:
             cosm = self._cosmology_parameters or {}
-            self.scale_factor = hvals["time"]#In comoving simulations, time stores the scale factor a
-            self.cosmological_simulation = 1
-            dcosm = dict(current_redshift=(1.0/self.scale_factor)-1.0,
-                         omega_lambda=self.parameters.get('dLambda', cosm.get('omega_lambda',0.0)),
-                         omega_matter=self.parameters.get('dOmega0', cosm.get('omega_matter',0.0)),
-                         hubble_constant=self.parameters.get('dHubble0', cosm.get('hubble_constant',1.0)))
+            # In comoving simulations, time stores the scale factor a
+            self.scale_factor = hvals["time"]
+            dcosm = dict(
+                current_redshift=(1.0/self.scale_factor)-1.0,
+                omega_lambda=self.parameters.get(
+                    'dLambda', cosm.get('omega_lambda',0.0)),
+                omega_matter=self.parameters.get(
+                    'dOmega0', cosm.get('omega_matter',0.0)),
+                hubble_constant=self.parameters.get(
+                    'dHubble0', cosm.get('hubble_constant',1.0)))
             for param in dcosm.keys():
                 pval = dcosm[param]
                 setattr(self, param, pval)
         else:
-            self.cosmological_simulation = 0.0
             kpc_unit = self.parameters.get('dKpcUnit', 1.0)
             self._unit_base['cm'] = 1.0 / (kpc_unit * cm_per_kpc)
 
@@ -218,51 +224,57 @@
         super(TipsyDataset, self)._set_derived_attrs()
 
     def _set_code_unit_attributes(self):
+        # First try to set units based on parameter file
         if self.cosmological_simulation:
             mu = self.parameters.get('dMsolUnit', 1.)
+            self.mass_unit = self.quan(mu, 'Msun')
             lu = self.parameters.get('dKpcUnit', 1000.)
             # In cosmological runs, lengths are stored as length*scale_factor
             self.length_unit = self.quan(lu, 'kpc')*self.scale_factor
-            self.mass_unit = self.quan(mu, 'Msun')
-            density_unit = self.mass_unit/ (self.length_unit/self.scale_factor)**3
-
-            # If self.comoving is set, we know this is a gasoline data set,
-            # and we do the conversion on the hubble constant.
-            if self.comoving:
-                # Gasoline's hubble constant, dHubble0, is stored units of
-                # proper code time.
-                self.hubble_constant *= np.sqrt(G.in_units(
-                    'kpc**3*Msun**-1*s**-2') * density_unit).value / (
-                    3.2407793e-18)
-            cosmo = Cosmology(self.hubble_constant,
-                              self.omega_matter, self.omega_lambda)
-            self.current_time = cosmo.hubble_time(self.current_redshift)
+            density_unit = self.mass_unit / (self.length_unit / self.scale_factor)**3
+            if 'dHubble0' in self.parameters:
+                # Gasoline's internal hubble constant, dHubble0, is stored in
+                # units of proper code time
+                self.hubble_constant *= np.sqrt(G * density_unit)
+                # Finally, we scale the hubble constant by 100 km/s/Mpc
+                self.hubble_constant /= self.quan(100, 'km/s/Mpc')
+                # If we leave it as a YTQuantity, the cosmology object
+                # used below will add units back on.
+                self.hubble_constant = self.hubble_constant.in_units("").d
         else:
             mu = self.parameters.get('dMsolUnit', 1.0)
             self.mass_unit = self.quan(mu, 'Msun')
             lu = self.parameters.get('dKpcUnit', 1.0)
             self.length_unit = self.quan(lu, 'kpc')
-            density_unit = self.mass_unit / self.length_unit**3
-        self.time_unit = 1.0 / np.sqrt(G * density_unit)
 
         # If unit base is defined by the user, override all relevant units
         if self._unit_base is not None:
-            length = self._unit_base.get('length', self.length_unit)
-            length = self.quan(*length) if isinstance(length, tuple) else self.quan(length)
-            self.length_unit = length
+            for my_unit in ["length", "mass", "time"]:
+                if my_unit in self._unit_base:
+                    my_val = self._unit_base[my_unit]
+                    my_val = \
+                      self.quan(*my_val) if isinstance(my_val, tuple) \
+                      else self.quan(my_val)
+                    setattr(self, "%s_unit" % my_unit, my_val)
 
-            mass = self._unit_base.get('mass', self.mass_unit)
-            mass = self.quan(*mass) if isinstance(mass, tuple) else self.quan(mass)
-            self.mass_unit = mass
+        # Finally, set the dependent units
+        if self.cosmological_simulation:
+            cosmo = Cosmology(self.hubble_constant,
+                              self.omega_matter, self.omega_lambda)
+            self.current_time = cosmo.hubble_time(self.current_redshift)
+            # mass units are rho_crit(z=0) * domain volume
+            mu = cosmo.critical_density(0.0) * \
+              (1 + self.current_redshift)**3 * self.length_unit**3
+            self.mass_unit = self.quan(mu.in_units("Msun"), "Msun")
+            density_unit = self.mass_unit / (self.length_unit / self.scale_factor)**3
+            # need to do this again because we've modified the hubble constant
+            self.unit_registry.modify("h", self.hubble_constant)
+        else:
+            density_unit = self.mass_unit / self.length_unit**3
 
-            density_unit = self.mass_unit / self.length_unit**3
+        if not hasattr(self, "time_unit"):
             self.time_unit = 1.0 / np.sqrt(G * density_unit)
 
-            time = self._unit_base.get('time', self.time_unit)
-            time = self.quan(*time) if isinstance(time, tuple) else self.quan(time)
-            self.time_unit = time
-
-
     @staticmethod
     def _validate_header(filename):
         '''
@@ -304,3 +316,8 @@
     @classmethod
     def _is_valid(self, *args, **kwargs):
         return TipsyDataset._validate_header(args[0])[0]
+
+    @property
+    @deprecate(replacement='cosmological_simulation')
+    def comoving(self):
+        return self.cosmological_simulation == 1.0

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/frontends/tipsy/tests/test_outputs.py
--- a/yt/frontends/tipsy/tests/test_outputs.py
+++ b/yt/frontends/tipsy/tests/test_outputs.py
@@ -42,7 +42,7 @@
                                 hubble_constant = 0.702)
     kwargs = dict(field_dtypes = {"Coordinates": "d"},
                   cosmology_parameters = cosmology_parameters,
-                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                  unit_base = {'length': (60.0, "Mpccm/h")},
                   n_ref = 64)
     ds = data_dir_load(pkdgrav, TipsyDataset, (), kwargs)
     yield assert_equal, str(ds), "halo1e11_run1.00400"
@@ -73,7 +73,7 @@
                                 omega_matter = 0.272,
                                 hubble_constant = 0.702)
     kwargs = dict(cosmology_parameters = cosmology_parameters,
-                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                  unit_base = {'length': (60.0, "Mpccm/h")},
                   n_ref = 64)
     ds = data_dir_load(gasoline_dmonly, TipsyDataset, (), kwargs)
     yield assert_equal, str(ds), "agora_1e11.00400"

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -221,24 +221,29 @@
     if ytcfg.getint("yt", "__topcomm_parallel_rank") > 0: return
     mylog.info(*args)
 
-def deprecate(func):
-    """
-    This decorator issues a deprecation warning.
+def deprecate(replacement):
+    def real_deprecate(func):
+        """
+        This decorator issues a deprecation warning.
 
-    This can be used like so:
+        This can be used like so:
 
-    .. code-block:: python
+        .. code-block:: python
 
-       @deprecate
-       def some_really_old_function(...):
+        @deprecate("new_function")
+        def some_really_old_function(...):
 
-    """
-    @wraps(func)
-    def run_func(*args, **kwargs):
-        warnings.warn("%s has been deprecated and may be removed without notice!" \
-                % func.__name__, DeprecationWarning, stacklevel=2)
-        func(*args, **kwargs)
-    return run_func
+        """
+        @wraps(func)
+        def run_func(*args, **kwargs):
+            message = "%s has been deprecated and may be removed without notice!"
+            if replacement is not None:
+                message += " Use %s instead." % replacement
+            warnings.warn(message % func.__name__, DeprecationWarning,
+                          stacklevel=2)
+            func(*args, **kwargs)
+        return run_func
+    return real_deprecate
 
 def pdb_run(func):
     """

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/geometry/grid_container.pxd
--- a/yt/geometry/grid_container.pxd
+++ b/yt/geometry/grid_container.pxd
@@ -21,7 +21,8 @@
 from libc.math cimport nearbyint, rint
 from yt.geometry.selection_routines cimport SelectorObject, _ensure_code
 from yt.utilities.lib.fp_utils cimport iclip
-from grid_visitors cimport GridTreeNode, GridVisitorData, grid_visitor_function
+from grid_visitors cimport GridTreeNode, GridVisitorData, \
+    grid_visitor_function, GridTreeNodePadded
 cimport grid_visitors 
 from yt.utilities.lib.bitarray cimport bitarray
 

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/geometry/grid_container.pyx
--- a/yt/geometry/grid_container.pyx
+++ b/yt/geometry/grid_container.pyx
@@ -124,6 +124,31 @@
             children.append(childs)
         return indices, levels, nchild, children
 
+    @property
+    def grid_arrays(self):
+        cdef GridTreeNodePadded[:] grids
+        grids = <GridTreeNodePadded[:self.num_grids]> \
+            (<GridTreeNodePadded*> self.grids)
+        grids_basic = np.asarray(grids)
+        # This next bit is necessary because as of 0.23.4, Cython can't make
+        # nested dtypes automatically where you have a property that is
+        # something like float[3].  So we unroll all of those, then re-roll
+        # them in a new dtype.
+        dtn = {}
+        dt = grids_basic.dtype
+        for name in dt.names:
+            d, o = dt.fields[name]
+            n = name
+            if name.endswith("_x"):
+                f = (d.char, 3)
+                n = name[:-2]
+            elif name.endswith("_y") or name.endswith("_z"):
+                continue
+            else:
+                f = (d.char, 1)
+            dtn[n] = (f, o)
+        return grids_basic.view(dtype=np.dtype(dtn))
+
     cdef void setup_data(self, GridVisitorData *data):
         # Being handed a new GVD object, we initialize it to sane defaults.
         data.index = 0

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/geometry/grid_visitors.pxd
--- a/yt/geometry/grid_visitors.pxd
+++ b/yt/geometry/grid_visitors.pxd
@@ -27,6 +27,27 @@
     int dims[3]
     np.float64_t dds[3]
 
+cdef struct GridTreeNodePadded:
+    int num_children
+    int level
+    long int index
+    double left_edge_x
+    double left_edge_y
+    double left_edge_z
+    double right_edge_x
+    double right_edge_y
+    double right_edge_z
+    long int children_pointers
+    long int start_index_x
+    long int start_index_y
+    long int start_index_z
+    int dims_x
+    int dims_y
+    int dims_z
+    double dds_x
+    double dds_y
+    double dds_z
+
 cdef struct GridVisitorData:
     GridTreeNode *grid
     np.uint64_t index

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/geometry/oct_container.pyx
--- a/yt/geometry/oct_container.pyx
+++ b/yt/geometry/oct_container.pyx
@@ -20,6 +20,7 @@
 from selection_routines cimport SelectorObject
 from libc.math cimport floor
 cimport selection_routines
+from yt.geometry.oct_visitors cimport OctPadded
 
 ORDER_MAX = 20
 _ORDER_MAX = ORDER_MAX
@@ -109,6 +110,19 @@
                 for k in range(self.nn[2]):
                     self.root_mesh[i][j][k] = NULL
 
+    @property
+    def oct_arrays(self):
+        cdef OctAllocationContainer *cur = self.cont
+        cdef Oct *this
+        cdef int i
+        cdef OctPadded[:] mm
+        rv = []
+        while cur != NULL:
+            mm = <OctPadded[:cur.n_assigned]> (<OctPadded*> cur.my_octs)
+            rv.append(np.asarray(mm))
+            cur = cur.next
+        return rv
+
     @classmethod
     def load_octree(cls, header):
         cdef np.ndarray[np.uint8_t, ndim=1] ref_mask

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/geometry/oct_visitors.pxd
--- a/yt/geometry/oct_visitors.pxd
+++ b/yt/geometry/oct_visitors.pxd
@@ -24,6 +24,12 @@
     np.int64_t domain       # (opt) addl int index
     Oct **children          # Up to 8 long
 
+cdef struct OctPadded:
+    np.int64_t file_ind
+    np.int64_t domain_ind
+    np.int64_t domain
+    np.int64_t padding
+
 cdef struct OctVisitorData:
     np.uint64_t index
     np.uint64_t last

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/geometry/tests/test_grid_container.py
--- a/yt/geometry/tests/test_grid_container.py
+++ b/yt/geometry/tests/test_grid_container.py
@@ -116,3 +116,12 @@
     # Test if find_points fails properly for non equal indices' array sizes
     yield assert_raises, AssertionError, test_ds.index._find_points, \
         [0], 1.0, [2, 3]
+
+def test_grid_arrays_view():
+    ds = setup_test_ds()
+    tree = ds.index._get_grid_tree()
+    grid_arr = tree.grid_arrays
+    yield assert_equal, grid_arr['left_edge'], ds.index.grid_left_edge
+    yield assert_equal, grid_arr['right_edge'], ds.index.grid_right_edge
+    yield assert_equal, grid_arr['dims'], ds.index.grid_dimensions
+    yield assert_equal, grid_arr['level'], ds.index.grid_levels[:,0]

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/units/tests/test_units.py
--- a/yt/units/tests/test_units.py
+++ b/yt/units/tests/test_units.py
@@ -454,7 +454,7 @@
     assert_raises(InvalidUnitOperation, operator.mul, u1, u2)
     assert_raises(InvalidUnitOperation, operator.truediv, u1, u2)
 
-def test_comoving_and_code_unit_labels():
+def test_latex_repr():
     ds = fake_random_ds(64, nprocs=1)
 
     # create a fake comoving unit
@@ -471,3 +471,6 @@
     test_unit = Unit('code_mass/code_length**3', registry=ds.unit_registry)
     assert_equal(test_unit.latex_repr,
                  '\\frac{\\rm{code\\ mass}}{\\rm{code\\ length}^{3}}')
+
+    test_unit = Unit('cm**-3', base_value=1.0, registry=ds.unit_registry)
+    assert_equal(test_unit.latex_repr, '\\frac{1}{\\rm{cm}^{3}}')

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/units/unit_object.py
--- a/yt/units/unit_object.py
+++ b/yt/units/unit_object.py
@@ -104,7 +104,10 @@
 def get_latex_representation(expr, registry):
     symbol_table = {}
     for ex in expr.free_symbols:
-        symbol_table[ex] = registry.lut[str(ex)][3]
+        try:
+            symbol_table[ex] = registry.lut[str(ex)][3]
+        except:
+            symbol_table[ex] = r"\rm{" + str(ex).replace('_', '\ ') + "}"
     latex_repr = latex(expr, symbol_names=symbol_table, mul_symbol="dot",
                        fold_frac_powers=True, fold_short_frac=True)
     if latex_repr == '1':
@@ -214,7 +217,7 @@
             if dimensions is not None:
                 validate_dimensions(dimensions)
             if latex_repr is None:
-                latex_repr = r"\rm{" + str(unit_expr).replace('_', '\ ') + "}"
+                latex_repr = get_latex_representation(unit_expr, registry)
         else:
             # lookup the unit symbols
             unit_data = _get_unit_data_from_expr(unit_expr, registry.lut)

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -709,6 +709,8 @@
     plot_title = None
     _plot_valid = False
     _plot_type = 'Phase'
+    _xlim = (None, None)
+    _ylim = (None, None)
 
     def __init__(self, data_source, x_field, y_field, z_fields,
                  weight_field="cell_mass", x_bins=128, y_bins=128,
@@ -813,6 +815,8 @@
             draw_colorbar = True
             draw_axes = True
             zlim = (None, None)
+            xlim = self._xlim
+            ylim = self._ylim
             if f in self.plots:
                 draw_colorbar = self.plots[f]._draw_colorbar
                 draw_axes = self.plots[f]._draw_axes
@@ -868,6 +872,9 @@
             self.plots[f].axes.yaxis.set_label_text(y_title)
             self.plots[f].cax.yaxis.set_label_text(z_title)
 
+            self.plots[f].axes.set_xlim(xlim)
+            self.plots[f].axes.set_ylim(ylim)
+
             if f in self._plot_text:
                 self.plots[f].axes.text(self._text_xpos[f], self._text_ypos[f],
                                         self._plot_text[f],
@@ -1173,6 +1180,7 @@
             **additional_kwargs)
         for field in zunits:
             self.profile.set_field_unit(field, zunits[field])
+        self._xlim = (xmin, xmax)
         return self
 
     @invalidate_plot
@@ -1237,6 +1245,7 @@
             **additional_kwargs)
         for field in zunits:
             self.profile.set_field_unit(field, zunits[field])
+        self._ylim = (ymin, ymax)
         return self
 
 

diff -r 9c99f941d4dc3a9f718a0ac9eabbd8cb4da6524d -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed yt/visualization/tests/test_profile_plots.py
--- a/yt/visualization/tests/test_profile_plots.py
+++ b/yt/visualization/tests/test_profile_plots.py
@@ -19,7 +19,9 @@
 from yt.data_objects.profiles import create_profile
 from yt.extern.parameterized import\
     parameterized, param
-from yt.testing import fake_random_ds
+from yt.testing import \
+    fake_random_ds, \
+    assert_array_almost_equal
 from yt.visualization.profile_plotter import \
     ProfilePlot, PhasePlot
 from yt.visualization.tests.test_plotwindow import \
@@ -60,6 +62,15 @@
                 p2d = create_profile(reg, [x_field, y_field], z_field,
                                      n_bins=[16, 16])
                 phases.append(PhasePlot.from_profile(p2d))
+        pp = PhasePlot(test_ds.all_data(), 'density', 'temperature', 'cell_mass')
+        pp.set_xlim(0.3, 0.8)
+        pp.set_ylim(0.4, 0.6)
+        pp._setup_plots()
+        xlim = pp.plots['cell_mass'].axes.get_xlim()
+        ylim = pp.plots['cell_mass'].axes.get_ylim()
+        assert_array_almost_equal(xlim, (0.3, 0.8))
+        assert_array_almost_equal(ylim, (0.4, 0.6))
+        phases.append(pp)
         cls.profiles = profiles
         cls.phases = phases
         cls.ds = test_ds


https://bitbucket.org/yt_analysis/yt/commits/e634ecce82ae/
Changeset:   e634ecce82ae
Branch:      yt
User:        MatthewTurk
Date:        2015-10-30 19:43:30+00:00
Summary:     Use "sane" default for surface_height.
Affected #:  1 file

diff -r 4b8406ab644394aa877f52fb3e8ea5d4ec9573ed -r e634ecce82ae5987fbdf6de76374cd66fca6ff2f yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -196,11 +196,15 @@
         raise NotImplementedError
 
     def convert_to_cartesian(self, coord):
+        if hasattr(data.ds, "surface_height"):
+            surface_height = data.ds.surface_height
+        else:
+            surface_height = data.ds.quan(0.0, "code_length")
         if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
             alt = self.axis_id['altitude']
             lon = self.axis_id['longitude']
             lat = self.axis_id['latitude']
-            r = coord[:,alt] + self.ds.surface_height
+            r = coord[:,alt] + surface_height
             theta = coord[:,lon] * np.pi/180
             phi = coord[:,lat] * np.pi/180
             nc = np.zeros_like(coord)
@@ -212,7 +216,7 @@
             a, b, c = coord
             theta = b * np.pi/180
             phi = a * np.pi/180
-            r = self.ds.surface_height + c
+            r = surface_height + c
             nc = (np.cos(phi) * np.sin(theta)*r,
                   np.sin(phi) * np.sin(theta)*r,
                   np.cos(theta) * r)


https://bitbucket.org/yt_analysis/yt/commits/46f676ec60e6/
Changeset:   46f676ec60e6
Branch:      yt
User:        MatthewTurk
Date:        2015-10-30 19:49:40+00:00
Summary:     Fixing typo
Affected #:  1 file

diff -r e634ecce82ae5987fbdf6de76374cd66fca6ff2f -r 46f676ec60e668ca1ddb274f2c29a0ef664fbc7e yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -196,10 +196,10 @@
         raise NotImplementedError
 
     def convert_to_cartesian(self, coord):
-        if hasattr(data.ds, "surface_height"):
-            surface_height = data.ds.surface_height
+        if hasattr(self.ds, "surface_height"):
+            surface_height = self.ds.surface_height
         else:
-            surface_height = data.ds.quan(0.0, "code_length")
+            surface_height = self.ds.quan(0.0, "code_length")
         if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
             alt = self.axis_id['altitude']
             lon = self.axis_id['longitude']


https://bitbucket.org/yt_analysis/yt/commits/ce128db22c39/
Changeset:   ce128db22c39
Branch:      yt
User:        atmyers
Date:        2015-11-09 19:17:44+00:00
Summary:     Merged in MatthewTurk/yt (pull request #1826)

Defer to coordinate handlers for width.
Affected #:  3 files

diff -r 452d6d6c36e658d9c2df5753619b07824ca2f5bc -r ce128db22c39c0e0e7ecaa0d1e1d69a2e91ea315 yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -196,11 +196,15 @@
         raise NotImplementedError
 
     def convert_to_cartesian(self, coord):
+        if hasattr(self.ds, "surface_height"):
+            surface_height = self.ds.surface_height
+        else:
+            surface_height = self.ds.quan(0.0, "code_length")
         if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
             alt = self.axis_id['altitude']
             lon = self.axis_id['longitude']
             lat = self.axis_id['latitude']
-            r = coord[:,alt] + self.ds.surface_height
+            r = coord[:,alt] + surface_height
             theta = coord[:,lon] * np.pi/180
             phi = coord[:,lat] * np.pi/180
             nc = np.zeros_like(coord)
@@ -212,7 +216,7 @@
             a, b, c = coord
             theta = b * np.pi/180
             phi = a * np.pi/180
-            r = self.ds.surface_height + c
+            r = surface_height + c
             nc = (np.cos(phi) * np.sin(theta)*r,
                   np.sin(phi) * np.sin(theta)*r,
                   np.cos(theta) * r)
@@ -230,19 +234,6 @@
     def convert_from_spherical(self, coord):
         raise NotImplementedError
 
-    # Despite being mutables, we uses these here to be clear about how these
-    # are generated and to ensure that they are not re-generated unnecessarily
-    axis_name = { 0  : 'latitude',  1  : 'longitude',  2  : 'altitude',
-                 'latitude' : 'latitude',
-                 'longitude' : 'longitude', 
-                 'altitude' : 'altitude',
-                 'Latitude' : 'latitude',
-                 'Longitude' : 'longitude', 
-                 'Altitude' : 'altitude',
-                 'lat' : 'latitude',
-                 'lon' : 'longitude', 
-                 'alt' : 'altitude' }
-
     _image_axis_name = None
     @property
     def image_axis_name(self):    
@@ -287,9 +278,31 @@
                               0.0 * display_center[1],
                               0.0 * display_center[2])
         elif name == 'longitude':
-            c = self.ds.domain_right_edge[self.axis_id['altitude']]/2.0
+            ri = self.axis_id['altitude']
+            c = (self.ds.domain_right_edge[ri] +
+                 self.ds.domain_left_edge[ri])/2.0
             display_center = [0.0 * display_center[0], 
                               0.0 * display_center[1],
                               0.0 * display_center[2]]
             display_center[self.axis_id['latitude']] = c
         return center, display_center
+
+    def sanitize_width(self, axis, width, depth):
+        name = self.axis_name[axis]
+        if width is not None:
+            width = super(GeographicCoordinateHandler, self).sanitize_width(
+              axis, width, depth)
+        elif name == 'altitude':
+            width = [self.ds.domain_width[self.y_axis['altitude']],
+                     self.ds.domain_width[self.x_axis['altitude']]]
+        elif name == 'latitude':
+            ri = self.axis_id['altitude']
+            # Remember, in spherical coordinates when we cut in theta,
+            # we create a conic section
+            width = [2.0*self.ds.domain_width[ri],
+                     2.0*self.ds.domain_width[ri]]
+        elif name == 'longitude':
+            ri = self.axis_id['altitude']
+            width = [self.ds.domain_width[ri],
+                     2.0*self.ds.domain_width[ri]]
+        return width

diff -r 452d6d6c36e658d9c2df5753619b07824ca2f5bc -r ce128db22c39c0e0e7ecaa0d1e1d69a2e91ea315 yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -235,7 +235,7 @@
                      2.0*self.ds.domain_width[ri]]
         elif name == 'phi':
             ri = self.axis_id['r']
-            width = [self.ds.domain_right_edge[ri] / 2.0,
+            width = [self.ds.domain_right_edge[ri],
                      2.0*self.ds.domain_width[ri]]
         return width
 

diff -r 452d6d6c36e658d9c2df5753619b07824ca2f5bc -r ce128db22c39c0e0e7ecaa0d1e1d69a2e91ea315 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -88,46 +88,9 @@
     from pyparsing import ParseFatalException
 
 def get_window_parameters(axis, center, width, ds):
-    if ds.geometry in ("cartesian", "spectral_cube"):
-        width = ds.coordinates.sanitize_width(axis, width, None)
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-    elif ds.geometry in ("polar", "cylindrical"):
-        # Set our default width to be the full domain
-        axis_name = ds.coordinates.axis_name[axis]
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-        # Note: regardless of axes, these are set up to give consistent plots
-        # when plotted, which is not strictly a "right hand rule" for axes.
-        r_ax, theta_ax, z_ax = (ds.coordinates.axis_id[ax]
-                                for ax in ('r', 'theta', 'z'))
-        if axis_name == "r": # soup can label
-            width = [2.0*np.pi * ds.domain_width.uq, ds.domain_width[z_ax]]
-        elif axis_name == "theta":
-            width = [ds.domain_right_edge[r_ax], ds.domain_width[z_ax]]
-        elif axis_name == "z":
-            width = [2.0*ds.domain_right_edge[r_ax],
-                     2.0*ds.domain_right_edge[r_ax]]
-    elif ds.geometry == "spherical":
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-        if axis == 0:
-            # latitude slice
-            width = ds.arr([2*np.pi, np.pi], "code_length")
-        elif axis == 1:
-            width = [2.0*ds.domain_right_edge[0], 2.0*ds.domain_right_edge[0]]
-        elif axis == 2:
-            width = [ds.domain_right_edge[0], 2.0*ds.domain_right_edge[0]]
-    elif ds.geometry == "geographic":
-        center, display_center = ds.coordinates.sanitize_center(center, axis)
-        if axis == 0:
-            width = [2.0*(ds.domain_right_edge[2] + ds.surface_height),
-                     2.0*(ds.domain_right_edge[2] + ds.surface_height)]
-        elif axis == 1:
-            width = [(ds.domain_left_edge[2] + ds.domain_width[2] + ds.surface_height),
-                     2.0*(ds.domain_right_edge[2] + ds.surface_height)]
-        elif axis == 2:
-            # latitude slice
-            width = ds.arr([360, 180], "code_length")
-    else:
-        raise NotImplementedError
+    axis_name = ds.coordinates.axis_name[axis]
+    width = ds.coordinates.sanitize_width(axis, width, None)
+    center, display_center = ds.coordinates.sanitize_center(center, axis)
     xax = ds.coordinates.x_axis[axis]
     yax = ds.coordinates.y_axis[axis]
     bounds = (display_center[xax]-width[0] / 2,

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list