[yt-svn] commit/yt: 3 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Nov 5 13:24:35 PST 2014
3 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/ef5fb85587b0/
Changeset: ef5fb85587b0
Branch: yt
User: drudd
Date: 2014-11-03 23:45:43+00:00
Summary: Update ProfilePlot to derive x_log and y_log from .from_profiles or constructor
Affected #: 1 file
diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r ef5fb85587b08d3d79dc42c3686115500afa206c yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -150,6 +150,15 @@
A dictionary or list of dictionaries containing plot keyword
arguments. For example, dict(color="red", linestyle=":").
Default: None.
+ x_log : bool
+ If not None, whether the x_axis should be plotted with a logarithmic
+ scaling.
+ Default: None
+ y_log : dict
+ A dictionary containing field:boolean pairs, setting the logarithmic
+ property for that field. May be overridden after instantiation using
+ set_log.
+ Default: None
Examples
--------
@@ -189,7 +198,6 @@
"""
x_log = None
y_log = None
- z_log = None
x_title = None
y_title = None
_plot_valid = False
@@ -197,21 +205,28 @@
def __init__(self, data_source, x_field, y_fields,
weight_field="cell_mass", n_bins=64,
accumulation=False, fractional=False,
- label=None, plot_spec=None):
+ label=None, plot_spec=None,
+ x_log=None, y_log=None):
+
+ if x_log is None:
+ logs = None
+ else:
+ logs = {x_field:x_log}
profiles = [create_profile(data_source, [x_field],
n_bins=[n_bins],
fields=ensure_list(y_fields),
weight_field=weight_field,
accumulation=accumulation,
- fractional=fractional)]
+ fractional=fractional,
+ logs=logs)]
if plot_spec is None:
plot_spec = [dict() for p in profiles]
if not isinstance(plot_spec, list):
plot_spec = [plot_spec.copy() for p in profiles]
- ProfilePlot._initialize_instance(self, profiles, label, plot_spec)
+ ProfilePlot._initialize_instance(self, profiles, label, plot_spec, y_log)
def save(self, name=None):
r"""
@@ -323,11 +338,16 @@
self._plot_valid = True
@classmethod
- def _initialize_instance(cls, obj, profiles, labels, plot_specs):
+ def _initialize_instance(cls, obj, profiles, labels, plot_specs, y_log):
+ if not hasattr(obj, "x_log"):
+ obj.x_log = None
+ obj.profiles = ensure_list(profiles)
obj.y_log = {}
+ if y_log is not None:
+ for field, log in y_log.items():
+ field, = obj.profiles[0].data_source._determine_fields([field])
+ obj.y_log[field] = log
obj.y_title = {}
- obj.x_log = None
- obj.profiles = ensure_list(profiles)
obj.label = sanitize_label(labels, len(obj.profiles))
if plot_specs is None:
plot_specs = [dict() for p in obj.profiles]
@@ -338,7 +358,7 @@
return obj
@classmethod
- def from_profiles(cls, profiles, labels=None, plot_specs=None):
+ def from_profiles(cls, profiles, labels=None, plot_specs=None, y_log=None):
r"""
Instantiate a ProfilePlot object from a list of profiles
created with :func:`~yt.data_objects.profiles.create_profile`.
@@ -384,7 +404,7 @@
if plot_specs is not None and len(plot_specs) != len(profiles):
raise RuntimeError("Profiles list and plot_specs list must be the same size.")
obj = cls.__new__(cls)
- return cls._initialize_instance(obj, profiles, labels, plot_specs)
+ return cls._initialize_instance(obj, profiles, labels, plot_specs, y_log)
@invalidate_plot
def set_line_property(self, property, value, index=None):
https://bitbucket.org/yt_analysis/yt/commits/f3278f386f5d/
Changeset: f3278f386f5d
Branch: yt
User: drudd
Date: 2014-11-03 23:51:27+00:00
Summary: Removed hasattr call from _initialize_instance which was unnecessary
Affected #: 1 file
diff -r ef5fb85587b08d3d79dc42c3686115500afa206c -r f3278f386f5d4505caf74ec3288bf883f144c774 yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -339,9 +339,8 @@
@classmethod
def _initialize_instance(cls, obj, profiles, labels, plot_specs, y_log):
- if not hasattr(obj, "x_log"):
- obj.x_log = None
obj.profiles = ensure_list(profiles)
+ obj.x_log = None
obj.y_log = {}
if y_log is not None:
for field, log in y_log.items():
https://bitbucket.org/yt_analysis/yt/commits/61b8f5675cc2/
Changeset: 61b8f5675cc2
Branch: yt
User: drudd
Date: 2014-11-05 19:04:04+00:00
Summary: Merged with upstream for overlapping tests
Affected #: 8 files
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -96,7 +96,7 @@
**Point**
| Class :class:`~yt.data_objects.selection_data_containers.YTPointBase`
- | Usage: ``point(coord, ds=None, field_parameters=None)``
+ | Usage: ``point(coord, ds=None, field_parameters=None, data_source=None)``
| A point defined by a single cell at specified coordinates.
1D Objects
@@ -104,14 +104,14 @@
**Ray (Axis-Aligned)**
| Class :class:`~yt.data_objects.selection_data_containers.YTOrthoRayBase`
- | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None)``
+ | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None, data_source=None)``
| A line (of data cells) stretching through the full domain
aligned with one of the x,y,z axes. Defined by an axis and a point
to be intersected.
**Ray (Arbitrarily-Aligned)**
| Class :class:`~yt.data_objects.selection_data_containers.YTRayBase`
- | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None)``
+ | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None, data_source=None)``
| A line (of data cells) defined by arbitrary start and end coordinates.
2D Objects
@@ -119,13 +119,13 @@
**Slice (Axis-Aligned)**
| Class :class:`~yt.data_objects.selection_data_containers.YTSliceBase`
- | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None)``
+ | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None, data_source=None)``
| A plane normal to one of the axes and intersecting a particular
coordinate.
**Slice (Arbitrarily-Aligned)**
| Class :class:`~yt.data_objects.selection_data_containers.YTCuttingPlaneBase`
- | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None)``
+ | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None, data_source=None)``
| A plane normal to a specified vector and intersecting a particular
coordinate.
@@ -141,8 +141,8 @@
**Box Region**
| Class :class:`~yt.data_objects.selection_data_containers.YTRegionBase`
- | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None)``
- | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None)``
+ | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
+ | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
| A box-like region aligned with the grid axis orientation. It is
defined by a left_edge, a right_edge, and a center. The left_edge
and right_edge are the minimum and maximum bounds in the three axes
@@ -152,14 +152,14 @@
**Disk/Cylinder**
| Class: :class:`~yt.data_objects.selection_data_containers.YTDiskBase`
- | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None)``
+ | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None, data_source=None)``
| A cylinder defined by a point at the center of one of the circular bases,
a normal vector to it defining the orientation of the length of the
cylinder, and radius and height values for the cylinder's dimensions.
**Ellipsoid**
| Class :class:`~yt.data_objects.selection_data_containers.YTEllipsoidBase`
- | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None)``
+ | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None, data_source=None)``
| An ellipsoid with axis magnitudes set by semi_major_axis_length,
semi_medium_axis_length, and semi_minor_axis_length. semi_major_vector
sets the direction of the semi_major_axis. tilt defines the orientation
@@ -167,7 +167,7 @@
**Sphere**
| Class :class:`~yt.data_objects.selection_data_containers.YTSphereBase`
- | Usage: ``sphere(center, radius, ds=None, field_parameters=None)``
+ | Usage: ``sphere(center, radius, ds=None, field_parameters=None, data_source=None)``
| A sphere defined by a central coordinate and a radius.
@@ -176,6 +176,12 @@
See also the section on :ref:`filtering-data`.
+**Intersecting Regions**
+ | Most Region objects provide a data_source parameter, which allows you to subselect
+ | one region from another (in the coordinate system of the DataSet). Note, this can
+ | easily lead to empty data for non-intersecting regions.
+ | Usage: ``slice(axis, coord, ds, data_source=sph)``
+
**Boolean Regions**
| **Note: not yet implemented in yt 3.0**
| Usage: ``boolean()``
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -42,7 +42,7 @@
from yt.utilities.minimal_representation import \
MinimalProjectionData
from yt.utilities.parallel_tools.parallel_analysis_interface import \
- parallel_objects, parallel_root_only, ParallelAnalysisInterface
+ parallel_objects, parallel_root_only
from yt.units.unit_object import Unit
import yt.geometry.particle_deposit as particle_deposit
from yt.utilities.grid_data_format.writer import write_to_gdf
@@ -833,7 +833,7 @@
new_fields.append(output_field)
level_state.fields = new_fields
-class YTSurfaceBase(YTSelectionContainer3D, ParallelAnalysisInterface):
+class YTSurfaceBase(YTSelectionContainer3D):
r"""This surface object identifies isocontours on a cell-by-cell basis,
with no consideration of global connectedness, and returns the vertices
of the Triangles in that isocontour.
@@ -886,7 +886,6 @@
("index", "z"))
vertices = None
def __init__(self, data_source, surface_field, field_value):
- ParallelAnalysisInterface.__init__(self)
self.data_source = data_source
self.surface_field = surface_field
self.field_value = field_value
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -41,6 +41,8 @@
from yt.fields.derived_field import \
ValidateSpatial
import yt.geometry.selection_routines
+from yt.geometry.selection_routines import \
+ compose_selector
from yt.extern.six import add_metaclass
def force_array(item, shape):
@@ -101,8 +103,15 @@
sets its initial set of fields, and the remainder of the arguments
are passed as field_parameters.
"""
- if ds != None:
+ # ds is typically set in the new object type created in Dataset._add_object_class
+ # but it can also be passed as a parameter to the constructor, in which case it will
+ # override the default. This code ensures it is never not set.
+ if ds is not None:
self.ds = ds
+ else:
+ if not hasattr(self, "ds"):
+ raise RuntimeError("Error: ds must be set either through class type or parameter to the constructor")
+
self._current_particle_type = "all"
self._current_fluid_type = self.ds.default_fluid_type
self.ds.objects.append(weakref.proxy(self))
@@ -542,10 +551,22 @@
_sort_by = None
_selector = None
_current_chunk = None
+ _data_source = None
+ _dimensionality = None
- def __init__(self, *args, **kwargs):
- super(YTSelectionContainer, self).__init__(*args, **kwargs)
-
+ def __init__(self, ds, field_parameters, data_source=None):
+ ParallelAnalysisInterface.__init__(self)
+ super(YTSelectionContainer, self).__init__(ds, field_parameters)
+ self._data_source = data_source
+ if data_source is not None:
+ if data_source.ds is not self.ds:
+ raise RuntimeError("Attempted to construct a DataContainer with a data_source from a different DataSet", ds, data_source.ds)
+ else:
+ print "DataSets: ", self.ds, data_source.ds
+ if data_source._dimensionality < self._dimensionality:
+ raise RuntimeError("Attempted to construct a DataContainer with a data_source of lower dimensionality (%u vs %u)" %
+ (data_source._dimensionality, self._dimensionality))
+
@property
def selector(self):
if self._selector is not None: return self._selector
@@ -555,7 +576,11 @@
"%s_selector" % self._type_name, None)
if sclass is None:
raise YTDataSelectorNotImplemented(self._type_name)
- self._selector = sclass(self)
+
+ if self._data_source is not None:
+ self._selector = compose_selector(self, self._data_source.selector, sclass(self))
+ else:
+ self._selector = sclass(self)
return self._selector
def chunks(self, fields, chunking_style, **kwargs):
@@ -765,30 +790,32 @@
class YTSelectionContainer0D(YTSelectionContainer):
_spatial = False
- def __init__(self, ds, field_parameters):
+ _dimensionality = 0
+ def __init__(self, ds, field_parameters = None, data_source = None):
super(YTSelectionContainer0D, self).__init__(
- ds, field_parameters)
+ ds, field_parameters, data_source)
class YTSelectionContainer1D(YTSelectionContainer):
_spatial = False
- def __init__(self, ds, field_parameters):
+ _dimensionality = 1
+ def __init__(self, ds, field_parameters = None, data_source = None):
super(YTSelectionContainer1D, self).__init__(
- ds, field_parameters)
+ ds, field_parameters, data_source)
self._grids = None
self._sortkey = None
self._sorted = {}
class YTSelectionContainer2D(YTSelectionContainer):
_key_fields = ['px','py','pdx','pdy']
+ _dimensionality = 2
"""
Prepares the YTSelectionContainer2D, normal to *axis*. If *axis* is 4, we are not
aligned with any axis.
"""
_spatial = False
- def __init__(self, axis, ds, field_parameters):
- ParallelAnalysisInterface.__init__(self)
+ def __init__(self, axis, ds, field_parameters = None, data_source = None):
super(YTSelectionContainer2D, self).__init__(
- ds, field_parameters)
+ ds, field_parameters, data_source)
# We need the ds, which will exist by now, for fix_axis.
self.axis = fix_axis(axis, self.ds)
self.set_field_parameter("axis", axis)
@@ -910,9 +937,9 @@
_key_fields = ['x','y','z','dx','dy','dz']
_spatial = False
_num_ghost_zones = 0
- def __init__(self, center, ds = None, field_parameters = None):
- ParallelAnalysisInterface.__init__(self)
- super(YTSelectionContainer3D, self).__init__(ds, field_parameters)
+ _dimensionality = 3
+ def __init__(self, center, ds, field_parameters = None, data_source = None):
+ super(YTSelectionContainer3D, self).__init__(ds, field_parameters, data_source)
self._set_center(center)
self.coords = None
self._grids = None
@@ -1273,9 +1300,9 @@
"""
_type_name = "boolean"
_con_args = ("regions",)
- def __init__(self, regions, fields = None, ds = None, **kwargs):
+ def __init__(self, regions, fields = None, ds = None, field_parameters = None, data_source = None):
# Center is meaningless, but we'll define it all the same.
- YTSelectionContainer3D.__init__(self, [0.5]*3, fields, ds, **kwargs)
+ YTSelectionContainer3D.__init__(self, [0.5]*3, fields, ds, field_parameters, data_source)
self.regions = regions
self._all_regions = []
self._some_overlap = []
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/data_objects/profiles.py
--- a/yt/data_objects/profiles.py
+++ b/yt/data_objects/profiles.py
@@ -761,6 +761,7 @@
self.field_data = YTFieldData()
if weight_field is not None:
self.variance = YTFieldData()
+ weight_field = self.data_source._determine_fields(weight_field)[0]
self.weight_field = weight_field
self.field_units = {}
ParallelAnalysisInterface.__init__(self, comm=data_source.comm)
@@ -774,7 +775,7 @@
A list of fields to create profile histograms for
"""
- fields = ensure_list(fields)
+ fields = self.data_source._determine_fields(fields)
temp_storage = ProfileFieldAccumulator(len(fields), self.size)
cfields = fields + list(self.bin_fields)
citer = self.data_source.chunks(cfields, "io")
@@ -907,9 +908,11 @@
if not np.any(filter): return None
arr = np.zeros((bin_fields[0].size, len(fields)), dtype="float64")
for i, field in enumerate(fields):
- arr[:,i] = chunk[field][filter]
+ units = chunk.ds.field_info[field].units
+ arr[:,i] = chunk[field][filter].in_units(units)
if self.weight_field is not None:
- weight_data = chunk[self.weight_field]
+ units = chunk.ds.field_info[self.weight_field].units
+ weight_data = chunk[self.weight_field].in_units(units)
else:
weight_data = np.ones(filter.size, dtype="float64")
weight_data = weight_data[filter]
@@ -1230,6 +1233,16 @@
self.z_bins.convert_to_units(new_unit)
self.z = 0.5*(self.z_bins[1:]+self.z_bins[:-1])
+
+def sanitize_field_tuple_keys(input_dict, data_source):
+ if input_dict is not None:
+ dummy = {}
+ for item in input_dict:
+ dummy[data_source._determine_fields(item)[0]] = input_dict[item]
+ return dummy
+ else:
+ return input_dict
+
def create_profile(data_source, bin_fields, fields, n_bins=64,
extrema=None, logs=None, units=None,
weight_field="cell_mass",
@@ -1293,7 +1306,7 @@
>>> print profile["gas", "temperature"]
"""
- bin_fields = ensure_list(bin_fields)
+ bin_fields = data_source._determine_fields(bin_fields)
fields = ensure_list(fields)
if len(bin_fields) == 1:
cls = Profile1D
@@ -1305,16 +1318,9 @@
raise NotImplementedError
bin_fields = data_source._determine_fields(bin_fields)
fields = data_source._determine_fields(fields)
- if units is not None:
- dummy = {}
- for item in units:
- dummy[data_source._determine_fields(item)[0]] = units[item]
- units.update(dummy)
- if extrema is not None:
- dummy = {}
- for item in extrema:
- dummy[data_source._determine_fields(item)[0]] = extrema[item]
- extrema.update(dummy)
+ units = sanitize_field_tuple_keys(units, data_source)
+ extrema = sanitize_field_tuple_keys(extrema, data_source)
+ logs = sanitize_field_tuple_keys(logs, data_source)
if weight_field is not None:
weight_field, = data_source._determine_fields([weight_field])
if not iterable(n_bins):
@@ -1322,18 +1328,21 @@
if not iterable(accumulation):
accumulation = [accumulation] * len(bin_fields)
if logs is None:
- logs = [data_source.ds._get_field_info(f[0],f[1]).take_log
- for f in bin_fields]
- else:
- logs = [logs[bin_field[-1]] for bin_field in bin_fields]
+ logs = {}
+ logs_list = []
+ for bin_field in bin_fields:
+ if bin_field in logs:
+ logs_list.append(logs[bin_field])
+ else:
+ logs_list.append(data_source.ds.field_info[bin_field].take_log)
+ logs = logs_list
if extrema is None:
ex = [data_source.quantities["Extrema"](f, non_zero=l)
for f, l in zip(bin_fields, logs)]
else:
ex = []
for bin_field in bin_fields:
- bf_units = data_source.ds._get_field_info(
- bin_field[0], bin_field[1]).units
+ bf_units = data_source.ds.field_info[bin_field].units
try:
field_ex = list(extrema[bin_field[-1]])
except KeyError:
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -51,8 +51,11 @@
ds: Dataset, optional
An optional dataset to use rather than self.ds
field_parameters : dictionary
- A dictionary of field parameters than can be accessed by derived
- fields.
+ A dictionary of field parameters than can be accessed by derived
+ fields.
+ data_source: optional
+ Draw the selection from the provided data source rather than
+ all data associated with the data_set
Examples
--------
@@ -64,8 +67,8 @@
"""
_type_name = "point"
_con_args = ('p',)
- def __init__(self, p, ds = None, field_parameters = None):
- super(YTPointBase, self).__init__(ds, field_parameters)
+ def __init__(self, p, ds=None, field_parameters=None, data_source=None):
+ super(YTPointBase, self).__init__(ds, field_parameters, data_source)
self.p = p
class YTOrthoRayBase(YTSelectionContainer1D):
@@ -92,6 +95,9 @@
field_parameters : dictionary
A dictionary of field parameters than can be accessed by derived
fields.
+ data_source: optional
+ Draw the selection from the provided data source rather than
+ all data associated with the data_set
Examples
--------
@@ -104,8 +110,9 @@
_key_fields = ['x','y','z','dx','dy','dz']
_type_name = "ortho_ray"
_con_args = ('axis', 'coords')
- def __init__(self, axis, coords, ds=None, field_parameters=None):
- super(YTOrthoRayBase, self).__init__(ds, field_parameters)
+ def __init__(self, axis, coords, ds=None,
+ field_parameters=None, data_source=None):
+ super(YTOrthoRayBase, self).__init__(ds, field_parameters, data_source)
self.axis = axis
xax = self.ds.coordinates.x_axis[self.axis]
yax = self.ds.coordinates.y_axis[self.axis]
@@ -144,6 +151,9 @@
field_parameters : dictionary
A dictionary of field parameters than can be accessed by derived
fields.
+ data_source: optional
+ Draw the selection from the provided data source rather than
+ all data associated with the data_set
Examples
--------
@@ -156,8 +166,9 @@
_type_name = "ray"
_con_args = ('start_point', 'end_point')
_container_fields = ("t", "dts")
- def __init__(self, start_point, end_point, ds=None, field_parameters=None):
- super(YTRayBase, self).__init__(ds, field_parameters)
+ def __init__(self, start_point, end_point, ds=None,
+ field_parameters=None, data_source=None):
+ super(YTRayBase, self).__init__(ds, field_parameters, data_source)
self.start_point = self.ds.arr(start_point,
'code_length', dtype='float64')
self.end_point = self.ds.arr(end_point,
@@ -204,6 +215,9 @@
field_parameters : dictionary
A dictionary of field parameters than can be accessed by derived
fields.
+ data_source: optional
+ Draw the selection from the provided data source rather than
+ all data associated with the data_set
Examples
--------
@@ -217,10 +231,10 @@
_type_name = "slice"
_con_args = ('axis', 'coord')
_container_fields = ("px", "py", "pdx", "pdy")
-
def __init__(self, axis, coord, center=None, ds=None,
- field_parameters = None):
- YTSelectionContainer2D.__init__(self, axis, ds, field_parameters)
+ field_parameters=None, data_source=None):
+ YTSelectionContainer2D.__init__(self, axis, ds,
+ field_parameters, data_source)
self._set_center(center)
self.coord = coord
@@ -285,6 +299,9 @@
field_parameters : dictionary
A dictionary of field parameters than can be accessed by derived
fields.
+ data_source: optional
+ Draw the selection from the provided data source rather than
+ all data associated with the data_set
Notes
-----
@@ -308,10 +325,10 @@
_type_name = "cutting"
_con_args = ('normal', 'center')
_container_fields = ("px", "py", "pz", "pdx", "pdy", "pdz")
-
- def __init__(self, normal, center, north_vector = None,
- ds = None, field_parameters = None):
- YTSelectionContainer2D.__init__(self, 4, ds, field_parameters)
+ def __init__(self, normal, center, north_vector=None,
+ ds=None, field_parameters=None, data_source=None):
+ YTSelectionContainer2D.__init__(self, 4, ds,
+ field_parameters, data_source)
self._set_center(center)
self.set_field_parameter('center',center)
# Let's set up our plane equation
@@ -465,7 +482,7 @@
Parameters
----------
- center : array_like
+ center : array_like
coordinate to which the normal, radius, and height all reference
normal : array_like
the normal vector defining the direction of lengthwise part of the
@@ -482,6 +499,9 @@
field_parameters : dictionary
A dictionary of field parameters than can be accessed by derived
fields.
+ data_source: optional
+ Draw the selection from the provided data source rather than
+ all data associated with the data_set
Examples
--------
@@ -494,8 +514,9 @@
_type_name = "disk"
_con_args = ('center', '_norm_vec', 'radius', 'height')
def __init__(self, center, normal, radius, height, fields=None,
- ds=None, **kwargs):
- YTSelectionContainer3D.__init__(self, center, fields, ds, **kwargs)
+ ds=None, field_parameters=None, data_source=None):
+ YTSelectionContainer3D.__init__(self, center, ds,
+ field_parameters, data_source)
self._norm_vec = np.array(normal)/np.sqrt(np.dot(normal,normal))
self.set_field_parameter("normal", self._norm_vec)
self.set_field_parameter("center", self.center)
@@ -523,9 +544,10 @@
"""
_type_name = "region"
_con_args = ('center', 'left_edge', 'right_edge')
- def __init__(self, center, left_edge, right_edge, fields = None,
- ds = None, **kwargs):
- YTSelectionContainer3D.__init__(self, center, ds, **kwargs)
+ def __init__(self, center, left_edge, right_edge, fields=None,
+ ds=None, field_parameters=None, data_source=None):
+ YTSelectionContainer3D.__init__(self, center, ds,
+ field_parameters, data_source)
if not isinstance(left_edge, YTArray):
self.left_edge = self.ds.arr(left_edge, 'code_length')
else:
@@ -542,8 +564,10 @@
"""
_type_name = "data_collection"
_con_args = ("_obj_list",)
- def __init__(self, obj_list, ds=None, field_parameters=None, center=None):
- YTSelectionContainer3D.__init__(self, center, ds, field_parameters)
+ def __init__(self, obj_list, ds=None, field_parameters=None,
+ data_source=None, center=None):
+ YTSelectionContainer3D.__init__(self, center, ds,
+ field_parameters, data_source)
self._obj_ids = np.array([o.id - o._id_offset for o in obj_list],
dtype="int64")
self._obj_list = obj_list
@@ -569,8 +593,10 @@
"""
_type_name = "sphere"
_con_args = ('center', 'radius')
- def __init__(self, center, radius, ds = None, field_parameters = None):
- super(YTSphereBase, self).__init__(center, ds, field_parameters)
+ def __init__(self, center, radius, ds=None,
+ field_parameters=None, data_source=None):
+ super(YTSphereBase, self).__init__(center, ds,
+ field_parameters, data_source)
# Unpack the radius, if necessary
radius = fix_length(radius, self.ds)
if radius < self.index.get_smallest_dx():
@@ -615,8 +641,9 @@
_type_name = "ellipsoid"
_con_args = ('center', '_A', '_B', '_C', '_e0', '_tilt')
def __init__(self, center, A, B, C, e0, tilt, fields=None,
- ds=None, field_parameters = None):
- YTSelectionContainer3D.__init__(self, center, ds, field_parameters)
+ ds=None, field_parameters=None, data_source=None):
+ YTSelectionContainer3D.__init__(self, center, ds,
+ field_parameters, data_source)
# make sure the magnitudes of semi-major axes are in order
if A<B or B<C:
raise YTEllipsoidOrdering(ds, A, B, C)
@@ -625,10 +652,10 @@
self._B = self.ds.quan(B, 'code_length')
self._C = self.ds.quan(C, 'code_length')
if self._C < self.index.get_smallest_dx():
- raise YTSphereTooSmall(ds, self._C, self.index.get_smallest_dx())
+ raise YTSphereTooSmall(self.ds, self._C, self.index.get_smallest_dx())
self._e0 = e0 = e0 / (e0**2.0).sum()**0.5
self._tilt = tilt
-
+
# find the t1 angle needed to rotate about z axis to align e0 to x
t1 = np.arctan(e0[1] / e0[0])
# rotate e0 by -t1
@@ -684,9 +711,10 @@
"""
_type_name = "cut_region"
_con_args = ("base_object", "conditionals")
- def __init__(self, base_object, conditionals, ds = None,
- field_parameters = None):
- super(YTCutRegionBase, self).__init__(base_object.center, ds, field_parameters)
+ def __init__(self, base_object, conditionals, ds=None,
+ field_parameters=None, data_source=None):
+ super(YTCutRegionBase, self).__init__(base_object.center, ds,
+ field_parameters, data_source)
self.conditionals = ensure_list(conditionals)
self.base_object = base_object
self._selector = None
@@ -762,4 +790,3 @@
@property
def fwidth(self):
return self.base_object.fwidth[self._cond_ind,:]
-
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/data_objects/tests/test_compose.py
--- /dev/null
+++ b/yt/data_objects/tests/test_compose.py
@@ -0,0 +1,146 @@
+from yt.testing import *
+from yt.fields.local_fields import add_field
+from yt.units.yt_array import YTArray, uintersect1d
+
+def setup():
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+
+# Copied from test_boolean for computing a unique identifier for
+# each cell from cell positions
+def _IDFIELD(field, data):
+ width = data.ds.domain_right_edge - data.ds.domain_left_edge
+ min_dx = YTArray(1.0/8192, input_units='code_length',
+ registry=data.ds.unit_registry)
+ delta = width / min_dx
+ x = data['x'] - min_dx / 2.
+ y = data['y'] - min_dx / 2.
+ z = data['z'] - min_dx / 2.
+ xi = x / min_dx
+ yi = y / min_dx
+ zi = z / min_dx
+ index = xi + delta[0] * (yi + delta[1] * zi)
+ index = index.astype('int64')
+ return index
+
+def test_compose_no_overlap():
+ r"""Test to make sure that composed data objects that don't
+ overlap behave the way we expect (return empty collections)
+ """
+ empty = np.array([])
+ for n in [1, 2, 4, 8]:
+ ds = fake_random_ds(64, nprocs=n)
+ ds.add_field("ID", function=_IDFIELD)
+
+ # position parameters for initial region
+ center = [0.25]*3
+ left_edge = [0.1]*3
+ right_edge = [0.4]*3
+ normal = [1, 0, 0]
+ radius = height = 0.15
+
+ # initial 3D regions
+ sources = [ds.sphere(center, radius),
+ ds.region(center, left_edge, right_edge),
+ ds.disk(center, normal, radius, height)]
+
+ # position parameters for non-overlapping regions
+ center = [0.75]*3
+ left_edge = [0.6]*3
+ right_edge = [0.9]*3
+
+ # subselect non-overlapping 0, 1, 2, 3D regions
+ for data1 in sources:
+ data2 = ds.sphere(center, radius, data_source=data1)
+ yield assert_array_equal, data2['ID'], empty
+
+ data2 = ds.region(center, left_edge, right_edge, data_source=data1)
+ yield assert_array_equal, data2['ID'], empty
+
+ data2 = ds.disk(center, normal, radius, height, data_source=data1)
+ yield assert_array_equal, data2['ID'], empty
+
+ for d in range(3):
+ data2 = ds.slice(d, center[d], data_source=data1)
+ yield assert_array_equal, data2['ID'], empty
+
+ for d in range(3):
+ data2 = ds.ortho_ray(d, center[0:d] + center[d+1:], data_source=data1)
+ yield assert_array_equal, data2['ID'], empty
+
+ data2 = ds.point(center, data_source=data1)
+ yield assert_array_equal, data2['ID'], empty
+
+def test_compose_overlap():
+ r"""Test to make sure that composed data objects that do
+ overlap behave the way we expect
+ """
+ empty = np.array([])
+ for n in [1, 2, 4, 8]:
+ ds = fake_random_ds(64, nprocs=n)
+ ds.add_field("ID", function=_IDFIELD)
+
+ # position parameters for initial region
+ center = [0.4, 0.5, 0.5]
+ left_edge = [0.1]*3
+ right_edge = [0.7]*3
+ normal = [1, 0, 0]
+ radius = height = 0.15
+
+ # initial 3D regions
+ sources = [ds.sphere(center, radius),
+ ds.region(center, left_edge, right_edge),
+ ds.disk(center, normal, radius, height)]
+
+ # position parameters for overlapping regions
+ center = [0.6, 0.5, 0.5]
+ left_edge = [0.3]*3
+ right_edge = [0.9]*3
+
+ # subselect non-overlapping 0, 1, 2, 3D regions
+ for data1 in sources:
+ id1 = data1['ID']
+
+ data2 = ds.sphere(center, radius)
+ data3 = ds.sphere(center, radius, data_source=data1)
+ id2 = data2['ID']
+ id3 = data3['ID']
+ id3.sort()
+ yield assert_array_equal, uintersect1d(id1, id2), id3
+
+ data2 = ds.region(center, left_edge, right_edge)
+ data3 = ds.region(center, left_edge, right_edge, data_source=data1)
+ id2 = data2['ID']
+ id3 = data3['ID']
+ id3.sort()
+ yield assert_array_equal, uintersect1d(id1, id2), id3
+
+ data2 = ds.disk(center, normal, radius, height)
+ data3 = ds.disk(center, normal, radius, height, data_source=data1)
+ id2 = data2['ID']
+ id3 = data3['ID']
+ id3.sort()
+ yield assert_array_equal, uintersect1d(id1, id2), id3
+
+ for d in range(3):
+ data2 = ds.slice(d, center[d])
+ data3 = ds.slice(d, center[d], data_source=data1)
+ id2 = data2['ID']
+ id3 = data3['ID']
+ id3.sort()
+ yield assert_array_equal, uintersect1d(id1, id2), id3
+
+ for d in range(3):
+ data2 = ds.ortho_ray(d, center[0:d] + center[d+1:])
+ data3 = ds.ortho_ray(d, center[0:d] + center[d+1:], data_source=data1)
+ id2 = data2['ID']
+ id3 = data3['ID']
+ id3.sort()
+ yield assert_array_equal, uintersect1d(id1, id2), id3
+
+ data2 = ds.point(center)
+ data3 = ds.point(center, data_source=data1)
+ id2 = data2['ID']
+ id3 = data3['ID']
+ id3.sort()
+ yield assert_array_equal, uintersect1d(id1, id2), id3
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/data_objects/tests/test_profiles.py
--- a/yt/data_objects/tests/test_profiles.py
+++ b/yt/data_objects/tests/test_profiles.py
@@ -1,7 +1,7 @@
from yt.testing import *
from yt.data_objects.profiles import \
BinnedProfile1D, BinnedProfile2D, BinnedProfile3D, \
- Profile1D, Profile2D, Profile3D
+ Profile1D, Profile2D, Profile3D, create_profile
_fields = ("density", "temperature", "dinosaurs", "tribbles")
_units = ("g/cm**3", "K", "dyne", "erg")
@@ -87,13 +87,26 @@
for nb in [8, 16, 32, 64]:
# We log all the fields or don't log 'em all. No need to do them
# individually.
- for lf in [True, False]:
- p1d = Profile1D(dd,
- "density", nb, rmi*e1, rma*e2, lf,
- weight_field = None)
- p1d.add_fields(["ones", "temperature"])
- yield assert_equal, p1d["ones"].sum(), nv
- yield assert_rel_equal, tt, p1d["temperature"].sum(), 7
+ for lf in [True, False]:
+ direct_profile = Profile1D(
+ dd, "density", nb, rmi*e1, rma*e2, lf, weight_field = None)
+ direct_profile.add_fields(["ones", "temperature"])
+
+ indirect_profile_s = create_profile(
+ dd, "density", ["ones", "temperature"], n_bins=nb,
+ extrema={'density': (rmi*e1, rma*e2)}, logs={'density': lf},
+ weight_field=None)
+
+ indirect_profile_t = create_profile(
+ dd, ("gas", "density"),
+ [("index", "ones"), ("gas", "temperature")], n_bins=nb,
+ extrema={'density': (rmi*e1, rma*e2)}, logs={'density': lf},
+ weight_field=None)
+
+ for p1d in [direct_profile, indirect_profile_s,
+ indirect_profile_t]:
+ yield assert_equal, p1d["index", "ones"].sum(), nv
+ yield assert_rel_equal, tt, p1d["gas", "temperature"].sum(), 7
p2d = Profile2D(dd,
"density", nb, rmi*e1, rma*e2, lf,
@@ -154,6 +167,12 @@
p3d.add_fields(["ones"])
yield assert_equal, p3d["ones"], np.ones((nb,nb,nb))
+extrema_s = {'particle_position_x': (0, 1)}
+logs_s = {'particle_position_x': False}
+
+extrema_t = {('all', 'particle_position_x'): (0, 1)}
+logs_t = {('all', 'particle_position_x'): False}
+
def test_particle_profiles():
for nproc in [1, 2, 4, 8]:
ds = fake_random_ds(32, nprocs=nproc, particles = 32**3)
@@ -164,6 +183,18 @@
p1d.add_fields(["particle_ones"])
yield assert_equal, p1d["particle_ones"].sum(), 32**3
+ p1d = create_profile(dd, ["particle_position_x"], ["particle_ones"],
+ weight_field=None, n_bins=128, extrema=extrema_s,
+ logs=logs_s)
+ yield assert_equal, p1d["particle_ones"].sum(), 32**3
+
+ p1d = create_profile(dd,
+ [("all", "particle_position_x")],
+ [("all", "particle_ones")],
+ weight_field=None, n_bins=128, extrema=extrema_t,
+ logs=logs_t)
+ yield assert_equal, p1d["particle_ones"].sum(), 32**3
+
p2d = Profile2D(dd, "particle_position_x", 128, 0.0, 1.0, False,
"particle_position_y", 128, 0.0, 1.0, False,
weight_field = None)
diff -r f3278f386f5d4505caf74ec3288bf883f144c774 -r 61b8f5675cc23c5e0b3635c70766ab5d0cd71bbb yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -112,7 +112,7 @@
cdef class SelectorObject:
- def __cinit__(self, dobj):
+ def __cinit__(self, dobj, *args):
self.min_level = getattr(dobj, "min_level", 0)
self.max_level = getattr(dobj, "max_level", 99)
self.overlap_cells = 0
@@ -1726,6 +1726,65 @@
always_selector = AlwaysSelector
+cdef class ComposeSelector(SelectorObject):
+ cdef SelectorObject selector1
+ cdef SelectorObject selector2
+
+ def __init__(self, dobj, selector1, selector2):
+ self.selector1 = selector1
+ self.selector2 = selector2
+
+ def select_grids(self,
+ np.ndarray[np.float64_t, ndim=2] left_edges,
+ np.ndarray[np.float64_t, ndim=2] right_edges,
+ np.ndarray[np.int32_t, ndim=2] levels):
+ return np.logical_or(
+ self.selector1.select_grids(left_edges, right_edges, levels),
+ self.selector2.select_grids(left_edges, right_edges, levels))
+
+ cdef int select_cell(self, np.float64_t pos[3], np.float64_t dds[3]) nogil:
+ if self.selector1.select_cell(pos, dds) and \
+ self.selector2.select_cell(pos, dds):
+ return 1
+ else:
+ return 0
+
+ cdef int select_grid(self, np.float64_t left_edge[3],
+ np.float64_t right_edge[3], np.int32_t level,
+ Oct *o = NULL) nogil:
+ if self.selector1.select_grid(left_edge, right_edge, level, o) or \
+ self.selector2.select_grid(left_edge, right_edge, level, o):
+ return 1
+ else:
+ return 0
+
+ cdef int select_point(self, np.float64_t pos[3]) nogil:
+ if self.selector1.select_point(pos) and \
+ self.selector2.select_point(pos):
+ return 1
+ else:
+ return 0
+
+ cdef int select_sphere(self, np.float64_t pos[3], np.float64_t radius) nogil:
+ if self.selector1.select_sphere(pos, radius) and \
+ self.selector2.select_sphere(pos, radius):
+ return 1
+ else:
+ return 0
+
+ cdef int select_bbox(self, np.float64_t left_edge[3],
+ np.float64_t right_edge[3]) nogil:
+ if self.selector1.select_bbox(left_edge, right_edge) and \
+ self.selector2.select_bbox(left_edge, right_edge):
+ return 1
+ else:
+ return 0
+
+ def _hash_vals(self):
+ return (hash(self.selector1), hash(self.selector2))
+
+compose_selector = ComposeSelector
+
cdef class HaloParticlesSelector(SelectorObject):
cdef public object base_source
cdef SelectorObject base_selector
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list