[yt-svn] commit/yt: 3 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Thu May 18 08:56:23 PDT 2017
3 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/08966c8e067d/
Changeset: 08966c8e067d
User: MatthewTurk
Date: 2017-05-16 19:39:05+00:00
Summary: Adding FRB generation (and tests) to ds.r
Affected #: 3 files
diff -r ce3301f502e43522031100307e5cec8c84b0de7d -r 08966c8e067d11fe2d25bc038043884ec11135e2 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -495,16 +495,16 @@
def save_as_dataset(self, filename=None, fields=None):
r"""Export a data object to a reloadable yt dataset.
- This function will take a data object and output a dataset
- containing either the fields presently existing or fields
+ This function will take a data object and output a dataset
+ containing either the fields presently existing or fields
given in the ``fields`` list. The resulting dataset can be
reloaded as a yt dataset.
Parameters
----------
filename : str, optional
- The name of the file to be written. If None, the name
- will be a combination of the original dataset and the type
+ The name of the file to be written. If None, the name
+ will be a combination of the original dataset and the type
of data container.
fields : list of string or tuple field names, optional
If this is supplied, it is the list of fields to be saved to
@@ -1628,6 +1628,8 @@
elif iterable(height):
h, u = height
height = self.ds.quan(h, input_units = u)
+ elif not isinstance(height, YTArray):
+ height = self.ds.quan(height, 'code_length')
if not iterable(resolution):
resolution = (resolution, resolution)
from yt.visualization.fixed_resolution import FixedResolutionBuffer
@@ -1857,7 +1859,7 @@
def _calculate_flux_in_grid(self, grid, mask, field, value,
field_x, field_y, field_z, fluxing_field = None):
-
+
vc_fields = [field, field_x, field_y, field_z]
if fluxing_field is not None:
vc_fields.append(fluxing_field)
diff -r ce3301f502e43522031100307e5cec8c84b0de7d -r 08966c8e067d11fe2d25bc038043884ec11135e2 yt/data_objects/region_expression.py
--- a/yt/data_objects/region_expression.py
+++ b/yt/data_objects/region_expression.py
@@ -38,10 +38,16 @@
if isinstance(item, tuple) and isinstance(item[1], string_types):
return self.all_data[item]
if isinstance(item, slice):
+ # This is for the case where we give a slice as an index; one
+ # possible use case of this would be where we supply something
+ # like ds.r[::256j] . This would be expanded, implicitly into
+ # ds.r[::256j, ::256j, ::256j]. Other cases would be if we do
+ # ds.r[0.1:0.9] where it will be expanded along three dimensions.
item = (item, item, item)
if len(item) != self.ds.dimensionality:
# Not the right specification, and we don't want to do anything
- # implicitly.
+ # implicitly. Note that this happens *after* the implicit expansion
+ # of a single slice.
raise YTDimensionalityError(len(item), self.ds.dimensionality)
if self.ds.dimensionality != 3:
# We'll pass on this for the time being.
@@ -49,14 +55,14 @@
# OK, now we need to look at our slices. How many are a specific
# coordinate?
-
+
if not all(isinstance(v, slice) for v in item):
return self._create_slice(item)
else:
if all(s.start is s.stop is s.step is None for s in item):
return self.all_data
return self._create_region(item)
-
+
def _spec_to_value(self, input_tuple):
if not isinstance(input_tuple, tuple):
# We now assume that it's in code_length
@@ -66,6 +72,9 @@
return value
def _create_slice(self, slice_tuple):
+ # This is somewhat more complex because we want to allow for slicing
+ # in one dimension but also *not* using the entire domain; for instance
+ # this means we allow something like ds.r[0.5, 0.1:0.4, 0.1:0.4].
axis = None
new_slice = []
for ax, v in enumerate(slice_tuple):
@@ -79,6 +88,23 @@
# This new slice doesn't need to be a tuple
source = self._create_region(new_slice)
sl = self.ds.slice(axis, coord, data_source = source)
+ # Now, there's the possibility that what we're also seeing here
+ # includes some steps, which would be for getting back a fixed
+ # resolution buffer. We check for that by checking if we have
+ # exactly two imaginary steps.
+ xax = self.ds.coordinates.x_axis[axis]
+ yax = self.ds.coordinates.y_axis[axis]
+ if getattr(new_slice[xax].step, "imag", 0.0) != 0.0 and \
+ getattr(new_slice[yax].step, "imag", 0.0) != 0.0:
+ # We now need to convert to a fixed res buffer.
+ # We'll do this by getting the x/y axes, and then using that.
+ width = source.right_edge[xax] - source.left_edge[xax]
+ height = source.right_edge[yax] - source.left_edge[yax]
+ # Make a resolution tuple with
+ resolution = (int(new_slice[xax].step.imag),
+ int(new_slice[yax].step.imag))
+ sl = sl.to_frb(width = width, resolution = resolution,
+ height = height)
return sl
def _slice_to_edges(self, ax, val):
diff -r ce3301f502e43522031100307e5cec8c84b0de7d -r 08966c8e067d11fe2d25bc038043884ec11135e2 yt/data_objects/tests/test_dataset_access.py
--- a/yt/data_objects/tests/test_dataset_access.py
+++ b/yt/data_objects/tests/test_dataset_access.py
@@ -66,6 +66,16 @@
assert_equal(dd["density"]*2.0, ds.r["density"])
assert_equal(dd["gas", "density"]*2.0, ds.r["gas", "density"])
+def test_slice_from_r():
+ ds = fake_amr_ds(fields = ["density"])
+ sl1 = ds.r[0.5, :, :]
+ sl2 = ds.slice("x", 0.5)
+ assert_equal(sl1["density"], sl2["density"])
+
+ frb1 = sl1.to_frb(width = 1.0, height = 1.0, resolution = (1024, 512))
+ frb2 = ds.r[0.5, ::1024j, ::512j]
+ assert_equal(frb1["density"], frb2["density"])
+
def test_particle_counts():
ds = fake_random_ds(16, particles=100)
assert ds.particle_type_counts == {'io': 100}
https://bitbucket.org/yt_analysis/yt/commits/82c04bf40bfc/
Changeset: 82c04bf40bfc
User: MatthewTurk
Date: 2017-05-16 20:22:45+00:00
Summary: Adding documentation about FRBs from ds.r
Affected #: 1 file
diff -r 08966c8e067d11fe2d25bc038043884ec11135e2 -r 82c04bf40bfca01d8248eb9015eea4af62dd52c8 doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -199,6 +199,21 @@
.. _available-objects:
+Making Image Buffers
+^^^^^^^^^^^^^^^^^^^^
+
+Using the slicing syntax above for choosing a slice, if you also provide an
+imaginary step value you can obtain a
+:class:`~yt.visualization.api.FixedResolutionBuffer` of the chosen resolution.
+
+For instance, to obtain a 1024 by 1024 buffer covering the entire
+domain but centered at 0.5 in code units, you can do:::
+
+ frb = ds.r[0.5, ::1024j, ::1024j]
+
+This `frb` object then can be queried like a normal fixed resolution buffer,
+and it will return arrays of shape (1024, 1024).
+
Available Objects
-----------------
@@ -650,7 +665,7 @@
cutout = sp1 - sp3
sp4 = sp1 ^ sp2
sp5 = sp1 & sp2
-
+
Note that the ``+`` operation and the ``|`` operation are identical. For when
multiple objects are to be combined in an intersection or a union, there are
https://bitbucket.org/yt_analysis/yt/commits/2c99a58e7aed/
Changeset: 2c99a58e7aed
User: ngoldbaum
Date: 2017-05-18 15:56:11+00:00
Summary: Merge pull request #1395 from MatthewTurk/dsr_frb
Adding FRB generation to ds.r
Affected #: 4 files
diff -r 3148ea36d1d6095de82e3f4ef4f02e82d8d6266f -r 2c99a58e7aed85ec4948345fefb6cb39e392aaf9 doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -199,6 +199,21 @@
.. _available-objects:
+Making Image Buffers
+^^^^^^^^^^^^^^^^^^^^
+
+Using the slicing syntax above for choosing a slice, if you also provide an
+imaginary step value you can obtain a
+:class:`~yt.visualization.api.FixedResolutionBuffer` of the chosen resolution.
+
+For instance, to obtain a 1024 by 1024 buffer covering the entire
+domain but centered at 0.5 in code units, you can do:::
+
+ frb = ds.r[0.5, ::1024j, ::1024j]
+
+This `frb` object then can be queried like a normal fixed resolution buffer,
+and it will return arrays of shape (1024, 1024).
+
Available Objects
-----------------
@@ -650,7 +665,7 @@
cutout = sp1 - sp3
sp4 = sp1 ^ sp2
sp5 = sp1 & sp2
-
+
Note that the ``+`` operation and the ``|`` operation are identical. For when
multiple objects are to be combined in an intersection or a union, there are
diff -r 3148ea36d1d6095de82e3f4ef4f02e82d8d6266f -r 2c99a58e7aed85ec4948345fefb6cb39e392aaf9 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -495,16 +495,16 @@
def save_as_dataset(self, filename=None, fields=None):
r"""Export a data object to a reloadable yt dataset.
- This function will take a data object and output a dataset
- containing either the fields presently existing or fields
+ This function will take a data object and output a dataset
+ containing either the fields presently existing or fields
given in the ``fields`` list. The resulting dataset can be
reloaded as a yt dataset.
Parameters
----------
filename : str, optional
- The name of the file to be written. If None, the name
- will be a combination of the original dataset and the type
+ The name of the file to be written. If None, the name
+ will be a combination of the original dataset and the type
of data container.
fields : list of string or tuple field names, optional
If this is supplied, it is the list of fields to be saved to
@@ -1628,6 +1628,8 @@
elif iterable(height):
h, u = height
height = self.ds.quan(h, input_units = u)
+ elif not isinstance(height, YTArray):
+ height = self.ds.quan(height, 'code_length')
if not iterable(resolution):
resolution = (resolution, resolution)
from yt.visualization.fixed_resolution import FixedResolutionBuffer
@@ -1857,7 +1859,7 @@
def _calculate_flux_in_grid(self, grid, mask, field, value,
field_x, field_y, field_z, fluxing_field = None):
-
+
vc_fields = [field, field_x, field_y, field_z]
if fluxing_field is not None:
vc_fields.append(fluxing_field)
diff -r 3148ea36d1d6095de82e3f4ef4f02e82d8d6266f -r 2c99a58e7aed85ec4948345fefb6cb39e392aaf9 yt/data_objects/region_expression.py
--- a/yt/data_objects/region_expression.py
+++ b/yt/data_objects/region_expression.py
@@ -38,10 +38,16 @@
if isinstance(item, tuple) and isinstance(item[1], string_types):
return self.all_data[item]
if isinstance(item, slice):
+ # This is for the case where we give a slice as an index; one
+ # possible use case of this would be where we supply something
+ # like ds.r[::256j] . This would be expanded, implicitly into
+ # ds.r[::256j, ::256j, ::256j]. Other cases would be if we do
+ # ds.r[0.1:0.9] where it will be expanded along three dimensions.
item = (item, item, item)
if len(item) != self.ds.dimensionality:
# Not the right specification, and we don't want to do anything
- # implicitly.
+ # implicitly. Note that this happens *after* the implicit expansion
+ # of a single slice.
raise YTDimensionalityError(len(item), self.ds.dimensionality)
if self.ds.dimensionality != 3:
# We'll pass on this for the time being.
@@ -49,14 +55,14 @@
# OK, now we need to look at our slices. How many are a specific
# coordinate?
-
+
if not all(isinstance(v, slice) for v in item):
return self._create_slice(item)
else:
if all(s.start is s.stop is s.step is None for s in item):
return self.all_data
return self._create_region(item)
-
+
def _spec_to_value(self, input_tuple):
if not isinstance(input_tuple, tuple):
# We now assume that it's in code_length
@@ -66,6 +72,9 @@
return value
def _create_slice(self, slice_tuple):
+ # This is somewhat more complex because we want to allow for slicing
+ # in one dimension but also *not* using the entire domain; for instance
+ # this means we allow something like ds.r[0.5, 0.1:0.4, 0.1:0.4].
axis = None
new_slice = []
for ax, v in enumerate(slice_tuple):
@@ -79,6 +88,23 @@
# This new slice doesn't need to be a tuple
source = self._create_region(new_slice)
sl = self.ds.slice(axis, coord, data_source = source)
+ # Now, there's the possibility that what we're also seeing here
+ # includes some steps, which would be for getting back a fixed
+ # resolution buffer. We check for that by checking if we have
+ # exactly two imaginary steps.
+ xax = self.ds.coordinates.x_axis[axis]
+ yax = self.ds.coordinates.y_axis[axis]
+ if getattr(new_slice[xax].step, "imag", 0.0) != 0.0 and \
+ getattr(new_slice[yax].step, "imag", 0.0) != 0.0:
+ # We now need to convert to a fixed res buffer.
+ # We'll do this by getting the x/y axes, and then using that.
+ width = source.right_edge[xax] - source.left_edge[xax]
+ height = source.right_edge[yax] - source.left_edge[yax]
+ # Make a resolution tuple with
+ resolution = (int(new_slice[xax].step.imag),
+ int(new_slice[yax].step.imag))
+ sl = sl.to_frb(width = width, resolution = resolution,
+ height = height)
return sl
def _slice_to_edges(self, ax, val):
diff -r 3148ea36d1d6095de82e3f4ef4f02e82d8d6266f -r 2c99a58e7aed85ec4948345fefb6cb39e392aaf9 yt/data_objects/tests/test_dataset_access.py
--- a/yt/data_objects/tests/test_dataset_access.py
+++ b/yt/data_objects/tests/test_dataset_access.py
@@ -66,6 +66,16 @@
assert_equal(dd["density"]*2.0, ds.r["density"])
assert_equal(dd["gas", "density"]*2.0, ds.r["gas", "density"])
+def test_slice_from_r():
+ ds = fake_amr_ds(fields = ["density"])
+ sl1 = ds.r[0.5, :, :]
+ sl2 = ds.slice("x", 0.5)
+ assert_equal(sl1["density"], sl2["density"])
+
+ frb1 = sl1.to_frb(width = 1.0, height = 1.0, resolution = (1024, 512))
+ frb2 = ds.r[0.5, ::1024j, ::512j]
+ assert_equal(frb1["density"], frb2["density"])
+
def test_particle_counts():
ds = fake_random_ds(16, particles=100)
assert ds.particle_type_counts == {'io': 100}
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list