[yt-svn] commit/yt: 3 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Mar 2 09:26:45 PST 2016
3 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/f76d7177e56b/
Changeset: f76d7177e56b
Branch: yt
User: MatthewTurk
Date: 2016-02-26 21:57:48+00:00
Summary: Move quantities to be on the top level of YTSelection.
Affected #: 1 file
diff -r 7130b7cef71f9422cc6191b755e1bd5ca96fbaa0 -r f76d7177e56b525a92ad0d335ad8ad2f0a3dea9e yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -1054,6 +1054,7 @@
"of lower dimensionality (%u vs %u)" %
(data_source._dimensionality, self._dimensionality))
self.field_parameters.update(data_source.field_parameters)
+ self.quantities = DerivedQuantityCollection(self)
@property
def selector(self):
@@ -1457,7 +1458,6 @@
self._set_center(center)
self.coords = None
self._grids = None
- self.quantities = DerivedQuantityCollection(self)
def cut_region(self, field_cuts, field_parameters=None):
"""
https://bitbucket.org/yt_analysis/yt/commits/069de0afd8c5/
Changeset: 069de0afd8c5
Branch: yt
User: MatthewTurk
Date: 2016-02-26 22:08:12+00:00
Summary: Adding slices to the derived quantities tests.
Affected #: 1 file
diff -r f76d7177e56b525a92ad0d335ad8ad2f0a3dea9e -r 069de0afd8c56ab3fc6f3b732db128d3042d72e5 yt/data_objects/tests/test_derived_quantities.py
--- a/yt/data_objects/tests/test_derived_quantities.py
+++ b/yt/data_objects/tests/test_derived_quantities.py
@@ -13,109 +13,109 @@
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density",
"velocity_x", "velocity_y", "velocity_z"))
- sp = ds.sphere("c", (0.25, 'unitary'))
- mi, ma = sp.quantities["Extrema"]("density")
- yield assert_equal, mi, np.nanmin(sp["density"])
- yield assert_equal, ma, np.nanmax(sp["density"])
- dd = ds.all_data()
- mi, ma = dd.quantities["Extrema"]("density")
- yield assert_equal, mi, np.nanmin(dd["density"])
- yield assert_equal, ma, np.nanmax(dd["density"])
- sp = ds.sphere("max", (0.25, 'unitary'))
- yield assert_equal, np.any(np.isnan(sp["radial_velocity"])), False
- mi, ma = dd.quantities["Extrema"]("radial_velocity")
- yield assert_equal, mi, np.nanmin(dd["radial_velocity"])
- yield assert_equal, ma, np.nanmax(dd["radial_velocity"])
+ for sp in [ds.sphere("c", (0.25, 'unitary')), ds.r[0.5,:,:]]:
+ mi, ma = sp.quantities["Extrema"]("density")
+ yield assert_equal, mi, np.nanmin(sp["density"])
+ yield assert_equal, ma, np.nanmax(sp["density"])
+ dd = ds.all_data()
+ mi, ma = dd.quantities["Extrema"]("density")
+ yield assert_equal, mi, np.nanmin(dd["density"])
+ yield assert_equal, ma, np.nanmax(dd["density"])
+ sp = ds.sphere("max", (0.25, 'unitary'))
+ yield assert_equal, np.any(np.isnan(sp["radial_velocity"])), False
+ mi, ma = dd.quantities["Extrema"]("radial_velocity")
+ yield assert_equal, mi, np.nanmin(dd["radial_velocity"])
+ yield assert_equal, ma, np.nanmax(dd["radial_velocity"])
def test_average():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density",))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- my_mean = ad.quantities["WeightedAverageQuantity"]("density", "ones")
- yield assert_rel_equal, my_mean, ad["density"].mean(), 12
+ my_mean = ad.quantities["WeightedAverageQuantity"]("density", "ones")
+ yield assert_rel_equal, my_mean, ad["density"].mean(), 12
- my_mean = ad.quantities["WeightedAverageQuantity"]("density", "cell_mass")
- a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
- yield assert_rel_equal, my_mean, a_mean, 12
+ my_mean = ad.quantities["WeightedAverageQuantity"]("density", "cell_mass")
+ a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
+ yield assert_rel_equal, my_mean, a_mean, 12
def test_variance():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density", ))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- my_std, my_mean = ad.quantities["WeightedVariance"]("density", "ones")
- yield assert_rel_equal, my_mean, ad["density"].mean(), 12
- yield assert_rel_equal, my_std, ad["density"].std(), 12
+ my_std, my_mean = ad.quantities["WeightedVariance"]("density", "ones")
+ yield assert_rel_equal, my_mean, ad["density"].mean(), 12
+ yield assert_rel_equal, my_std, ad["density"].std(), 12
- my_std, my_mean = ad.quantities["WeightedVariance"]("density", "cell_mass")
- a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
- yield assert_rel_equal, my_mean, a_mean, 12
- a_std = np.sqrt((ad["cell_mass"] * (ad["density"] - a_mean)**2).sum() /
- ad["cell_mass"].sum())
- yield assert_rel_equal, my_std, a_std, 12
+ my_std, my_mean = ad.quantities["WeightedVariance"]("density", "cell_mass")
+ a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
+ yield assert_rel_equal, my_mean, a_mean, 12
+ a_std = np.sqrt((ad["cell_mass"] * (ad["density"] - a_mean)**2).sum() /
+ ad["cell_mass"].sum())
+ yield assert_rel_equal, my_std, a_std, 12
def test_max_location():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density", ))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, x, y, z = ad.quantities.max_location(("gas", "density"))
+ mv, x, y, z = ad.quantities.max_location(("gas", "density"))
- yield assert_equal, mv, ad["density"].max()
+ yield assert_equal, mv, ad["density"].max()
- mi = np.argmax(ad["density"])
+ mi = np.argmax(ad["density"])
- yield assert_equal, ad["x"][mi], x
- yield assert_equal, ad["y"][mi], y
- yield assert_equal, ad["z"][mi], z
+ yield assert_equal, ad["x"][mi], x
+ yield assert_equal, ad["y"][mi], y
+ yield assert_equal, ad["z"][mi], z
def test_min_location():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density", ))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, x, y, z = ad.quantities.min_location(("gas", "density"))
+ mv, x, y, z = ad.quantities.min_location(("gas", "density"))
- yield assert_equal, mv, ad["density"].min()
+ yield assert_equal, mv, ad["density"].min()
- mi = np.argmin(ad["density"])
+ mi = np.argmin(ad["density"])
- yield assert_equal, ad["x"][mi], x
- yield assert_equal, ad["y"][mi], y
- yield assert_equal, ad["z"][mi], z
+ yield assert_equal, ad["x"][mi], x
+ yield assert_equal, ad["y"][mi], y
+ yield assert_equal, ad["z"][mi], z
def test_sample_at_min_field_values():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs,
fields = ("density", "temperature", "velocity_x"))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, temp, vm = ad.quantities.sample_at_min_field_values(
- "density", ["temperature", "velocity_x"])
+ mv, temp, vm = ad.quantities.sample_at_min_field_values(
+ "density", ["temperature", "velocity_x"])
- yield assert_equal, mv, ad["density"].min()
+ yield assert_equal, mv, ad["density"].min()
- mi = np.argmin(ad["density"])
+ mi = np.argmin(ad["density"])
- yield assert_equal, ad["temperature"][mi], temp
- yield assert_equal, ad["velocity_x"][mi], vm
+ yield assert_equal, ad["temperature"][mi], temp
+ yield assert_equal, ad["velocity_x"][mi], vm
def test_sample_at_max_field_values():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs,
fields = ("density", "temperature", "velocity_x"))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, temp, vm = ad.quantities.sample_at_max_field_values(
- "density", ["temperature", "velocity_x"])
+ mv, temp, vm = ad.quantities.sample_at_max_field_values(
+ "density", ["temperature", "velocity_x"])
- yield assert_equal, mv, ad["density"].max()
+ yield assert_equal, mv, ad["density"].max()
- mi = np.argmax(ad["density"])
+ mi = np.argmax(ad["density"])
- yield assert_equal, ad["temperature"][mi], temp
- yield assert_equal, ad["velocity_x"][mi], vm
+ yield assert_equal, ad["temperature"][mi], temp
+ yield assert_equal, ad["velocity_x"][mi], vm
if __name__ == "__main__":
for i in test_extrema():
https://bitbucket.org/yt_analysis/yt/commits/bf1389cb5d09/
Changeset: bf1389cb5d09
Branch: yt
User: jzuhone
Date: 2016-03-02 17:26:34+00:00
Summary: Merged in MatthewTurk/yt (pull request #2006)
Move .quantities to top level of YTSelectionContainer
Affected #: 2 files
diff -r 764f5ed10e7ec972b70dbccbfb201c03d2657e58 -r bf1389cb5d0995907a78e24adc00dd6b5788ec03 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -1054,6 +1054,7 @@
"of lower dimensionality (%u vs %u)" %
(data_source._dimensionality, self._dimensionality))
self.field_parameters.update(data_source.field_parameters)
+ self.quantities = DerivedQuantityCollection(self)
@property
def selector(self):
@@ -1457,7 +1458,6 @@
self._set_center(center)
self.coords = None
self._grids = None
- self.quantities = DerivedQuantityCollection(self)
def cut_region(self, field_cuts, field_parameters=None):
"""
diff -r 764f5ed10e7ec972b70dbccbfb201c03d2657e58 -r bf1389cb5d0995907a78e24adc00dd6b5788ec03 yt/data_objects/tests/test_derived_quantities.py
--- a/yt/data_objects/tests/test_derived_quantities.py
+++ b/yt/data_objects/tests/test_derived_quantities.py
@@ -13,109 +13,109 @@
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density",
"velocity_x", "velocity_y", "velocity_z"))
- sp = ds.sphere("c", (0.25, 'unitary'))
- mi, ma = sp.quantities["Extrema"]("density")
- yield assert_equal, mi, np.nanmin(sp["density"])
- yield assert_equal, ma, np.nanmax(sp["density"])
- dd = ds.all_data()
- mi, ma = dd.quantities["Extrema"]("density")
- yield assert_equal, mi, np.nanmin(dd["density"])
- yield assert_equal, ma, np.nanmax(dd["density"])
- sp = ds.sphere("max", (0.25, 'unitary'))
- yield assert_equal, np.any(np.isnan(sp["radial_velocity"])), False
- mi, ma = dd.quantities["Extrema"]("radial_velocity")
- yield assert_equal, mi, np.nanmin(dd["radial_velocity"])
- yield assert_equal, ma, np.nanmax(dd["radial_velocity"])
+ for sp in [ds.sphere("c", (0.25, 'unitary')), ds.r[0.5,:,:]]:
+ mi, ma = sp.quantities["Extrema"]("density")
+ yield assert_equal, mi, np.nanmin(sp["density"])
+ yield assert_equal, ma, np.nanmax(sp["density"])
+ dd = ds.all_data()
+ mi, ma = dd.quantities["Extrema"]("density")
+ yield assert_equal, mi, np.nanmin(dd["density"])
+ yield assert_equal, ma, np.nanmax(dd["density"])
+ sp = ds.sphere("max", (0.25, 'unitary'))
+ yield assert_equal, np.any(np.isnan(sp["radial_velocity"])), False
+ mi, ma = dd.quantities["Extrema"]("radial_velocity")
+ yield assert_equal, mi, np.nanmin(dd["radial_velocity"])
+ yield assert_equal, ma, np.nanmax(dd["radial_velocity"])
def test_average():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density",))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- my_mean = ad.quantities["WeightedAverageQuantity"]("density", "ones")
- yield assert_rel_equal, my_mean, ad["density"].mean(), 12
+ my_mean = ad.quantities["WeightedAverageQuantity"]("density", "ones")
+ yield assert_rel_equal, my_mean, ad["density"].mean(), 12
- my_mean = ad.quantities["WeightedAverageQuantity"]("density", "cell_mass")
- a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
- yield assert_rel_equal, my_mean, a_mean, 12
+ my_mean = ad.quantities["WeightedAverageQuantity"]("density", "cell_mass")
+ a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
+ yield assert_rel_equal, my_mean, a_mean, 12
def test_variance():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density", ))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- my_std, my_mean = ad.quantities["WeightedVariance"]("density", "ones")
- yield assert_rel_equal, my_mean, ad["density"].mean(), 12
- yield assert_rel_equal, my_std, ad["density"].std(), 12
+ my_std, my_mean = ad.quantities["WeightedVariance"]("density", "ones")
+ yield assert_rel_equal, my_mean, ad["density"].mean(), 12
+ yield assert_rel_equal, my_std, ad["density"].std(), 12
- my_std, my_mean = ad.quantities["WeightedVariance"]("density", "cell_mass")
- a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
- yield assert_rel_equal, my_mean, a_mean, 12
- a_std = np.sqrt((ad["cell_mass"] * (ad["density"] - a_mean)**2).sum() /
- ad["cell_mass"].sum())
- yield assert_rel_equal, my_std, a_std, 12
+ my_std, my_mean = ad.quantities["WeightedVariance"]("density", "cell_mass")
+ a_mean = (ad["density"] * ad["cell_mass"]).sum() / ad["cell_mass"].sum()
+ yield assert_rel_equal, my_mean, a_mean, 12
+ a_std = np.sqrt((ad["cell_mass"] * (ad["density"] - a_mean)**2).sum() /
+ ad["cell_mass"].sum())
+ yield assert_rel_equal, my_std, a_std, 12
def test_max_location():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density", ))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, x, y, z = ad.quantities.max_location(("gas", "density"))
+ mv, x, y, z = ad.quantities.max_location(("gas", "density"))
- yield assert_equal, mv, ad["density"].max()
+ yield assert_equal, mv, ad["density"].max()
- mi = np.argmax(ad["density"])
+ mi = np.argmax(ad["density"])
- yield assert_equal, ad["x"][mi], x
- yield assert_equal, ad["y"][mi], y
- yield assert_equal, ad["z"][mi], z
+ yield assert_equal, ad["x"][mi], x
+ yield assert_equal, ad["y"][mi], y
+ yield assert_equal, ad["z"][mi], z
def test_min_location():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs, fields = ("density", ))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, x, y, z = ad.quantities.min_location(("gas", "density"))
+ mv, x, y, z = ad.quantities.min_location(("gas", "density"))
- yield assert_equal, mv, ad["density"].min()
+ yield assert_equal, mv, ad["density"].min()
- mi = np.argmin(ad["density"])
+ mi = np.argmin(ad["density"])
- yield assert_equal, ad["x"][mi], x
- yield assert_equal, ad["y"][mi], y
- yield assert_equal, ad["z"][mi], z
+ yield assert_equal, ad["x"][mi], x
+ yield assert_equal, ad["y"][mi], y
+ yield assert_equal, ad["z"][mi], z
def test_sample_at_min_field_values():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs,
fields = ("density", "temperature", "velocity_x"))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, temp, vm = ad.quantities.sample_at_min_field_values(
- "density", ["temperature", "velocity_x"])
+ mv, temp, vm = ad.quantities.sample_at_min_field_values(
+ "density", ["temperature", "velocity_x"])
- yield assert_equal, mv, ad["density"].min()
+ yield assert_equal, mv, ad["density"].min()
- mi = np.argmin(ad["density"])
+ mi = np.argmin(ad["density"])
- yield assert_equal, ad["temperature"][mi], temp
- yield assert_equal, ad["velocity_x"][mi], vm
+ yield assert_equal, ad["temperature"][mi], temp
+ yield assert_equal, ad["velocity_x"][mi], vm
def test_sample_at_max_field_values():
for nprocs in [1, 2, 4, 8]:
ds = fake_random_ds(16, nprocs = nprocs,
fields = ("density", "temperature", "velocity_x"))
- ad = ds.all_data()
+ for ad in [ds.all_data(), ds.r[0.5, :, :]]:
- mv, temp, vm = ad.quantities.sample_at_max_field_values(
- "density", ["temperature", "velocity_x"])
+ mv, temp, vm = ad.quantities.sample_at_max_field_values(
+ "density", ["temperature", "velocity_x"])
- yield assert_equal, mv, ad["density"].max()
+ yield assert_equal, mv, ad["density"].max()
- mi = np.argmax(ad["density"])
+ mi = np.argmax(ad["density"])
- yield assert_equal, ad["temperature"][mi], temp
- yield assert_equal, ad["velocity_x"][mi], vm
+ yield assert_equal, ad["temperature"][mi], temp
+ yield assert_equal, ad["velocity_x"][mi], vm
if __name__ == "__main__":
for i in test_extrema():
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list