[yt-svn] commit/yt: xarthisius: Merged in MatthewTurk/yt (pull request #2418)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Jan 18 07:59:15 PST 2017
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/771123590278/
Changeset: 771123590278
Branch: yt
User: xarthisius
Date: 2017-01-18 15:58:47+00:00
Summary: Merged in MatthewTurk/yt (pull request #2418)
Enable refine_by to be an array
Affected #: 6 files
diff -r fc3d1369fd2821a203b0bae4b94b53915254f468 -r 771123590278f97c40911a6bc318174de90cdd0e yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -666,13 +666,17 @@
for field in fields]
domain_dims = self.ds.domain_dimensions.astype("int64") \
* self.ds.relative_refinement(0, self.level)
+ refine_by = self.ds.refine_by
+ if not iterable(self.ds.refine_by):
+ refine_by = [refine_by, refine_by, refine_by]
+ refine_by = np.array(refine_by, dtype="i8")
for chunk in self._data_source.chunks(fields, "io"):
input_fields = [chunk[field] for field in fields]
# NOTE: This usage of "refine_by" is actually *okay*, because it's
# being used with respect to iref, which is *already* scaled!
fill_region(input_fields, output_fields, self.level,
self.global_startindex, chunk.icoords, chunk.ires,
- domain_dims, self.ds.refine_by)
+ domain_dims, refine_by)
for name, v in zip(fields, output_fields):
fi = self.ds._get_field_info(*name)
self[name] = self.ds.arr(v, fi.units)
@@ -940,6 +944,12 @@
if len(fields) == 0: return
ls = self._initialize_level_state(fields)
min_level = self._compute_minimum_level()
+ # NOTE: This usage of "refine_by" is actually *okay*, because it's
+ # being used with respect to iref, which is *already* scaled!
+ refine_by = self.ds.refine_by
+ if not iterable(self.ds.refine_by):
+ refine_by = [refine_by, refine_by, refine_by]
+ refine_by = np.array(refine_by, dtype="i8")
for level in range(self.level + 1):
if level < min_level:
self._update_level_state(ls)
@@ -954,11 +964,9 @@
for chunk in ls.data_source.chunks(fields, "io"):
chunk[fields[0]]
input_fields = [chunk[field] for field in fields]
- # NOTE: This usage of "refine_by" is actually *okay*, because it's
- # being used with respect to iref, which is *already* scaled!
tot -= fill_region(input_fields, ls.fields, ls.current_level,
ls.global_startindex, chunk.icoords,
- chunk.ires, domain_dims, self.ds.refine_by)
+ chunk.ires, domain_dims, refine_by)
if level == 0 and tot != 0:
raise RuntimeError
self._update_level_state(ls)
diff -r fc3d1369fd2821a203b0bae4b94b53915254f468 -r 771123590278f97c40911a6bc318174de90cdd0e yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -717,7 +717,7 @@
cname = cls.__name__
if cname.endswith("Base"): cname = cname[:-4]
self._add_object_class(name, cls)
- if self.refine_by != 2 and hasattr(self, 'proj') and \
+ if not np.all(self.refine_by == 2) and hasattr(self, 'proj') and \
hasattr(self, 'overlap_proj'):
mylog.warning("Refine by something other than two: reverting to"
+ " overlap_proj")
diff -r fc3d1369fd2821a203b0bae4b94b53915254f468 -r 771123590278f97c40911a6bc318174de90cdd0e yt/data_objects/tests/test_refinement.py
--- /dev/null
+++ b/yt/data_objects/tests/test_refinement.py
@@ -0,0 +1,49 @@
+from yt.testing import \
+ assert_array_equal, \
+ assert_equal
+import yt
+import numpy as np
+
+def setup_fake_refby():
+ refine_by=np.array([5, 1, 1])
+ top_grid_dim = [100, 10, 2]
+ n1=100
+ n2=10
+ n3=2
+
+ grid_data = [
+ dict(left_edge = [0.0, 0.0, 0.0],
+ right_edge = [1.0, np.pi, np.pi*2.],
+ level = 0,
+ dimensions = np.array([n1, n2, n3])),
+ dict(left_edge = [0., 0., 0.],
+ right_edge = [0.5, np.pi, np.pi*2.],
+ level = 1,
+ dimensions = refine_by*[n1/2.0, n2, n3]),
+ ]
+
+ for g in grid_data:
+ g["density"] = (np.random.random(g["dimensions"].astype("i8")),
+ "g/cm**3")
+ bbox = np.array([[0.0, 1.0], [0.0, np.pi], [0.0, np.pi*2]])
+
+ ds = yt.load_amr_grids(grid_data, top_grid_dim,
+ bbox = bbox, geometry='spherical',
+ refine_by=refine_by, length_unit='kpc')
+ return ds
+
+def test_refine_by():
+ ds = setup_fake_refby()
+ dd = ds.all_data()
+ # This checks that we always refine_by 1 in dimensions 2 and 3
+ dims = ds.domain_dimensions*ds.refine_by**ds.max_level
+ for i in range(1, 3):
+ # Check the refine_by == 1
+ ncoords = np.unique(dd.icoords[:,i]).size
+ assert_equal(ncoords, dims[i])
+ for g in ds.index.grids:
+ dims = ds.domain_dimensions*ds.refine_by**g.Level
+ # Now we can check converting back to the reference space
+ v = ((g.icoords + 1) / dims.astype("f8")).max(axis=0)
+ v *= ds.domain_width
+ assert_array_equal(v, g.RightEdge.d)
diff -r fc3d1369fd2821a203b0bae4b94b53915254f468 -r 771123590278f97c40911a6bc318174de90cdd0e yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -786,8 +786,12 @@
be z, x, y, this would be: ("cartesian", ("z", "x", "y")). The same
can be done for other coordinates, for instance:
("spherical", ("theta", "phi", "r")).
- refine_by : integer
- Specifies the refinement ratio between levels. Defaults to 2.
+ refine_by : integer or list/array of integers.
+ Specifies the refinement ratio between levels. Defaults to 2. This
+ can be an array, in which case it specifies for each dimension. For
+ instance, this can be used to say that some datasets have refinement of
+ 1 in one dimension, indicating that they span the full range in that
+ dimension.
Examples
--------
diff -r fc3d1369fd2821a203b0bae4b94b53915254f468 -r 771123590278f97c40911a6bc318174de90cdd0e yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -813,10 +813,10 @@
np.ndarray[np.int64_t, ndim=2] ipos,
np.ndarray[np.int64_t, ndim=1] ires,
np.ndarray[np.int64_t, ndim=1] level_dims,
- np.int64_t refine_by = 2
+ np.ndarray[np.int64_t, ndim=1] refine_by
):
cdef int i, n
- cdef np.int64_t tot = 0, oi, oj, ok, rf
+ cdef np.int64_t tot = 0, oi, oj, ok, rf[3]
cdef np.int64_t iind[3]
cdef np.int64_t oind[3]
cdef np.int64_t dim[3]
@@ -844,15 +844,16 @@
ofield = output_fields[n]
ifield = input_fields[n]
for i in range(ipos.shape[0]):
- rf = refine_by**(output_level - ires[i])
+ for k in range(3):
+ rf[k] = refine_by[k]**(output_level - ires[i])
for wi in range(3):
if offsets[0][wi] == 0: continue
off = (left_index[0] + level_dims[0]*(wi-1))
- iind[0] = ipos[i, 0] * rf - off
+ iind[0] = ipos[i, 0] * rf[0] - off
# rf here is the "refinement factor", or, the number of zones
# that this zone could potentially contribute to our filled
# grid.
- for oi in range(rf):
+ for oi in range(rf[0]):
# Now we need to apply our offset
oind[0] = oi + iind[0]
if oind[0] < 0:
@@ -862,8 +863,8 @@
for wj in range(3):
if offsets[1][wj] == 0: continue
off = (left_index[1] + level_dims[1]*(wj-1))
- iind[1] = ipos[i, 1] * rf - off
- for oj in range(rf):
+ iind[1] = ipos[i, 1] * rf[1] - off
+ for oj in range(rf[1]):
oind[1] = oj + iind[1]
if oind[1] < 0:
continue
@@ -872,8 +873,8 @@
for wk in range(3):
if offsets[2][wk] == 0: continue
off = (left_index[2] + level_dims[2]*(wk-1))
- iind[2] = ipos[i, 2] * rf - off
- for ok in range(rf):
+ iind[2] = ipos[i, 2] * rf[2] - off
+ for ok in range(rf[2]):
oind[2] = ok + iind[2]
if oind[2] < 0:
continue
diff -r fc3d1369fd2821a203b0bae4b94b53915254f468 -r 771123590278f97c40911a6bc318174de90cdd0e yt/utilities/lib/tests/test_fill_region.py
--- a/yt/utilities/lib/tests/test_fill_region.py
+++ b/yt/utilities/lib/tests/test_fill_region.py
@@ -25,7 +25,8 @@
ires = np.zeros(NDIM*NDIM*NDIM, "int64")
ddims = np.array([NDIM, NDIM, NDIM], dtype="int64") * rf
fill_region(input_fields, output_fields, level,
- left_index, ipos, ires, ddims, 2)
+ left_index, ipos, ires, ddims,
+ np.array([2, 2, 2], dtype="i8"))
for r in range(level + 1):
for o, i in zip(output_fields, v):
assert_equal( o[r::rf,r::rf,r::rf], i)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list