[yt-svn] commit/yt: MatthewTurk: Merged in xarthisius/yt (pull request #1990)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Feb 24 09:30:15 PST 2016
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/ba82b89be0db/
Changeset: ba82b89be0db
Branch: yt
User: MatthewTurk
Date: 2016-02-24 17:30:03+00:00
Summary: Merged in xarthisius/yt (pull request #1990)
Update data in kd-Tree to respect 'log_fields' value during AMRKDTree.set_fields call
Affected #: 2 files
diff -r 658172e6838146e6eb47c1abd069dc82819f3992 -r ba82b89be0dbd9b6535a83589000948579e8ef57 yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -14,6 +14,7 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
+import operator
import numpy as np
from yt.funcs import mylog
@@ -42,19 +43,28 @@
steps = np.array([[-1, -1, -1], [-1, -1, 0], [-1, -1, 1],
[-1, 0, -1], [-1, 0, 0], [-1, 0, 1],
[-1, 1, -1], [-1, 1, 0], [-1, 1, 1],
-
+
[ 0, -1, -1], [ 0, -1, 0], [ 0, -1, 1],
[ 0, 0, -1],
# [ 0, 0, 0],
[ 0, 0, 1],
[ 0, 1, -1], [ 0, 1, 0], [ 0, 1, 1],
-
+
[ 1, -1, -1], [ 1, -1, 0], [ 1, -1, 1],
[ 1, 0, -1], [ 1, 0, 0], [ 1, 0, 1],
[ 1, 1, -1], [ 1, 1, 0], [ 1, 1, 1] ])
+def _apply_log(data, log_changed, log_new):
+ '''Helper used to set log10/10^ to data in AMRKDTree'''
+ if not log_changed:
+ return
+ if log_new:
+ np.log10(data, data)
+ else:
+ np.power(10.0, data, data)
+
class Tree(object):
- def __init__(self, ds, comm_rank=0, comm_size=1, left=None, right=None,
+ def __init__(self, ds, comm_rank=0, comm_size=1, left=None, right=None,
min_level=None, max_level=None, data_source=None):
self.ds = ds
@@ -155,6 +165,7 @@
self.brick_dimensions = []
self.sdx = ds.index.get_smallest_dx()
+ self.regenerate_data = True
self._initialized = False
try:
self._id_offset = ds.index.grids[0]._id_offset
@@ -171,13 +182,25 @@
data_source=data_source)
def set_fields(self, fields, log_fields, no_ghost):
- self.fields = self.data_source._determine_fields(fields)
+ new_fields = self.data_source._determine_fields(fields)
+ self.regenerate_data = \
+ self.fields is None or \
+ len(self.fields) != len(new_fields) or \
+ self.fields != new_fields
+ self.fields = new_fields
+
+ if self.log_fields is not None:
+ flip_log = map(operator.ne, self.log_fields, log_fields)
+ else:
+ flip_log = [False] * len(log_fields)
self.log_fields = log_fields
+
self.no_ghost = no_ghost
del self.bricks, self.brick_dimensions
self.brick_dimensions = []
bricks = []
for b in self.traverse():
+ map(_apply_log, b.my_data, flip_log, log_fields)
bricks.append(b)
self.bricks = np.array(bricks)
self.brick_dimensions = np.array(self.brick_dimensions)
@@ -261,7 +284,8 @@
return scatter_image(self.comm, owners[1], image)
def get_brick_data(self, node):
- if node.data is not None: return node.data
+ if node.data is not None and not self.regenerate_data:
+ return node.data
grid = self.ds.index.grids[node.grid - self._id_offset]
dds = grid.dds.ndarray_view()
gle = grid.LeftEdge.ndarray_view()
@@ -273,7 +297,7 @@
assert(np.all(grid.LeftEdge <= nle))
assert(np.all(grid.RightEdge >= nre))
- if grid in self.current_saved_grids:
+ if grid in self.current_saved_grids and not self.regenerate_data:
dds = self.current_vcds[self.current_saved_grids.index(grid)]
else:
dds = []
@@ -301,6 +325,7 @@
node.data = brick
if not self._initialized:
self.brick_dimensions.append(dims)
+ self.regenerate_data = False
return brick
def locate_brick(self, position):
@@ -327,16 +352,16 @@
cis: List of neighbor cell index tuples
Both of these are neighbors that, relative to the current cell
- index (i,j,k), are ordered as:
-
- (i-1, j-1, k-1), (i-1, j-1, k ), (i-1, j-1, k+1), ...
- (i-1, j , k-1), (i-1, j , k ), (i-1, j , k+1), ...
+ index (i,j,k), are ordered as:
+
+ (i-1, j-1, k-1), (i-1, j-1, k ), (i-1, j-1, k+1), ...
+ (i-1, j , k-1), (i-1, j , k ), (i-1, j , k+1), ...
(i+1, j+1, k-1), (i-1, j-1, k ), (i+1, j+1, k+1)
That is they start from the lower left and proceed to upper
right varying the third index most frequently. Note that the
center cell (i,j,k) is ommitted.
-
+
"""
ci = np.array(ci)
center_dds = grid.dds
@@ -351,7 +376,7 @@
new_positions = position + steps*offs
new_positions = [periodic_position(p, self.ds) for p in new_positions]
grids[in_grid] = grid
-
+
get_them = np.argwhere(in_grid).ravel()
cis[in_grid] = new_cis[in_grid]
@@ -367,11 +392,11 @@
return grids, cis
def locate_neighbors_from_position(self, position):
- r"""Given a position, finds the 26 neighbor grids
+ r"""Given a position, finds the 26 neighbor grids
and cell indices.
This is a mostly a wrapper for locate_neighbors.
-
+
Parameters
----------
position: array-like
@@ -383,16 +408,16 @@
cis: List of neighbor cell index tuples
Both of these are neighbors that, relative to the current cell
- index (i,j,k), are ordered as:
-
- (i-1, j-1, k-1), (i-1, j-1, k ), (i-1, j-1, k+1), ...
- (i-1, j , k-1), (i-1, j , k ), (i-1, j , k+1), ...
+ index (i,j,k), are ordered as:
+
+ (i-1, j-1, k-1), (i-1, j-1, k ), (i-1, j-1, k+1), ...
+ (i-1, j , k-1), (i-1, j , k ), (i-1, j , k+1), ...
(i+1, j+1, k-1), (i-1, j-1, k ), (i+1, j+1, k+1)
That is they start from the lower left and proceed to upper
right varying the third index most frequently. Note that the
center cell (i,j,k) is ommitted.
-
+
"""
position = np.array(position)
grid = self.ds.index.grids[self.locate_brick(position).grid -
@@ -421,7 +446,7 @@
del f
if self.comm.rank != (self.comm.size-1):
self.comm.send_array([0],self.comm.rank+1, tag=self.comm.rank)
-
+
def load_kd_bricks(self,fn=None):
if fn is None:
fn = '%s_kd_bricks.h5' % self.ds
@@ -435,10 +460,10 @@
data = [f["brick_%s_%s" %
(hex(i), field)][:].astype('float64') for field in self.fields]
node.data = PartitionedGrid(node.grid.id, data,
- node.l_corner.copy(),
- node.r_corner.copy(),
+ node.l_corner.copy(),
+ node.r_corner.copy(),
node.dims.astype('int64'))
-
+
self.bricks.append(node.data)
self.brick_dimensions.append(node.dims)
@@ -457,15 +482,15 @@
if self.comm.size == 0: return
nid, pid, lid, rid, les, res, gid, splitdims, splitposs = \
self.get_node_arrays()
- nid = self.comm.par_combine_object(nid, 'cat', 'list')
- pid = self.comm.par_combine_object(pid, 'cat', 'list')
- lid = self.comm.par_combine_object(lid, 'cat', 'list')
- rid = self.comm.par_combine_object(rid, 'cat', 'list')
- gid = self.comm.par_combine_object(gid, 'cat', 'list')
- les = self.comm.par_combine_object(les, 'cat', 'list')
- res = self.comm.par_combine_object(res, 'cat', 'list')
- splitdims = self.comm.par_combine_object(splitdims, 'cat', 'list')
- splitposs = self.comm.par_combine_object(splitposs, 'cat', 'list')
+ nid = self.comm.par_combine_object(nid, 'cat', 'list')
+ pid = self.comm.par_combine_object(pid, 'cat', 'list')
+ lid = self.comm.par_combine_object(lid, 'cat', 'list')
+ rid = self.comm.par_combine_object(rid, 'cat', 'list')
+ gid = self.comm.par_combine_object(gid, 'cat', 'list')
+ les = self.comm.par_combine_object(les, 'cat', 'list')
+ res = self.comm.par_combine_object(res, 'cat', 'list')
+ splitdims = self.comm.par_combine_object(splitdims, 'cat', 'list')
+ splitposs = self.comm.par_combine_object(splitposs, 'cat', 'list')
nid = np.array(nid)
self.rebuild_tree_from_array(nid, pid, lid,
rid, les, res, gid, splitdims, splitposs)
@@ -481,25 +506,25 @@
splitdims = []
splitposs = []
for node in depth_first_touch(self.tree.trunk):
- nids.append(node.node_id)
- les.append(node.get_left_edge())
- res.append(node.get_right_edge())
+ nids.append(node.node_id)
+ les.append(node.get_left_edge())
+ res.append(node.get_right_edge())
if node.left is None:
- leftids.append(-1)
+ leftids.append(-1)
else:
- leftids.append(node.left.node_id)
+ leftids.append(node.left.node_id)
if node.right is None:
- rightids.append(-1)
+ rightids.append(-1)
else:
- rightids.append(node.right.node_id)
+ rightids.append(node.right.node_id)
if node.parent is None:
- parentids.append(-1)
+ parentids.append(-1)
else:
- parentids.append(node.parent.node_id)
+ parentids.append(node.parent.node_id)
if node.grid is None:
- gridids.append(-1)
+ gridids.append(-1)
else:
- gridids.append(node.grid)
+ gridids.append(node.grid)
splitdims.append(node.get_split_dim())
splitposs.append(node.get_split_pos())
@@ -510,10 +535,10 @@
rids, les, res, gids, splitdims, splitposs):
del self.tree.trunk
- self.tree.trunk = Node(None,
+ self.tree.trunk = Node(None,
None,
None,
- les[0], res[0], gids[0], nids[0])
+ les[0], res[0], gids[0], nids[0])
N = nids.shape[0]
for i in range(N):
@@ -521,14 +546,14 @@
n.set_left_edge(les[i])
n.set_right_edge(res[i])
if lids[i] != -1 and n.left is None:
- n.left = Node(n, None, None,
- np.zeros(3, dtype='float64'),
- np.zeros(3, dtype='float64'),
+ n.left = Node(n, None, None,
+ np.zeros(3, dtype='float64'),
+ np.zeros(3, dtype='float64'),
-1, lids[i])
if rids[i] != -1 and n.right is None:
- n.right = Node(n, None, None,
- np.zeros(3, dtype='float64'),
- np.zeros(3, dtype='float64'),
+ n.right = Node(n, None, None,
+ np.zeros(3, dtype='float64'),
+ np.zeros(3, dtype='float64'),
-1, rids[i])
if gids[i] != -1:
n.grid = gids[i]
@@ -541,9 +566,9 @@
def count_volume(self):
return kd_sum_volume(self.tree.trunk)
-
+
def count_cells(self):
- return self.tree.sum_cells()
+ return self.tree.sum_cells()
if __name__ == "__main__":
import yt
diff -r 658172e6838146e6eb47c1abd069dc82819f3992 -r ba82b89be0dbd9b6535a83589000948579e8ef57 yt/utilities/tests/test_amr_kdtree.py
--- a/yt/utilities/tests/test_amr_kdtree.py
+++ b/yt/utilities/tests/test_amr_kdtree.py
@@ -19,8 +19,9 @@
import yt.utilities.initial_conditions as ic
import yt.utilities.flagging_methods as fm
from yt.frontends.stream.api import load_uniform_grid, refine_amr
-from yt.testing import assert_equal
+from yt.testing import assert_equal, assert_almost_equal, fake_amr_ds
import numpy as np
+import itertools
def test_amr_kdtree_coverage():
@@ -61,3 +62,23 @@
tree_ok *= np.all(dims > 0)
yield assert_equal, True, tree_ok
+
+def test_amr_kdtree_set_fields():
+ ds = fake_amr_ds(fields=["density", "pressure"])
+ dd = ds.all_data()
+
+ fields = ds.field_list
+ dd.tiles.set_fields(fields, [True, True], False)
+ gold = {}
+ for i, block in enumerate(dd.tiles.traverse()):
+ gold[i] = [data.copy() for data in block.my_data]
+
+ for log_fields in itertools.product([True, False], [True, False]):
+ dd.tiles.set_fields(fields, log_fields, False)
+ for iblock, block in enumerate(dd.tiles.traverse()):
+ for i in range(len(fields)):
+ if log_fields[i]:
+ data = block.my_data[i]
+ else:
+ data = np.log10(block.my_data[i])
+ assert_almost_equal(gold[iblock][i], data)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list