[yt-svn] commit/yt: brittonsmith: Merged in MatthewTurk/yt/yt-3.0 (pull request #1104)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Sat Aug 2 03:46:14 PDT 2014
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/9701602a49cf/
Changeset: 9701602a49cf
Branch: yt-3.0
User: brittonsmith
Date: 2014-08-02 12:46:07
Summary: Merged in MatthewTurk/yt/yt-3.0 (pull request #1104)
Update usage of level sets
Affected #: 6 files
diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/analysis_modules/level_sets/clump_handling.py
--- a/yt/analysis_modules/level_sets/clump_handling.py
+++ b/yt/analysis_modules/level_sets/clump_handling.py
@@ -138,7 +138,7 @@
unique_contours.update(np.unique(ff))
contour_key = uuid.uuid4().hex
base_object = getattr(self.data, 'base_object', self.data)
- add_contour_field(base_object.pf, contour_key)
+ add_contour_field(base_object.ds, contour_key)
for cid in sorted(unique_contours):
if cid == -1: continue
new_clump = base_object.cut_region(
diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -30,24 +30,27 @@
gct = TileContourTree(min_val, max_val)
total_contours = 0
contours = {}
- empty_mask = np.ones((1,1,1), dtype="uint8")
node_ids = []
DLE = data_source.ds.domain_left_edge
+ total_vol = None
+ selector = getattr(data_source, "base_object", data_source).selector
+ masks = dict((g.id, m) for g, m in data_source.blocks)
for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():
node.node_ind = len(node_ids)
nid = node.node_id
node_ids.append(nid)
values = g[field][sl].astype("float64")
contour_ids = np.zeros(dims, "int64") - 1
+ mask = masks[g.id][sl].astype("uint8")
total_contours += gct.identify_contours(values, contour_ids,
- total_contours)
+ mask, total_contours)
new_contours = tree.cull_candidates(contour_ids)
tree.add_contours(new_contours)
# Now we can create a partitioned grid with the contours.
LE = (DLE + g.dds * gi).in_units("code_length").ndarray_view()
RE = LE + (dims * g.dds).in_units("code_length").ndarray_view()
pg = PartitionedGrid(g.id,
- [contour_ids.view("float64")], empty_mask,
+ [contour_ids.view("float64")], mask,
LE, RE, dims.astype("int64"))
contours[nid] = (g.Level, node.node_ind, pg, sl)
node_ids = np.array(node_ids)
diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -15,6 +15,7 @@
import itertools
import types
+import uuid
data_object_registry = {}
@@ -1118,11 +1119,22 @@
else:
mv = cons[level+1]
from yt.analysis_modules.level_sets.api import identify_contours
+ from yt.analysis_modules.level_sets.clump_handling import \
+ add_contour_field
nj, cids = identify_contours(self, field, cons[level], mv)
- for cid in range(nj):
- contours[level][cid] = self.cut_region(
- ["obj['contours'] == %s" % (cid + 1)],
- {'contour_slices': cids})
+ unique_contours = set([])
+ for sl_list in cids.values():
+ for sl, ff in sl_list:
+ unique_contours.update(np.unique(ff))
+ contour_key = uuid.uuid4().hex
+ # In case we're a cut region already...
+ base_object = getattr(self, 'base_object', self)
+ add_contour_field(base_object.ds, contour_key)
+ for cid in sorted(unique_contours):
+ if cid == -1: continue
+ contours[level][cid] = base_object.cut_region(
+ ["obj['contours_%s'] == %s" % (contour_key, cid)],
+ {'contour_slices_%s' % contour_key: cids})
return cons, contours
def paint_grids(self, field, value, default_value=None):
diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/data_objects/tests/test_connected_sets.py
--- /dev/null
+++ b/yt/data_objects/tests/test_connected_sets.py
@@ -0,0 +1,13 @@
+from yt.utilities.answer_testing.level_sets_tests import \
+ ExtractConnectedSetsTest, \
+ requires_ds, \
+ data_dir_load
+
+g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
+ at requires_ds(g30, big_data=True)
+def test_connected_sets():
+ ds = data_dir_load(g30)
+ data_source = ds.disk([0.5, 0.5, 0.5], [0., 0., 1.],
+ (8, 'kpc'), (1, 'kpc'))
+ yield ExtractConnectedSetsTest(g30, data_source, ("gas", "density"),
+ 5, 1e-24, 8e-24)
diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/utilities/answer_testing/level_sets_tests.py
--- /dev/null
+++ b/yt/utilities/answer_testing/level_sets_tests.py
@@ -0,0 +1,50 @@
+"""
+Answer Testing for level sets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.testing import *
+
+from .framework import \
+ AnswerTestingTest, requires_ds, data_dir_load
+
+class ExtractConnectedSetsTest(AnswerTestingTest):
+ _type_name = "ExtractConnectedSets"
+ _attrs = ()
+
+ def __init__(self, ds_fn, data_source, field, num_levels, min_val, max_val):
+ super(ExtractConnectedSetsTest, self).__init__(ds_fn)
+ self.data_source = data_source
+ self.field = field
+ self.num_levels = num_levels
+ self.min_val = min_val
+ self.max_val = max_val
+
+ def run(self):
+ n, all_sets = self.data_source.extract_connected_sets(
+ self.field, self.num_levels, self.min_val, self.max_val)
+ result = []
+ for level in all_sets:
+ for set_id in all_sets[level]:
+ result.append([all_sets[level][set_id]["cell_mass"].size,
+ all_sets[level][set_id]["cell_mass"].sum()])
+ result = np.array(result)
+ return result
+
+ def compare(self, new_result, old_result):
+ err_msg = "Size and/or mass of connected sets do not agree for %s." % \
+ self.ds_fn
+ assert_equal(new_result, old_result,
+ err_msg=err_msg, verbose=True)
diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/utilities/lib/ContourFinding.pyx
--- a/yt/utilities/lib/ContourFinding.pyx
+++ b/yt/utilities/lib/ContourFinding.pyx
@@ -296,6 +296,7 @@
@cython.wraparound(False)
def identify_contours(self, np.ndarray[np.float64_t, ndim=3] values,
np.ndarray[np.int64_t, ndim=3] contour_ids,
+ np.ndarray[np.uint8_t, ndim=3] mask,
np.int64_t start):
# This just looks at neighbor values and tries to identify which zones
# are touching by face within a given brick.
@@ -316,6 +317,7 @@
for j in range(nj):
for k in range(nk):
v = values[i,j,k]
+ if mask[i,j,k] == 0: continue
if v < self.min_val or v > self.max_val: continue
nc += 1
c1 = contour_create(nc + start)
@@ -403,7 +405,7 @@
+ vc0.dims[1]*vc0.dims[2]) * 18
# We allocate an array of fixed (maximum) size
cdef np.ndarray[np.int64_t, ndim=2] joins = np.zeros((s, 2), dtype="int64")
- cdef int ti = 0, side
+ cdef int ti = 0, side, m1, m2
cdef int index, pos[3], my_pos[3]
cdef np.float64_t spos[3]
@@ -440,10 +442,12 @@
if spos_contained(vc1, spos):
index = vc_index(vc0, my_pos[0],
my_pos[1], my_pos[2])
+ m1 = vc0.mask[index]
c1 = (<np.int64_t*>vc0.data[0])[index]
index = vc_pos_index(vc1, spos)
+ m2 = vc1.mask[index]
c2 = (<np.int64_t*>vc1.data[0])[index]
- if c1 > -1 and c2 > -1:
+ if m1 == 1 and m2 == 1 and c1 > -1 and c2 > -1:
if examined[adj_node.node_ind] == 0:
joins[ti,0] = i64max(c1,c2)
joins[ti,1] = i64min(c1,c2)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list