[yt-svn] commit/yt: 10 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sat Aug 2 03:46:13 PDT 2014


10 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/b3c1db9604fc/
Changeset:   b3c1db9604fc
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-30 15:07:53
Summary:     Update usage of level sets
Affected #:  2 files

diff -r c259c6a5f8ac42834e77c4056bdb635de93620c8 -r b3c1db9604fc1410669a346756122796d12f2cb3 yt/analysis_modules/level_sets/clump_handling.py
--- a/yt/analysis_modules/level_sets/clump_handling.py
+++ b/yt/analysis_modules/level_sets/clump_handling.py
@@ -138,7 +138,7 @@
                 unique_contours.update(np.unique(ff))
         contour_key = uuid.uuid4().hex
         base_object = getattr(self.data, 'base_object', self.data)
-        add_contour_field(base_object.pf, contour_key)
+        add_contour_field(base_object.ds, contour_key)
         for cid in sorted(unique_contours):
             if cid == -1: continue
             new_clump = base_object.cut_region(

diff -r c259c6a5f8ac42834e77c4056bdb635de93620c8 -r b3c1db9604fc1410669a346756122796d12f2cb3 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -15,6 +15,7 @@
 
 import itertools
 import types
+import uuid
 
 data_object_registry = {}
 
@@ -1118,11 +1119,22 @@
             else:
                 mv = cons[level+1]
             from yt.analysis_modules.level_sets.api import identify_contours
+            from yt.analysis_modules.level_sets.clump_handling import \
+                add_contour_field
             nj, cids = identify_contours(self, field, cons[level], mv)
-            for cid in range(nj):
-                contours[level][cid] = self.cut_region(
-                    ["obj['contours'] == %s" % (cid + 1)],
-                    {'contour_slices': cids})
+            unique_contours = set([])
+            for sl_list in cids.values():
+                for sl, ff in sl_list:
+                    unique_contours.update(np.unique(ff))
+            contour_key = uuid.uuid4().hex
+            # In case we're a cut region already...
+            base_object = getattr(self, 'base_object', self)
+            add_contour_field(base_object.ds, contour_key)
+            for cid in sorted(unique_contours):
+                if cid == -1: continue
+                contours[level][cid] = base_object.cut_region(
+                    ["obj['contours_%s'] == %s" % (contour_key, cid + 1)],
+                    {'contour_slices_%s' % contour_key: cids})
         return cons, contours
 
     def paint_grids(self, field, value, default_value=None):


https://bitbucket.org/yt_analysis/yt/commits/819e024f86d9/
Changeset:   819e024f86d9
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-07-31 13:21:58
Summary:     Adding level sets answer test.
Affected #:  1 file

diff -r b3c1db9604fc1410669a346756122796d12f2cb3 -r 819e024f86d99330fa81d5d709e83d9398444557 yt/utilities/answer_testing/level_sets_tests.py
--- /dev/null
+++ b/yt/utilities/answer_testing/level_sets_tests.py
@@ -0,0 +1,48 @@
+"""
+Answer Testing for level sets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from .framework import \
+    AnswerTestingTest
+
+class ExtractConnectedSetsTest(AnswerTestingTest):
+    _type_name = "ExtractConnectedSets"
+    _attrs = ()
+
+    def __init__(self, ds_fn, data_source, field, num_levels, min_val, max_val):
+        super(ExtractConnectedSetsTest, self).__init__(ds_fn)
+        self.data_source = data_source
+        self.field = field
+        self.num_levels = num_levels
+        self.min_val = min_val
+        self.max_val = max_val
+    
+    def run(self):
+        n, all_sets = self.data_source.extract_connected_sets(
+            self.field, self.num_levels, self.min_value, self.max_value)
+        result = []
+        for level in all_sets:
+            for set_id in all_sets[level]:
+                result.append([all_sets[level][set_id]["cell_mass"].size,
+                               all_sets[level][set_id]["cell_mass"].sum()])
+        result = np.array(result)
+        return result
+
+    def compare(self, new_result, old_result):
+        err_msg = "Size and/or mass of connected sets do not agree for %s." % \
+          self.ds_fn
+        assert_equal(new_result, old_result,
+                     err_msg=err_msg, verbose=True)


https://bitbucket.org/yt_analysis/yt/commits/f2b00aee110d/
Changeset:   f2b00aee110d
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-07-31 13:32:52
Summary:     Adding test for connected sets.
Affected #:  2 files

diff -r 819e024f86d99330fa81d5d709e83d9398444557 -r f2b00aee110d0e00034af5351178abfa3a158e56 yt/data_objects/tests/test_connected_sets.py
--- /dev/null
+++ b/yt/data_objects/tests/test_connected_sets.py
@@ -0,0 +1,12 @@
+from yt.utilities.answer_testing.level_sets_tests import \
+     ExtractConnectedSetsTest, \
+     requires_ds
+
+g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
+ at requires_ds(g30, big_data=True)
+def test_connected_sets():
+    ds = data_dir_load(g30)
+    data_source = ds.disk([0.5, 0.5, 0.5], [0., 0., 1.],
+                          (1, 'kpc'), (1, 'kpc'))
+    yield ExtractConnectedSetsTest(data_source, ("gas", "density"),
+                                   5, 1e-24, 8e-24)

diff -r 819e024f86d99330fa81d5d709e83d9398444557 -r f2b00aee110d0e00034af5351178abfa3a158e56 yt/utilities/answer_testing/level_sets_tests.py
--- a/yt/utilities/answer_testing/level_sets_tests.py
+++ b/yt/utilities/answer_testing/level_sets_tests.py
@@ -15,6 +15,8 @@
 
 import numpy as np
 
+from yt.testing import *
+
 from .framework import \
     AnswerTestingTest
 


https://bitbucket.org/yt_analysis/yt/commits/80fb5beed653/
Changeset:   80fb5beed653
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-31 16:40:19
Summary:     Adding requires_ds to imports.
Affected #:  2 files

diff -r f2b00aee110d0e00034af5351178abfa3a158e56 -r 80fb5beed65398d81710d35528bebe4806d06188 yt/data_objects/tests/test_connected_sets.py
--- a/yt/data_objects/tests/test_connected_sets.py
+++ b/yt/data_objects/tests/test_connected_sets.py
@@ -1,6 +1,7 @@
 from yt.utilities.answer_testing.level_sets_tests import \
      ExtractConnectedSetsTest, \
-     requires_ds
+     requires_ds, \
+     data_dir_load
 
 g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
 @requires_ds(g30, big_data=True)

diff -r f2b00aee110d0e00034af5351178abfa3a158e56 -r 80fb5beed65398d81710d35528bebe4806d06188 yt/utilities/answer_testing/level_sets_tests.py
--- a/yt/utilities/answer_testing/level_sets_tests.py
+++ b/yt/utilities/answer_testing/level_sets_tests.py
@@ -18,7 +18,7 @@
 from yt.testing import *
 
 from .framework import \
-    AnswerTestingTest
+    AnswerTestingTest, requires_ds, data_dir_load
 
 class ExtractConnectedSetsTest(AnswerTestingTest):
     _type_name = "ExtractConnectedSets"


https://bitbucket.org/yt_analysis/yt/commits/270590047999/
Changeset:   270590047999
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-31 16:44:24
Summary:     Fixing imports and tests.
Affected #:  2 files

diff -r 80fb5beed65398d81710d35528bebe4806d06188 -r 270590047999b18693e67a50d2b8050f9bb9cea9 yt/data_objects/tests/test_connected_sets.py
--- a/yt/data_objects/tests/test_connected_sets.py
+++ b/yt/data_objects/tests/test_connected_sets.py
@@ -9,5 +9,5 @@
     ds = data_dir_load(g30)
     data_source = ds.disk([0.5, 0.5, 0.5], [0., 0., 1.],
                           (1, 'kpc'), (1, 'kpc'))
-    yield ExtractConnectedSetsTest(data_source, ("gas", "density"),
+    yield ExtractConnectedSetsTest(g30, data_source, ("gas", "density"),
                                    5, 1e-24, 8e-24)

diff -r 80fb5beed65398d81710d35528bebe4806d06188 -r 270590047999b18693e67a50d2b8050f9bb9cea9 yt/utilities/answer_testing/level_sets_tests.py
--- a/yt/utilities/answer_testing/level_sets_tests.py
+++ b/yt/utilities/answer_testing/level_sets_tests.py
@@ -34,7 +34,7 @@
     
     def run(self):
         n, all_sets = self.data_source.extract_connected_sets(
-            self.field, self.num_levels, self.min_value, self.max_value)
+            self.field, self.num_levels, self.min_val, self.max_val)
         result = []
         for level in all_sets:
             for set_id in all_sets[level]:


https://bitbucket.org/yt_analysis/yt/commits/fc288a094300/
Changeset:   fc288a094300
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-01 16:00:55
Summary:     Starting mask checks.
Affected #:  2 files

diff -r 270590047999b18693e67a50d2b8050f9bb9cea9 -r fc288a09430070d93776f796a1229b750f47f2de yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -33,14 +33,20 @@
     empty_mask = np.ones((1,1,1), dtype="uint8")
     node_ids = []
     DLE = data_source.ds.domain_left_edge
+    total_vol = None
     for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():
+        if total_vol is None:
+            total_vol = g["cell_volume"][sl].sum()
+        else:
+            total_vol += g["cell_volume"][sl].sum()
         node.node_ind = len(node_ids)
         nid = node.node_id
         node_ids.append(nid)
         values = g[field][sl].astype("float64")
         contour_ids = np.zeros(dims, "int64") - 1
+        mask = data_source.selector.fill_mask(g)[sl]
         total_contours += gct.identify_contours(values, contour_ids,
-                                                total_contours)
+                                                mask.astype("uint8"), total_contours)
         new_contours = tree.cull_candidates(contour_ids)
         tree.add_contours(new_contours)
         # Now we can create a partitioned grid with the contours.
@@ -50,6 +56,7 @@
             [contour_ids.view("float64")], empty_mask,
             LE, RE, dims.astype("int64"))
         contours[nid] = (g.Level, node.node_ind, pg, sl)
+    print total_vol, data_source["cell_volume"].sum()
     node_ids = np.array(node_ids)
     if node_ids.size == 0:
         return 0, {}

diff -r 270590047999b18693e67a50d2b8050f9bb9cea9 -r fc288a09430070d93776f796a1229b750f47f2de yt/utilities/lib/ContourFinding.pyx
--- a/yt/utilities/lib/ContourFinding.pyx
+++ b/yt/utilities/lib/ContourFinding.pyx
@@ -296,6 +296,7 @@
     @cython.wraparound(False)
     def identify_contours(self, np.ndarray[np.float64_t, ndim=3] values,
                                 np.ndarray[np.int64_t, ndim=3] contour_ids,
+                                np.ndarray[np.uint8_t, ndim=3] mask,
                                 np.int64_t start):
         # This just looks at neighbor values and tries to identify which zones
         # are touching by face within a given brick.
@@ -317,6 +318,7 @@
                 for k in range(nk):
                     v = values[i,j,k]
                     if v < self.min_val or v > self.max_val: continue
+                    if mask[i,j,k] == 0: continue
                     nc += 1
                     c1 = contour_create(nc + start)
                     cur = container[i*nj*nk + j*nk + k] = c1
@@ -403,7 +405,7 @@
                 + vc0.dims[1]*vc0.dims[2]) * 18
     # We allocate an array of fixed (maximum) size
     cdef np.ndarray[np.int64_t, ndim=2] joins = np.zeros((s, 2), dtype="int64")
-    cdef int ti = 0, side
+    cdef int ti = 0, side, m
     cdef int index, pos[3], my_pos[3]
     cdef np.float64_t spos[3]
 
@@ -440,10 +442,12 @@
                             if spos_contained(vc1, spos):
                                 index = vc_index(vc0, my_pos[0], 
                                                  my_pos[1], my_pos[2])
+                                m = vc0.mask[index]
                                 c1 = (<np.int64_t*>vc0.data[0])[index]
                                 index = vc_pos_index(vc1, spos)
+                                m *= vc1.mask[index]
                                 c2 = (<np.int64_t*>vc1.data[0])[index]
-                                if c1 > -1 and c2 > -1:
+                                if m == 1 and c1 > -1 and c2 > -1:
                                     if examined[adj_node.node_ind] == 0:
                                         joins[ti,0] = i64max(c1,c2)
                                         joins[ti,1] = i64min(c1,c2)


https://bitbucket.org/yt_analysis/yt/commits/6459a73b0bc7/
Changeset:   6459a73b0bc7
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-01 20:17:02
Summary:     This gets single-source clump finding back where it was.
Affected #:  3 files

diff -r fc288a09430070d93776f796a1229b750f47f2de -r 6459a73b0bc7b9437b5c93ffd49134c743397385 yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -30,33 +30,28 @@
     gct = TileContourTree(min_val, max_val)
     total_contours = 0
     contours = {}
-    empty_mask = np.ones((1,1,1), dtype="uint8")
     node_ids = []
     DLE = data_source.ds.domain_left_edge
     total_vol = None
+    selector = getattr(data_source, "base_object", data_source).selector
     for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():
-        if total_vol is None:
-            total_vol = g["cell_volume"][sl].sum()
-        else:
-            total_vol += g["cell_volume"][sl].sum()
         node.node_ind = len(node_ids)
         nid = node.node_id
         node_ids.append(nid)
         values = g[field][sl].astype("float64")
         contour_ids = np.zeros(dims, "int64") - 1
-        mask = data_source.selector.fill_mask(g)[sl]
+        mask = selector.fill_mask(g)[sl].astype("uint8")
         total_contours += gct.identify_contours(values, contour_ids,
-                                                mask.astype("uint8"), total_contours)
+                                                mask, total_contours)
         new_contours = tree.cull_candidates(contour_ids)
         tree.add_contours(new_contours)
         # Now we can create a partitioned grid with the contours.
         LE = (DLE + g.dds * gi).in_units("code_length").ndarray_view()
         RE = LE + (dims * g.dds).in_units("code_length").ndarray_view()
         pg = PartitionedGrid(g.id,
-            [contour_ids.view("float64")], empty_mask,
+            [contour_ids.view("float64")], mask,
             LE, RE, dims.astype("int64"))
         contours[nid] = (g.Level, node.node_ind, pg, sl)
-    print total_vol, data_source["cell_volume"].sum()
     node_ids = np.array(node_ids)
     if node_ids.size == 0:
         return 0, {}

diff -r fc288a09430070d93776f796a1229b750f47f2de -r 6459a73b0bc7b9437b5c93ffd49134c743397385 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -1133,7 +1133,7 @@
             for cid in sorted(unique_contours):
                 if cid == -1: continue
                 contours[level][cid] = base_object.cut_region(
-                    ["obj['contours_%s'] == %s" % (contour_key, cid + 1)],
+                    ["obj['contours_%s'] == %s" % (contour_key, cid)],
                     {'contour_slices_%s' % contour_key: cids})
         return cons, contours
 

diff -r fc288a09430070d93776f796a1229b750f47f2de -r 6459a73b0bc7b9437b5c93ffd49134c743397385 yt/utilities/lib/ContourFinding.pyx
--- a/yt/utilities/lib/ContourFinding.pyx
+++ b/yt/utilities/lib/ContourFinding.pyx
@@ -317,8 +317,8 @@
             for j in range(nj):
                 for k in range(nk):
                     v = values[i,j,k]
+                    if mask[i,j,k] == 0: continue
                     if v < self.min_val or v > self.max_val: continue
-                    if mask[i,j,k] == 0: continue
                     nc += 1
                     c1 = contour_create(nc + start)
                     cur = container[i*nj*nk + j*nk + k] = c1
@@ -405,7 +405,7 @@
                 + vc0.dims[1]*vc0.dims[2]) * 18
     # We allocate an array of fixed (maximum) size
     cdef np.ndarray[np.int64_t, ndim=2] joins = np.zeros((s, 2), dtype="int64")
-    cdef int ti = 0, side, m
+    cdef int ti = 0, side, m1, m2
     cdef int index, pos[3], my_pos[3]
     cdef np.float64_t spos[3]
 
@@ -442,12 +442,12 @@
                             if spos_contained(vc1, spos):
                                 index = vc_index(vc0, my_pos[0], 
                                                  my_pos[1], my_pos[2])
-                                m = vc0.mask[index]
+                                m1 = vc0.mask[index]
                                 c1 = (<np.int64_t*>vc0.data[0])[index]
                                 index = vc_pos_index(vc1, spos)
-                                m *= vc1.mask[index]
+                                m2 = vc1.mask[index]
                                 c2 = (<np.int64_t*>vc1.data[0])[index]
-                                if m == 1 and c1 > -1 and c2 > -1:
+                                if m1 == 1 and m2 == 1 and c1 > -1 and c2 > -1:
                                     if examined[adj_node.node_ind] == 0:
                                         joins[ti,0] = i64max(c1,c2)
                                         joins[ti,1] = i64min(c1,c2)


https://bitbucket.org/yt_analysis/yt/commits/956ffe273938/
Changeset:   956ffe273938
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-01 20:22:30
Summary:     This last little bit fixes the clumps.
Affected #:  1 file

diff -r 6459a73b0bc7b9437b5c93ffd49134c743397385 -r 956ffe273938eea39757c05584abfac76e86901e yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -34,13 +34,14 @@
     DLE = data_source.ds.domain_left_edge
     total_vol = None
     selector = getattr(data_source, "base_object", data_source).selector
+    masks = dict((g.id, m) for g, m in data_source.blocks)
     for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():
         node.node_ind = len(node_ids)
         nid = node.node_id
         node_ids.append(nid)
         values = g[field][sl].astype("float64")
         contour_ids = np.zeros(dims, "int64") - 1
-        mask = selector.fill_mask(g)[sl].astype("uint8")
+        mask = masks[g.id][sl].astype("uint8")
         total_contours += gct.identify_contours(values, contour_ids,
                                                 mask, total_contours)
         new_contours = tree.cull_candidates(contour_ids)


https://bitbucket.org/yt_analysis/yt/commits/6e76ec5b3a3a/
Changeset:   6e76ec5b3a3a
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-02 05:11:21
Summary:     Make test_connected_sets consistent with find_clumps.

This change doesn't make much of a difference, since we're just testing
internal consistency.
Affected #:  1 file

diff -r 956ffe273938eea39757c05584abfac76e86901e -r 6e76ec5b3a3a6e1c9745aee79ad44d6b566bac09 yt/data_objects/tests/test_connected_sets.py
--- a/yt/data_objects/tests/test_connected_sets.py
+++ b/yt/data_objects/tests/test_connected_sets.py
@@ -8,6 +8,6 @@
 def test_connected_sets():
     ds = data_dir_load(g30)
     data_source = ds.disk([0.5, 0.5, 0.5], [0., 0., 1.],
-                          (1, 'kpc'), (1, 'kpc'))
+                          (8, 'kpc'), (1, 'kpc'))
     yield ExtractConnectedSetsTest(g30, data_source, ("gas", "density"),
                                    5, 1e-24, 8e-24)


https://bitbucket.org/yt_analysis/yt/commits/9701602a49cf/
Changeset:   9701602a49cf
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-08-02 12:46:07
Summary:     Merged in MatthewTurk/yt/yt-3.0 (pull request #1104)

Update usage of level sets
Affected #:  6 files

diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/analysis_modules/level_sets/clump_handling.py
--- a/yt/analysis_modules/level_sets/clump_handling.py
+++ b/yt/analysis_modules/level_sets/clump_handling.py
@@ -138,7 +138,7 @@
                 unique_contours.update(np.unique(ff))
         contour_key = uuid.uuid4().hex
         base_object = getattr(self.data, 'base_object', self.data)
-        add_contour_field(base_object.pf, contour_key)
+        add_contour_field(base_object.ds, contour_key)
         for cid in sorted(unique_contours):
             if cid == -1: continue
             new_clump = base_object.cut_region(

diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -30,24 +30,27 @@
     gct = TileContourTree(min_val, max_val)
     total_contours = 0
     contours = {}
-    empty_mask = np.ones((1,1,1), dtype="uint8")
     node_ids = []
     DLE = data_source.ds.domain_left_edge
+    total_vol = None
+    selector = getattr(data_source, "base_object", data_source).selector
+    masks = dict((g.id, m) for g, m in data_source.blocks)
     for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():
         node.node_ind = len(node_ids)
         nid = node.node_id
         node_ids.append(nid)
         values = g[field][sl].astype("float64")
         contour_ids = np.zeros(dims, "int64") - 1
+        mask = masks[g.id][sl].astype("uint8")
         total_contours += gct.identify_contours(values, contour_ids,
-                                                total_contours)
+                                                mask, total_contours)
         new_contours = tree.cull_candidates(contour_ids)
         tree.add_contours(new_contours)
         # Now we can create a partitioned grid with the contours.
         LE = (DLE + g.dds * gi).in_units("code_length").ndarray_view()
         RE = LE + (dims * g.dds).in_units("code_length").ndarray_view()
         pg = PartitionedGrid(g.id,
-            [contour_ids.view("float64")], empty_mask,
+            [contour_ids.view("float64")], mask,
             LE, RE, dims.astype("int64"))
         contours[nid] = (g.Level, node.node_ind, pg, sl)
     node_ids = np.array(node_ids)

diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -15,6 +15,7 @@
 
 import itertools
 import types
+import uuid
 
 data_object_registry = {}
 
@@ -1118,11 +1119,22 @@
             else:
                 mv = cons[level+1]
             from yt.analysis_modules.level_sets.api import identify_contours
+            from yt.analysis_modules.level_sets.clump_handling import \
+                add_contour_field
             nj, cids = identify_contours(self, field, cons[level], mv)
-            for cid in range(nj):
-                contours[level][cid] = self.cut_region(
-                    ["obj['contours'] == %s" % (cid + 1)],
-                    {'contour_slices': cids})
+            unique_contours = set([])
+            for sl_list in cids.values():
+                for sl, ff in sl_list:
+                    unique_contours.update(np.unique(ff))
+            contour_key = uuid.uuid4().hex
+            # In case we're a cut region already...
+            base_object = getattr(self, 'base_object', self)
+            add_contour_field(base_object.ds, contour_key)
+            for cid in sorted(unique_contours):
+                if cid == -1: continue
+                contours[level][cid] = base_object.cut_region(
+                    ["obj['contours_%s'] == %s" % (contour_key, cid)],
+                    {'contour_slices_%s' % contour_key: cids})
         return cons, contours
 
     def paint_grids(self, field, value, default_value=None):

diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/data_objects/tests/test_connected_sets.py
--- /dev/null
+++ b/yt/data_objects/tests/test_connected_sets.py
@@ -0,0 +1,13 @@
+from yt.utilities.answer_testing.level_sets_tests import \
+     ExtractConnectedSetsTest, \
+     requires_ds, \
+     data_dir_load
+
+g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
+ at requires_ds(g30, big_data=True)
+def test_connected_sets():
+    ds = data_dir_load(g30)
+    data_source = ds.disk([0.5, 0.5, 0.5], [0., 0., 1.],
+                          (8, 'kpc'), (1, 'kpc'))
+    yield ExtractConnectedSetsTest(g30, data_source, ("gas", "density"),
+                                   5, 1e-24, 8e-24)

diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/utilities/answer_testing/level_sets_tests.py
--- /dev/null
+++ b/yt/utilities/answer_testing/level_sets_tests.py
@@ -0,0 +1,50 @@
+"""
+Answer Testing for level sets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.testing import *
+
+from .framework import \
+    AnswerTestingTest, requires_ds, data_dir_load
+
+class ExtractConnectedSetsTest(AnswerTestingTest):
+    _type_name = "ExtractConnectedSets"
+    _attrs = ()
+
+    def __init__(self, ds_fn, data_source, field, num_levels, min_val, max_val):
+        super(ExtractConnectedSetsTest, self).__init__(ds_fn)
+        self.data_source = data_source
+        self.field = field
+        self.num_levels = num_levels
+        self.min_val = min_val
+        self.max_val = max_val
+    
+    def run(self):
+        n, all_sets = self.data_source.extract_connected_sets(
+            self.field, self.num_levels, self.min_val, self.max_val)
+        result = []
+        for level in all_sets:
+            for set_id in all_sets[level]:
+                result.append([all_sets[level][set_id]["cell_mass"].size,
+                               all_sets[level][set_id]["cell_mass"].sum()])
+        result = np.array(result)
+        return result
+
+    def compare(self, new_result, old_result):
+        err_msg = "Size and/or mass of connected sets do not agree for %s." % \
+          self.ds_fn
+        assert_equal(new_result, old_result,
+                     err_msg=err_msg, verbose=True)

diff -r f06b454a1c620f658ada1f635cf51ca21d6f7c5f -r 9701602a49cf2aa538b53570bb9038bc1ef441e4 yt/utilities/lib/ContourFinding.pyx
--- a/yt/utilities/lib/ContourFinding.pyx
+++ b/yt/utilities/lib/ContourFinding.pyx
@@ -296,6 +296,7 @@
     @cython.wraparound(False)
     def identify_contours(self, np.ndarray[np.float64_t, ndim=3] values,
                                 np.ndarray[np.int64_t, ndim=3] contour_ids,
+                                np.ndarray[np.uint8_t, ndim=3] mask,
                                 np.int64_t start):
         # This just looks at neighbor values and tries to identify which zones
         # are touching by face within a given brick.
@@ -316,6 +317,7 @@
             for j in range(nj):
                 for k in range(nk):
                     v = values[i,j,k]
+                    if mask[i,j,k] == 0: continue
                     if v < self.min_val or v > self.max_val: continue
                     nc += 1
                     c1 = contour_create(nc + start)
@@ -403,7 +405,7 @@
                 + vc0.dims[1]*vc0.dims[2]) * 18
     # We allocate an array of fixed (maximum) size
     cdef np.ndarray[np.int64_t, ndim=2] joins = np.zeros((s, 2), dtype="int64")
-    cdef int ti = 0, side
+    cdef int ti = 0, side, m1, m2
     cdef int index, pos[3], my_pos[3]
     cdef np.float64_t spos[3]
 
@@ -440,10 +442,12 @@
                             if spos_contained(vc1, spos):
                                 index = vc_index(vc0, my_pos[0], 
                                                  my_pos[1], my_pos[2])
+                                m1 = vc0.mask[index]
                                 c1 = (<np.int64_t*>vc0.data[0])[index]
                                 index = vc_pos_index(vc1, spos)
+                                m2 = vc1.mask[index]
                                 c2 = (<np.int64_t*>vc1.data[0])[index]
-                                if c1 > -1 and c2 > -1:
+                                if m1 == 1 and m2 == 1 and c1 > -1 and c2 > -1:
                                     if examined[adj_node.node_ind] == 0:
                                         joins[ti,0] = i64max(c1,c2)
                                         joins[ti,1] = i64min(c1,c2)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list