[yt-svn] commit/yt: 9 new changesets
Bitbucket
commits-noreply at bitbucket.org
Wed Oct 24 12:35:11 PDT 2012
9 new commits in yt:
https://bitbucket.org/yt_analysis/yt/changeset/b6ec5694fc75/
changeset: b6ec5694fc75
branch: yt
user: sskory
date: 2012-10-19 22:03:30
summary: Fixed a small error with boolean objects.
affected #: 1 file
diff -r 56c2d60a99c72bb9cf58f1c1f264787999ba7c01 -r b6ec5694fc75f05406c68448adf155b523112f36 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -71,7 +71,7 @@
def force_array(item, shape):
try:
sh = item.shape
- return item
+ return item.copy()
except AttributeError:
if item:
return np.ones(shape, dtype='bool')
https://bitbucket.org/yt_analysis/yt/changeset/b18f2082144d/
changeset: b18f2082144d
branch: yt
user: sskory
date: 2012-10-20 00:50:02
summary: Merge.
affected #: 1 file
diff -r b6ec5694fc75f05406c68448adf155b523112f36 -r b18f2082144d476ce06a6d03af5f9ea2b9ff2124 yt/data_objects/tests/test_extract_regions.py
--- /dev/null
+++ b/yt/data_objects/tests/test_extract_regions.py
@@ -0,0 +1,53 @@
+from yt.testing import *
+
+def setup():
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+
+def test_cut_region():
+ # We decompose in different ways
+ for nprocs in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs = nprocs,
+ fields = ("Density", "Temperature", "x-velocity"))
+ # We'll test two objects
+ dd = pf.h.all_data()
+ r = dd.cut_region( [ "grid['Temperature'] > 0.5",
+ "grid['Density'] < 0.75",
+ "grid['x-velocity'] > 0.25" ])
+ t = ( (dd["Temperature"] > 0.5 )
+ & (dd["Density"] < 0.75 )
+ & (dd["x-velocity"] > 0.25 ) )
+ yield assert_equal, np.all(r["Temperature"] > 0.5), True
+ yield assert_equal, np.all(r["Density"] < 0.75), True
+ yield assert_equal, np.all(r["x-velocity"] > 0.25), True
+ yield assert_equal, np.sort(dd["Density"][t]), np.sort(r["Density"])
+ yield assert_equal, np.sort(dd["x"][t]), np.sort(r["x"])
+ r2 = r.cut_region( [ "grid['Temperature'] < 0.75" ] )
+ t2 = (r["Temperature"] < 0.75)
+ yield assert_equal, np.sort(r2["Temperature"]), np.sort(r["Temperature"][t2])
+ yield assert_equal, np.all(r2["Temperature"] < 0.75), True
+
+def test_extract_region():
+ # We decompose in different ways
+ for nprocs in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs = nprocs,
+ fields = ("Density", "Temperature", "x-velocity"))
+ # We'll test two objects
+ dd = pf.h.all_data()
+ t = ( (dd["Temperature"] > 0.5 )
+ & (dd["Density"] < 0.75 )
+ & (dd["x-velocity"] > 0.25 ) )
+ r = dd.extract_region(t)
+ yield assert_equal, np.all(r["Temperature"] > 0.5), True
+ yield assert_equal, np.all(r["Density"] < 0.75), True
+ yield assert_equal, np.all(r["x-velocity"] > 0.25), True
+ yield assert_equal, np.sort(dd["Density"][t]), np.sort(r["Density"])
+ yield assert_equal, np.sort(dd["x"][t]), np.sort(r["x"])
+ t2 = (r["Temperature"] < 0.75)
+ r2 = r.cut_region( [ "grid['Temperature'] < 0.75" ] )
+ yield assert_equal, np.sort(r2["Temperature"]), np.sort(r["Temperature"][t2])
+ yield assert_equal, np.all(r2["Temperature"] < 0.75), True
+ t3 = (r["Temperature"] < 0.75)
+ r3 = r.extract_region( t3 )
+ yield assert_equal, np.sort(r3["Temperature"]), np.sort(r["Temperature"][t3])
+ yield assert_equal, np.all(r3["Temperature"] < 0.75), True
https://bitbucket.org/yt_analysis/yt/changeset/52637cee578f/
changeset: 52637cee578f
branch: yt
user: sskory
date: 2012-10-23 17:39:01
summary: merge
affected #: 3 files
diff -r b18f2082144d476ce06a6d03af5f9ea2b9ff2124 -r 52637cee578ff0f290c12331eaf1220e74467d83 yt/data_objects/time_series.py
--- a/yt/data_objects/time_series.py
+++ b/yt/data_objects/time_series.py
@@ -258,8 +258,11 @@
"""
if isinstance(filenames, types.StringTypes):
+ pattern = filenames
filenames = glob.glob(filenames)
filenames.sort()
+ if len(filenames) == 0:
+ raise YTNoFilenamesMatchPattern(pattern)
obj = cls(filenames[:], parallel = parallel)
return obj
diff -r b18f2082144d476ce06a6d03af5f9ea2b9ff2124 -r 52637cee578ff0f290c12331eaf1220e74467d83 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -146,3 +146,11 @@
def __str__(self):
return "You must create an API key before uploading. See " + \
"https://data.yt-project.org/getting_started.html"
+
+class YTNoFilenamesMatchPattern(YTException):
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def __str__(self):
+ return "No filenames were found to match the pattern: " + \
+ "'%s'" % (self.pattern)
diff -r b18f2082144d476ce06a6d03af5f9ea2b9ff2124 -r 52637cee578ff0f290c12331eaf1220e74467d83 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -211,7 +211,7 @@
class ContourCallback(PlotCallback):
_type_name = "contour"
def __init__(self, field, ncont=5, factor=4, clim=None,
- plot_args = None):
+ plot_args = None, label = False, label_args = None):
"""
annotate_contour(self, field, ncont=5, factor=4, take_log=False, clim=None,
plot_args = None):
@@ -230,6 +230,10 @@
self.clim = clim
if plot_args is None: plot_args = {'colors':'k'}
self.plot_args = plot_args
+ self.label = label
+ if label_args is None:
+ label_args = {}
+ self.label_args = label_args
def __call__(self, plot):
x0, x1 = plot.xlim
@@ -290,10 +294,14 @@
if self.clim is not None:
self.ncont = np.linspace(self.clim[0], self.clim[1], ncont)
- plot._axes.contour(xi,yi,zi,self.ncont, **self.plot_args)
+ cset = plot._axes.contour(xi,yi,zi,self.ncont, **self.plot_args)
plot._axes.set_xlim(xx0,xx1)
plot._axes.set_ylim(yy0,yy1)
plot._axes.hold(False)
+
+ if self.label:
+ plot._axes.clabel(cset, **self.label_args)
+
class GridBoundaryCallback(PlotCallback):
_type_name = "grids"
https://bitbucket.org/yt_analysis/yt/changeset/97aea7f156df/
changeset: 97aea7f156df
branch: yt
user: sskory
date: 2012-10-23 17:39:19
summary: Adding tests for boolean objects.
affected #: 1 file
diff -r 52637cee578ff0f290c12331eaf1220e74467d83 -r 97aea7f156dfbab8080028fed481f49b8480669c yt/data_objects/tests/test_boolean_regions.py
--- /dev/null
+++ b/yt/data_objects/tests/test_boolean_regions.py
@@ -0,0 +1,348 @@
+from yt.testing import *
+from yt.mods import *
+
+def setup():
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+ def _ID(field, data):
+ width = data.pf.domain_right_edge - data.pf.domain_left_edge
+ delta = width / data.pf.h.get_smallest_dx()
+ x = data['x'] - data.pf.h.get_smallest_dx() / 2.
+ y = data['y'] - data.pf.h.get_smallest_dx() / 2.
+ z = data['z'] - data.pf.h.get_smallest_dx() / 2.
+ xi = x / data.pf.h.get_smallest_dx()
+ yi = y / data.pf.h.get_smallest_dx()
+ zi = z / data.pf.h.get_smallest_dx()
+ index = xi + delta[0] * (yi + delta[1] * zi)
+ index = index.astype('int64')
+ return index
+
+ add_field("ID", function=_ID)
+
+def test_boolean_spheres_no_overlap():
+ r"""Test to make sure that boolean objects (spheres, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping spheres. This also checks that the original spheres
+ don't change as part of constructing the booleans.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ sp1 = pf.h.sphere([0.25, 0.25, 0.25], 0.15)
+ sp2 = pf.h.sphere([0.75, 0.75, 0.75], 0.15)
+ # Store the original indices
+ i1 = sp1['ID']
+ i1.sort()
+ i2 = sp2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([sp1, "AND", sp2]) # empty
+ bo2 = pf.h.boolean([sp1, "NOT", sp2]) # only sp1
+ bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = sp1['ID']
+ new_i1.sort()
+ new_i2 = sp2['ID']
+ new_i2.sort()
+ assert_equal(new_i1, i1)
+ assert_equal(new_i2, i2)
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ assert_array_equal(bo1['ID'], empty)
+ b2 = bo2['ID']
+ b2.sort()
+ assert_array_equal(b2, i1)
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b3, ii)
+
+def test_boolean_spheres_overlap():
+ r"""Test to make sure that boolean objects (spheres, overlap)
+ behave the way we expect.
+
+ Test overlapping spheres.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ sp1 = pf.h.sphere([0.45, 0.45, 0.45], 0.15)
+ sp2 = pf.h.sphere([0.55, 0.55, 0.55], 0.15)
+ # Get indices of both.
+ i1 = sp1['ID']
+ i2 = sp2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([sp1, "AND", sp2]) # overlap (a lens)
+ bo2 = pf.h.boolean([sp1, "NOT", sp2]) # sp1 - sp2 (sphere with bite)
+ bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination (H2)
+ # Now make sure the indices also behave as we expect.
+ lens = np.intersect1d(i1, i2)
+ apple = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b1, lens)
+ assert_array_equal(b2, apple)
+ assert_array_equal(b3, both)
+
+def test_boolean_regions_no_overlap():
+ r"""Test to make sure that boolean objects (regions, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping regions. This also checks that the original regions
+ don't change as part of constructing the booleans.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ re1 = pf.h.region([0.25]*3, [0.2]*3, [0.3]*3)
+ re2 = pf.h.region([0.65]*3, [0.6]*3, [0.7]*3)
+ # Store the original indices
+ i1 = re1['ID']
+ i1.sort()
+ i2 = re2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([re1, "AND", re2]) # empty
+ bo2 = pf.h.boolean([re1, "NOT", re2]) # only re1
+ bo3 = pf.h.boolean([re1, "OR", re2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = re1['ID']
+ new_i1.sort()
+ new_i2 = re2['ID']
+ new_i2.sort()
+ assert_equal(new_i1, i1)
+ assert_equal(new_i2, i2)
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ assert_array_equal(bo1['ID'], empty)
+ b2 = bo2['ID']
+ b2.sort()
+ assert_array_equal(b2, i1)
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b3, ii)
+
+def test_boolean_regions_overlap():
+ r"""Test to make sure that boolean objects (regions, overlap)
+ behave the way we expect.
+
+ Test overlapping regions.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ re1 = pf.h.region([0.55]*3, [0.5]*3, [0.6]*3)
+ re2 = pf.h.region([0.6]*3, [0.55]*3, [0.65]*3)
+ # Get indices of both.
+ i1 = re1['ID']
+ i2 = re2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([re1, "AND", re2]) # overlap (small cube)
+ bo2 = pf.h.boolean([re1, "NOT", re2]) # sp1 - sp2 (large cube with bite)
+ bo3 = pf.h.boolean([re1, "OR", re2]) # combination (merged large cubes)
+ # Now make sure the indices also behave as we expect.
+ cube = np.intersect1d(i1, i2)
+ bite_cube = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b1, cube)
+ assert_array_equal(b2, bite_cube)
+ assert_array_equal(b3, both)
+
+def test_boolean_cylinders_no_overlap():
+ r"""Test to make sure that boolean objects (cylinders, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping cylinders. This also checks that the original cylinders
+ don't change as part of constructing the booleans.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ cyl1 = pf.h.disk([0.25]*3, [1, 0, 0], 0.1, 0.1)
+ cyl2 = pf.h.disk([0.75]*3, [1, 0, 0], 0.1, 0.1)
+ # Store the original indices
+ i1 = cyl1['ID']
+ i1.sort()
+ i2 = cyl2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # empty
+ bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # only cyl1
+ bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = cyl1['ID']
+ new_i1.sort()
+ new_i2 = cyl2['ID']
+ new_i2.sort()
+ assert_equal(new_i1, i1)
+ assert_equal(new_i2, i2)
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ assert_array_equal(bo1['ID'], empty)
+ b2 = bo2['ID']
+ b2.sort()
+ assert_array_equal(b2, i1)
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b3, ii)
+
+def test_boolean_cylinders_overlap():
+ r"""Test to make sure that boolean objects (cylinders, overlap)
+ behave the way we expect.
+
+ Test overlapping cylinders.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ cyl1 = pf.h.disk([0.45]*3, [1, 0, 0], 0.2, 0.2)
+ cyl2 = pf.h.disk([0.55]*3, [1, 0, 0], 0.2, 0.2)
+ # Get indices of both.
+ i1 = cyl1['ID']
+ i2 = cyl2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # overlap (vertically extened lens)
+ bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # sp1 - sp2 (disk minus a bite)
+ bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination (merged disks)
+ # Now make sure the indices also behave as we expect.
+ vlens = np.intersect1d(i1, i2)
+ bite_disk = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b1, vlens)
+ assert_array_equal(b2, bite_disk)
+ assert_array_equal(b3, both)
+
+def test_boolean_ellipsoids_no_overlap():
+ r"""Test to make sure that boolean objects (ellipsoids, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping ellipsoids. This also checks that the original
+ ellipsoids don't change as part of constructing the booleans.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ ell1 = pf.h.ellipsoid([0.25]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ ell2 = pf.h.ellipsoid([0.75]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ # Store the original indices
+ i1 = ell1['ID']
+ i1.sort()
+ i2 = ell2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([ell1, "AND", ell2]) # empty
+ bo2 = pf.h.boolean([ell1, "NOT", ell2]) # only cyl1
+ bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = ell1['ID']
+ new_i1.sort()
+ new_i2 = ell2['ID']
+ new_i2.sort()
+ assert_equal(new_i1, i1)
+ assert_equal(new_i2, i2)
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ assert_array_equal(bo1['ID'], empty)
+ b2 = bo2['ID']
+ b2.sort()
+ assert_array_equal(b2, i1)
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b3, ii)
+
+def test_boolean_ellipsoids_overlap():
+ r"""Test to make sure that boolean objects (ellipsoids, overlap)
+ behave the way we expect.
+
+ Test overlapping ellipsoids.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ ell1 = pf.h.ellipsoid([0.45]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ ell2 = pf.h.ellipsoid([0.55]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ # Get indices of both.
+ i1 = ell1['ID']
+ i2 = ell2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([ell1, "AND", ell2]) # overlap
+ bo2 = pf.h.boolean([ell1, "NOT", ell2]) # ell1 - ell2
+ bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
+ # Now make sure the indices also behave as we expect.
+ overlap = np.intersect1d(i1, i2)
+ diff = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ assert_array_equal(b1, overlap)
+ assert_array_equal(b2, diff)
+ assert_array_equal(b3, both)
+
+def test_boolean_mix_periodicity():
+ r"""Test that a hybrid boolean region behaves as we expect.
+
+ This also tests nested logic and that periodicity works.
+ """
+ pf = fake_random_pf(64)
+ pf.h
+ re = pf.h.region([0.5]*3, [0.0]*3, [1]*3) # whole thing
+ sp = pf.h.sphere([0.95]*3, 0.3) # wraps around
+ cyl = pf.h.disk([0.05]*3, [1,1,1], 0.1, 0.4) # wraps around
+ ell = pf.h.ellipsoid([0.35]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3)) # no wrap
+ # Get original indices
+ rei = re['ID']
+ spi = sp['ID']
+ cyli = cyl['ID']
+ elli = ell['ID']
+ # Make some booleans
+ # whole box minux spherical bites at corners
+ bo1 = pf.h.boolean([re, "NOT", sp])
+ # sphere plus cylinder
+ bo2 = pf.h.boolean([sp, "OR", cyl])
+ # a big jumble, the region minus the ell+cyl (scepter shaped?), plus the
+ # sphere which should add back some of what the ell+cyl took out.
+ bo3 = pf.h.boolean([re, "NOT", "(", ell, "OR", cyl, ")", "OR", sp])
+ # Now make sure the indices also behave as we expect.
+ expect = np.setdiff1d(rei, spi)
+ ii = bo1['ID']
+ ii.sort()
+ assert_array_equal(expect, ii)
+ #
+ expect = np.union1d(spi, cyli)
+ ii = bo2['ID']
+ ii.sort()
+ assert_array_equal(expect, ii)
+ #
+ expect = np.union1d(elli, cyli)
+ expect = np.setdiff1d(rei, expect)
+ expect = np.union1d(expect, spi)
+ ii = bo3['ID']
+ ii.sort()
+ assert_array_equal(expect, ii)
+
https://bitbucket.org/yt_analysis/yt/changeset/88d1492cc7e4/
changeset: 88d1492cc7e4
branch: yt
user: sskory
date: 2012-10-23 17:50:32
summary: Fixing import.
affected #: 1 file
diff -r 97aea7f156dfbab8080028fed481f49b8480669c -r 88d1492cc7e418e26b95a69dab03f9489918b167 yt/data_objects/tests/test_boolean_regions.py
--- a/yt/data_objects/tests/test_boolean_regions.py
+++ b/yt/data_objects/tests/test_boolean_regions.py
@@ -1,5 +1,5 @@
from yt.testing import *
-from yt.mods import *
+from yt.data_objects.api import add_field
def setup():
from yt.config import ytcfg
https://bitbucket.org/yt_analysis/yt/changeset/dfaaede9f097/
changeset: dfaaede9f097
branch: yt
user: sskory
date: 2012-10-23 18:10:59
summary: Added nprocs and yield to boolean tests.
affected #: 1 file
diff -r 88d1492cc7e418e26b95a69dab03f9489918b167 -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e yt/data_objects/tests/test_boolean_regions.py
--- a/yt/data_objects/tests/test_boolean_regions.py
+++ b/yt/data_objects/tests/test_boolean_regions.py
@@ -26,37 +26,38 @@
Test non-overlapping spheres. This also checks that the original spheres
don't change as part of constructing the booleans.
"""
- pf = fake_random_pf(64)
- pf.h
- sp1 = pf.h.sphere([0.25, 0.25, 0.25], 0.15)
- sp2 = pf.h.sphere([0.75, 0.75, 0.75], 0.15)
- # Store the original indices
- i1 = sp1['ID']
- i1.sort()
- i2 = sp2['ID']
- i2.sort()
- ii = np.concatenate((i1, i2))
- ii.sort()
- # Make some booleans
- bo1 = pf.h.boolean([sp1, "AND", sp2]) # empty
- bo2 = pf.h.boolean([sp1, "NOT", sp2]) # only sp1
- bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination
- # This makes sure the original containers didn't change.
- new_i1 = sp1['ID']
- new_i1.sort()
- new_i2 = sp2['ID']
- new_i2.sort()
- assert_equal(new_i1, i1)
- assert_equal(new_i2, i2)
- # Now make sure the indices also behave as we expect.
- empty = np.array([])
- assert_array_equal(bo1['ID'], empty)
- b2 = bo2['ID']
- b2.sort()
- assert_array_equal(b2, i1)
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b3, ii)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ sp1 = pf.h.sphere([0.25, 0.25, 0.25], 0.15)
+ sp2 = pf.h.sphere([0.75, 0.75, 0.75], 0.15)
+ # Store the original indices
+ i1 = sp1['ID']
+ i1.sort()
+ i2 = sp2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([sp1, "AND", sp2]) # empty
+ bo2 = pf.h.boolean([sp1, "NOT", sp2]) # only sp1
+ bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = sp1['ID']
+ new_i1.sort()
+ new_i2 = sp2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
def test_boolean_spheres_overlap():
r"""Test to make sure that boolean objects (spheres, overlap)
@@ -64,30 +65,31 @@
Test overlapping spheres.
"""
- pf = fake_random_pf(64)
- pf.h
- sp1 = pf.h.sphere([0.45, 0.45, 0.45], 0.15)
- sp2 = pf.h.sphere([0.55, 0.55, 0.55], 0.15)
- # Get indices of both.
- i1 = sp1['ID']
- i2 = sp2['ID']
- # Make some booleans
- bo1 = pf.h.boolean([sp1, "AND", sp2]) # overlap (a lens)
- bo2 = pf.h.boolean([sp1, "NOT", sp2]) # sp1 - sp2 (sphere with bite)
- bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination (H2)
- # Now make sure the indices also behave as we expect.
- lens = np.intersect1d(i1, i2)
- apple = np.setdiff1d(i1, i2)
- both = np.union1d(i1, i2)
- b1 = bo1['ID']
- b1.sort()
- b2 = bo2['ID']
- b2.sort()
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b1, lens)
- assert_array_equal(b2, apple)
- assert_array_equal(b3, both)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ sp1 = pf.h.sphere([0.45, 0.45, 0.45], 0.15)
+ sp2 = pf.h.sphere([0.55, 0.55, 0.55], 0.15)
+ # Get indices of both.
+ i1 = sp1['ID']
+ i2 = sp2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([sp1, "AND", sp2]) # overlap (a lens)
+ bo2 = pf.h.boolean([sp1, "NOT", sp2]) # sp1 - sp2 (sphere with bite)
+ bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination (H2)
+ # Now make sure the indices also behave as we expect.
+ lens = np.intersect1d(i1, i2)
+ apple = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, lens
+ yield assert_array_equal, b2, apple
+ yield assert_array_equal, b3, both
def test_boolean_regions_no_overlap():
r"""Test to make sure that boolean objects (regions, no overlap)
@@ -96,37 +98,38 @@
Test non-overlapping regions. This also checks that the original regions
don't change as part of constructing the booleans.
"""
- pf = fake_random_pf(64)
- pf.h
- re1 = pf.h.region([0.25]*3, [0.2]*3, [0.3]*3)
- re2 = pf.h.region([0.65]*3, [0.6]*3, [0.7]*3)
- # Store the original indices
- i1 = re1['ID']
- i1.sort()
- i2 = re2['ID']
- i2.sort()
- ii = np.concatenate((i1, i2))
- ii.sort()
- # Make some booleans
- bo1 = pf.h.boolean([re1, "AND", re2]) # empty
- bo2 = pf.h.boolean([re1, "NOT", re2]) # only re1
- bo3 = pf.h.boolean([re1, "OR", re2]) # combination
- # This makes sure the original containers didn't change.
- new_i1 = re1['ID']
- new_i1.sort()
- new_i2 = re2['ID']
- new_i2.sort()
- assert_equal(new_i1, i1)
- assert_equal(new_i2, i2)
- # Now make sure the indices also behave as we expect.
- empty = np.array([])
- assert_array_equal(bo1['ID'], empty)
- b2 = bo2['ID']
- b2.sort()
- assert_array_equal(b2, i1)
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b3, ii)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ re1 = pf.h.region([0.25]*3, [0.2]*3, [0.3]*3)
+ re2 = pf.h.region([0.65]*3, [0.6]*3, [0.7]*3)
+ # Store the original indices
+ i1 = re1['ID']
+ i1.sort()
+ i2 = re2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([re1, "AND", re2]) # empty
+ bo2 = pf.h.boolean([re1, "NOT", re2]) # only re1
+ bo3 = pf.h.boolean([re1, "OR", re2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = re1['ID']
+ new_i1.sort()
+ new_i2 = re2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
def test_boolean_regions_overlap():
r"""Test to make sure that boolean objects (regions, overlap)
@@ -134,30 +137,31 @@
Test overlapping regions.
"""
- pf = fake_random_pf(64)
- pf.h
- re1 = pf.h.region([0.55]*3, [0.5]*3, [0.6]*3)
- re2 = pf.h.region([0.6]*3, [0.55]*3, [0.65]*3)
- # Get indices of both.
- i1 = re1['ID']
- i2 = re2['ID']
- # Make some booleans
- bo1 = pf.h.boolean([re1, "AND", re2]) # overlap (small cube)
- bo2 = pf.h.boolean([re1, "NOT", re2]) # sp1 - sp2 (large cube with bite)
- bo3 = pf.h.boolean([re1, "OR", re2]) # combination (merged large cubes)
- # Now make sure the indices also behave as we expect.
- cube = np.intersect1d(i1, i2)
- bite_cube = np.setdiff1d(i1, i2)
- both = np.union1d(i1, i2)
- b1 = bo1['ID']
- b1.sort()
- b2 = bo2['ID']
- b2.sort()
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b1, cube)
- assert_array_equal(b2, bite_cube)
- assert_array_equal(b3, both)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ re1 = pf.h.region([0.55]*3, [0.5]*3, [0.6]*3)
+ re2 = pf.h.region([0.6]*3, [0.55]*3, [0.65]*3)
+ # Get indices of both.
+ i1 = re1['ID']
+ i2 = re2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([re1, "AND", re2]) # overlap (small cube)
+ bo2 = pf.h.boolean([re1, "NOT", re2]) # sp1 - sp2 (large cube with bite)
+ bo3 = pf.h.boolean([re1, "OR", re2]) # combination (merged large cubes)
+ # Now make sure the indices also behave as we expect.
+ cube = np.intersect1d(i1, i2)
+ bite_cube = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, cube
+ yield assert_array_equal, b2, bite_cube
+ yield assert_array_equal, b3, both
def test_boolean_cylinders_no_overlap():
r"""Test to make sure that boolean objects (cylinders, no overlap)
@@ -166,37 +170,38 @@
Test non-overlapping cylinders. This also checks that the original cylinders
don't change as part of constructing the booleans.
"""
- pf = fake_random_pf(64)
- pf.h
- cyl1 = pf.h.disk([0.25]*3, [1, 0, 0], 0.1, 0.1)
- cyl2 = pf.h.disk([0.75]*3, [1, 0, 0], 0.1, 0.1)
- # Store the original indices
- i1 = cyl1['ID']
- i1.sort()
- i2 = cyl2['ID']
- i2.sort()
- ii = np.concatenate((i1, i2))
- ii.sort()
- # Make some booleans
- bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # empty
- bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # only cyl1
- bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination
- # This makes sure the original containers didn't change.
- new_i1 = cyl1['ID']
- new_i1.sort()
- new_i2 = cyl2['ID']
- new_i2.sort()
- assert_equal(new_i1, i1)
- assert_equal(new_i2, i2)
- # Now make sure the indices also behave as we expect.
- empty = np.array([])
- assert_array_equal(bo1['ID'], empty)
- b2 = bo2['ID']
- b2.sort()
- assert_array_equal(b2, i1)
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b3, ii)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ cyl1 = pf.h.disk([0.25]*3, [1, 0, 0], 0.1, 0.1)
+ cyl2 = pf.h.disk([0.75]*3, [1, 0, 0], 0.1, 0.1)
+ # Store the original indices
+ i1 = cyl1['ID']
+ i1.sort()
+ i2 = cyl2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # empty
+ bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # only cyl1
+ bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = cyl1['ID']
+ new_i1.sort()
+ new_i2 = cyl2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
def test_boolean_cylinders_overlap():
r"""Test to make sure that boolean objects (cylinders, overlap)
@@ -204,30 +209,31 @@
Test overlapping cylinders.
"""
- pf = fake_random_pf(64)
- pf.h
- cyl1 = pf.h.disk([0.45]*3, [1, 0, 0], 0.2, 0.2)
- cyl2 = pf.h.disk([0.55]*3, [1, 0, 0], 0.2, 0.2)
- # Get indices of both.
- i1 = cyl1['ID']
- i2 = cyl2['ID']
- # Make some booleans
- bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # overlap (vertically extened lens)
- bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # sp1 - sp2 (disk minus a bite)
- bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination (merged disks)
- # Now make sure the indices also behave as we expect.
- vlens = np.intersect1d(i1, i2)
- bite_disk = np.setdiff1d(i1, i2)
- both = np.union1d(i1, i2)
- b1 = bo1['ID']
- b1.sort()
- b2 = bo2['ID']
- b2.sort()
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b1, vlens)
- assert_array_equal(b2, bite_disk)
- assert_array_equal(b3, both)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ cyl1 = pf.h.disk([0.45]*3, [1, 0, 0], 0.2, 0.2)
+ cyl2 = pf.h.disk([0.55]*3, [1, 0, 0], 0.2, 0.2)
+ # Get indices of both.
+ i1 = cyl1['ID']
+ i2 = cyl2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # overlap (vertically extened lens)
+ bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # sp1 - sp2 (disk minus a bite)
+ bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination (merged disks)
+ # Now make sure the indices also behave as we expect.
+ vlens = np.intersect1d(i1, i2)
+ bite_disk = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, vlens
+ yield assert_array_equal, b2, bite_disk
+ yield assert_array_equal, b3, both
def test_boolean_ellipsoids_no_overlap():
r"""Test to make sure that boolean objects (ellipsoids, no overlap)
@@ -236,39 +242,40 @@
Test non-overlapping ellipsoids. This also checks that the original
ellipsoids don't change as part of constructing the booleans.
"""
- pf = fake_random_pf(64)
- pf.h
- ell1 = pf.h.ellipsoid([0.25]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
- np.array([0.1]*3))
- ell2 = pf.h.ellipsoid([0.75]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
- np.array([0.1]*3))
- # Store the original indices
- i1 = ell1['ID']
- i1.sort()
- i2 = ell2['ID']
- i2.sort()
- ii = np.concatenate((i1, i2))
- ii.sort()
- # Make some booleans
- bo1 = pf.h.boolean([ell1, "AND", ell2]) # empty
- bo2 = pf.h.boolean([ell1, "NOT", ell2]) # only cyl1
- bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
- # This makes sure the original containers didn't change.
- new_i1 = ell1['ID']
- new_i1.sort()
- new_i2 = ell2['ID']
- new_i2.sort()
- assert_equal(new_i1, i1)
- assert_equal(new_i2, i2)
- # Now make sure the indices also behave as we expect.
- empty = np.array([])
- assert_array_equal(bo1['ID'], empty)
- b2 = bo2['ID']
- b2.sort()
- assert_array_equal(b2, i1)
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b3, ii)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ ell1 = pf.h.ellipsoid([0.25]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ ell2 = pf.h.ellipsoid([0.75]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ # Store the original indices
+ i1 = ell1['ID']
+ i1.sort()
+ i2 = ell2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([ell1, "AND", ell2]) # empty
+ bo2 = pf.h.boolean([ell1, "NOT", ell2]) # only cyl1
+ bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = ell1['ID']
+ new_i1.sort()
+ new_i2 = ell2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
def test_boolean_ellipsoids_overlap():
r"""Test to make sure that boolean objects (ellipsoids, overlap)
@@ -276,73 +283,75 @@
Test overlapping ellipsoids.
"""
- pf = fake_random_pf(64)
- pf.h
- ell1 = pf.h.ellipsoid([0.45]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
- np.array([0.1]*3))
- ell2 = pf.h.ellipsoid([0.55]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
- np.array([0.1]*3))
- # Get indices of both.
- i1 = ell1['ID']
- i2 = ell2['ID']
- # Make some booleans
- bo1 = pf.h.boolean([ell1, "AND", ell2]) # overlap
- bo2 = pf.h.boolean([ell1, "NOT", ell2]) # ell1 - ell2
- bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
- # Now make sure the indices also behave as we expect.
- overlap = np.intersect1d(i1, i2)
- diff = np.setdiff1d(i1, i2)
- both = np.union1d(i1, i2)
- b1 = bo1['ID']
- b1.sort()
- b2 = bo2['ID']
- b2.sort()
- b3 = bo3['ID']
- b3.sort()
- assert_array_equal(b1, overlap)
- assert_array_equal(b2, diff)
- assert_array_equal(b3, both)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ ell1 = pf.h.ellipsoid([0.45]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ ell2 = pf.h.ellipsoid([0.55]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ # Get indices of both.
+ i1 = ell1['ID']
+ i2 = ell2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([ell1, "AND", ell2]) # overlap
+ bo2 = pf.h.boolean([ell1, "NOT", ell2]) # ell1 - ell2
+ bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
+ # Now make sure the indices also behave as we expect.
+ overlap = np.intersect1d(i1, i2)
+ diff = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, overlap
+ yield assert_array_equal, b2, diff
+ yield assert_array_equal, b3, both
def test_boolean_mix_periodicity():
r"""Test that a hybrid boolean region behaves as we expect.
This also tests nested logic and that periodicity works.
"""
- pf = fake_random_pf(64)
- pf.h
- re = pf.h.region([0.5]*3, [0.0]*3, [1]*3) # whole thing
- sp = pf.h.sphere([0.95]*3, 0.3) # wraps around
- cyl = pf.h.disk([0.05]*3, [1,1,1], 0.1, 0.4) # wraps around
- ell = pf.h.ellipsoid([0.35]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
- np.array([0.1]*3)) # no wrap
- # Get original indices
- rei = re['ID']
- spi = sp['ID']
- cyli = cyl['ID']
- elli = ell['ID']
- # Make some booleans
- # whole box minux spherical bites at corners
- bo1 = pf.h.boolean([re, "NOT", sp])
- # sphere plus cylinder
- bo2 = pf.h.boolean([sp, "OR", cyl])
- # a big jumble, the region minus the ell+cyl (scepter shaped?), plus the
- # sphere which should add back some of what the ell+cyl took out.
- bo3 = pf.h.boolean([re, "NOT", "(", ell, "OR", cyl, ")", "OR", sp])
- # Now make sure the indices also behave as we expect.
- expect = np.setdiff1d(rei, spi)
- ii = bo1['ID']
- ii.sort()
- assert_array_equal(expect, ii)
- #
- expect = np.union1d(spi, cyli)
- ii = bo2['ID']
- ii.sort()
- assert_array_equal(expect, ii)
- #
- expect = np.union1d(elli, cyli)
- expect = np.setdiff1d(rei, expect)
- expect = np.union1d(expect, spi)
- ii = bo3['ID']
- ii.sort()
- assert_array_equal(expect, ii)
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ re = pf.h.region([0.5]*3, [0.0]*3, [1]*3) # whole thing
+ sp = pf.h.sphere([0.95]*3, 0.3) # wraps around
+ cyl = pf.h.disk([0.05]*3, [1,1,1], 0.1, 0.4) # wraps around
+ ell = pf.h.ellipsoid([0.35]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3)) # no wrap
+ # Get original indices
+ rei = re['ID']
+ spi = sp['ID']
+ cyli = cyl['ID']
+ elli = ell['ID']
+ # Make some booleans
+ # whole box minux spherical bites at corners
+ bo1 = pf.h.boolean([re, "NOT", sp])
+ # sphere plus cylinder
+ bo2 = pf.h.boolean([sp, "OR", cyl])
+ # a big jumble, the region minus the ell+cyl (scepter shaped?), plus the
+ # sphere which should add back some of what the ell+cyl took out.
+ bo3 = pf.h.boolean([re, "NOT", "(", ell, "OR", cyl, ")", "OR", sp])
+ # Now make sure the indices also behave as we expect.
+ expect = np.setdiff1d(rei, spi)
+ ii = bo1['ID']
+ ii.sort()
+ yield assert_array_equal, expect, ii
+ #
+ expect = np.union1d(spi, cyli)
+ ii = bo2['ID']
+ ii.sort()
+ yield assert_array_equal, expect, ii
+ #
+ expect = np.union1d(elli, cyli)
+ expect = np.setdiff1d(rei, expect)
+ expect = np.union1d(expect, spi)
+ ii = bo3['ID']
+ ii.sort()
+ yield assert_array_equal, expect, ii
https://bitbucket.org/yt_analysis/yt/changeset/a4bb699ba3cd/
changeset: a4bb699ba3cd
branch: yt
user: sskory
date: 2012-10-24 01:16:02
summary: Merge.
affected #: 12 files
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a setup.py
--- a/setup.py
+++ b/setup.py
@@ -154,7 +154,11 @@
'amr adaptivemeshrefinement',
entry_points={'console_scripts': [
'yt = yt.utilities.command_line:run_main',
- ]},
+ ],
+ 'nose.plugins.0.10': [
+ 'answer-testing = yt.utilities.answer_testing.framework:AnswerTesting'
+ ]
+ },
author="Matthew J. Turk",
author_email="matthewturk at gmail.com",
url="http://yt-project.org/",
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -54,6 +54,7 @@
pasteboard_repo = '',
reconstruct_hierarchy = 'False',
test_storage_dir = '/does/not/exist',
+ test_data_dir = '/does/not/exist',
enzo_db = '',
hub_url = 'https://hub.yt-project.org/upload',
hub_api_key = '',
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -3502,10 +3502,7 @@
for gi, g in enumerate(grids): self._grids[gi] = g
def _is_fully_enclosed(self, grid):
- r = np.abs(grid._corners - self.center)
- r = np.minimum(r, np.abs(self.DW[None,:]-r))
- corner_radius = np.sqrt((r**2.0).sum(axis=1))
- return np.all(corner_radius <= self.radius)
+ return False
@restore_grid_state # Pains me not to decorate with cache_mask here
def _get_cut_mask(self, grid, field=None):
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/frontends/enzo/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -0,0 +1,51 @@
+"""
+Enzo frontend tests using moving7
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt-project.org/
+License:
+ Copyright (C) 2012 Matthew Turk. All Rights Reserved.
+
+ This file is part of yt.
+
+ yt is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+ requires_pf, \
+ small_patch_amr, \
+ big_patch_amr, \
+ data_dir_load
+from yt.frontends.enzo.api import EnzoStaticOutput
+
+_fields = ("Temperature", "Density", "VelocityMagnitude", "DivV",
+ "particle_density")
+
+m7 = "DD0010/moving7_0010"
+ at requires_pf(m7)
+def test_moving7():
+ pf = data_dir_load(m7)
+ yield assert_equal, str(pf), "moving7_0010"
+ for test in small_patch_amr(m7, _fields):
+ yield test
+
+g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
+ at requires_pf(g30, big_data=True)
+def test_galaxy0030():
+ pf = data_dir_load(g30)
+ yield assert_equal, str(pf), "galaxy0030"
+ for test in big_patch_amr(g30, _fields):
+ yield test
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -29,6 +29,15 @@
assert_array_less, assert_string_equal, assert_array_almost_equal_nulp
def assert_rel_equal(a1, a2, decimals):
+ # We have nan checks in here because occasionally we have fields that get
+ # weighted without non-zero weights. I'm looking at you, particle fields!
+ if isinstance(a1, np.ndarray):
+ assert(a1.size == a2.size)
+ # Mask out NaNs
+ a1[np.isnan(a1)] = 1.0
+ a2[np.isnan(a2)] = 1.0
+ elif np.isnan(a1) and np.isnan(a2):
+ return True
return assert_almost_equal(a1/a2, 1.0, decimals)
def amrspace(extent, levels=7, cells=8):
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/utilities/answer_testing/__init__.py
--- a/yt/utilities/answer_testing/__init__.py
+++ b/yt/utilities/answer_testing/__init__.py
@@ -22,10 +22,3 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
-
-import runner
-import output_tests
-from runner import RegressionTestRunner
-
-from output_tests import RegressionTest, SingleOutputTest, \
- MultipleOutputTest, YTStaticOutputTest, create_test
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/utilities/answer_testing/default_tests.py
--- a/yt/utilities/answer_testing/default_tests.py
+++ b/yt/utilities/answer_testing/default_tests.py
@@ -67,3 +67,4 @@
for field in sorted(self.result):
for p1, p2 in zip(self.result[field], old_result[field]):
self.compare_data_arrays(p1, p2, self.tolerance)
+
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/utilities/answer_testing/framework.py
--- /dev/null
+++ b/yt/utilities/answer_testing/framework.py
@@ -0,0 +1,396 @@
+"""
+Answer Testing using Nose as a starting point
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt-project.org/
+License:
+ Copyright (C) 2012 Matthew Turk. All Rights Reserved.
+
+ This file is part of yt.
+
+ yt is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import logging
+import os
+import hashlib
+import contextlib
+import urllib2
+import cPickle
+
+from nose.plugins import Plugin
+from yt.testing import *
+from yt.config import ytcfg
+from yt.mods import *
+import cPickle
+
+from yt.utilities.logger import disable_stream_logging
+from yt.utilities.command_line import get_yt_version
+
+mylog = logging.getLogger('nose.plugins.answer-testing')
+run_big_data = False
+
+_latest = "gold001"
+_url_path = "http://yt-answer-tests.s3-website-us-east-1.amazonaws.com/%s_%s"
+
+class AnswerTesting(Plugin):
+ name = "answer-testing"
+
+ def options(self, parser, env=os.environ):
+ super(AnswerTesting, self).options(parser, env=env)
+ parser.add_option("--answer-compare", dest="compare_name",
+ default=_latest, help="The name against which we will compare")
+ parser.add_option("--answer-big-data", dest="big_data",
+ default=False, help="Should we run against big data, too?",
+ action="store_true")
+ parser.add_option("--answer-name", dest="this_name",
+ default=None,
+ help="The name we'll call this set of tests")
+ parser.add_option("--answer-store", dest="store_results",
+ default=False, action="store_true")
+
+ def configure(self, options, conf):
+ super(AnswerTesting, self).configure(options, conf)
+ if not self.enabled:
+ return
+ disable_stream_logging()
+ try:
+ my_hash = get_yt_version()
+ except:
+ my_hash = "UNKNOWN%s" % (time.time())
+ if options.this_name is None: options.this_name = my_hash
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+ AnswerTestingTest.result_storage = \
+ self.result_storage = defaultdict(dict)
+ if options.compare_name is not None:
+ # Now we grab from our S3 store
+ if options.compare_name == "latest":
+ options.compare_name = _latest
+ AnswerTestingTest.reference_storage = \
+ AnswerTestOpener(options.compare_name)
+ self.answer_name = options.this_name
+ self.store_results = options.store_results
+ global run_big_data
+ run_big_data = options.big_data
+
+ def finalize(self, result):
+ # This is where we dump our result storage up to Amazon, if we are able
+ # to.
+ if self.store_results is False: return
+ import boto
+ from boto.s3.key import Key
+ c = boto.connect_s3()
+ bucket = c.get_bucket("yt-answer-tests")
+ for pf_name in self.result_storage:
+ rs = cPickle.dumps(self.result_storage[pf_name])
+ tk = bucket.get_key("%s_%s" % (self.answer_name, pf_name))
+ if tk is not None: tk.delete()
+ k = Key(bucket)
+ k.key = "%s_%s" % (self.answer_name, pf_name)
+ k.set_contents_from_string(rs)
+ k.set_acl("public-read")
+
+class AnswerTestOpener(object):
+ def __init__(self, reference_name):
+ self.reference_name = reference_name
+ self.cache = {}
+
+ def get(self, pf_name, default = None):
+ if pf_name in self.cache: return self.cache[pf_name]
+ url = _url_path % (self.reference_name, pf_name)
+ try:
+ resp = urllib2.urlopen(url)
+ # This is dangerous, but we have a controlled S3 environment
+ data = resp.read()
+ rv = cPickle.loads(data)
+ except urllib2.HTTPError as ex:
+ raise YTNoOldAnswer(url)
+ mylog.warning("Missing %s (%s)", url, ex)
+ rv = default
+ self.cache[pf_name] = rv
+ return rv
+
+ at contextlib.contextmanager
+def temp_cwd(cwd):
+ oldcwd = os.getcwd()
+ os.chdir(cwd)
+ yield
+ os.chdir(oldcwd)
+
+def can_run_pf(pf_fn):
+ path = ytcfg.get("yt", "test_data_dir")
+ with temp_cwd(path):
+ try:
+ load(pf_fn)
+ except:
+ return False
+ return AnswerTestingTest.result_storage is not None
+
+def data_dir_load(pf_fn):
+ path = ytcfg.get("yt", "test_data_dir")
+ with temp_cwd(path):
+ pf = load(pf_fn)
+ pf.h
+ return pf
+
+class AnswerTestingTest(object):
+ reference_storage = None
+ def __init__(self, pf_fn):
+ self.pf = data_dir_load(pf_fn)
+
+ def __call__(self):
+ nv = self.run()
+ if self.reference_storage is not None:
+ dd = self.reference_storage.get(str(self.pf))
+ if dd is None: raise YTNoOldAnswer()
+ ov = dd[self.description]
+ self.compare(nv, ov)
+ else:
+ ov = None
+ self.result_storage[str(self.pf)][self.description] = nv
+
+ def compare(self, new_result, old_result):
+ raise RuntimeError
+
+ def create_obj(self, pf, obj_type):
+ # obj_type should be tuple of
+ # ( obj_name, ( args ) )
+ if obj_type is None:
+ return pf.h.all_data()
+ cls = getattr(pf.h, obj_type[0])
+ obj = cls(*obj_type[1])
+ return obj
+
+ @property
+ def sim_center(self):
+ """
+ This returns the center of the domain.
+ """
+ return 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
+
+ @property
+ def max_dens_location(self):
+ """
+ This is a helper function to return the location of the most dense
+ point.
+ """
+ return self.pf.h.find_max("Density")[1]
+
+ @property
+ def entire_simulation(self):
+ """
+ Return an unsorted array of values that cover the entire domain.
+ """
+ return self.pf.h.all_data()
+
+ @property
+ def description(self):
+ obj_type = getattr(self, "obj_type", None)
+ if obj_type is None:
+ oname = "all"
+ else:
+ oname = "_".join((str(s) for s in obj_type))
+ args = [self._type_name, str(self.pf), oname]
+ args += [str(getattr(self, an)) for an in self._attrs]
+ return "_".join(args)
+
+class FieldValuesTest(AnswerTestingTest):
+ _type_name = "FieldValues"
+ _attrs = ("field", )
+
+ def __init__(self, pf_fn, field, obj_type = None):
+ super(FieldValuesTest, self).__init__(pf_fn)
+ self.obj_type = obj_type
+ self.field = field
+
+ def run(self):
+ obj = self.create_obj(self.pf, self.obj_type)
+ avg = obj.quantities["WeightedAverageQuantity"](self.field,
+ weight="Ones")
+ (mi, ma), = obj.quantities["Extrema"](self.field)
+ return np.array([avg, mi, ma])
+
+ def compare(self, new_result, old_result):
+ assert_equal(new_result, old_result)
+
+class ProjectionValuesTest(AnswerTestingTest):
+ _type_name = "ProjectionValues"
+ _attrs = ("field", "axis", "weight_field")
+
+ def __init__(self, pf_fn, axis, field, weight_field = None,
+ obj_type = None):
+ super(ProjectionValuesTest, self).__init__(pf_fn)
+ self.axis = axis
+ self.field = field
+ self.weight_field = field
+ self.obj_type = obj_type
+
+ def run(self):
+ if self.obj_type is not None:
+ obj = self.create_obj(self.pf, self.obj_type)
+ else:
+ obj = None
+ proj = self.pf.h.proj(self.axis, self.field,
+ weight_field=self.weight_field,
+ data_source = obj)
+ return proj.field_data
+
+ def compare(self, new_result, old_result):
+ assert(len(new_result) == len(old_result))
+ for k in new_result:
+ assert (k in old_result)
+ for k in new_result:
+ assert_equal(new_result[k], old_result[k])
+
+class PixelizedProjectionValuesTest(AnswerTestingTest):
+ _type_name = "PixelizedProjectionValues"
+ _attrs = ("field", "axis", "weight_field")
+
+ def __init__(self, pf_fn, axis, field, weight_field = None,
+ obj_type = None):
+ super(PixelizedProjectionValuesTest, self).__init__(pf_fn)
+ self.axis = axis
+ self.field = field
+ self.weight_field = field
+ self.obj_type = obj_type
+
+ def run(self):
+ if self.obj_type is not None:
+ obj = self.create_obj(self.pf, self.obj_type)
+ else:
+ obj = None
+ proj = self.pf.h.proj(self.axis, self.field,
+ weight_field=self.weight_field,
+ data_source = obj)
+ frb = proj.to_frb((1.0, 'unitary'), 256)
+ frb[self.field]
+ frb[self.weight_field]
+ d = frb.data
+ d.update( dict( (("%s_sum" % f, proj[f].sum(dtype="float64"))
+ for f in proj.field_data.keys()) ) )
+ return d
+
+ def compare(self, new_result, old_result):
+ assert(len(new_result) == len(old_result))
+ for k in new_result:
+ assert (k in old_result)
+ for k in new_result:
+ assert_rel_equal(new_result[k], old_result[k], 10)
+
+class GridValuesTest(AnswerTestingTest):
+ _type_name = "GridValues"
+ _attrs = ("field",)
+
+ def __init__(self, pf_fn, field):
+ super(GridValuesTest, self).__init__(pf_fn)
+ self.field = field
+
+ def run(self):
+ hashes = {}
+ for g in self.pf.h.grids:
+ hashes[g.id] = hashlib.md5(g[self.field].tostring()).hexdigest()
+ g.clear_data()
+ return hashes
+
+ def compare(self, new_result, old_result):
+ assert(len(new_result) == len(old_result))
+ for k in new_result:
+ assert (k in old_result)
+ for k in new_result:
+ assert_equal(new_result[k], old_result[k])
+
+class GridHierarchyTest(AnswerTestingTest):
+ _type_name = "GridHierarchy"
+ _attrs = ()
+
+ def run(self):
+ result = {}
+ result["grid_dimensions"] = self.pf.h.grid_dimensions
+ result["grid_left_edges"] = self.pf.h.grid_left_edge
+ result["grid_right_edges"] = self.pf.h.grid_right_edge
+ result["grid_levels"] = self.pf.h.grid_levels
+ result["grid_particle_count"] = self.pf.h.grid_particle_count
+ return result
+
+ def compare(self, new_result, old_result):
+ for k in new_result:
+ assert_equal(new_result[k], old_result[k])
+
+class ParentageRelationshipsTest(AnswerTestingTest):
+ _type_name = "ParentageRelationships"
+ _attrs = ()
+ def run(self):
+ result = {}
+ result["parents"] = []
+ result["children"] = []
+ for g in self.pf.h.grids:
+ p = g.Parent
+ if p is None:
+ result["parents"].append(None)
+ elif hasattr(p, "id"):
+ result["parents"].append(p.id)
+ else:
+ result["parents"].append([pg.id for pg in p])
+ result["children"].append([c.id for c in g.Children])
+ return result
+
+ def compare(self, new_result, old_result):
+ for newp, oldp in zip(new_result["parents"], old_result["parents"]):
+ assert(newp == oldp)
+ for newc, oldc in zip(new_result["children"], old_result["children"]):
+ assert(newp == oldp)
+
+def requires_pf(pf_fn, big_data = False):
+ def ffalse(func):
+ return lambda: None
+ def ftrue(func):
+ return func
+ if run_big_data == False and big_data == True:
+ return ffalse
+ elif not can_run_pf(pf_fn):
+ return ffalse
+ else:
+ return ftrue
+
+def small_patch_amr(pf_fn, fields):
+ if not can_run_pf(pf_fn): return
+ dso = [ None, ("sphere", ("max", (0.1, 'unitary')))]
+ yield GridHierarchyTest(pf_fn)
+ yield ParentageRelationshipsTest(pf_fn)
+ for field in fields:
+ yield GridValuesTest(pf_fn, field)
+ for axis in [0, 1, 2]:
+ for ds in dso:
+ for weight_field in [None, "Density"]:
+ yield ProjectionValuesTest(
+ pf_fn, axis, field, weight_field,
+ ds)
+ yield FieldValuesTest(
+ pf_fn, field, ds)
+
+def big_patch_amr(pf_fn, fields):
+ if not can_run_pf(pf_fn): return
+ dso = [ None, ("sphere", ("max", (0.1, 'unitary')))]
+ yield GridHierarchyTest(pf_fn)
+ yield ParentageRelationshipsTest(pf_fn)
+ for field in fields:
+ yield GridValuesTest(pf_fn, field)
+ for axis in [0, 1, 2]:
+ for ds in dso:
+ for weight_field in [None, "Density"]:
+ yield PixelizedProjectionValuesTest(
+ pf_fn, axis, field, weight_field,
+ ds)
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/utilities/answer_testing/output_tests.py
--- a/yt/utilities/answer_testing/output_tests.py
+++ b/yt/utilities/answer_testing/output_tests.py
@@ -29,14 +29,12 @@
# We first create our dictionary of tests to run. This starts out empty, and
# as tests are imported it will be filled.
if "TestRegistry" not in locals():
- print "Initializing TestRegistry"
class TestRegistry(dict):
def __new__(cls, *p, **k):
if not '_the_instance' in cls.__dict__:
cls._the_instance = dict.__new__(cls)
return cls._the_instance
if "test_registry" not in locals():
- print "Initializing test_registry"
test_registry = TestRegistry()
# The exceptions we raise, related to the character of the failure.
diff -r dfaaede9f09714cd4a2caa51267fc5cbc946fe7e -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -154,3 +154,11 @@
def __str__(self):
return "No filenames were found to match the pattern: " + \
"'%s'" % (self.pattern)
+
+class YTNoOldAnswer(YTException):
+ def __init__(self, path):
+ self.path = path
+
+ def __str__(self):
+ return "There is no old answer available.\n" + \
+ str(self.path)
https://bitbucket.org/yt_analysis/yt/changeset/7f963c6458bc/
changeset: 7f963c6458bc
branch: yt
user: sskory
date: 2012-10-24 05:04:24
summary: Removed ellipsoid from test_boolean_mix_periodicity test because
it is problematic.
affected #: 1 file
diff -r a4bb699ba3cd642a7efe76ecbf06e29a38d2a10a -r 7f963c6458bced3770cfef7fd0dc4b85f9b2b338 yt/data_objects/tests/test_boolean_regions.py
--- a/yt/data_objects/tests/test_boolean_regions.py
+++ b/yt/data_objects/tests/test_boolean_regions.py
@@ -322,21 +322,17 @@
re = pf.h.region([0.5]*3, [0.0]*3, [1]*3) # whole thing
sp = pf.h.sphere([0.95]*3, 0.3) # wraps around
cyl = pf.h.disk([0.05]*3, [1,1,1], 0.1, 0.4) # wraps around
- ell = pf.h.ellipsoid([0.35]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
- np.array([0.1]*3)) # no wrap
# Get original indices
rei = re['ID']
spi = sp['ID']
cyli = cyl['ID']
- elli = ell['ID']
# Make some booleans
# whole box minux spherical bites at corners
bo1 = pf.h.boolean([re, "NOT", sp])
# sphere plus cylinder
bo2 = pf.h.boolean([sp, "OR", cyl])
- # a big jumble, the region minus the ell+cyl (scepter shaped?), plus the
- # sphere which should add back some of what the ell+cyl took out.
- bo3 = pf.h.boolean([re, "NOT", "(", ell, "OR", cyl, ")", "OR", sp])
+ # a jumble, the region minus the sp+cyl
+ bo3 = pf.h.boolean([re, "NOT", "(", sp, "OR", cyl, ")"])
# Now make sure the indices also behave as we expect.
expect = np.setdiff1d(rei, spi)
ii = bo1['ID']
@@ -348,9 +344,8 @@
ii.sort()
yield assert_array_equal, expect, ii
#
- expect = np.union1d(elli, cyli)
+ expect = np.union1d(spi, cyli)
expect = np.setdiff1d(rei, expect)
- expect = np.union1d(expect, spi)
ii = bo3['ID']
ii.sort()
yield assert_array_equal, expect, ii
https://bitbucket.org/yt_analysis/yt/changeset/81d3dd529a1d/
changeset: 81d3dd529a1d
branch: yt
user: MatthewTurk
date: 2012-10-24 21:35:09
summary: Merged in sskory/yt (pull request #311)
affected #: 2 files
diff -r 4c9fee8d49acfebfa2756f286d0352e85a4f6de2 -r 81d3dd529a1d278f5f3434c4e15d8336063dd0b6 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -71,7 +71,7 @@
def force_array(item, shape):
try:
sh = item.shape
- return item
+ return item.copy()
except AttributeError:
if item:
return np.ones(shape, dtype='bool')
diff -r 4c9fee8d49acfebfa2756f286d0352e85a4f6de2 -r 81d3dd529a1d278f5f3434c4e15d8336063dd0b6 yt/data_objects/tests/test_boolean_regions.py
--- /dev/null
+++ b/yt/data_objects/tests/test_boolean_regions.py
@@ -0,0 +1,352 @@
+from yt.testing import *
+from yt.data_objects.api import add_field
+
+def setup():
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+ def _ID(field, data):
+ width = data.pf.domain_right_edge - data.pf.domain_left_edge
+ delta = width / data.pf.h.get_smallest_dx()
+ x = data['x'] - data.pf.h.get_smallest_dx() / 2.
+ y = data['y'] - data.pf.h.get_smallest_dx() / 2.
+ z = data['z'] - data.pf.h.get_smallest_dx() / 2.
+ xi = x / data.pf.h.get_smallest_dx()
+ yi = y / data.pf.h.get_smallest_dx()
+ zi = z / data.pf.h.get_smallest_dx()
+ index = xi + delta[0] * (yi + delta[1] * zi)
+ index = index.astype('int64')
+ return index
+
+ add_field("ID", function=_ID)
+
+def test_boolean_spheres_no_overlap():
+ r"""Test to make sure that boolean objects (spheres, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping spheres. This also checks that the original spheres
+ don't change as part of constructing the booleans.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ sp1 = pf.h.sphere([0.25, 0.25, 0.25], 0.15)
+ sp2 = pf.h.sphere([0.75, 0.75, 0.75], 0.15)
+ # Store the original indices
+ i1 = sp1['ID']
+ i1.sort()
+ i2 = sp2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([sp1, "AND", sp2]) # empty
+ bo2 = pf.h.boolean([sp1, "NOT", sp2]) # only sp1
+ bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = sp1['ID']
+ new_i1.sort()
+ new_i2 = sp2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
+
+def test_boolean_spheres_overlap():
+ r"""Test to make sure that boolean objects (spheres, overlap)
+ behave the way we expect.
+
+ Test overlapping spheres.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ sp1 = pf.h.sphere([0.45, 0.45, 0.45], 0.15)
+ sp2 = pf.h.sphere([0.55, 0.55, 0.55], 0.15)
+ # Get indices of both.
+ i1 = sp1['ID']
+ i2 = sp2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([sp1, "AND", sp2]) # overlap (a lens)
+ bo2 = pf.h.boolean([sp1, "NOT", sp2]) # sp1 - sp2 (sphere with bite)
+ bo3 = pf.h.boolean([sp1, "OR", sp2]) # combination (H2)
+ # Now make sure the indices also behave as we expect.
+ lens = np.intersect1d(i1, i2)
+ apple = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, lens
+ yield assert_array_equal, b2, apple
+ yield assert_array_equal, b3, both
+
+def test_boolean_regions_no_overlap():
+ r"""Test to make sure that boolean objects (regions, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping regions. This also checks that the original regions
+ don't change as part of constructing the booleans.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ re1 = pf.h.region([0.25]*3, [0.2]*3, [0.3]*3)
+ re2 = pf.h.region([0.65]*3, [0.6]*3, [0.7]*3)
+ # Store the original indices
+ i1 = re1['ID']
+ i1.sort()
+ i2 = re2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([re1, "AND", re2]) # empty
+ bo2 = pf.h.boolean([re1, "NOT", re2]) # only re1
+ bo3 = pf.h.boolean([re1, "OR", re2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = re1['ID']
+ new_i1.sort()
+ new_i2 = re2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
+
+def test_boolean_regions_overlap():
+ r"""Test to make sure that boolean objects (regions, overlap)
+ behave the way we expect.
+
+ Test overlapping regions.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ re1 = pf.h.region([0.55]*3, [0.5]*3, [0.6]*3)
+ re2 = pf.h.region([0.6]*3, [0.55]*3, [0.65]*3)
+ # Get indices of both.
+ i1 = re1['ID']
+ i2 = re2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([re1, "AND", re2]) # overlap (small cube)
+ bo2 = pf.h.boolean([re1, "NOT", re2]) # sp1 - sp2 (large cube with bite)
+ bo3 = pf.h.boolean([re1, "OR", re2]) # combination (merged large cubes)
+ # Now make sure the indices also behave as we expect.
+ cube = np.intersect1d(i1, i2)
+ bite_cube = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, cube
+ yield assert_array_equal, b2, bite_cube
+ yield assert_array_equal, b3, both
+
+def test_boolean_cylinders_no_overlap():
+ r"""Test to make sure that boolean objects (cylinders, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping cylinders. This also checks that the original cylinders
+ don't change as part of constructing the booleans.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ cyl1 = pf.h.disk([0.25]*3, [1, 0, 0], 0.1, 0.1)
+ cyl2 = pf.h.disk([0.75]*3, [1, 0, 0], 0.1, 0.1)
+ # Store the original indices
+ i1 = cyl1['ID']
+ i1.sort()
+ i2 = cyl2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # empty
+ bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # only cyl1
+ bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = cyl1['ID']
+ new_i1.sort()
+ new_i2 = cyl2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
+
+def test_boolean_cylinders_overlap():
+ r"""Test to make sure that boolean objects (cylinders, overlap)
+ behave the way we expect.
+
+ Test overlapping cylinders.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ cyl1 = pf.h.disk([0.45]*3, [1, 0, 0], 0.2, 0.2)
+ cyl2 = pf.h.disk([0.55]*3, [1, 0, 0], 0.2, 0.2)
+ # Get indices of both.
+ i1 = cyl1['ID']
+ i2 = cyl2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([cyl1, "AND", cyl2]) # overlap (vertically extened lens)
+ bo2 = pf.h.boolean([cyl1, "NOT", cyl2]) # sp1 - sp2 (disk minus a bite)
+ bo3 = pf.h.boolean([cyl1, "OR", cyl2]) # combination (merged disks)
+ # Now make sure the indices also behave as we expect.
+ vlens = np.intersect1d(i1, i2)
+ bite_disk = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, vlens
+ yield assert_array_equal, b2, bite_disk
+ yield assert_array_equal, b3, both
+
+def test_boolean_ellipsoids_no_overlap():
+ r"""Test to make sure that boolean objects (ellipsoids, no overlap)
+ behave the way we expect.
+
+ Test non-overlapping ellipsoids. This also checks that the original
+ ellipsoids don't change as part of constructing the booleans.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ ell1 = pf.h.ellipsoid([0.25]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ ell2 = pf.h.ellipsoid([0.75]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ # Store the original indices
+ i1 = ell1['ID']
+ i1.sort()
+ i2 = ell2['ID']
+ i2.sort()
+ ii = np.concatenate((i1, i2))
+ ii.sort()
+ # Make some booleans
+ bo1 = pf.h.boolean([ell1, "AND", ell2]) # empty
+ bo2 = pf.h.boolean([ell1, "NOT", ell2]) # only cyl1
+ bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
+ # This makes sure the original containers didn't change.
+ new_i1 = ell1['ID']
+ new_i1.sort()
+ new_i2 = ell2['ID']
+ new_i2.sort()
+ yield assert_array_equal, new_i1, i1
+ yield assert_array_equal, new_i2, i2
+ # Now make sure the indices also behave as we expect.
+ empty = np.array([])
+ yield assert_array_equal, bo1['ID'], empty
+ b2 = bo2['ID']
+ b2.sort()
+ yield assert_array_equal, b2, i1
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b3, ii
+
+def test_boolean_ellipsoids_overlap():
+ r"""Test to make sure that boolean objects (ellipsoids, overlap)
+ behave the way we expect.
+
+ Test overlapping ellipsoids.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ ell1 = pf.h.ellipsoid([0.45]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ ell2 = pf.h.ellipsoid([0.55]*3, 0.05, 0.05, 0.05, np.array([0.1]*3),
+ np.array([0.1]*3))
+ # Get indices of both.
+ i1 = ell1['ID']
+ i2 = ell2['ID']
+ # Make some booleans
+ bo1 = pf.h.boolean([ell1, "AND", ell2]) # overlap
+ bo2 = pf.h.boolean([ell1, "NOT", ell2]) # ell1 - ell2
+ bo3 = pf.h.boolean([ell1, "OR", ell2]) # combination
+ # Now make sure the indices also behave as we expect.
+ overlap = np.intersect1d(i1, i2)
+ diff = np.setdiff1d(i1, i2)
+ both = np.union1d(i1, i2)
+ b1 = bo1['ID']
+ b1.sort()
+ b2 = bo2['ID']
+ b2.sort()
+ b3 = bo3['ID']
+ b3.sort()
+ yield assert_array_equal, b1, overlap
+ yield assert_array_equal, b2, diff
+ yield assert_array_equal, b3, both
+
+def test_boolean_mix_periodicity():
+ r"""Test that a hybrid boolean region behaves as we expect.
+
+ This also tests nested logic and that periodicity works.
+ """
+ for n in [1, 2, 4, 8]:
+ pf = fake_random_pf(64, nprocs=n)
+ pf.h
+ re = pf.h.region([0.5]*3, [0.0]*3, [1]*3) # whole thing
+ sp = pf.h.sphere([0.95]*3, 0.3) # wraps around
+ cyl = pf.h.disk([0.05]*3, [1,1,1], 0.1, 0.4) # wraps around
+ # Get original indices
+ rei = re['ID']
+ spi = sp['ID']
+ cyli = cyl['ID']
+ # Make some booleans
+ # whole box minux spherical bites at corners
+ bo1 = pf.h.boolean([re, "NOT", sp])
+ # sphere plus cylinder
+ bo2 = pf.h.boolean([sp, "OR", cyl])
+ # a jumble, the region minus the sp+cyl
+ bo3 = pf.h.boolean([re, "NOT", "(", sp, "OR", cyl, ")"])
+ # Now make sure the indices also behave as we expect.
+ expect = np.setdiff1d(rei, spi)
+ ii = bo1['ID']
+ ii.sort()
+ yield assert_array_equal, expect, ii
+ #
+ expect = np.union1d(spi, cyli)
+ ii = bo2['ID']
+ ii.sort()
+ yield assert_array_equal, expect, ii
+ #
+ expect = np.union1d(spi, cyli)
+ expect = np.setdiff1d(rei, expect)
+ ii = bo3['ID']
+ ii.sort()
+ yield assert_array_equal, expect, ii
+
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list