[yt-svn] commit/yt: 3 new changesets

Bitbucket commits-noreply at bitbucket.org
Thu Feb 7 14:53:45 PST 2013


3 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/02c774c286d8/
changeset:   02c774c286d8
branch:      yt
user:        samskillman
date:        2013-02-07 22:39:40
summary:     Restoring parallel streamlining capabilities.
affected #:  3 files

diff -r ea4949df805c2fcdb69b46f54343aafce1a36976 -r 02c774c286d8103e4daad58b9d2c2892f5dd44ff yt/data_objects/tests/test_streamlines.py
--- a/yt/data_objects/tests/test_streamlines.py
+++ b/yt/data_objects/tests/test_streamlines.py
@@ -12,7 +12,7 @@
     cs = np.mgrid[0.47:0.53:2j,0.47:0.53:2j,0.47:0.53:2j]
     cs = np.array([a.ravel() for a in cs]).T
     length = (1.0/128) * 16 # 16 half-widths of a cell
-    for nprocs in [1]:
+    for nprocs in [1, 2, 4, 8]:
         pf = fake_random_pf(64, nprocs = nprocs, fields = _fields)
         streams = Streamlines(pf, cs, length=length)
         streams.integrate_through_volume()

diff -r ea4949df805c2fcdb69b46f54343aafce1a36976 -r 02c774c286d8103e4daad58b9d2c2892f5dd44ff yt/utilities/amr_kdtree/amr_kdtools.py
--- a/yt/utilities/amr_kdtree/amr_kdtools.py
+++ b/yt/utilities/amr_kdtree/amr_kdtools.py
@@ -42,6 +42,7 @@
         self.parent = parent
         self.id = node_id
         self.data = None
+        self.split = None
 
 class Split(object):
     def __init__(self, dim, pos):

diff -r ea4949df805c2fcdb69b46f54343aafce1a36976 -r 02c774c286d8103e4daad58b9d2c2892f5dd44ff yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -26,12 +26,12 @@
 from yt.funcs import *
 import numpy as np
 import h5py
-from amr_kdtools import Node, kd_is_leaf, kd_sum_volume, kd_node_check, \
+from amr_kdtools import Node, Split, kd_is_leaf, kd_sum_volume, kd_node_check, \
         depth_traverse, viewpoint_traverse, add_grids, \
         receive_and_reduce, send_to_parent, scatter_image, find_node, \
         depth_first_touch
 from yt.utilities.parallel_tools.parallel_analysis_interface \
-    import ParallelAnalysisInterface, parallel_root_only
+    import ParallelAnalysisInterface 
 from yt.utilities.lib.grid_traversal import PartitionedGrid
 from yt.utilities.math_utils import periodic_position
 
@@ -445,10 +445,10 @@
         if self.comm.rank != (self.comm.size-1):
             self.comm.send_array([0],self.comm.rank+1, tag=self.comm.rank)
 
-
-    @parallel_root_only
     def join_parallel_trees(self):
-        nid, pid, lid, rid, les, res, gid = self.get_node_arrays()
+        if self.comm.size == 0: return
+        nid, pid, lid, rid, les, res, gid, splitdims, splitposs = \
+                self.get_node_arrays()
         nid = self.comm.par_combine_object(nid, 'cat', 'list') 
         pid = self.comm.par_combine_object(pid, 'cat', 'list') 
         lid = self.comm.par_combine_object(lid, 'cat', 'list') 
@@ -456,9 +456,11 @@
         gid = self.comm.par_combine_object(gid, 'cat', 'list') 
         les = self.comm.par_combine_object(les, 'cat', 'list') 
         res = self.comm.par_combine_object(res, 'cat', 'list') 
+        splitdims = self.comm.par_combine_object(splitdims, 'cat', 'list') 
+        splitposs = self.comm.par_combine_object(splitposs, 'cat', 'list') 
         nid = np.array(nid)
         new_tree = self.rebuild_tree_from_array(nid, pid, lid, 
-            rid, les, res, gid)
+            rid, les, res, gid, splitdims, splitposs)
 
     def get_node_arrays(self):
         nids = []
@@ -468,6 +470,8 @@
         les = []
         res = []
         gridids = []
+        splitdims = []
+        splitposs = []
         for node in depth_first_touch(self.tree):
             nids.append(node.id) 
             les.append(node.left_edge) 
@@ -488,10 +492,18 @@
                 gridids.append(-1) 
             else:
                 gridids.append(node.grid) 
-        return nids, parentids, leftids, rightids, les, res, gridids
+            if node.split is None:
+                splitdims.append(-1)
+                splitposs.append(np.nan)
+            else:
+                splitdims.append(node.split.dim)
+                splitposs.append(node.split.pos)
+
+        return nids, parentids, leftids, rightids, les, res, gridids,\
+                splitdims, splitposs
 
     def rebuild_tree_from_array(self, nids, pids, lids,
-                               rids, les, res, gids):
+                               rids, les, res, gids, splitdims, splitposs):
         del self.tree.trunk
 
         self.tree.trunk = Node(None, 
@@ -512,6 +524,10 @@
                                       None, None, rids[i])
             if gids[i] != -1:
                 n.grid = gids[i]
+
+            if splitdims[i] != -1:
+                n.split = Split(splitdims[i], splitposs[i])
+
         mylog.info('AMRKDTree rebuilt, Final Volume: %e' % kd_sum_volume(self.tree.trunk))
         return self.tree.trunk
 


https://bitbucket.org/yt_analysis/yt/commits/c03286262fcb/
changeset:   c03286262fcb
branch:      yt
user:        samskillman
date:        2013-02-07 22:41:30
summary:     Merging
affected #:  3 files

diff -r 02c774c286d8103e4daad58b9d2c2892f5dd44ff -r c03286262fcb340062faff1a681ef9c774042e77 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -22,6 +22,7 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import itertools as it
 import numpy as np
 from yt.funcs import *
 from numpy.testing import assert_array_equal, assert_almost_equal, \
@@ -163,3 +164,100 @@
                  for field,offset in zip(fields,offsets))
     ug = load_uniform_grid(data, ndims, 1.0, nprocs = nprocs)
     return ug
+
+def expand_keywords(keywords, full=False):
+    """
+    expand_keywords is a means for testing all possible keyword
+    arguments in the nosetests.  Simply pass it a dictionary of all the
+    keyword arguments and all of the values for these arguments that you
+    want to test.
+
+    It will return a list of **kwargs dicts containing combinations of
+    the various kwarg values you passed it.  These can then be passed
+    to the appropriate function in nosetests. 
+
+    If full=True, then every possible combination of keywords is produced,
+    otherwise, every keyword option is included at least once in the output
+    list.  Be careful, by using full=True, you may be in for an exponentially
+    larger number of tests! 
+
+    keywords : dict
+        a dictionary where the keys are the keywords for the function,
+        and the values of each key are the possible values that this key
+        can take in the function
+
+   full : bool
+        if set to True, every possible combination of given keywords is 
+        returned
+
+    Returns
+    -------
+    array of dicts
+        An array of **kwargs dictionaries to be individually passed to
+        the appropriate function matching these kwargs.
+
+    Examples
+    --------
+    >>> keywords = {}
+    >>> keywords['dpi'] = (50, 100, 200)
+    >>> keywords['cmap'] = ('algae', 'jet')
+    >>> list_of_kwargs = expand_keywords(keywords)
+    >>> print list_of_kwargs
+
+    array([{'cmap': 'algae', 'dpi': 50}, 
+           {'cmap': 'jet', 'dpi': 100},
+           {'cmap': 'algae', 'dpi': 200}], dtype=object)
+
+    >>> list_of_kwargs = expand_keywords(keywords, full=True)
+    >>> print list_of_kwargs
+
+    array([{'cmap': 'algae', 'dpi': 50}, 
+           {'cmap': 'algae', 'dpi': 100},
+           {'cmap': 'algae', 'dpi': 200}, 
+           {'cmap': 'jet', 'dpi': 50},
+           {'cmap': 'jet', 'dpi': 100}, 
+           {'cmap': 'jet', 'dpi': 200}], dtype=object)
+
+    >>> for kwargs in list_of_kwargs:
+    ...     write_projection(*args, **kwargs)
+    """
+
+    # if we want every possible combination of keywords, use iter magic
+    if full:
+        keys = sorted(keywords)
+        list_of_kwarg_dicts = np.array([dict(zip(keys, prod)) for prod in \
+                              it.product(*(keywords[key] for key in keys))])
+            
+    # if we just want to probe each keyword, but not necessarily every 
+    # combination
+    else:
+        # Determine the maximum number of values any of the keywords has
+        num_lists = 0
+        for val in keywords.values():
+            if isinstance(val, str):
+                num_lists = max(1.0, num_lists)
+            else:
+                num_lists = max(len(val), num_lists)
+    
+        # Construct array of kwargs dicts, each element of the list is a different
+        # **kwargs dict.  each kwargs dict gives a different combination of
+        # the possible values of the kwargs
+    
+        # initialize array
+        list_of_kwarg_dicts = np.array([dict() for x in range(num_lists)])
+    
+        # fill in array
+        for i in np.arange(num_lists):
+            list_of_kwarg_dicts[i] = {}
+            for key in keywords.keys():
+                # if it's a string, use it (there's only one)
+                if isinstance(keywords[key], str):
+                    list_of_kwarg_dicts[i][key] = keywords[key]
+                # if there are more options, use the i'th val
+                elif i < len(keywords[key]):
+                    list_of_kwarg_dicts[i][key] = keywords[key][i]
+                # if there are not more options, use the 0'th val
+                else:
+                    list_of_kwarg_dicts[i][key] = keywords[key][0]
+
+    return list_of_kwarg_dicts

diff -r 02c774c286d8103e4daad58b9d2c2892f5dd44ff -r c03286262fcb340062faff1a681ef9c774042e77 yt/visualization/image_writer.py
--- a/yt/visualization/image_writer.py
+++ b/yt/visualization/image_writer.py
@@ -335,8 +335,8 @@
     return im
 
 def write_projection(data, filename, colorbar=True, colorbar_label=None, 
-                     title=None, limits=None, take_log=True, var_fig_size=False,
-                     cmap='algae'):
+                     title=None, limits=None, take_log=True, figsize=(8,6),
+                     dpi=100, cmap_name='algae'):
     r"""Write a projection or volume rendering to disk with a variety of 
     pretty parameters such as limits, title, colorbar, etc.  write_projection
     uses the standard matplotlib interface to create the figure.  N.B. This code
@@ -344,7 +344,8 @@
     framework (i.e. the Camera interface or off_axis_projection).
 
     Accepts an NxM sized array representing the projection itself as well
-    as the filename to which you will save this figure.  
+    as the filename to which you will save this figure.  Note that the final
+    resolution of your image will be a product of dpi/100 * figsize.
 
     Parameters
     ----------
@@ -363,10 +364,11 @@
         of the data array
     take_log : boolean
         plot the log of the data array (and take the log of the limits if set)?
-    var_fig_size : boolean
-        If we want the resolution (and size) of the output image to scale 
-        with the resolution of the image array.  
-    cmap : string
+    figsize : array_like
+        width, height in inches of final image
+    dpi : int
+        final image resolution in pixels / inch
+    cmap_name : string
         The name of the colormap.
 
     Examples
@@ -375,7 +377,7 @@
     >>> image = off_axis_projection(pf, c, L, W, N, "Density", no_ghost=False)
     >>> write_projection(image, 'test.png', 
                          colorbar_label="Column Density (cm$^{-2}$)", 
-                         title="Offaxis Projection", limits=(1e-3,1e-5), 
+                         title="Offaxis Projection", limits=(1e-5,1e-3), 
                          take_log=True)
     """
     import matplotlib
@@ -391,11 +393,11 @@
         limits = [None, None]
 
     # Create the figure and paint the data on
-    fig = matplotlib.figure.Figure()
+    fig = matplotlib.figure.Figure(figsize=figsize)
     ax = fig.add_subplot(111)
     fig.tight_layout()
 
-    cax = ax.imshow(data, vmin=limits[0], vmax=limits[1], norm=norm, cmap=cmap)
+    cax = ax.imshow(data, vmin=limits[0], vmax=limits[1], norm=norm, cmap=cmap_name)
     
     if title:
         ax.set_title(title)
@@ -410,16 +412,6 @@
         if colorbar_label:
             cbar.ax.set_ylabel(colorbar_label)
 
-    # If we want the resolution of the image to scale with the resolution
-    # of the image array. we increase the dpi value accordingly
-    
-    if var_fig_size:
-        N = data.shape[0]
-        mag_factor = N/480.
-        dpi = 100*mag_factor
-    else:
-        dpi = None
-
     suffix = get_image_suffix(filename)
 
     if suffix == '':
@@ -436,7 +428,7 @@
         mylog.warning("Unknown suffix %s, defaulting to Agg", suffix)
         canvas = FigureCanvasAgg(fig)
 
-    canvas.print_figure(filename)
+    canvas.print_figure(filename, dpi=dpi)
     return filename
 
 

diff -r 02c774c286d8103e4daad58b9d2c2892f5dd44ff -r c03286262fcb340062faff1a681ef9c774042e77 yt/visualization/tests/test_offaxisprojection.py
--- /dev/null
+++ b/yt/visualization/tests/test_offaxisprojection.py
@@ -0,0 +1,92 @@
+"""
+Test for off_axis_projection and write_projection
+
+Author: Cameron Hummels <chummels at gmail.com>
+Affiliation: University of Arizona
+Homepage: http://yt-project.org/
+License:
+  Copyright (C) 2013 Cameron Hummels.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+import os
+import os.path
+import tempfile
+import shutil
+from yt.testing import \
+    fake_random_pf, assert_equal, expand_keywords
+from yt.mods import \
+    off_axis_projection, write_projection
+
+
+def setup():
+    """Test specific setup."""
+    from yt.config import ytcfg
+    ytcfg["yt", "__withintesting"] = "True"
+
+
+def test_write_projection():
+    """Tests functionality of off_axis_projection and write_projection."""
+    # Perform I/O in safe place instead of yt main dir
+    tmpdir = tempfile.mkdtemp()
+    curdir = os.getcwd()
+    os.chdir(tmpdir)
+
+    # args for off_axis_projection
+    test_pf = fake_random_pf(64)
+    c = [0.5, 0.5, 0.5]
+    norm = [0.5, 0.5, 0.5]
+    W = [0.5,0.5,1.0]
+    N = 64
+    field = "Density"
+    oap_args = [test_pf, c, norm, W, N, field]
+
+    # kwargs for off_axis_projection
+    oap_kwargs = {}
+    oap_kwargs['weight'] = (None, 'CellMassMsun')
+    oap_kwargs['no_ghost'] = (True, False)
+    oap_kwargs['interpolated'] = (True, False)
+    oap_kwargs['north_vector'] = ((1,0,0), (0,0.5,1.0))
+    oap_kwargs_list = expand_keywords(oap_kwargs)
+
+    # args for write_projection
+    fn = "test.png"
+
+    # kwargs for write_projection
+    wp_kwargs = {}
+    wp_kwargs['colorbar'] = (True, False)
+    wp_kwargs['colorbar_label'] = ('test')
+    wp_kwargs['title'] = ('test')
+    wp_kwargs['limits'] = (None, (1e3, 1e5))
+    wp_kwargs['take_log'] = (True, False)
+    wp_kwargs['figsize'] = ((8,6), [1,1])
+    wp_kwargs['dpi'] = (100, 50)
+    wp_kwargs['cmap_name'] = ('algae', 'jet')
+    wp_kwargs_list = expand_keywords(wp_kwargs)
+
+    # test all off_axis_projection kwargs and write_projection kwargs
+    # make sure they are able to be projected, then remove and try next
+    # iteration
+    for oap_kwargs in oap_kwargs_list:
+        image = off_axis_projection(*oap_args, **oap_kwargs)
+        for wp_kwargs in wp_kwargs_list:
+            write_projection(image, fn, **wp_kwargs)
+            yield assert_equal, os.path.exists(fn), True
+            os.remove(fn)
+
+    os.chdir(curdir)
+    # clean up
+    shutil.rmtree(tmpdir)


https://bitbucket.org/yt_analysis/yt/commits/d0e722ba5afe/
changeset:   d0e722ba5afe
branch:      yt
user:        ngoldbaum
date:        2013-02-07 23:53:41
summary:     Merged in samskillman/yt (pull request #418)

Restore parallel streamlines, AMRKDTree parallel reduction
affected #:  3 files

diff -r 023d08daede64ee04aa098c33dbd49ea34189940 -r d0e722ba5afe0b3f7fd54df2b3645e38692015d2 yt/data_objects/tests/test_streamlines.py
--- a/yt/data_objects/tests/test_streamlines.py
+++ b/yt/data_objects/tests/test_streamlines.py
@@ -12,7 +12,7 @@
     cs = np.mgrid[0.47:0.53:2j,0.47:0.53:2j,0.47:0.53:2j]
     cs = np.array([a.ravel() for a in cs]).T
     length = (1.0/128) * 16 # 16 half-widths of a cell
-    for nprocs in [1]:
+    for nprocs in [1, 2, 4, 8]:
         pf = fake_random_pf(64, nprocs = nprocs, fields = _fields)
         streams = Streamlines(pf, cs, length=length)
         streams.integrate_through_volume()

diff -r 023d08daede64ee04aa098c33dbd49ea34189940 -r d0e722ba5afe0b3f7fd54df2b3645e38692015d2 yt/utilities/amr_kdtree/amr_kdtools.py
--- a/yt/utilities/amr_kdtree/amr_kdtools.py
+++ b/yt/utilities/amr_kdtree/amr_kdtools.py
@@ -42,6 +42,7 @@
         self.parent = parent
         self.id = node_id
         self.data = None
+        self.split = None
 
 class Split(object):
     def __init__(self, dim, pos):

diff -r 023d08daede64ee04aa098c33dbd49ea34189940 -r d0e722ba5afe0b3f7fd54df2b3645e38692015d2 yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -26,12 +26,12 @@
 from yt.funcs import *
 import numpy as np
 import h5py
-from amr_kdtools import Node, kd_is_leaf, kd_sum_volume, kd_node_check, \
+from amr_kdtools import Node, Split, kd_is_leaf, kd_sum_volume, kd_node_check, \
         depth_traverse, viewpoint_traverse, add_grids, \
         receive_and_reduce, send_to_parent, scatter_image, find_node, \
         depth_first_touch
 from yt.utilities.parallel_tools.parallel_analysis_interface \
-    import ParallelAnalysisInterface, parallel_root_only
+    import ParallelAnalysisInterface 
 from yt.utilities.lib.grid_traversal import PartitionedGrid
 from yt.utilities.math_utils import periodic_position
 
@@ -445,10 +445,10 @@
         if self.comm.rank != (self.comm.size-1):
             self.comm.send_array([0],self.comm.rank+1, tag=self.comm.rank)
 
-
-    @parallel_root_only
     def join_parallel_trees(self):
-        nid, pid, lid, rid, les, res, gid = self.get_node_arrays()
+        if self.comm.size == 0: return
+        nid, pid, lid, rid, les, res, gid, splitdims, splitposs = \
+                self.get_node_arrays()
         nid = self.comm.par_combine_object(nid, 'cat', 'list') 
         pid = self.comm.par_combine_object(pid, 'cat', 'list') 
         lid = self.comm.par_combine_object(lid, 'cat', 'list') 
@@ -456,9 +456,11 @@
         gid = self.comm.par_combine_object(gid, 'cat', 'list') 
         les = self.comm.par_combine_object(les, 'cat', 'list') 
         res = self.comm.par_combine_object(res, 'cat', 'list') 
+        splitdims = self.comm.par_combine_object(splitdims, 'cat', 'list') 
+        splitposs = self.comm.par_combine_object(splitposs, 'cat', 'list') 
         nid = np.array(nid)
         new_tree = self.rebuild_tree_from_array(nid, pid, lid, 
-            rid, les, res, gid)
+            rid, les, res, gid, splitdims, splitposs)
 
     def get_node_arrays(self):
         nids = []
@@ -468,6 +470,8 @@
         les = []
         res = []
         gridids = []
+        splitdims = []
+        splitposs = []
         for node in depth_first_touch(self.tree):
             nids.append(node.id) 
             les.append(node.left_edge) 
@@ -488,10 +492,18 @@
                 gridids.append(-1) 
             else:
                 gridids.append(node.grid) 
-        return nids, parentids, leftids, rightids, les, res, gridids
+            if node.split is None:
+                splitdims.append(-1)
+                splitposs.append(np.nan)
+            else:
+                splitdims.append(node.split.dim)
+                splitposs.append(node.split.pos)
+
+        return nids, parentids, leftids, rightids, les, res, gridids,\
+                splitdims, splitposs
 
     def rebuild_tree_from_array(self, nids, pids, lids,
-                               rids, les, res, gids):
+                               rids, les, res, gids, splitdims, splitposs):
         del self.tree.trunk
 
         self.tree.trunk = Node(None, 
@@ -512,6 +524,10 @@
                                       None, None, rids[i])
             if gids[i] != -1:
                 n.grid = gids[i]
+
+            if splitdims[i] != -1:
+                n.split = Split(splitdims[i], splitposs[i])
+
         mylog.info('AMRKDTree rebuilt, Final Volume: %e' % kd_sum_volume(self.tree.trunk))
         return self.tree.trunk

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list