[yt-svn] commit/yt: 3 new changesets

Bitbucket commits-noreply at bitbucket.org
Thu Feb 14 09:01:20 PST 2013


3 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/f2583db944f0/
changeset:   f2583db944f0
branch:      yt
user:        jzuhone
date:        2013-02-12 18:13:15
summary:     Answer testing for FLASH data. This commit just sets up the basic testing framework with very basic tests. More complex tests will follow.
affected #:  1 file

diff -r 3ba01837bf241d316ad589f6638ec9038e5dbb0e -r f2583db944f07248918701edd7a26aa1b030fccd yt/frontends/flash/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -0,0 +1,58 @@
+"""
+FLASH frontend tests
+
+Author: John ZuHone <jzuhone at gmail.com>
+Affiliation: NASA/Goddard Space Flight Center
+Homepage: http://yt-project.org/
+License:
+  Copyright (C) 2012 John ZuHone.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    requires_pf, \
+    small_patch_amr, \
+    big_patch_amr, \
+    data_dir_load
+from yt.frontends.flash.api import FLASHStaticOutput
+
+_fields = ("Temperature", "Density", "VelocityMagnitude", "DivV")
+
+sloshing = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
+ at requires_pf(sloshing)
+def test_sloshing():
+    pf = data_dir_load(sloshing)
+    yield assert_equal, str(pf), ""
+    for test in small_patch_amr(m7, _fields):
+        yield test
+
+wt = "WindTunnel/windtunnel_4lev_hdf5_plt_cnt_0030"
+ at requires_pf(wt)
+def test_wind_tunnel():
+    pf = data_dir_load(wt)
+    yield assert_equal, str(pf), "windtunnel_4lev_hdf5_plt_cnt_0030"
+    for test in small_patch_amr(wt, _fields):
+        yield test
+
+gcm = "GalaxyClusterMerger/fiducial_1to10_b0.273d_hdf5_plt_cnt_0245.gz"
+ at requires_pf(gcm, big_data=True)
+def test_galaxy_cluster_merger():
+    pf = data_dir_load(gcm)
+    for test in big_patch_amr(gcm, _fields):
+        yield test
+


https://bitbucket.org/yt_analysis/yt/commits/3e22311df39a/
changeset:   3e22311df39a
branch:      yt
user:        MatthewTurk
date:        2013-02-14 18:00:48
summary:     A few fixes for FLASH answer testing in 2D
affected #:  1 file

diff -r f2583db944f07248918701edd7a26aa1b030fccd -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -37,16 +37,18 @@
 @requires_pf(sloshing)
 def test_sloshing():
     pf = data_dir_load(sloshing)
-    yield assert_equal, str(pf), ""
-    for test in small_patch_amr(m7, _fields):
+    yield assert_equal, str(pf), "sloshing_low_res_hdf5_plt_cnt_0300"
+    for test in small_patch_amr(sloshing, _fields):
         yield test
 
+_fields_2d = ("Temperature", "Density")
+
 wt = "WindTunnel/windtunnel_4lev_hdf5_plt_cnt_0030"
 @requires_pf(wt)
 def test_wind_tunnel():
     pf = data_dir_load(wt)
     yield assert_equal, str(pf), "windtunnel_4lev_hdf5_plt_cnt_0030"
-    for test in small_patch_amr(wt, _fields):
+    for test in small_patch_amr(wt, _fields_2d):
         yield test
 
 gcm = "GalaxyClusterMerger/fiducial_1to10_b0.273d_hdf5_plt_cnt_0245.gz"


https://bitbucket.org/yt_analysis/yt/commits/a42fc6396b3b/
changeset:   a42fc6396b3b
branch:      yt
user:        MatthewTurk
date:        2013-02-14 18:01:10
summary:     Merge
affected #:  9 files

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -7,8 +7,8 @@
 # There are a few options, but you only need to set *one* of them.  And
 # that's the next one, DEST_DIR.  But, if you want to use an existing HDF5
 # installation you can set HDF5_DIR, or if you want to use some other
-# subversion checkout of YT, you can set YT_DIR, too.  (It'll already
-# check the current directory and one up).
+# subversion checkout of yt, you can set YT_DIR, too.  (It'll already
+# check the current directory and one up.
 #
 # And, feel free to drop me a line: matthewturk at gmail.com
 #
@@ -49,7 +49,7 @@
 INST_ROCKSTAR=0 # Install the Rockstar halo finder?
 INST_SCIPY=0    # Install scipy?
 
-# If you've got YT some other place, set this to point to it.
+# If you've got yt some other place, set this to point to it.
 YT_DIR=""
 
 # If you need to pass anything to matplotlib, do so here.
@@ -230,6 +230,27 @@
             MPL_SUPP_CXXFLAGS="${MPL_SUPP_CXXFLAGS} -mmacosx-version-min=10.7"
         fi
     fi
+    if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
+    then
+        echo "Looks like you're on an OpenSUSE-compatible machine."
+        echo
+        echo "You need to have these packages installed:"
+        echo
+        echo "  * devel_C_C++"
+        echo "  * libopenssl-devel"
+        echo "  * libuuid-devel"
+        echo "  * zip"
+        echo "  * gcc-c++"
+        echo
+        echo "You can accomplish this by executing:"
+        echo
+        echo "$ sudo zypper install -t pattern devel_C_C++"
+        echo "$ sudo zypper install gcc-c++ libopenssl-devel libuuid-devel zip"
+        echo
+        echo "I am also setting special configure arguments to Python to"
+        echo "specify control lib/lib64 issues."
+        PYCONF_ARGS="--libdir=${DEST_DIR}/lib"
+    fi
     if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
     then
         echo "Looks like you're on an Ubuntu-compatible machine."
@@ -293,9 +314,9 @@
 echo
 echo "========================================================================"
 echo
-echo "Hi there!  This is the YT installation script.  We're going to download"
+echo "Hi there!  This is the yt installation script.  We're going to download"
 echo "some stuff and install it to create a self-contained, isolated"
-echo "environment for YT to run within."
+echo "environment for yt to run within."
 echo
 echo "Inside the installation script you can set a few variables.  Here's what"
 echo "they're currently set to -- you can hit Ctrl-C and edit the values in "
@@ -476,7 +497,7 @@
 echo 'c68a425bacaa7441037910b9166f25b89e1387776a7749a5350793f89b1690350df5f018060c31d03686e7c3ed2aa848bd2b945c96350dc3b6322e087934783a  hdf5-1.8.9.tar.gz' > hdf5-1.8.9.tar.gz.sha512
 echo 'dbefad00fa34f4f21dca0f1e92e95bd55f1f4478fa0095dcf015b4d06f0c823ff11755cd777e507efaf1c9098b74af18f613ec9000e5c3a5cc1c7554fb5aefb8  libpng-1.5.12.tar.gz' > libpng-1.5.12.tar.gz.sha512
 echo '5b1a0fb52dcb21ca5f0ab71c8a49550e1e8cf633552ec6598dc43f0b32c03422bf5af65b30118c163231ecdddfd40846909336f16da318959106076e80a3fad0  matplotlib-1.2.0.tar.gz' > matplotlib-1.2.0.tar.gz.sha512
-echo '52d1127de2208aaae693d16fef10ffc9b8663081bece83b7597d65706e9568af3b9e56bd211878774e1ebed92e21365ee9c49602a0ff5e48f89f12244d79c161  mercurial-2.4.tar.gz' > mercurial-2.4.tar.gz.sha512
+echo '91693ca5f34934956a7c2c98bb69a5648b2a5660afd2ecf4a05035c5420450d42c194eeef0606d7683e267e4eaaaab414df23f30b34c88219bdd5c1a0f1f66ed  mercurial-2.5.1.tar.gz' > mercurial-2.5.1.tar.gz.sha512
 echo 'de3dd37f753614055dcfed910e9886e03688b8078492df3da94b1ec37be796030be93291cba09e8212fffd3e0a63b086902c3c25a996cf1439e15c5b16e014d9  numpy-1.6.1.tar.gz' > numpy-1.6.1.tar.gz.sha512
 echo '5ad681f99e75849a5ca6f439c7a19bb51abc73d121b50f4f8e4c0da42891950f30407f761a53f0fe51b370b1dbd4c4f5a480557cb2444c8c7c7d5412b328a474  sqlite-autoconf-3070500.tar.gz' > sqlite-autoconf-3070500.tar.gz.sha512
 echo 'edae735960279d92acf58e1f4095c6392a7c2059b8f1d2c46648fc608a0fb06b392db2d073f4973f5762c034ea66596e769b95b3d26ad963a086b9b2d09825f2  zlib-1.2.3.tar.bz2' > zlib-1.2.3.tar.bz2.sha512
@@ -509,7 +530,7 @@
 get_ytproject Python-2.7.3.tgz
 get_ytproject numpy-1.6.1.tar.gz
 get_ytproject matplotlib-1.2.0.tar.gz
-get_ytproject mercurial-2.4.tar.gz
+get_ytproject mercurial-2.5.1.tar.gz
 get_ytproject ipython-0.13.1.tar.gz
 get_ytproject h5py-2.1.0.tar.gz
 get_ytproject Cython-0.17.1.tar.gz
@@ -636,10 +657,10 @@
 
 if [ ! -e Python-2.7.3/done ]
 then
-    echo "Installing Python.  This may take a while, but don't worry.  YT loves you."
+    echo "Installing Python.  This may take a while, but don't worry.  yt loves you."
     [ ! -e Python-2.7.3 ] && tar xfz Python-2.7.3.tgz
     cd Python-2.7.3
-    ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
     ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
     ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
@@ -654,7 +675,7 @@
 if [ $INST_HG -eq 1 ]
 then
     echo "Installing Mercurial."
-    do_setup_py mercurial-2.4
+    do_setup_py mercurial-2.5.1
     export HG_EXEC=${DEST_DIR}/bin/hg
 else
     # We assume that hg can be found in the path.

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -142,18 +142,30 @@
         if self.CoM is not None:
             return self.CoM
         pm = self["ParticleMassMsun"]
-        cx = self["particle_position_x"]
-        cy = self["particle_position_y"]
-        cz = self["particle_position_z"]
-        if isinstance(self, FOFHalo):
-            c_vec = np.array([cx[0], cy[0], cz[0]]) - self.pf.domain_center
-        else:
-            c_vec = self.maximum_density_location() - self.pf.domain_center
-        cx = (cx - c_vec[0])
-        cy = (cy - c_vec[1])
-        cz = (cz - c_vec[2])
-        com = np.array([v - np.floor(v) for v in [cx, cy, cz]])
-        return (com * pm).sum(axis=1) / pm.sum() + c_vec
+        c = {}
+        c[0] = self["particle_position_x"]
+        c[1] = self["particle_position_y"]
+        c[2] = self["particle_position_z"]
+        c_vec = np.zeros(3)
+        com = []
+        for i in range(3):
+            # A halo is likely periodic around a boundary if the distance 
+            # between the max and min particle
+            # positions are larger than half the box. 
+            # So skip the rest if the converse is true.
+            # Note we might make a change here when periodicity-handling is
+            # fully implemented.
+            if (c[i].max() - c[i].min()) < (self.pf.domain_width[i] / 2.):
+                com.append(c[i])
+                continue
+            # Now we want to flip around only those close to the left boundary.
+            d_left = c[i] - self.pf.domain_left_edge[i]
+            sel = (d_left <= (self.pf.domain_width[i]/2))
+            c[i][sel] += self.pf.domain_width[i]
+            com.append(c[i])
+        com = np.array(com)
+        c = (com * pm).sum(axis=1) / pm.sum()
+        return c%self.pf.domain_width
 
     def maximum_density(self):
         r"""Return the HOP-identified maximum density. Not applicable to
@@ -809,7 +821,6 @@
     _radjust = 1.05
 
     def __init__(self, pf, id, size=None, CoM=None,
-
         max_dens_point=None, group_total_mass=None, max_radius=None, bulk_vel=None,
         rms_vel=None, fnames=None, mag_A=None, mag_B=None, mag_C=None,
         e1_vec=None, tilt=None, supp=None):
@@ -843,6 +854,10 @@
             self.supp = {}
         else:
             self.supp = supp
+        self._saved_fields = {}
+        self._ds_sort = None
+        self._particle_mask = None
+
 
     def __getitem__(self, key):
         # This function will try to get particle data in one of three ways,

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/analysis_modules/halo_mass_function/halo_mass_function.py
--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py
@@ -132,7 +132,6 @@
         not stored in enzo datasets, so must be entered by hand.
         sigma8input=%f primordial_index=%f omega_baryon0=%f
         """ % (self.sigma8input, self.primordial_index, self.omega_baryon0))
-        time.sleep(1)
         
         # Do the calculations.
         self.sigmaM()

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -1387,9 +1387,11 @@
         else:
             self.fields = ensure_list(fields)
         from yt.visualization.plot_window import \
-            GetOffAxisBoundsAndCenter, PWViewerMPL
+            GetObliqueWindowParameters, PWViewerMPL
         from yt.visualization.fixed_resolution import ObliqueFixedResolutionBuffer
-        (bounds, center_rot) = GetOffAxisBoundsAndCenter(normal, center, width, self.pf)
+        (bounds, center_rot, units) = GetObliqueWindowParameters(normal, center, width, self.pf)
+        if axes_unit is None and units != ('1', '1'):
+            axes_units = units
         pw = PWViewerMPL(self, bounds, origin='center-window', periodic=False, oblique=True,
                          frb_generator=ObliqueFixedResolutionBuffer, plot_type='OffAxisSlice')
         pw.set_axes_unit(axes_unit)

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/data_objects/tests/test_cutting_plane.py
--- /dev/null
+++ b/yt/data_objects/tests/test_cutting_plane.py
@@ -0,0 +1,45 @@
+from yt.testing import *
+import os
+
+def setup():
+    from yt.config import ytcfg
+    ytcfg["yt","__withintesting"] = "True"
+
+def teardown_func(fns):
+    for fn in fns:
+        os.remove(fn)
+
+def test_cutting_plane():
+    for nprocs in [8, 1]:
+        # We want to test both 1 proc and 8 procs, to make sure that
+        # parallelism isn't broken
+        pf = fake_random_pf(64, nprocs = nprocs)
+        dims = pf.domain_dimensions
+        center = [0.5,0.5,0.5]
+        normal = [1,1,1]
+        fns = []
+        cut = pf.h.cutting(normal, center, ["Ones", "Density"])
+        yield assert_equal, cut["Ones"].sum(), cut["Ones"].size
+        yield assert_equal, cut["Ones"].min(), 1.0
+        yield assert_equal, cut["Ones"].max(), 1.0
+        pw = cut.to_pw()
+        fns += pw.save()
+        frb = cut.to_frb((1.0,'unitary'), 64)
+        for cut_field in ['Ones', 'Density']:
+            yield assert_equal, frb[cut_field].info['data_source'], \
+                cut.__str__()
+            yield assert_equal, frb[cut_field].info['axis'], \
+                4
+            yield assert_equal, frb[cut_field].info['field'], \
+                cut_field
+            yield assert_equal, frb[cut_field].info['units'], \
+                pf.field_info[cut_field].get_units()
+            yield assert_equal, frb[cut_field].info['xlim'], \
+                frb.bounds[:2]
+            yield assert_equal, frb[cut_field].info['ylim'], \
+                frb.bounds[2:]
+            yield assert_equal, frb[cut_field].info['length_to_cm'], \
+                pf['cm']
+            yield assert_equal, frb[cut_field].info['center'], \
+                cut.center
+        teardown_func(fns)

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -995,6 +995,7 @@
         for p, v in self._conversion_override.items():
             self.conversion_factors[p] = v
         self.refine_by = self.parameters["RefineBy"]
+        self.periodicity = ensure_tuple(self.parameters["LeftFaceBoundaryCondition"] == 3)
         self.dimensionality = self.parameters["TopGridRank"]
         self.domain_dimensions = self.parameters["TopGridDimensions"]
         self.current_time = self.parameters["InitialTime"]

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/visualization/volume_rendering/blenders.py
--- a/yt/visualization/volume_rendering/blenders.py
+++ b/yt/visualization/volume_rendering/blenders.py
@@ -1,19 +1,13 @@
 import numpy as np
-from yt.mods import *
 
 def enhance(im, stdval=6.0, just_alpha=True):
     if just_alpha:
         nz = im[im>0.0]
-        im[:] = im[:]/(nz.mean()+stdval*na.std(nz))
+        im[:] = im[:]/(nz.mean()+stdval*np.std(nz))
     else:
         for c in range(3):
             nz = im[:,:,c][im[:,:,c]>0.0]
-            im[:,:,c] = im[:,:,c]/(nz.mean()+stdval*na.std(nz))
+            im[:,:,c] = im[:,:,c]/(nz.mean()+stdval*np.std(nz))
             del nz
     np.clip(im, 0.0, 1.0, im)
 
-if __name__ == 'main':
-    im = na.zeros((256,256,3))
-    line(im, 50,60,150,200)
-    write_bitmap(im,'test_line.png')
-

diff -r 3e22311df39a0f436b5f5bc4f93a7cd74978446b -r a42fc6396b3b4c9b916615423074577ef23223bc yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -683,7 +683,7 @@
         """
         if "__IPYTHON__" in dir(__builtin__):
             from IPython.core.displaypub import publish_display_data
-            image = self.snapshot()
+            image = self.snapshot()[:,:,:3]
             if clip_ratio is not None: clip_ratio *= image.std()
             data = write_bitmap(image, None, clip_ratio)
             publish_display_data(

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list