[yt-svn] commit/yt: 4 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Mon Oct 21 14:48:58 PDT 2013
4 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/eaafe86bfb32/
Changeset: eaafe86bfb32
Branch: yt
User: jzuhone
Date: 2013-10-17 23:43:44
Summary: Fixing loading of Athena datasets from other directories.
Affected #: 1 file
diff -r b1642de56ef468aa58ac954778f977726edefbc0 -r eaafe86bfb3295364fe770661b8676f7456193f3 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -197,13 +197,18 @@
raise TypeError
# Need to determine how many grids: self.num_grids
- dname = self.hierarchy_filename
- gridlistread = glob.glob('id*/%s-id*%s' % (dname[4:-9],dname[-9:] ))
+ dataset_dir = os.path.dirname(self.hierarchy_filename)
+ dname = os.path.split(self.hierarchy_filename)[-1]
+ if dataset_dir.endswith("id0"):
+ dname = "id0/"+dname
+ dataset_dir = dataset_dir[:-3]
+
+ gridlistread = glob.glob(os.path.join(dataset_dir, 'id*/%s-id*%s' % (dname[4:-9],dname[-9:])))
gridlistread.insert(0,self.hierarchy_filename)
if 'id0' in dname :
- gridlistread += glob.glob('id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:])))
else :
- gridlistread += glob.glob('lev*/%s*-lev*%s' % (dname[:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'lev*/%s*-lev*%s' % (dname[:-9],dname[-9:])))
self.num_grids = len(gridlistread)
dxs=[]
self.grids = np.empty(self.num_grids, dtype='object')
@@ -426,12 +431,17 @@
else:
self.periodicity = (True,)*self.dimensionality
- dname = self.parameter_filename
- gridlistread = glob.glob('id*/%s-id*%s' % (dname[4:-9],dname[-9:] ))
+ dataset_dir = os.path.dirname(self.parameter_filename)
+ dname = os.path.split(self.parameter_filename)[-1]
+ if dataset_dir.endswith("id0"):
+ dname = "id0/"+dname
+ dataset_dir = dataset_dir[:-3]
+
+ gridlistread = glob.glob(os.path.join(dataset_dir, 'id*/%s-id*%s' % (dname[4:-9],dname[-9:])))
if 'id0' in dname :
- gridlistread += glob.glob('id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:])))
else :
- gridlistread += glob.glob('lev*/%s*-lev*%s' % (dname[:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'lev*/%s*-lev*%s' % (dname[:-9],dname[-9:])))
self.nvtk = len(gridlistread)+1
self.current_redshift = self.omega_lambda = self.omega_matter = \
https://bitbucket.org/yt_analysis/yt/commits/72aa625e39ec/
Changeset: 72aa625e39ec
Branch: yt
User: jzuhone
Date: 2013-10-17 23:44:27
Summary: Merging
Affected #: 6 files
diff -r eaafe86bfb3295364fe770661b8676f7456193f3 -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 yt/analysis_modules/sunyaev_zeldovich/projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/projection.py
@@ -27,15 +27,14 @@
from yt.visualization.volume_rendering.camera import off_axis_projection
from yt.utilities.parallel_tools.parallel_analysis_interface import \
communication_system, parallel_root_only
-from yt.utilities.exceptions import YTException
import numpy as np
I0 = 2*(kboltz*Tcmb)**3/((hcgs*clight)**2)*1.0e17
try:
import SZpack
-except:
- raise ImportError("SZpack not installed. It can be obtained from from http://www.chluba.de/SZpack/.")
+except ImportError:
+ pass
vlist = "xyz"
@@ -87,7 +86,7 @@
self.mueinv = 1./mue
self.xinit = hcgs*self.freqs*1.0e9/(kboltz*Tcmb)
self.freq_fields = ["%d_GHz" % (int(freq)) for freq in freqs]
- self.field_dict = {}
+ self.data = {}
self.units = {}
self.units["TeSZ"] = r"$\mathrm{keV}$"
@@ -187,8 +186,9 @@
ctr = center
if source is not None:
- raise YTException("Source argument is not currently supported for off-axis S-Z projections.")
-
+ mylog.error("Source argument is not currently supported for off-axis S-Z projections.")
+ raise NotImplementedError
+
def _beta_par(field, data):
vpar = data["Density"]*(data["x-velocity"]*L[0]+
data["y-velocity"]*L[1]+
@@ -254,9 +254,9 @@
pbar.finish()
for i, field in enumerate(self.freq_fields):
- self.field_dict[field] = ImageArray(I0*self.xinit[i]**3*signal[i,:,:])
- self.field_dict["Tau"] = ImageArray(tau)
- self.field_dict["TeSZ"] = ImageArray(Te)
+ self.data[field] = ImageArray(I0*self.xinit[i]**3*signal[i,:,:])
+ self.data["Tau"] = ImageArray(tau)
+ self.data["TeSZ"] = ImageArray(Te)
@parallel_root_only
def write_fits(self, filename_prefix, clobber=True):
@@ -282,7 +282,7 @@
coords["yctr"] = 0.0
coords["units"] = "kpc"
other_keys = {"Time" : self.pf.current_time}
- write_fits(self.field_dict, filename_prefix, clobber=clobber, coords=coords,
+ write_fits(self.data, filename_prefix, clobber=clobber, coords=coords,
other_keys=other_keys)
@parallel_root_only
@@ -328,21 +328,21 @@
for field, data in self.items():
f.create_dataset(field,data=data)
f.close()
-
+
def keys(self):
- return self.field_dict.keys()
+ return self.data.keys()
def items(self):
- return self.field_dict.items()
+ return self.data.items()
def values(self):
- return self.field_dict.values()
+ return self.data.values()
def has_key(self, key):
- return key in self.field_dict.keys()
+ return key in self.data.keys()
def __getitem__(self, key):
- return self.field_dict[key]
+ return self.data[key]
@property
def shape(self):
diff -r eaafe86bfb3295364fe770661b8676f7456193f3 -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
@@ -11,13 +11,16 @@
#-----------------------------------------------------------------------------
from yt.frontends.stream.api import load_uniform_grid
-from yt.funcs import get_pbar
+from yt.funcs import get_pbar, mylog
from yt.utilities.physical_constants import cm_per_kpc, K_per_keV, \
mh, cm_per_km, kboltz, Tcmb, hcgs, clight, sigma_thompson
from yt.testing import *
from yt.utilities.answer_testing.framework import requires_pf, \
- GenericArrayTest, data_dir_load
-from yt.analysis_modules.sunyaev_zeldovich.projection import SZProjection, I0
+ GenericArrayTest, data_dir_load, GenericImageTest
+try:
+ from yt.analysis_modules.sunyaev_zeldovich.projection import SZProjection, I0
+except ImportError:
+ pass
import numpy as np
try:
import SZpack
@@ -109,18 +112,24 @@
@requires_pf(M7)
def test_M7_onaxis():
pf = data_dir_load(M7)
- def onaxis_func():
- szprj = SZProjection(pf, freqs)
- szprj.on_axis(2)
- return szprj
- yield GenericArrayTest(pf, onaxis_func)
-
+ szprj = SZProjection(pf, freqs)
+ szprj.on_axis(2, nx=100)
+ def onaxis_array_func():
+ return szprj.data
+ def onaxis_image_func(filename_prefix):
+ szprj.write_png(filename_prefix)
+ yield GenericArrayTest(pf, onaxis_array_func)
+ yield GenericImageTest(pf, onaxis_image_func, 3)
+
@requires_module("SZpack")
@requires_pf(M7)
def test_M7_offaxis():
- pf = data_dir_load(sloshing)
- def offaxis_func():
- szprj = SZProjection(pf, freqs)
- szprj.off_axis(np.array([0.1,-0.2,0.4]))
- return szprj
- yield GenericArrayTest(pf, offaxis_func)
+ pf = data_dir_load(M7)
+ szprj = SZProjection(pf, freqs)
+ szprj.off_axis(np.array([0.1,-0.2,0.4]), nx=100)
+ def offaxis_array_func():
+ return szprj.data
+ def offaxis_image_func(filename_prefix):
+ szprj.write_png(filename_prefix)
+ yield GenericArrayTest(pf, offaxis_array_func)
+ yield GenericImageTest(pf, offaxis_image_func, 3)
diff -r eaafe86bfb3295364fe770661b8676f7456193f3 -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 yt/analysis_modules/two_point_functions/two_point_functions.py
--- a/yt/analysis_modules/two_point_functions/two_point_functions.py
+++ b/yt/analysis_modules/two_point_functions/two_point_functions.py
@@ -499,7 +499,7 @@
points[:, 2] = points[:, 2] / self.period[2]
fKD.qv_many = points.T
fKD.nn_tags = np.asfortranarray(np.empty((1, points.shape[0]), dtype='int64'))
- find_many_nn_nearest_neighbors()
+ fKD.find_many_nn_nearest_neighbors()
# The -1 is for fortran counting.
n = fKD.nn_tags[0,:] - 1
return n
diff -r eaafe86bfb3295364fe770661b8676f7456193f3 -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 yt/data_objects/tests/test_fields.py
--- a/yt/data_objects/tests/test_fields.py
+++ b/yt/data_objects/tests/test_fields.py
@@ -86,6 +86,9 @@
if field.startswith("particle"): continue
if field.startswith("CIC"): continue
if field.startswith("WeakLensingConvergence"): continue
+ if field.startswith("BetaPar"): continue
+ if field.startswith("TBetaPar"): continue
+ if field.startswith("BetaPerp"): continue
if FieldInfo[field].particle_type: continue
for nproc in [1, 4, 8]:
yield TestFieldAccess(field, nproc)
diff -r eaafe86bfb3295364fe770661b8676f7456193f3 -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -24,6 +24,7 @@
import shelve
import zlib
import tempfile
+import glob
from matplotlib.testing.compare import compare_images
from nose.plugins import Plugin
@@ -576,6 +577,16 @@
for newc, oldc in zip(new_result["children"], old_result["children"]):
assert(newp == oldp)
+def compare_image_lists(new_result, old_result, decimals):
+ fns = ['old.png', 'new.png']
+ num_images = len(old_result)
+ assert(num_images > 0)
+ for i in xrange(num_images):
+ mpimg.imsave(fns[0], np.loads(zlib.decompress(old_result[i])))
+ mpimg.imsave(fns[1], np.loads(zlib.decompress(new_result[i])))
+ assert compare_images(fns[0], fns[1], 10**(decimals)) == None
+ for fn in fns: os.remove(fn)
+
class PlotWindowAttributeTest(AnswerTestingTest):
_type_name = "PlotWindowAttribute"
_attrs = ('plot_type', 'plot_field', 'plot_axis', 'attr_name', 'attr_args')
@@ -603,20 +614,27 @@
return [zlib.compress(image.dumps())]
def compare(self, new_result, old_result):
- fns = ['old.png', 'new.png']
- mpimg.imsave(fns[0], np.loads(zlib.decompress(old_result[0])))
- mpimg.imsave(fns[1], np.loads(zlib.decompress(new_result[0])))
- assert compare_images(fns[0], fns[1], 10**(-self.decimals)) == None
- for fn in fns: os.remove(fn)
-
+ compare_image_lists(new_result, old_result, self.decimals)
+
class GenericArrayTest(AnswerTestingTest):
_type_name = "GenericArray"
- _attrs = ('array_func','args','kwargs')
+ _attrs = ('array_func_name','args','kwargs')
def __init__(self, pf_fn, array_func, args=None, kwargs=None, decimals=None):
- super(AnalysisModuleResultTest, self).__init__(pf_fn)
+ super(GenericArrayTest, self).__init__(pf_fn)
self.array_func = array_func
+ self.array_func_name = array_func.func_name
+ self.args = args
+ self.kwargs = kwargs
self.decimals = decimals
def run(self):
+ if self.args is None:
+ args = []
+ else:
+ args = self.args
+ if self.kwargs is None:
+ kwargs = {}
+ else:
+ kwargs = self.kwargs
return self.array_func(*args, **kwargs)
def compare(self, new_result, old_result):
assert_equal(len(new_result), len(old_result),
@@ -627,7 +645,40 @@
assert_equal(new_result[k], old_result[k])
else:
assert_allclose(new_result[k], old_result[k], 10**(-self.decimals))
-
+
+class GenericImageTest(AnswerTestingTest):
+ _type_name = "GenericImage"
+ _attrs = ('image_func_name','args','kwargs')
+ def __init__(self, pf_fn, image_func, decimals, args=None, kwargs=None):
+ super(GenericImageTest, self).__init__(pf_fn)
+ self.image_func = image_func
+ self.image_func_name = image_func.func_name
+ self.args = args
+ self.kwargs = kwargs
+ self.decimals = decimals
+ def run(self):
+ if self.args is None:
+ args = []
+ else:
+ args = self.args
+ if self.kwargs is None:
+ kwargs = {}
+ else:
+ kwargs = self.kwargs
+ comp_imgs = []
+ tmpdir = tempfile.mkdtemp()
+ image_prefix = os.path.join(tmpdir,"test_img")
+ self.image_func(image_prefix, *args, **kwargs)
+ imgs = glob.glob(image_prefix+"*")
+ assert(len(imgs) > 0)
+ for img in imgs:
+ img_data = mpimg.imread(img)
+ os.remove(img)
+ comp_imgs.append(zlib.compress(img_data.dumps()))
+ return comp_imgs
+ def compare(self, new_result, old_result):
+ compare_image_lists(new_result, old_result, self.decimals)
+
def requires_pf(pf_fn, big_data = False):
def ffalse(func):
return lambda: None
https://bitbucket.org/yt_analysis/yt/commits/a12f2aecefcd/
Changeset: a12f2aecefcd
Branch: yt
User: jzuhone
Date: 2013-10-17 23:53:44
Summary: Merged yt_analysis/yt into yt
Affected #: 25 files
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 doc/get_yt.sh
--- /dev/null
+++ b/doc/get_yt.sh
@@ -0,0 +1,358 @@
+#
+# Hi there! Welcome to the yt installation script.
+#
+# This script is designed to create a fully isolated Python installation
+# with the dependencies you need to run yt.
+#
+# This script is based on Conda, a distribution mechanism from Continuum
+# Analytics. The process is as follows:
+#
+# 1. Download the appropriate Conda installation package
+# 2. Install Conda into the specified directory
+# 3. Install yt-specific dependencies
+# 4. Install yt
+#
+# There are a few options listed below, but by default, this will install
+# everything. At the end, it will tell you what to do to use yt.
+#
+# By default this will install yt from source.
+#
+# If you experience problems, please visit the Help section at
+# http://yt-project.org.
+#
+DEST_SUFFIX="yt-conda"
+DEST_DIR="`pwd`/${DEST_SUFFIX/ /}" # Installation location
+BRANCH="yt" # This is the branch to which we will forcibly update.
+INST_YT_SOURCE=1 # Do we do a source install of yt?
+
+##################################################################
+# #
+# You will likely not have to modify anything below this region. #
+# #
+##################################################################
+
+LOG_FILE="`pwd`/yt_install.log"
+
+# Here is the idiom for redirecting to the log file:
+# ( SOMECOMMAND 2>&1 ) 1>> ${LOG_FILE} || do_exit
+
+MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
+MINICONDA_VERSION="1.9.1"
+YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
+
+function do_exit
+{
+ echo "********************************************"
+ echo " FAILURE REPORT:"
+ echo "********************************************"
+ echo
+ tail -n 10 ${LOG_FILE}
+ echo
+ echo "********************************************"
+ echo "********************************************"
+ echo "Failure. Check ${LOG_FILE}. The last 10 lines are above."
+ exit 1
+}
+
+function log_cmd
+{
+ echo "EXECUTING:" >> ${LOG_FILE}
+ echo " $*" >> ${LOG_FILE}
+ ( $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+function get_ytproject
+{
+ [ -e $1 ] && return
+ echo "Downloading $1 from yt-project.org"
+ ${GETFILE} "http://yt-project.org/dependencies/$1" || do_exit
+ ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+function get_ytdata
+{
+ echo "Downloading $1 from yt-project.org"
+ [ -e $1 ] && return
+ ${GETFILE} "http://yt-project.org/data/$1" || do_exit
+ ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+function get_ytrecipe {
+ RDIR=${DEST_DIR}/src/yt-recipes/$1
+ mkdir -p ${RDIR}
+ pushd ${RDIR}
+ log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/meta.yaml
+ log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/build.sh
+ NEW_PKG=`conda build --output ${RDIR}`
+ log_cmd conda build --no-binstar-upload ${RDIR}
+ log_cmd conda install ${NEW_PKG}
+ popd
+}
+
+
+echo
+echo
+echo "========================================================================"
+echo
+echo "Hi there! This is the yt installation script. We're going to download"
+echo "some stuff and install it to create a self-contained, isolated"
+echo "environment for yt to run within."
+echo
+echo "This will install Miniconda from Continuum Analytics, the necessary"
+echo "packages to run yt, and create a self-contained environment for you to"
+echo "use yt. Additionally, Conda itself provides the ability to install"
+echo "many other packages that can be used for other purposes."
+echo
+MYOS=`uname -s` # A guess at the OS
+if [ "${MYOS##Darwin}" != "${MYOS}" ]
+then
+ echo "Looks like you're running on Mac OSX."
+ echo
+ echo "NOTE: you must have the Xcode command line tools installed."
+ echo
+ echo "The instructions for obtaining these tools varies according"
+ echo "to your exact OS version. On older versions of OS X, you"
+ echo "must register for an account on the apple developer tools"
+ echo "website: https://developer.apple.com/downloads to obtain the"
+ echo "download link."
+ echo
+ echo "We have gathered some additional instructions for each"
+ echo "version of OS X below. If you have trouble installing yt"
+ echo "after following these instructions, don't hesitate to contact"
+ echo "the yt user's e-mail list."
+ echo
+ echo "You can see which version of OSX you are running by clicking"
+ echo "'About This Mac' in the apple menu on the left hand side of"
+ echo "menu bar. We're assuming that you've installed all operating"
+ echo "system updates; if you have an older version, we suggest"
+ echo "running software update and installing all available updates."
+ echo
+ echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
+ echo "Apple developer tools website."
+ echo
+ echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
+ echo "developer tools website. You can either download the"
+ echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
+ echo "Software Update to update to XCode 3.2.6 or"
+ echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
+ echo "bundle (4.1 GB)."
+ echo
+ echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
+ echo "(search for Xcode)."
+ echo "Alternatively, download the Xcode command line tools from"
+ echo "the Apple developer tools website."
+ echo
+ echo "OS X 10.8.2: download Xcode 4.6.1 from the mac app store."
+ echo "(search for Xcode)."
+ echo "Additionally, you will have to manually install the Xcode"
+ echo "command line tools, see:"
+ echo "http://stackoverflow.com/questions/9353444"
+ echo "Alternatively, download the Xcode command line tools from"
+ echo "the Apple developer tools website."
+ echo
+ echo "NOTE: It's possible that the installation will fail, if so,"
+ echo "please set the following environment variables, remove any"
+ echo "broken installation tree, and re-run this script verbatim."
+ echo
+ echo "$ export CC=gcc"
+ echo "$ export CXX=g++"
+ echo
+ MINICONDA_OS="MacOSX-x86_64"
+fi
+if [ "${MYOS##Linux}" != "${MYOS}" ]
+then
+ echo "Looks like you're on Linux."
+ echo
+ echo "Please make sure you have the developer tools for your OS installed."
+ echo
+ if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
+ then
+ echo "Looks like you're on an OpenSUSE-compatible machine."
+ echo
+ echo "You need to have these packages installed:"
+ echo
+ echo " * devel_C_C++"
+ echo " * libopenssl-devel"
+ echo " * libuuid-devel"
+ echo " * zip"
+ echo " * gcc-c++"
+ echo " * chrpath"
+ echo
+ echo "You can accomplish this by executing:"
+ echo
+ echo "$ sudo zypper install -t pattern devel_C_C++"
+ echo "$ sudo zypper install gcc-c++ libopenssl-devel libuuid-devel zip"
+ echo "$ sudo zypper install chrpath"
+ fi
+ if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
+ then
+ echo "Looks like you're on an Ubuntu-compatible machine."
+ echo
+ echo "You need to have these packages installed:"
+ echo
+ echo " * libssl-dev"
+ echo " * build-essential"
+ echo " * libncurses5"
+ echo " * libncurses5-dev"
+ echo " * zip"
+ echo " * uuid-dev"
+ echo " * chrpath"
+ echo
+ echo "You can accomplish this by executing:"
+ echo
+ echo "$ sudo apt-get install libssl-dev build-essential libncurses5 libncurses5-dev zip uuid-dev chrpath"
+ echo
+ fi
+ echo
+ echo "If you are running on a supercomputer or other module-enabled"
+ echo "system, please make sure that the GNU module has been loaded."
+ echo
+ if [ "${MYOS##x86_64}" != "${MYOS}" ]
+ then
+ MINICONDA_OS="Linux-x86_64"
+ elif [ "${MYOS##i386}" != "${MYOS}" ]
+ then
+ MINICONDA_OS="Linux-x86"
+ else
+ echo "Not sure which type of Linux you're on. Going with x86_64."
+ MINICONDA_OS="Linux-x86_64"
+ fi
+fi
+echo
+echo "If you'd rather not continue, hit Ctrl-C."
+echo
+echo "========================================================================"
+echo
+read -p "[hit enter] "
+echo
+echo "Awesome! Here we go."
+echo
+
+MINICONDA_PKG=Miniconda-${MINICONDA_VERSION}-${MINICONDA_OS}.sh
+
+if type -P wget &>/dev/null
+then
+ echo "Using wget"
+ export GETFILE="wget -nv"
+else
+ echo "Using curl"
+ export GETFILE="curl -sSO"
+fi
+
+echo
+echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}"
+echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}" >> ${LOG_FILE}
+echo
+
+${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
+
+echo "Installing the Miniconda python environment."
+
+log_cmd bash ./${MINICONDA_PKG} -b -p $DEST_DIR
+
+# I don't think we need OR want this anymore:
+#export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
+
+# This we *do* need.
+export PATH=${DEST_DIR}/bin:$PATH
+
+echo "Installing the necessary packages for yt."
+echo "This may take a while, but don't worry. yt loves you."
+
+declare -a YT_DEPS
+YT_DEPS+=('python')
+YT_DEPS+=('distribute')
+YT_DEPS+=('libpng')
+YT_DEPS+=('freetype')
+YT_DEPS+=('hdf5')
+YT_DEPS+=('numpy')
+YT_DEPS+=('pygments')
+YT_DEPS+=('jinja2')
+YT_DEPS+=('tornado')
+YT_DEPS+=('pyzmq')
+YT_DEPS+=('ipython')
+YT_DEPS+=('sphinx')
+YT_DEPS+=('h5py')
+YT_DEPS+=('matplotlib')
+YT_DEPS+=('cython')
+
+# Here is our dependency list for yt
+log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/free
+log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/dev
+log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/gpl
+log_cmd conda update --yes conda
+
+echo "Current dependencies: ${YT_DEPS[@]}"
+log_cmd echo "DEPENDENCIES" ${YT_DEPS[@]}
+log_cmd conda install --yes ${YT_DEPS[@]}
+
+echo "Installing mercurial."
+get_ytrecipe mercurial
+
+if [ $INST_YT_SOURCE -eq 0 ]
+then
+ echo "Installing yt as a package."
+ get_ytrecipe yt
+else
+ # We do a source install.
+ YT_DIR="${DEST_DIR}/src/yt-hg"
+ export PNG_DIR=${DEST_DIR}
+ export FTYPE_DIR=${DEST_DIR}
+ export HDF5_DIR=${DEST_DIR}
+ log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
+ pushd ${YT_DIR}
+ echo $DEST_DIR > hdf5.cfg
+ log_cmd python setup.py develop
+ popd
+ log_cmd cp ${YT_DIR}/doc/activate ${DEST_DIR}/bin/activate
+ log_cmd sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate
+ log_cmd cp ${YT_DIR}/doc/activate.csh ${DEST_DIR}/bin/activate.csh
+ log_cmd sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate.csh
+fi
+
+echo
+echo
+echo "========================================================================"
+echo
+echo "yt and the Conda system are now installed in $DEST_DIR ."
+echo
+if [ $INST_YT_SOURCE -eq 0 ]
+then
+ echo "You must now modify your PATH variable by prepending:"
+ echo
+ echo " $DEST_DIR/bin"
+ echo
+ echo "For example, if you use bash, place something like this at the end"
+ echo "of your ~/.bashrc :"
+ echo
+ echo " export PATH=$DEST_DIR/bin:$PATH"
+else
+ echo "To run from this new installation, use the activate script for this "
+ echo "environment."
+ echo
+ echo " $ source $DEST_DIR/bin/activate"
+ echo
+ echo "This modifies the environment variables YT_DEST, PATH, PYTHONPATH, and"
+ echo "LD_LIBRARY_PATH to match your new yt install. If you use csh, just"
+ echo "append .csh to the above."
+fi
+echo
+echo "To get started with yt, check out the orientation:"
+echo
+echo " http://yt-project.org/doc/orientation/"
+echo
+echo "or just activate your environment and run 'yt serve' to bring up the"
+echo "yt GUI."
+echo
+echo "For support, see the website and join the mailing list:"
+echo
+echo " http://yt-project.org/"
+echo " http://yt-project.org/data/ (Sample data)"
+echo " http://yt-project.org/doc/ (Docs)"
+echo
+echo " http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
+echo
+echo "========================================================================"
+echo
+echo "Oh, look at me, still talking when there's science to do!"
+echo "Good luck, and email the user list if you run into any problems."
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -918,6 +918,8 @@
do_setup_py $SYMPY
[ $INST_PYX -eq 1 ] && do_setup_py $PYX
+( ${DEST_DIR}/bin/pip install jinja2 2>&1 ) 1>> ${LOG_FILE}
+
# Now we build Rockstar and set its environment variable.
if [ $INST_ROCKSTAR -eq 1 ]
then
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
@@ -29,11 +29,11 @@
mue = 1./0.88
freqs = np.array([30., 90., 240.])
-
+
def setup():
"""Test specific setup."""
from yt.config import ytcfg
- ytcfg["yt", "__withintesting"] = "True"
+ ytcfg["yt", "__withintesting"] = "True"
def full_szpack3d(pf, xo):
data = pf.h.grids[0]
@@ -43,7 +43,7 @@
Dtau = sigma_thompson*data["Density"]/(mh*mue)*dz
Te = data["Temperature"]/K_per_keV
betac = data["z-velocity"]/clight
- pbar = get_pbar("Computing 3-D cell-by-cell S-Z signal for comparison.", nx)
+ pbar = get_pbar("Computing 3-D cell-by-cell S-Z signal for comparison.", nx)
for i in xrange(nx):
pbar.update(i)
for j in xrange(ny):
@@ -67,7 +67,7 @@
a = 200.
v0 = 300.*cm_per_km
ddims = (nx,ny,nz)
-
+
x, y, z = np.mgrid[-R:R:nx*1j,
-R:R:ny*1j,
-R:R:nz*1j]
@@ -87,7 +87,7 @@
data["z-velocity"] = velz
bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]])
-
+
L = 2*R*cm_per_kpc
dl = L/nz
@@ -118,9 +118,11 @@
return szprj.data
def onaxis_image_func(filename_prefix):
szprj.write_png(filename_prefix)
- yield GenericArrayTest(pf, onaxis_array_func)
- yield GenericImageTest(pf, onaxis_image_func, 3)
-
+ for test in [GenericArrayTest(pf, onaxis_array_func),
+ GenericImageTest(pf, onaxis_image_func, 3)]:
+ test_M7_onaxis.__name__ = test.description
+ yield test
+
@requires_module("SZpack")
@requires_pf(M7)
def test_M7_offaxis():
@@ -131,5 +133,7 @@
return szprj.data
def offaxis_image_func(filename_prefix):
szprj.write_png(filename_prefix)
- yield GenericArrayTest(pf, offaxis_array_func)
- yield GenericImageTest(pf, offaxis_image_func, 3)
+ for test in [GenericArrayTest(pf, offaxis_array_func),
+ GenericImageTest(pf, offaxis_image_func, 3)]:
+ test_M7_offaxis.__name__ = test.description
+ yield test
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/data_objects/hierarchy.py
--- a/yt/data_objects/hierarchy.py
+++ b/yt/data_objects/hierarchy.py
@@ -395,6 +395,23 @@
[self.grid_left_edge[:,0], self.grid_right_edge[:,1], self.grid_right_edge[:,2]],
], dtype='float64')
+ def lock_grids_to_parents(self):
+ r"""This function locks grid edges to their parents.
+
+ This is useful in cases where the grid structure may be somewhat
+ irregular, or where setting the left and right edges is a lossy
+ process. It is designed to correct situations where left/right edges
+ may be set slightly incorrectly, resulting in discontinuities in images
+ and the like.
+ """
+ mylog.info("Locking grids to parents.")
+ for i, g in enumerate(self.grids):
+ si = g.get_global_startindex()
+ g.LeftEdge = self.pf.domain_left_edge + g.dds * si
+ g.RightEdge = g.LeftEdge + g.ActiveDimensions * g.dds
+ self.grid_left_edge[i,:] = g.LeftEdge
+ self.grid_right_edge[i,:] = g.RightEdge
+
def print_stats(self):
"""
Prints out (stdout) relevant information about the simulation
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/frontends/chombo/tests/test_outputs.py
--- a/yt/frontends/chombo/tests/test_outputs.py
+++ b/yt/frontends/chombo/tests/test_outputs.py
@@ -29,6 +29,7 @@
pf = data_dir_load(gc)
yield assert_equal, str(pf), "data.0077.3d.hdf5"
for test in small_patch_amr(gc, _fields):
+ test_gc.__name__ = test.description
yield test
tb = "TurbBoxLowRes/data.0005.3d.hdf5"
@@ -37,4 +38,5 @@
pf = data_dir_load(tb)
yield assert_equal, str(pf), "data.0005.3d.hdf5"
for test in small_patch_amr(tb, _fields):
+ test_tb.__name__ = test.description
yield test
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -40,6 +40,7 @@
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities import hdf5_light_reader
+from yt.utilities.io_handler import io_registry
from yt.utilities.logger import ytLogger as mylog
from .definitions import parameterDict
@@ -131,10 +132,11 @@
def retrieve_ghost_zones(self, n_zones, fields, all_levels=False,
smoothed=False):
- # We ignore smoothed in this case.
- if n_zones > 3:
+ NGZ = self.pf.parameters.get("NumberOfGhostZones", 3)
+ if n_zones > NGZ:
return EnzoGrid.retrieve_ghost_zones(
self, n_zones, fields, all_levels, smoothed)
+
# ----- Below is mostly the original code, except we remove the field
# ----- access section
# We will attempt this by creating a datacube that is exactly bigger
@@ -162,7 +164,12 @@
level, new_left_edge, **kwargs)
# ----- This is EnzoGrid.get_data, duplicated here mostly for
# ---- efficiency's sake.
- sl = [slice(3 - n_zones, -(3 - n_zones)) for i in range(3)]
+ start_zone = NGZ - n_zones
+ if start_zone == 0:
+ end_zone = None
+ else:
+ end_zone = -(NGZ - n_zones)
+ sl = [slice(start_zone, end_zone) for i in range(3)]
if fields is None: return cube
for field in ensure_list(fields):
if field in self.hierarchy.field_list:
@@ -543,6 +550,9 @@
result[p] = result[p][0:max_num]
return result
+ def _setup_data_io(self):
+ self.io = io_registry[self.data_style](self.parameter_file)
+
class EnzoHierarchyInMemory(EnzoHierarchy):
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -32,6 +32,10 @@
_data_style = "enzo_hdf4"
+ def __init__(self, pf, *args, **kwargs):
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+
def modify(self, field):
return field.swapaxes(0,2)
@@ -61,6 +65,10 @@
_data_style = "enzo_hdf5"
_particle_reader = True
+ def __init__(self, pf, *args, **kwargs):
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+
def _read_field_names(self, grid):
"""
Returns a list of fields associated with the filename
@@ -90,6 +98,10 @@
_data_style = "enzo_packed_3d"
_particle_reader = True
+ def __init__(self, pf, *args, **kwargs):
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+
def _read_particles(self, fields, rtype, args, grid_list, enclosed,
conv_factors):
filenames = [g.filename for g in grid_list]
@@ -144,10 +156,18 @@
class IOHandlerPackedHDF5GhostZones(IOHandlerPackedHDF5):
_data_style = "enzo_packed_3d_gz"
+ def __init__(self, pf, *args, **kwargs):
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+
def modify(self, field):
+ NGZ = self.pf.parameters.get("NumberOfGhostZones", 3)
+ sl = (slice(NGZ,-NGZ),
+ slice(NGZ,-NGZ),
+ slice(NGZ,-NGZ))
if len(field.shape) < 3:
return field
- tr = field[3:-3,3:-3,3:-3].swapaxes(0,2)
+ tr = field[sl].swapaxes(0,2)
return tr.copy() # To ensure contiguous
def _read_raw_data_set(self, grid, field):
@@ -158,7 +178,7 @@
_data_style = "enzo_inline"
- def __init__(self, ghost_zones=3):
+ def __init__(self, pf, ghost_zones=3):
import enzo
self.enzo = enzo
self.grids_in_memory = enzo.grid_data
@@ -166,6 +186,7 @@
self.my_slice = (slice(ghost_zones,-ghost_zones),
slice(ghost_zones,-ghost_zones),
slice(ghost_zones,-ghost_zones))
+ self.pf = pf
BaseIOHandler.__init__(self)
def _read_data(self, grid, field):
@@ -210,6 +231,10 @@
_data_style = "enzo_packed_2d"
_particle_reader = False
+ def __init__(self, pf, *args, **kwargs):
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+
def _read_data(self, grid, field):
return hdf5_light_reader.ReadData(grid.filename,
"/Grid%08i/%s" % (grid.id, field)).transpose()[:,:,None]
@@ -228,6 +253,10 @@
_data_style = "enzo_packed_1d"
_particle_reader = False
+ def __init__(self, pf, *args, **kwargs):
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+
def _read_data(self, grid, field):
return hdf5_light_reader.ReadData(grid.filename,
"/Grid%08i/%s" % (grid.id, field)).transpose()[:,None,None]
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -30,6 +30,7 @@
pf = data_dir_load(m7)
yield assert_equal, str(pf), "moving7_0010"
for test in small_patch_amr(m7, _fields):
+ test_moving7.__name__ = test.description
yield test
g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
@@ -38,4 +39,5 @@
pf = data_dir_load(g30)
yield assert_equal, str(pf), "galaxy0030"
for test in big_patch_amr(g30, _fields):
+ test_galaxy0030.__name__ = test.description
yield test
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -29,6 +29,7 @@
pf = data_dir_load(sloshing)
yield assert_equal, str(pf), "sloshing_low_res_hdf5_plt_cnt_0300"
for test in small_patch_amr(sloshing, _fields):
+ test_sloshing.__name__ = test.description
yield test
_fields_2d = ("Temperature", "Density")
@@ -39,4 +40,5 @@
pf = data_dir_load(wt)
yield assert_equal, str(pf), "windtunnel_4lev_hdf5_plt_cnt_0030"
for test in small_patch_amr(wt, _fields_2d):
+ test_wind_tunnel.__name__ = test.description
yield test
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/frontends/orion/tests/test_outputs.py
--- a/yt/frontends/orion/tests/test_outputs.py
+++ b/yt/frontends/orion/tests/test_outputs.py
@@ -29,6 +29,7 @@
pf = data_dir_load(radadvect)
yield assert_equal, str(pf), "plt00000"
for test in small_patch_amr(radadvect, _fields):
+ test_radadvect.__name__ = test.description
yield test
rt = "RadTube/plt00500"
@@ -37,4 +38,5 @@
pf = data_dir_load(rt)
yield assert_equal, str(pf), "plt00500"
for test in small_patch_amr(rt, _fields):
+ test_radtube.__name__ = test.description
yield test
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -625,3 +625,38 @@
return
if not os.path.exists(my_dir):
only_on_root(os.makedirs, my_dir)
+
+ at contextlib.contextmanager
+def memory_checker(interval = 15):
+ r"""This is a context manager that monitors memory usage.
+
+ Parameters
+ ----------
+ interval : int
+ The number of seconds between printing the current memory usage in
+ gigabytes of the current Python interpreter.
+
+ Examples
+ --------
+
+ >>> with memory_checker(10):
+ ... arr = np.zeros(1024*1024*1024, dtype="float64")
+ ... time.sleep(15)
+ ... del arr
+ """
+ import threading
+ class MemoryChecker(threading.Thread):
+ def __init__(self, event, interval):
+ self.event = event
+ self.interval = interval
+ threading.Thread.__init__(self)
+
+ def run(self):
+ while not self.event.wait(self.interval):
+ print "MEMORY: %0.3e gb" % (get_memory_usage()/1024.)
+
+ e = threading.Event()
+ mem_check = MemoryChecker(e, interval)
+ mem_check.start()
+ yield
+ e.set()
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/gui/reason/extdirect_router.py
--- a/yt/gui/reason/extdirect_router.py
+++ b/yt/gui/reason/extdirect_router.py
@@ -9,6 +9,13 @@
This code was released under the BSD License.
"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
import inspect
class DirectException(Exception):
@@ -186,12 +193,4 @@
-"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/api.py
--- a/yt/visualization/api.py
+++ b/yt/visualization/api.py
@@ -20,8 +20,7 @@
from plot_collection import \
PlotCollection, \
PlotCollectionInteractive, \
- concatenate_pdfs, \
- get_multi_plot
+ concatenate_pdfs
from fixed_resolution import \
FixedResolutionBuffer, \
@@ -54,5 +53,7 @@
OffAxisSlicePlot, \
ProjectionPlot, \
OffAxisProjectionPlot
-
+from base_plot_types import \
+ get_multi_plot
+
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/base_plot_types.py
--- a/yt/visualization/base_plot_types.py
+++ b/yt/visualization/base_plot_types.py
@@ -94,3 +94,86 @@
canvas.print_figure(f)
f.seek(0)
return f.read()
+
+def get_multi_plot(nx, ny, colorbar = 'vertical', bw = 4, dpi=300,
+ cbar_padding = 0.4):
+ r"""Construct a multiple axes plot object, with or without a colorbar, into
+ which multiple plots may be inserted.
+
+ This will create a set of :class:`matplotlib.axes.Axes`, all lined up into
+ a grid, which are then returned to the user and which can be used to plot
+ multiple plots on a single figure.
+
+ Parameters
+ ----------
+ nx : int
+ Number of axes to create along the x-direction
+ ny : int
+ Number of axes to create along the y-direction
+ colorbar : {'vertical', 'horizontal', None}, optional
+ Should Axes objects for colorbars be allocated, and if so, should they
+ correspond to the horizontal or vertical set of axes?
+ bw : number
+ The base height/width of an axes object inside the figure, in inches
+ dpi : number
+ The dots per inch fed into the Figure instantiation
+
+ Returns
+ -------
+ fig : :class:`matplotlib.figure.Figure`
+ The figure created inside which the axes reside
+ tr : list of list of :class:`matplotlib.axes.Axes` objects
+ This is a list, where the inner list is along the x-axis and the outer
+ is along the y-axis
+ cbars : list of :class:`matplotlib.axes.Axes` objects
+ Each of these is an axes onto which a colorbar can be placed.
+
+ Notes
+ -----
+ This is a simple implementation for a common use case. Viewing the source
+ can be instructure, and is encouraged to see how to generate more
+ complicated or more specific sets of multiplots for your own purposes.
+ """
+ hf, wf = 1.0/ny, 1.0/nx
+ fudge_x = fudge_y = 1.0
+ if colorbar is None:
+ fudge_x = fudge_y = 1.0
+ elif colorbar.lower() == 'vertical':
+ fudge_x = nx/(cbar_padding+nx)
+ fudge_y = 1.0
+ elif colorbar.lower() == 'horizontal':
+ fudge_x = 1.0
+ fudge_y = ny/(cbar_padding+ny)
+ fig = matplotlib.figure.Figure((bw*nx/fudge_x, bw*ny/fudge_y), dpi=dpi)
+ from _mpl_imports import FigureCanvasAgg
+ fig.set_canvas(FigureCanvasAgg(fig))
+ fig.subplots_adjust(wspace=0.0, hspace=0.0,
+ top=1.0, bottom=0.0,
+ left=0.0, right=1.0)
+ tr = []
+ for j in range(ny):
+ tr.append([])
+ for i in range(nx):
+ left = i*wf*fudge_x
+ bottom = fudge_y*(1.0-(j+1)*hf) + (1.0-fudge_y)
+ ax = fig.add_axes([left, bottom, wf*fudge_x, hf*fudge_y])
+ tr[-1].append(ax)
+ cbars = []
+ if colorbar is None:
+ pass
+ elif colorbar.lower() == 'horizontal':
+ for i in range(nx):
+ # left, bottom, width, height
+ # Here we want 0.10 on each side of the colorbar
+ # We want it to be 0.05 tall
+ # And we want a buffer of 0.15
+ ax = fig.add_axes([wf*(i+0.10)*fudge_x, hf*fudge_y*0.20,
+ wf*(1-0.20)*fudge_x, hf*fudge_y*0.05])
+ cbars.append(ax)
+ elif colorbar.lower() == 'vertical':
+ for j in range(ny):
+ ax = fig.add_axes([wf*(nx+0.05)*fudge_x, hf*fudge_y*(ny-(j+0.95)),
+ wf*fudge_x*0.05, hf*fudge_y*0.90])
+ ax.clear()
+ cbars.append(ax)
+ return fig, tr, cbars
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/plot_collection.py
--- a/yt/visualization/plot_collection.py
+++ b/yt/visualization/plot_collection.py
@@ -1728,90 +1728,6 @@
canvas = FigureCanvasAgg(plot._figure)
send_figure(plot._figure)
-def get_multi_plot(nx, ny, colorbar = 'vertical', bw = 4, dpi=300,
- cbar_padding = 0.4):
- r"""Construct a multiple axes plot object, with or without a colorbar, into
- which multiple plots may be inserted.
-
- This will create a set of :class:`matplotlib.axes.Axes`, all lined up into
- a grid, which are then returned to the user and which can be used to plot
- multiple plots on a single figure.
-
- Parameters
- ----------
- nx : int
- Number of axes to create along the x-direction
- ny : int
- Number of axes to create along the y-direction
- colorbar : {'vertical', 'horizontal', None}, optional
- Should Axes objects for colorbars be allocated, and if so, should they
- correspond to the horizontal or vertical set of axes?
- bw : number
- The base height/width of an axes object inside the figure, in inches
- dpi : number
- The dots per inch fed into the Figure instantiation
-
- Returns
- -------
- fig : :class:`matplotlib.figure.Figure`
- The figure created inside which the axes reside
- tr : list of list of :class:`matplotlib.axes.Axes` objects
- This is a list, where the inner list is along the x-axis and the outer
- is along the y-axis
- cbars : list of :class:`matplotlib.axes.Axes` objects
- Each of these is an axes onto which a colorbar can be placed.
-
- Notes
- -----
- This is a simple implementation for a common use case. Viewing the source
- can be instructure, and is encouraged to see how to generate more
- complicated or more specific sets of multiplots for your own purposes.
- """
- hf, wf = 1.0/ny, 1.0/nx
- fudge_x = fudge_y = 1.0
- if colorbar is None:
- fudge_x = fudge_y = 1.0
- elif colorbar.lower() == 'vertical':
- fudge_x = nx/(cbar_padding+nx)
- fudge_y = 1.0
- elif colorbar.lower() == 'horizontal':
- fudge_x = 1.0
- fudge_y = ny/(cbar_padding+ny)
- fig = figure.Figure((bw*nx/fudge_x, bw*ny/fudge_y), dpi=dpi)
- from _mpl_imports import FigureCanvasAgg
- fig.set_canvas(FigureCanvasAgg(fig))
- fig.subplots_adjust(wspace=0.0, hspace=0.0,
- top=1.0, bottom=0.0,
- left=0.0, right=1.0)
- tr = []
- print fudge_x, fudge_y
- for j in range(ny):
- tr.append([])
- for i in range(nx):
- left = i*wf*fudge_x
- bottom = fudge_y*(1.0-(j+1)*hf) + (1.0-fudge_y)
- ax = fig.add_axes([left, bottom, wf*fudge_x, hf*fudge_y])
- tr[-1].append(ax)
- cbars = []
- if colorbar is None:
- pass
- elif colorbar.lower() == 'horizontal':
- for i in range(nx):
- # left, bottom, width, height
- # Here we want 0.10 on each side of the colorbar
- # We want it to be 0.05 tall
- # And we want a buffer of 0.15
- ax = fig.add_axes([wf*(i+0.10)*fudge_x, hf*fudge_y*0.20,
- wf*(1-0.20)*fudge_x, hf*fudge_y*0.05])
- cbars.append(ax)
- elif colorbar.lower() == 'vertical':
- for j in range(ny):
- ax = fig.add_axes([wf*(nx+0.05)*fudge_x, hf*fudge_y*(ny-(j+0.95)),
- wf*fudge_x*0.05, hf*fudge_y*0.90])
- ax.clear()
- cbars.append(ax)
- return fig, tr, cbars
-
def _MPLFixImage(data_source, image_obj, field, cbar, cls):
nx, ny = image_obj.get_size()
def f(axes):
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -290,7 +290,8 @@
_vector_info = None
_frb = None
def __init__(self, data_source, bounds, buff_size=(800,800), antialias=True,
- periodic=True, origin='center-window', oblique=False, window_size=10.0):
+ periodic=True, origin='center-window', oblique=False,
+ window_size=10.0, fields=None):
if not hasattr(self, "pf"):
self.pf = data_source.pf
ts = self._initialize_dataset(self.pf)
@@ -304,6 +305,12 @@
self.buff_size = buff_size
self.window_size = window_size
self.antialias = antialias
+ skip = list(FixedResolutionBuffer._exclude_fields) + data_source._key_fields
+ if fields is None:
+ fields = []
+ else:
+ fields = ensure_list(fields)
+ self.override_fields = list(np.intersect1d(fields, skip))
self.set_window(bounds) # this automatically updates the data and plot
self.origin = origin
if self.data_source.center is not None and oblique == False:
@@ -359,6 +366,8 @@
self._frb._get_data_source_fields()
else:
for key in old_fields: self._frb[key]
+ for key in self.override_fields:
+ self._frb[key]
self._data_valid = True
def _setup_plots(self):
@@ -366,7 +375,7 @@
@property
def fields(self):
- return self._frb.data.keys()
+ return self._frb.data.keys() + self.override_fields
@property
def width(self):
@@ -1274,7 +1283,8 @@
axes_unit = units
if field_parameters is None: field_parameters = {}
slc = pf.h.slice(axis, center[axis], center=center, fields=fields, **field_parameters)
- PWViewerMPL.__init__(self, slc, bounds, origin=origin, fontsize=fontsize)
+ PWViewerMPL.__init__(self, slc, bounds, origin=origin,
+ fontsize=fontsize, fields=fields)
self.set_axes_unit(axes_unit)
class ProjectionPlot(PWViewerMPL):
@@ -1391,7 +1401,8 @@
if field_parameters is None: field_parameters = {}
proj = pf.h.proj(axis, fields, weight_field=weight_field, max_level=max_level,
center=center, source=data_source, **field_parameters)
- PWViewerMPL.__init__(self, proj, bounds, origin=origin, fontsize=fontsize)
+ PWViewerMPL.__init__(self, proj, bounds, origin=origin,
+ fontsize=fontsize, fields=fields)
self.set_axes_unit(axes_unit)
class OffAxisSlicePlot(PWViewerMPL):
@@ -1450,8 +1461,9 @@
cutting = pf.h.cutting(normal, center, fields=fields, north_vector=north_vector, **field_parameters)
# Hard-coding the origin keyword since the other two options
# aren't well-defined for off-axis data objects
- PWViewerMPL.__init__(self, cutting, bounds, origin='center-window', periodic=False,
- oblique=True, fontsize=fontsize)
+ PWViewerMPL.__init__(self, cutting, bounds, origin='center-window',
+ periodic=False, oblique=True, fontsize=fontsize,
+ fields=fields)
self.set_axes_unit(axes_unit)
class OffAxisProjectionDummyDataSource(object):
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/tests/test_plotwindow.py
--- a/yt/visualization/tests/test_plotwindow.py
+++ b/yt/visualization/tests/test_plotwindow.py
@@ -94,8 +94,10 @@
for ax in 'xyz':
for attr_name in ATTR_ARGS.keys():
for args in ATTR_ARGS[attr_name]:
- yield PlotWindowAttributeTest(pf, plot_field, ax, attr_name,
- args, decimals)
+ test = PlotWindowAttributeTest(pf, plot_field, ax, attr_name,
+ args, decimals)
+ test_attributes.__name__ = test.description
+ yield test
@requires_pf(WT)
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -72,6 +72,9 @@
cubical, but if not, it is left/right, top/bottom, front/back.
resolution : int or list of ints
The number of pixels in each direction.
+ transfer_function : `yt.visualization.volume_rendering.TransferFunction`
+ The transfer function used to map values to colors in an image. If
+ not specified, defaults to a ProjectionTransferFunction.
north_vector : array_like, optional
The 'up' direction for the plane of rays. If not specific, calculated
automatically.
@@ -184,7 +187,7 @@
_tf_figure = None
_render_figure = None
def __init__(self, center, normal_vector, width,
- resolution, transfer_function,
+ resolution, transfer_function = None,
north_vector = None, steady_north=False,
volume = None, fields = None,
log_fields = None,
@@ -1465,7 +1468,7 @@
class MosaicCamera(Camera):
def __init__(self, center, normal_vector, width,
- resolution, transfer_function,
+ resolution, transfer_function = None,
north_vector = None, steady_north=False,
volume = None, fields = None,
log_fields = None,
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/volume_rendering/multi_texture.py
--- a/yt/visualization/volume_rendering/multi_texture.py
+++ b/yt/visualization/volume_rendering/multi_texture.py
@@ -35,6 +35,14 @@
I hope this helps,
Almar
"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
from yt.mods import *
from yt.funcs import *
@@ -300,14 +308,3 @@
ax.Draw()
return mtex, ax
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r a12f2aecefcdc81d8113ee6210ec0eea3c838726 yt/visualization/volume_rendering/transfer_function_helper.py
--- /dev/null
+++ b/yt/visualization/volume_rendering/transfer_function_helper.py
@@ -0,0 +1,211 @@
+"""
+A helper class to build, display, and modify transfer functions for volume
+rendering.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.data_objects.profiles import BinnedProfile1D
+from yt.visualization.volume_rendering.api import ColorTransferFunction
+from yt.visualization._mpl_imports import FigureCanvasAgg
+from matplotlib.figure import Figure
+from IPython.core.display import Image
+import cStringIO
+import numpy as np
+
+
+class TransferFunctionHelper(object):
+
+ profiles = None
+
+ def __init__(self, pf):
+ r"""A transfer function helper.
+
+ This attempts to help set up a good transfer function by finding
+ bounds, handling linear/log options, and displaying the transfer
+ function combined with 1D profiles of rendering quantity.
+
+ Parameters
+ ----------
+ pf: A StaticOutput instance
+ A static output that is currently being rendered. This is used to
+ help set up data bounds.
+
+ Notes
+ -----
+ """
+ self.pf = pf
+ self.field = None
+ self.log = False
+ self.tf = None
+ self.bounds = None
+ self.grey_opacity = True
+ self.profiles = {}
+
+ def set_bounds(self, bounds=None):
+ """
+ Set the bounds of the transfer function.
+
+ Parameters
+ ----------
+ bounds: array-like, length 2, optional
+ A length 2 list/array in the form [min, max]. These should be the
+ raw values and not the logarithm of the min and max. If bounds is
+ None, the bounds of the data are calculated from all of the data
+ in the dataset. This can be slow for very large datasets.
+ """
+ if bounds is None:
+ bounds = self.pf.h.all_data().quantities['Extrema'](self.field)[0]
+ self.bounds = bounds
+
+ # Do some error checking.
+ assert(len(self.bounds) == 2)
+ if self.log:
+ assert(self.bounds[0] > 0.0)
+ assert(self.bounds[1] > 0.0)
+ return
+
+ def set_field(self, field):
+ """
+ Set the field to be rendered
+
+ Parameters
+ ----------
+ field: string
+ The field to be rendered.
+ """
+ self.field = field
+
+ def set_log(self, log):
+ """
+ Set whether or not the transfer function should be in log or linear
+ space. Also modifies the pf.field_info[field].take_log attribute to
+ stay in sync with this setting.
+
+ Parameters
+ ----------
+ log: boolean
+ Sets whether the transfer function should use log or linear space.
+ """
+ self.log = log
+ self.pf.h
+ self.pf.field_info[self.field].take_log = log
+
+ def build_transfer_function(self):
+ """
+ Builds the transfer function according to the current state of the
+ TransferFunctionHelper.
+
+ Parameters
+ ----------
+ None
+
+ Returns
+ -------
+
+ A ColorTransferFunction object.
+
+ """
+ if self.bounds is None:
+ mylog.info('Calculating data bounds. This may take a while.' +
+ ' Set the .bounds to avoid this.')
+ self.set_bounds()
+
+ if self.log:
+ mi, ma = np.log10(self.bounds[0]), np.log10(self.bounds[1])
+ else:
+ mi, ma = self.bounds
+ self.tf = ColorTransferFunction((mi, ma),
+ grey_opacity=self.grey_opacity,
+ nbins=512)
+ return self.tf
+
+ def plot(self, fn=None, profile_field=None, profile_weight=None):
+ """
+ Save the current transfer function to a bitmap, or display
+ it inline.
+
+ Parameters
+ ----------
+ fn: string, optional
+ Filename to save the image to. If None, the returns an image
+ to an IPython session.
+
+ Returns
+ -------
+
+ If fn is None, will return an image to an IPython notebook.
+
+ """
+ if self.tf is None:
+ self.build_transfer_function()
+ tf = self.tf
+ if self.log:
+ xfunc = np.logspace
+ xmi, xma = np.log10(self.bounds[0]), np.log10(self.bounds[1])
+ else:
+ xfunc = np.linspace
+ xmi, xma = self.bounds
+
+ x = xfunc(xmi, xma, tf.nbins)
+ y = tf.funcs[3].y
+ w = np.append(x[1:]-x[:-1], x[-1]-x[-2])
+ colors = np.array([tf.funcs[0].y, tf.funcs[1].y, tf.funcs[2].y,
+ np.ones_like(x)]).T
+
+ fig = Figure(figsize=[6, 3])
+ canvas = FigureCanvasAgg(fig)
+ ax = fig.add_axes([0.2, 0.2, 0.75, 0.75])
+ ax.bar(x, tf.funcs[3].y, w, edgecolor=[0.0, 0.0, 0.0, 0.0],
+ log=True, color=colors, bottom=[0])
+
+ if profile_field is not None:
+ try:
+ prof = self.profiles[self.field]
+ except KeyError:
+ self.setup_profile(profile_field, profile_weight)
+ prof = self.profiles[self.field]
+ if profile_field not in prof.keys():
+ prof.add_fields([profile_field], fractional=False,
+ weight=profile_weight)
+ ax.plot(prof[self.field], prof[profile_field]*tf.funcs[3].y.max() /
+ prof[profile_field].max(), color='w', linewidth=3)
+ ax.plot(prof[self.field], prof[profile_field]*tf.funcs[3].y.max() /
+ prof[profile_field].max(), color='k')
+
+ ax.set_xscale({True: 'log', False: 'linear'}[self.log])
+ ax.set_xlim(x.min(), x.max())
+ ax.set_xlabel(self.pf.field_info[self.field].get_label())
+ ax.set_ylabel(r'$\mathrm{alpha}$')
+ ax.set_ylim(y.max()*1.0e-3, y.max()*2)
+
+ if fn is None:
+ f = cStringIO.StringIO()
+ canvas.print_figure(f)
+ f.seek(0)
+ img = f.read()
+ return Image(img)
+ else:
+ fig.savefig(fn)
+
+ def setup_profile(self, profile_field=None, profile_weight=None):
+ if profile_field is None:
+ profile_field = 'CellVolume'
+ prof = BinnedProfile1D(self.pf.h.all_data(), 128, self.field,
+ self.bounds[0], self.bounds[1],
+ log_space=self.log,
+ lazy_reader=False, end_collect=False)
+ prof.add_fields([profile_field], fractional=False,
+ weight=profile_weight)
+ self.profiles[self.field] = prof
+ return
https://bitbucket.org/yt_analysis/yt/commits/0969081e02f4/
Changeset: 0969081e02f4
Branch: yt
User: MatthewTurk
Date: 2013-10-21 23:48:54
Summary: Merged in jzuhone/yt (pull request #616)
Enabling reading of Athena datasets from directories other than the one it's contained in.
Affected #: 1 file
diff -r 0d2bc17ad518127ae36708577486be87a104bb73 -r 0969081e02f4b595a6b60473d7582ab7b3f195c4 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -197,13 +197,18 @@
raise TypeError
# Need to determine how many grids: self.num_grids
- dname = self.hierarchy_filename
- gridlistread = glob.glob('id*/%s-id*%s' % (dname[4:-9],dname[-9:] ))
+ dataset_dir = os.path.dirname(self.hierarchy_filename)
+ dname = os.path.split(self.hierarchy_filename)[-1]
+ if dataset_dir.endswith("id0"):
+ dname = "id0/"+dname
+ dataset_dir = dataset_dir[:-3]
+
+ gridlistread = glob.glob(os.path.join(dataset_dir, 'id*/%s-id*%s' % (dname[4:-9],dname[-9:])))
gridlistread.insert(0,self.hierarchy_filename)
if 'id0' in dname :
- gridlistread += glob.glob('id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:])))
else :
- gridlistread += glob.glob('lev*/%s*-lev*%s' % (dname[:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'lev*/%s*-lev*%s' % (dname[:-9],dname[-9:])))
self.num_grids = len(gridlistread)
dxs=[]
self.grids = np.empty(self.num_grids, dtype='object')
@@ -426,12 +431,17 @@
else:
self.periodicity = (True,)*self.dimensionality
- dname = self.parameter_filename
- gridlistread = glob.glob('id*/%s-id*%s' % (dname[4:-9],dname[-9:] ))
+ dataset_dir = os.path.dirname(self.parameter_filename)
+ dname = os.path.split(self.parameter_filename)[-1]
+ if dataset_dir.endswith("id0"):
+ dname = "id0/"+dname
+ dataset_dir = dataset_dir[:-3]
+
+ gridlistread = glob.glob(os.path.join(dataset_dir, 'id*/%s-id*%s' % (dname[4:-9],dname[-9:])))
if 'id0' in dname :
- gridlistread += glob.glob('id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:])))
else :
- gridlistread += glob.glob('lev*/%s*-lev*%s' % (dname[:-9],dname[-9:]))
+ gridlistread += glob.glob(os.path.join(dataset_dir, 'lev*/%s*-lev*%s' % (dname[:-9],dname[-9:])))
self.nvtk = len(gridlistread)+1
self.current_redshift = self.omega_lambda = self.omega_matter = \
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list