[Yt-svn] yt: 2 new changesets
hg at spacepope.org
hg at spacepope.org
Tue Nov 9 10:58:54 PST 2010
hg Repository: yt
details: yt/rev/c86a97210a3f
changeset: 3511:c86a97210a3f
user: Britton Smith <brittonsmith at gmail.com>
date:
Tue Nov 09 13:58:05 2010 -0500
description:
Removed new keyword from histogram call for numpy 1.5.
hg Repository: yt
details: yt/rev/c67eed8e3e2b
changeset: 3512:c67eed8e3e2b
user: Britton Smith <brittonsmith at gmail.com>
date:
Tue Nov 09 13:58:37 2010 -0500
description:
Merged.
diffstat:
doc/install_script.sh | 24 +
yt/analysis_modules/halo_finding/halo_objects.py | 95 +-
yt/analysis_modules/sunrise_export/sunrise_exporter.py | 106 +-
yt/analysis_modules/two_point_functions/two_point_functions.py | 8 +-
yt/data_objects/data_containers.py | 2 +-
yt/data_objects/profiles.py | 41 +
yt/frontends/art/data_structures.py | 46 +-
yt/frontends/art/fields.py | 56 +-
yt/frontends/art/io.py | 15 +-
yt/frontends/enzo/fields.py | 11 +-
yt/gui/tvtk_interface.py | 2 +-
yt/utilities/_amr_utils/RayIntegrators.pyx | 34 +-
yt/utilities/amr_utils.c | 7221 +++++----
yt/utilities/parallel_tools/parallel_analysis_interface.py | 309 +-
14 files changed, 4300 insertions(+), 3670 deletions(-)
diffs (truncated from 19142 to 300 lines):
diff -r 276e74af822b -r c67eed8e3e2b doc/install_script.sh
--- a/doc/install_script.sh Fri Oct 29 16:04:40 2010 -0700
+++ b/doc/install_script.sh Tue Nov 09 13:58:37 2010 -0500
@@ -37,6 +37,7 @@
INST_BZLIB=1 # On some systems, libbzip2 is missing. This can
# lead to broken mercurial installations.
INST_PNG=1 # Install a local libpng? Same things apply as with zlib.
+INST_ENZO=0 # Clone a copy of Enzo?
# If you've got YT some other place, set this to point to it.
YT_DIR=""
@@ -145,6 +146,10 @@
get_willwont ${INST_HG}
echo "be installing Mercurial"
+printf "%-15s = %s so I " "INST_ENZO" "${INST_ENZO}"
+get_willwont ${INST_ENZO}
+echo "be checking out Enzo"
+
echo
if [ -z "$HDF5_DIR" ]
@@ -410,6 +415,14 @@
touch done
cd $MY_PWD
+if [ $INST_ENZO -eq 1 ]
+then
+ echo "Cloning a copy of Enzo."
+ cd ${DEST_DIR}/src/
+ ${HG_EXEC} clone https://enzo.googlecode.com/hg/ ./enzo-hg-stable
+ cd $MY_PWD
+fi
+
echo
echo
echo "========================================================================"
@@ -418,6 +431,7 @@
echo "To run from this new installation, the a few variables need to be"
echo "prepended with the following information:"
echo
+echo "YT_DEST => $DEST_DIR"
echo "PATH => $DEST_DIR/bin/"
echo "PYTHONPATH => $DEST_DIR/lib/python2.6/site-packages/"
echo "LD_LIBRARY_PATH => $DEST_DIR/lib/"
@@ -443,6 +457,16 @@
echo "$DEST_DIR/bin/hg"
echo
fi
+if [ $INST_ENZO -eq 1 ]
+then
+ echo "Enzo has also been checked out, but not built."
+ echo
+ echo "$DEST_DIR/src/enzo-hg-stable"
+ echo
+ echo "The value of YT_DEST can be used as an HDF5 installation location."
+ echo "Questions about Enzo should be directed to the Enzo User List."
+ echo
+fi
echo
echo "For support, see one of the following websites:"
echo
diff -r 276e74af822b -r c67eed8e3e2b yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py Fri Oct 29 16:04:40 2010 -0700
+++ b/yt/analysis_modules/halo_finding/halo_objects.py Tue Nov 09 13:58:37 2010 -0500
@@ -1328,10 +1328,10 @@
HaloList.write_out(self, filename)
class GenericHaloFinder(HaloList, ParallelAnalysisInterface):
- def __init__(self, pf, dm_only=True, padding=0.0):
+ def __init__(self, pf, ds, dm_only=True, padding=0.0):
self.pf = pf
self.hierarchy = pf.h
- self.center = (pf.domain_right_edge + pf.domain_left_edge)/2.0
+ self.center = (na.array(ds.right_edge) + na.array(ds.left_edge))/2.0
def _parse_halolist(self, threshold_adjustment):
groups, max_dens, hi = [], {}, 0
@@ -1473,7 +1473,8 @@
halo.write_particle_list(f)
class parallelHF(GenericHaloFinder, parallelHOPHaloList):
- def __init__(self, pf, threshold=160, dm_only=True, resize=True, rearrange=True,\
+ def __init__(self, pf, subvolume=None,threshold=160, dm_only=True, \
+ resize=True, rearrange=True,\
fancy_padding=True, safety=1.5, premerge=True, sample=0.03):
r"""Parallel HOP halo finder.
@@ -1528,7 +1529,12 @@
>>> pf = load("RedshiftOutput0000")
>>> halos = parallelHF(pf)
"""
- GenericHaloFinder.__init__(self, pf, dm_only, padding=0.0)
+ if subvolume is not None:
+ ds_LE = na.array(subvolume.left_edge)
+ ds_RE = na.array(subvolume.right_edge)
+ self._data_source = pf.h.all_data()
+ GenericHaloFinder.__init__(self, pf, self._data_source, dm_only,
+ padding=0.0)
self.padding = 0.0
self.num_neighbors = 65
self.safety = safety
@@ -1536,13 +1542,16 @@
period = pf.domain_right_edge - pf.domain_left_edge
topbounds = na.array([[0., 0., 0.], period])
# Cut up the volume evenly initially, with no padding.
- padded, LE, RE, self._data_source = self._partition_hierarchy_3d(padding=self.padding)
+ padded, LE, RE, self._data_source = \
+ self._partition_hierarchy_3d(ds=self._data_source,
+ padding=self.padding)
# also get the total mass of particles
yt_counters("Reading Data")
- # Adaptive subregions by bisection.
+ # Adaptive subregions by bisection. We do not load balance if we are
+ # analyzing a subvolume.
ds_names = ["particle_position_x","particle_position_y","particle_position_z"]
if ytcfg.getboolean("yt","inline") == False and \
- resize and self._mpi_get_size() != 1:
+ resize and self._mpi_get_size() != 1 and subvolume is None:
random.seed(self._mpi_get_rank())
cut_list = self._partition_hierarchy_3d_bisection_list()
root_points = self._subsample_points()
@@ -1569,7 +1578,8 @@
l = pf.domain_right_edge - pf.domain_left_edge
vol = l[0] * l[1] * l[2]
full_vol = vol
- if not fancy_padding:
+ # We will use symmetric padding when a subvolume is being used.
+ if not fancy_padding or subvolume is not None:
avg_spacing = (float(vol) / data.size)**(1./3.)
# padding is a function of inter-particle spacing, this is an
# approximation, but it's OK with the safety factor
@@ -1594,7 +1604,7 @@
bin_width = base_padding
num_bins = int(math.ceil(width / bin_width))
bins = na.arange(num_bins+1, dtype='float64') * bin_width + self._data_source.left_edge[dim]
- counts, bins = na.histogram(data, bins, new=True)
+ counts, bins = na.histogram(data, bins)
# left side.
start = 0
count = counts[0]
@@ -1626,6 +1636,13 @@
total_mass = self._mpi_allsum((self._data_source["ParticleMassMsun"].astype('float64')).sum())
if not self._distributed:
self.padding = (na.zeros(3,dtype='float64'), na.zeros(3,dtype='float64'))
+ # If we're using a subvolume, we now re-divide.
+ if subvolume is not None:
+ self._data_source = pf.h.periodic_region_strict([0.]*3, ds_LE, ds_RE)
+ # Cut up the volume.
+ padded, LE, RE, self._data_source = \
+ self._partition_hierarchy_3d(ds=self._data_source,
+ padding=0.)
self.bounds = (LE, RE)
(LE_padding, RE_padding) = self.padding
parallelHOPHaloList.__init__(self, self._data_source, self.padding, \
@@ -1734,7 +1751,8 @@
class HOPHaloFinder(GenericHaloFinder, HOPHaloList):
- def __init__(self, pf, threshold=160, dm_only=True, padding=0.02):
+ def __init__(self, pf, subvolume=None, threshold=160, dm_only=True,
+ padding=0.02):
r"""HOP halo finder.
Halos are built by:
@@ -1753,6 +1771,9 @@
Parameters
----------
pf : EnzoStaticOutput object
+ subvolume : A region over which HOP will be run, which can be used
+ to run HOP on a subvolume of the full volume. Default = None,
+ which defaults to the full volume automatically.
threshold : float
The density threshold used when building halos. Default = 160.0.
dm_only : bool
@@ -1769,37 +1790,49 @@
>>> pf = load("RedshiftOutput0000")
>>> halos = HaloFinder(pf)
"""
- GenericHaloFinder.__init__(self, pf, dm_only, padding)
-
- # do it once with no padding so the total_mass is correct (no duplicated particles)
+ if subvolume is not None:
+ ds_LE = na.array(subvolume.left_edge)
+ ds_RE = na.array(subvolume.right_edge)
+ self._data_source = pf.h.all_data()
+ GenericHaloFinder.__init__(self, pf, self._data_source, dm_only, padding)
+ # do it once with no padding so the total_mass is correct
+ # (no duplicated particles), and on the entire volume, even if only
+ # a small part is actually going to be used.
self.padding = 0.0
- padded, LE, RE, self._data_source = self._partition_hierarchy_3d(padding=self.padding)
+ padded, LE, RE, self._data_source = \
+ self._partition_hierarchy_3d(ds = self._data_source, padding=self.padding)
# For scaling the threshold, note that it's a passthrough
if dm_only:
select = self._get_dm_indices()
- total_mass = self._mpi_allsum((self._data_source["ParticleMassMsun"][select]).sum())
+ total_mass = \
+ self._mpi_allsum((self._data_source["ParticleMassMsun"][select]).sum(dtype='float64'))
else:
- total_mass = self._mpi_allsum(self._data_source["ParticleMassMsun"].sum())
+ total_mass = self._mpi_allsum(self._data_source["ParticleMassMsun"].sum(dtype='float64'))
# MJT: Note that instead of this, if we are assuming that the particles
# are all on different processors, we should instead construct an
# object representing the entire domain and sum it "lazily" with
# Derived Quantities.
+ if subvolume is not None:
+ self._data_source = pf.h.periodic_region_strict([0.]*3, ds_LE, ds_RE)
self.padding = padding #* pf["unitary"] # This should be clevererer
- padded, LE, RE, self._data_source = self._partition_hierarchy_3d(padding=self.padding)
+ padded, LE, RE, self._data_source = \
+ self._partition_hierarchy_3d(ds = self._data_source,
+ padding=self.padding)
self.bounds = (LE, RE)
# reflect particles around the periodic boundary
#self._reposition_particles((LE, RE))
if dm_only:
select = self._get_dm_indices()
- sub_mass = self._data_source["ParticleMassMsun"][select].sum()
+ sub_mass = self._data_source["ParticleMassMsun"][select].sum(dtype='float64')
else:
- sub_mass = self._data_source["ParticleMassMsun"].sum()
- HOPHaloList.__init__(self, self._data_source, threshold*total_mass/sub_mass, dm_only)
+ sub_mass = self._data_source["ParticleMassMsun"].sum(dtype='float64')
+ HOPHaloList.__init__(self, self._data_source,
+ threshold*total_mass/sub_mass, dm_only)
self._parse_halolist(total_mass/sub_mass)
self._join_halolists()
class FOFHaloFinder(GenericHaloFinder, FOFHaloList):
- def __init__(self, pf, link=0.2, dm_only=True, padding=0.02):
+ def __init__(self, pf, subvolume=None, link=0.2, dm_only=True, padding=0.02):
r"""Friends-of-friends halo finder.
Halos are found by linking together all pairs of particles closer than
@@ -1815,6 +1848,9 @@
Parameters
----------
pf : EnzoStaticOutput object
+ subvolume : A region over which FOF will be run, which can be used
+ to run FOF on a subvolume of the full volume. Default = None,
+ which defaults to the full volume automatically.
link : float
The interparticle distance (compared to the overall average)
used to build the halos. Default = 0.2.
@@ -1832,19 +1868,30 @@
>>> pf = load("RedshiftOutput0000")
>>> halos = FOFHaloFinder(pf)
"""
+ if subvolume is not None:
+ ds_LE = na.array(subvolume.left_edge)
+ ds_RE = na.array(subvolume.right_edge)
self.pf = pf
self.hierarchy = pf.h
- self.center = (pf.domain_right_edge + pf.domain_left_edge)/2.0
+ self._data_source = pf.h.all_data()
+ GenericHaloFinder.__init__(self, pf, self._data_source, dm_only,
+ padding)
self.padding = 0.0 #* pf["unitary"] # This should be clevererer
# get the total number of particles across all procs, with no padding
- padded, LE, RE, self._data_source = self._partition_hierarchy_3d(padding=self.padding)
+ padded, LE, RE, self._data_source = \
+ self._partition_hierarchy_3d(ds=self._data_source,
+ padding=self.padding)
n_parts = self._mpi_allsum(self._data_source["particle_position_x"].size)
# get the average spacing between particles
l = pf.domain_right_edge - pf.domain_left_edge
vol = l[0] * l[1] * l[2]
avg_spacing = (float(vol) / n_parts)**(1./3.)
self.padding = padding
- padded, LE, RE, self._data_source = self._partition_hierarchy_3d(padding=self.padding)
+ if subvolume is not None:
+ self._data_source = pf.h.periodic_region_strict([0.]*3, ds_LE, ds_RE)
+ padded, LE, RE, self._data_source = \
+ self._partition_hierarchy_3d(ds=self._data_source,
+ padding=self.padding)
self.bounds = (LE, RE)
# reflect particles around the periodic boundary
#self._reposition_particles((LE, RE))
diff -r 276e74af822b -r c67eed8e3e2b yt/analysis_modules/sunrise_export/sunrise_exporter.py
--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py Fri Oct 29 16:04:40 2010 -0700
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py Tue Nov 09 13:58:37 2010 -0500
@@ -35,8 +35,10 @@
from yt.funcs import *
import yt.utilities.amr_utils as amr_utils
+from yt.data_objects.universal_fields import add_field
-def export_to_sunrise(pf, fn, write_particles = True, subregion_bounds = None):
+def export_to_sunrise(pf, fn, write_particles = True, subregion_bounds = None,
+ particle_mass=None, particle_pos=None, particle_age=None, particle_metal=None):
r"""Convert the contents of a dataset to a FITS file format that Sunrise
understands.
@@ -53,8 +55,11 @@
The parameter file to convert.
fn : string
The filename of the FITS file.
- write_particles : bool, default is True
More information about the yt-svn
mailing list