[Yt-svn] yt: 2 new changesets
hg at spacepope.org
hg at spacepope.org
Mon Jan 3 16:16:50 PST 2011
hg Repository: yt
details: yt/rev/65369eeb4d68
changeset: 3629:65369eeb4d68
user: J.S. Oishi <jsoishi at gmail.com>
date:
Mon Jan 03 16:13:55 2011 -0800
description:
moved _parse_header_file() to _parse_parameter_file() to avoid chicken/egg problem with self.current_time during StaticOutput.__init__() call
hg Repository: yt
details: yt/rev/ffb67ffd732b
changeset: 3630:ffb67ffd732b
user: J.S. Oishi <jsoishi at gmail.com>
date:
Mon Jan 03 16:15:18 2011 -0800
description:
merged.
diffstat:
doc/install_script.sh | 8 +
yt/analysis_modules/halo_finding/halo_objects.py | 63 +-
yt/analysis_modules/halo_finding/hop/EnzoHop.c | 12 +-
yt/analysis_modules/halo_finding/hop/hop_hop.c | 8 +-
yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py | 1 -
yt/analysis_modules/halo_merger_tree/merger_tree.py | 25 +-
yt/analysis_modules/two_point_functions/two_point_functions.py | 14 +-
yt/data_objects/data_containers.py | 5 +-
yt/data_objects/particle_io.py | 5 +-
yt/data_objects/universal_fields.py | 5 +-
yt/frontends/flash/data_structures.py | 8 +
yt/frontends/flash/fields.py | 12 +
yt/frontends/orion/data_structures.py | 2 +-
yt/utilities/_amr_utils/VolumeIntegrator.pyx | 3 -
yt/utilities/amr_kdtree/amr_kdtree.py | 12 +-
yt/utilities/amr_utils.c | 3977 +++++----
yt/utilities/kdtree/Makefile | 8 +-
yt/utilities/kdtree/fKD.f90 | 6 +-
yt/utilities/kdtree/fKD.v | 5 +-
yt/utilities/kdtree/fKD_source.f90 | 49 +-
yt/visualization/plot_collection.py | 24 +
yt/visualization/volume_rendering/camera.py | 30 +-
22 files changed, 2211 insertions(+), 2071 deletions(-)
diffs (truncated from 8056 to 300 lines):
diff -r ad6d77904932 -r ffb67ffd732b doc/install_script.sh
--- a/doc/install_script.sh Tue Dec 14 12:22:49 2010 -0800
+++ b/doc/install_script.sh Mon Jan 03 16:15:18 2011 -0800
@@ -39,6 +39,7 @@
INST_PNG=1 # Install a local libpng? Same things apply as with zlib.
INST_FTYPE=1 # Install FreeType2 locally?
INST_ENZO=0 # Clone a copy of Enzo?
+INST_FORTHON=1
# If you've got YT some other place, set this to point to it.
YT_DIR=""
@@ -151,6 +152,10 @@
get_willwont ${INST_FTYPE}
echo "be installing freetype2"
+printf "%-15s = %s so I " "INST_FORTHON" "${INST_FORTHON}"
+get_willwont ${INST_FORTHON}
+echo "be installing Forthon (for Halo Finding, etc)"
+
printf "%-15s = %s so I " "INST_HG" "${INST_HG}"
get_willwont ${INST_HG}
echo "be installing Mercurial"
@@ -251,6 +256,7 @@
get_enzotools ipython-0.10.tar.gz
get_enzotools h5py-1.2.0.tar.gz
get_enzotools Cython-0.13.tar.gz
+get_enzotools Forthon-0.8.4.tar.gz
get_enzotools yt.hg
if [ $INST_BZLIB -eq 1 ]
@@ -446,6 +452,7 @@
do_setup_py ipython-0.10
do_setup_py h5py-1.2.0
do_setup_py Cython-0.13
+[ $INST_FORTHON -eq 1 ] && do_setup_py Forthon-0.8.4
echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
MY_PWD=`pwd`
@@ -456,6 +463,7 @@
echo $HDF5_DIR > hdf5.cfg
[ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
[ $INST_FTYPE -eq 1 ] && echo $FTYPE_DIR > freetype.cfg
+[ $INST_FORTHON -eq 1 ] && ( ( cd yt/utilities/kdtree && FORTHON_EXE=${DEST_DIR}/bin/Forthon make 2>&1 ) 1>> ${LOG_FILE} )
( ${DEST_DIR}/bin/python2.6 setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
touch done
cd $MY_PWD
diff -r ad6d77904932 -r ffb67ffd732b yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py Tue Dec 14 12:22:49 2010 -0800
+++ b/yt/analysis_modules/halo_finding/halo_objects.py Mon Jan 03 16:15:18 2011 -0800
@@ -1050,16 +1050,12 @@
HaloList.__init__(self, data_source, dm_only)
def _run_finder(self):
- period_x, period_y, period_z = \
- self.pf.domain_right_edge - self.pf.domain_left_edge
- print "Setting period to", period_x, period_y, period_z
self.densities, self.tags = \
- RunHOP(self.particle_fields["particle_position_x"],
- self.particle_fields["particle_position_y"],
- self.particle_fields["particle_position_z"],
- self.particle_fields["ParticleMassMsun"],
- self.threshold,
- period_x, period_y, period_z)
+ RunHOP(self.particle_fields["particle_position_x"] / self.period[0],
+ self.particle_fields["particle_position_y"] / self.period[1],
+ self.particle_fields["particle_position_z"] / self.period[2],
+ self.particle_fields["ParticleMassMsun"],
+ self.threshold)
self.particle_fields["densities"] = self.densities
self.particle_fields["tags"] = self.tags
@@ -1088,9 +1084,9 @@
def _run_finder(self):
self.tags = \
- RunFOF(self.particle_fields["particle_position_x"],
- self.particle_fields["particle_position_y"],
- self.particle_fields["particle_position_z"],
+ RunFOF(self.particle_fields["particle_position_x"] / self.period[0],
+ self.particle_fields["particle_position_y"] / self.period[1],
+ self.particle_fields["particle_position_z"] / self.period[2],
self.link)
self.densities = na.ones(self.tags.size, dtype='float64') * -1
self.particle_fields["densities"] = self.densities
@@ -1129,6 +1125,8 @@
self.total_mass = total_mass
self.rearrange = rearrange
self.period = period
+ self.old_period = period.copy()
+ self.period = na.array([1.]*3)
self._data_source = data_source
self.premerge = premerge
mylog.info("Initializing HOP")
@@ -1148,9 +1146,9 @@
self._mpi_exit_test(exit)
obj = ParallelHOPHaloFinder(self.period, self.padding,
self.num_neighbors, self.bounds,
- self.particle_fields["particle_position_x"],
- self.particle_fields["particle_position_y"],
- self.particle_fields["particle_position_z"],
+ self.particle_fields["particle_position_x"] / self.old_period[0],
+ self.particle_fields["particle_position_y"] / self.old_period[1],
+ self.particle_fields["particle_position_z"] / self.old_period[2],
self.particle_fields["particle_index"],
self.particle_fields["ParticleMassMsun"]/self.total_mass,
self.threshold, rearrange=self.rearrange, premerge=self.premerge)
@@ -1168,6 +1166,13 @@
self.Tot_M = obj.Tot_M * self.total_mass
self.max_dens_point = obj.max_dens_point
self.max_radius = obj.max_radius
+ for dd in range(3):
+ self.CoM[:, dd] *= self.old_period[dd]
+ self.max_dens_point[:, dd+1] *= self.old_period[dd]
+ # This is wrong, below, with uneven boundaries. We'll cross that bridge
+ # when we get there.
+ self.max_radius *= self.old_period[0]
+ self.period = self.old_period.copy()
# Precompute the bulk velocity in parallel.
yt_counters("Precomp bulk vel.")
self.bulk_vel = na.zeros((self.group_count, 3), dtype='float64')
@@ -1797,6 +1802,7 @@
if subvolume is not None:
ds_LE = na.array(subvolume.left_edge)
ds_RE = na.array(subvolume.right_edge)
+ self.period = pf.domain_right_edge - pf.domain_left_edge
self._data_source = pf.h.all_data()
GenericHaloFinder.__init__(self, pf, self._data_source, dm_only, padding)
# do it once with no padding so the total_mass is correct
@@ -1856,8 +1862,10 @@
to run FOF on a subvolume of the full volume. Default = None,
which defaults to the full volume automatically.
link : float
- The interparticle distance (compared to the overall average)
- used to build the halos. Default = 0.2.
+ If positive, the interparticle distance (compared to the overall
+ average) used to build the halos. If negative, this is taken to be
+ the *actual* linking length, and no other calculations will be
+ applied. Default = 0.2.
dm_only : bool
If True, only dark matter particles are used when building halos.
Default = False.
@@ -1875,6 +1883,7 @@
if subvolume is not None:
ds_LE = na.array(subvolume.left_edge)
ds_RE = na.array(subvolume.right_edge)
+ self.period = pf.domain_right_edge - pf.domain_left_edge
self.pf = pf
self.hierarchy = pf.h
self._data_source = pf.h.all_data()
@@ -1885,11 +1894,18 @@
padded, LE, RE, self._data_source = \
self._partition_hierarchy_3d(ds=self._data_source,
padding=self.padding)
- n_parts = self._mpi_allsum(self._data_source["particle_position_x"].size)
- # get the average spacing between particles
- l = pf.domain_right_edge - pf.domain_left_edge
- vol = l[0] * l[1] * l[2]
- avg_spacing = (float(vol) / n_parts)**(1./3.)
+ if link > 0.0:
+ n_parts = self._mpi_allsum(self._data_source["particle_position_x"].size)
+ # get the average spacing between particles
+ #l = pf.domain_right_edge - pf.domain_left_edge
+ #vol = l[0] * l[1] * l[2]
+ # Because we are now allowing for datasets with non 1-periodicity,
+ # but symmetric, vol is always 1.
+ vol = 1.
+ avg_spacing = (float(vol) / n_parts)**(1./3.)
+ linking_length = link * avg_spacing
+ else:
+ linking_length = na.abs(link)
self.padding = padding
if subvolume is not None:
self._data_source = pf.h.periodic_region_strict([0.]*3, ds_LE, ds_RE)
@@ -1900,7 +1916,8 @@
# reflect particles around the periodic boundary
#self._reposition_particles((LE, RE))
# here is where the FOF halo finder is run
- FOFHaloList.__init__(self, self._data_source, link * avg_spacing, dm_only)
+ mylog.info("Using a linking length of %0.3e", linking_length)
+ FOFHaloList.__init__(self, self._data_source, linking_length, dm_only)
self._parse_halolist(1.)
self._join_halolists()
diff -r ad6d77904932 -r ffb67ffd732b yt/analysis_modules/halo_finding/hop/EnzoHop.c
--- a/yt/analysis_modules/halo_finding/hop/EnzoHop.c Tue Dec 14 12:22:49 2010 -0800
+++ b/yt/analysis_modules/halo_finding/hop/EnzoHop.c Mon Jan 03 16:15:18 2011 -0800
@@ -36,8 +36,7 @@
#include "numpy/ndarrayobject.h"
void initgrouplist(Grouplist *g);
-void hop_main(KD kd, HC *my_comm, float densthres,
- float period_x, float period_y, float period_z);
+void hop_main(KD kd, HC *my_comm, float densthres);
void regroup_main(float dens_outer, HC *my_comm);
static PyObject *_HOPerror;
@@ -103,14 +102,11 @@
npy_float64 totalmass = 0.0;
float normalize_to = 1.0;
float thresh = 160.0;
- float period_x, period_y, period_z;
- period_x = period_y = period_z = 1.0;
int i;
- if (!PyArg_ParseTuple(args, "OOOO|fffff",
- &oxpos, &oypos, &ozpos, &omass, &thresh, &normalize_to,
- &period_x, &period_y, &period_z))
+ if (!PyArg_ParseTuple(args, "OOOO|ff",
+ &oxpos, &oypos, &ozpos, &omass, &thresh, &normalize_to))
return PyErr_Format(_HOPerror,
"EnzoHop: Invalid parameters.");
@@ -159,7 +155,7 @@
initgrouplist(my_comm.gl);
fprintf(stderr, "Calling hop... %d %0.3e\n",num_particles,thresh);
- hop_main(kd, &my_comm, thresh, period_x, period_y, period_z);
+ hop_main(kd, &my_comm, thresh);
fprintf(stderr, "Calling regroup...\n");
regroup_main(thresh, &my_comm);
diff -r ad6d77904932 -r ffb67ffd732b yt/analysis_modules/halo_finding/hop/hop_hop.c
--- a/yt/analysis_modules/halo_finding/hop/hop_hop.c Tue Dec 14 12:22:49 2010 -0800
+++ b/yt/analysis_modules/halo_finding/hop/hop_hop.c Mon Jan 03 16:15:18 2011 -0800
@@ -51,8 +51,7 @@
void outGroupMerge(SMX smx, HC *my_comm);
/* void main(int argc,char **argv) */
-void hop_main(KD kd, HC *my_comm, float densthres,
- float period_x, float period_y, float period_z)
+void hop_main(KD kd, HC *my_comm, float densthres)
{
/* KD kd; */
SMX smx;
@@ -78,10 +77,7 @@
inputfile = NULL;
i = 1;
/* for (j=0;j<3;++j) fPeriod[j] = HUGE; */
-/* for (j=0;j<3;++j) fPeriod[j] = 1.0; */
- fPeriod[0] = period_x;
- fPeriod[1] = period_y;
- fPeriod[2] = period_z;
+ for (j=0;j<3;++j) fPeriod[j] = 1.0;
nMerge = 4;
diff -r ad6d77904932 -r ffb67ffd732b yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py
--- a/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py Tue Dec 14 12:22:49 2010 -0800
+++ b/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py Mon Jan 03 16:15:18 2011 -0800
@@ -358,7 +358,6 @@
fKD.sort = True # Slower, but needed in _connect_chains
fKD.rearrange = self.rearrange # True is faster, but uses more memory
# Now call the fortran.
- fKD.period = self.period
create_tree(0)
self.__max_memory()
yt_counters("init kd tree")
diff -r ad6d77904932 -r ffb67ffd732b yt/analysis_modules/halo_merger_tree/merger_tree.py
--- a/yt/analysis_modules/halo_merger_tree/merger_tree.py Tue Dec 14 12:22:49 2010 -0800
+++ b/yt/analysis_modules/halo_merger_tree/merger_tree.py Mon Jan 03 16:15:18 2011 -0800
@@ -164,7 +164,6 @@
self.dm_only = dm_only
self.refresh = refresh
self.sleep = sleep # How long to wait between db sync checks.
- self.period = -1
if self.sleep <= 0.:
self.sleep = 5
# MPI stuff
@@ -216,9 +215,7 @@
for cycle, file in enumerate(self.restart_files):
gc.collect()
pf = load(file)
- # get the period, only once is sufficient
- if self.period == -1:
- self.period = pf.domain_right_edge - pf.domain_left_edge
+ self.period = self.pf.domain_right_edge - self.pf.domain_left_edge
# If the halos are already found, skip this data step, unless
# refresh is True.
dir = os.path.dirname(file)
@@ -357,9 +354,12 @@
child_pf.current_redshift)
# Build the kdtree for the children by looping over the fetched rows.
+ # Normalize the points for use only within the kdtree.
child_points = []
for row in self.cursor:
- child_points.append([row[1], row[2], row[3]])
+ child_points.append([row[1] / self.period[0],
+ row[2] / self.period[1],
+ row[3] / self.period[2]])
# Turn it into fortran.
child_points = na.array(child_points)
fKD.pos = na.asfortranarray(child_points.T)
@@ -369,7 +369,6 @@
fKD.nn = 5
fKD.sort = True
fKD.rearrange = True
- fKD.period = self.period
create_tree(0)
More information about the yt-svn
mailing list