[Yt-svn] yt: Reverting most of the previous changes for non-[1, 1, 1] perio...

hg at spacepope.org hg at spacepope.org
Mon Dec 20 14:44:33 PST 2010


hg Repository: yt
details:   yt/rev/f5ec91bcb1ef
changeset: 3622:f5ec91bcb1ef
user:      Stephen Skory <stephenskory at yahoo.com>
date:
Mon Dec 20 15:44:13 2010 -0700
description:
Reverting most of the previous changes for non-[1,1,1] periodicity in favor
of rescaling data for only use within the kdtrees. Data outside of the kdtrees
is untouched, and data coming out of the routines that use the kdtrees rescale
things back into the orignial scales, when necessary.

diffstat:

 yt/analysis_modules/halo_finding/halo_objects.py                        |  45 +++++---
 yt/analysis_modules/halo_finding/hop/EnzoHop.c                          |  12 +-
 yt/analysis_modules/halo_finding/hop/hop_hop.c                          |   8 +-
 yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py |   1 -
 yt/analysis_modules/halo_merger_tree/merger_tree.py                     |  23 ++--
 yt/analysis_modules/two_point_functions/two_point_functions.py          |  14 +-
 yt/frontends/flash/data_structures.py                                   |   8 +
 yt/frontends/flash/fields.py                                            |  12 ++
 yt/utilities/kdtree/fKD.f90                                             |   6 +-
 yt/utilities/kdtree/fKD.v                                               |   5 +-
 yt/utilities/kdtree/fKD_source.f90                                      |  49 +++------
 11 files changed, 98 insertions(+), 85 deletions(-)

diffs (truncated from 619 to 300 lines):

diff -r 82d276c572eb -r f5ec91bcb1ef yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/analysis_modules/halo_finding/halo_objects.py	Mon Dec 20 15:44:13 2010 -0700
@@ -811,6 +811,7 @@
         self.particle_fields = {}
         for field in self._fields:
             if ytcfg.getboolean("yt","inline") == False:
+                print "FIELD", field
                 tot_part = self._data_source[field].size
                 if field == "particle_index":
                     self.particle_fields[field] = self._data_source[field][ii].astype('int64')
@@ -1050,16 +1051,12 @@
         HaloList.__init__(self, data_source, dm_only)
 
     def _run_finder(self):
-        period_x, period_y, period_z = \
-            self.pf.domain_right_edge - self.pf.domain_left_edge
-        print "Setting period to", period_x, period_y, period_z
         self.densities, self.tags = \
-            RunHOP(self.particle_fields["particle_position_x"],
-                   self.particle_fields["particle_position_y"],
-                   self.particle_fields["particle_position_z"],
-                   self.particle_fields["ParticleMassMsun"],
-                   self.threshold,
-                   period_x, period_y, period_z)
+            RunHOP(self.particle_fields["particle_position_x"] / self.period[0],
+                self.particle_fields["particle_position_y"] / self.period[1],
+                self.particle_fields["particle_position_z"] / self.period[2],
+                self.particle_fields["ParticleMassMsun"],
+                self.threshold)
         self.particle_fields["densities"] = self.densities
         self.particle_fields["tags"] = self.tags
 
@@ -1088,9 +1085,9 @@
 
     def _run_finder(self):
         self.tags = \
-            RunFOF(self.particle_fields["particle_position_x"],
-                   self.particle_fields["particle_position_y"],
-                   self.particle_fields["particle_position_z"],
+            RunFOF(self.particle_fields["particle_position_x"] / self.period[0],
+                   self.particle_fields["particle_position_y"] / self.period[1],
+                   self.particle_fields["particle_position_z"] / self.period[2],
                    self.link)
         self.densities = na.ones(self.tags.size, dtype='float64') * -1
         self.particle_fields["densities"] = self.densities
@@ -1129,6 +1126,8 @@
         self.total_mass = total_mass
         self.rearrange = rearrange
         self.period = period
+        self.old_period = period.copy()
+        self.period = na.array([1.]*3)
         self._data_source = data_source
         self.premerge = premerge
         mylog.info("Initializing HOP")
@@ -1148,9 +1147,9 @@
         self._mpi_exit_test(exit)
         obj = ParallelHOPHaloFinder(self.period, self.padding,
             self.num_neighbors, self.bounds,
-            self.particle_fields["particle_position_x"],
-            self.particle_fields["particle_position_y"],
-            self.particle_fields["particle_position_z"],
+            self.particle_fields["particle_position_x"] / self.old_period[0],
+            self.particle_fields["particle_position_y"] / self.old_period[1],
+            self.particle_fields["particle_position_z"] / self.old_period[2],
             self.particle_fields["particle_index"],
             self.particle_fields["ParticleMassMsun"]/self.total_mass,
             self.threshold, rearrange=self.rearrange, premerge=self.premerge)
@@ -1168,6 +1167,13 @@
         self.Tot_M = obj.Tot_M * self.total_mass
         self.max_dens_point = obj.max_dens_point
         self.max_radius = obj.max_radius
+        for dd in range(3):
+            self.CoM[:, dd] *= self.old_period[dd]
+            self.max_dens_point[:, dd+1] *= self.old_period[dd]
+        # This is wrong, below, with uneven boundaries. We'll cross that bridge
+        # when we get there.
+        self.max_radius *= self.old_period[0]
+        self.period = self.old_period.copy()
         # Precompute the bulk velocity in parallel.
         yt_counters("Precomp bulk vel.")
         self.bulk_vel = na.zeros((self.group_count, 3), dtype='float64')
@@ -1797,6 +1803,7 @@
         if subvolume is not None:
             ds_LE = na.array(subvolume.left_edge)
             ds_RE = na.array(subvolume.right_edge)
+        self.period = pf.domain_right_edge - pf.domain_left_edge
         self._data_source = pf.h.all_data()
         GenericHaloFinder.__init__(self, pf, self._data_source, dm_only, padding)
         # do it once with no padding so the total_mass is correct
@@ -1875,6 +1882,7 @@
         if subvolume is not None:
             ds_LE = na.array(subvolume.left_edge)
             ds_RE = na.array(subvolume.right_edge)
+        self.period = pf.domain_right_edge - pf.domain_left_edge
         self.pf = pf
         self.hierarchy = pf.h
         self._data_source = pf.h.all_data()
@@ -1887,8 +1895,11 @@
             padding=self.padding)
         n_parts = self._mpi_allsum(self._data_source["particle_position_x"].size)
         # get the average spacing between particles
-        l = pf.domain_right_edge - pf.domain_left_edge
-        vol = l[0] * l[1] * l[2]
+        #l = pf.domain_right_edge - pf.domain_left_edge
+        #vol = l[0] * l[1] * l[2]
+        # Because we are now allowing for datasets with non 1-periodicity,
+        # but symmetric, vol is always 1.
+        vol = 1.
         avg_spacing = (float(vol) / n_parts)**(1./3.)
         self.padding = padding
         if subvolume is not None:
diff -r 82d276c572eb -r f5ec91bcb1ef yt/analysis_modules/halo_finding/hop/EnzoHop.c
--- a/yt/analysis_modules/halo_finding/hop/EnzoHop.c	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/analysis_modules/halo_finding/hop/EnzoHop.c	Mon Dec 20 15:44:13 2010 -0700
@@ -36,8 +36,7 @@
 #include "numpy/ndarrayobject.h"
 
 void initgrouplist(Grouplist *g);
-void hop_main(KD kd, HC *my_comm, float densthres,
-        float period_x, float period_y, float period_z);
+void hop_main(KD kd, HC *my_comm, float densthres);
 void regroup_main(float dens_outer, HC *my_comm);
 static PyObject *_HOPerror;
 
@@ -103,14 +102,11 @@
     npy_float64 totalmass = 0.0;
     float normalize_to = 1.0;
     float thresh = 160.0;
-    float period_x, period_y, period_z;
-    period_x = period_y = period_z = 1.0;
 
     int i;
 
-    if (!PyArg_ParseTuple(args, "OOOO|fffff",
-        &oxpos, &oypos, &ozpos, &omass, &thresh, &normalize_to,
-        &period_x, &period_y, &period_z))
+    if (!PyArg_ParseTuple(args, "OOOO|ff",
+        &oxpos, &oypos, &ozpos, &omass, &thresh, &normalize_to))
     return PyErr_Format(_HOPerror,
             "EnzoHop: Invalid parameters.");
 
@@ -159,7 +155,7 @@
     initgrouplist(my_comm.gl);
 
     fprintf(stderr, "Calling hop... %d %0.3e\n",num_particles,thresh);
-    hop_main(kd, &my_comm, thresh, period_x, period_y, period_z);
+    hop_main(kd, &my_comm, thresh);
 
     fprintf(stderr, "Calling regroup...\n");
     regroup_main(thresh, &my_comm);
diff -r 82d276c572eb -r f5ec91bcb1ef yt/analysis_modules/halo_finding/hop/hop_hop.c
--- a/yt/analysis_modules/halo_finding/hop/hop_hop.c	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/analysis_modules/halo_finding/hop/hop_hop.c	Mon Dec 20 15:44:13 2010 -0700
@@ -51,8 +51,7 @@
 void outGroupMerge(SMX smx, HC *my_comm);
 
 /* void main(int argc,char **argv) */
-void hop_main(KD kd, HC *my_comm, float densthres, 
-              float period_x, float period_y, float period_z)
+void hop_main(KD kd, HC *my_comm, float densthres)
 {
   /*	KD kd; */
 	SMX smx;
@@ -78,10 +77,7 @@
 	inputfile = NULL;
 	i = 1;
 /*	for (j=0;j<3;++j) fPeriod[j] = HUGE; */
-/*	for (j=0;j<3;++j) fPeriod[j] = 1.0; */
-    fPeriod[0] = period_x;
-    fPeriod[1] = period_y;
-    fPeriod[2] = period_z;
+	for (j=0;j<3;++j) fPeriod[j] = 1.0;
 	nMerge = 4;
  
  
diff -r 82d276c572eb -r f5ec91bcb1ef yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py
--- a/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py	Mon Dec 20 15:44:13 2010 -0700
@@ -358,7 +358,6 @@
         fKD.sort = True # Slower, but needed in _connect_chains
         fKD.rearrange = self.rearrange # True is faster, but uses more memory
         # Now call the fortran.
-        fKD.period = self.period
         create_tree(0)
         self.__max_memory()
         yt_counters("init kd tree")
diff -r 82d276c572eb -r f5ec91bcb1ef yt/analysis_modules/halo_merger_tree/merger_tree.py
--- a/yt/analysis_modules/halo_merger_tree/merger_tree.py	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/analysis_modules/halo_merger_tree/merger_tree.py	Mon Dec 20 15:44:13 2010 -0700
@@ -215,8 +215,7 @@
         for cycle, file in enumerate(self.restart_files):
             gc.collect()
             pf = load(file)
-            # This will be fetched many times, but it's not a problem.
-            self.period = pf.domain_right_edge - pf.domain_left_edge
+            self.period = self.pf.domain_right_edge - self.pf.domain_left_edge
             # If the halos are already found, skip this data step, unless
             # refresh is True.
             dir = os.path.dirname(file)
@@ -355,9 +354,12 @@
             child_pf.current_redshift)
         
         # Build the kdtree for the children by looping over the fetched rows.
+        # Normalize the points for use only within the kdtree.
         child_points = []
         for row in self.cursor:
-            child_points.append([row[1], row[2], row[3]])
+            child_points.append([row[1] / self.period[0],
+            row[2] / self.period[1],
+            row[3] / self.period[2]])
         # Turn it into fortran.
         child_points = na.array(child_points)
         fKD.pos = na.asfortranarray(child_points.T)
@@ -367,7 +369,6 @@
         fKD.nn = 5
         fKD.sort = True
         fKD.rearrange = True
-        fKD.period = self.period
         create_tree(0)
 
         # Find the parent points from the database.
@@ -381,17 +382,19 @@
         # parents.
         candidates = {}
         for row in self.cursor:
-            fKD.qv = na.array([row[1], row[2], row[3]])
+            # Normalize positions for use within the kdtree.
+            fKD.qv = na.array([row[1] / self.period[0],
+            row[2] / self.period[1],
+            row[3] / self.period[2]])
             find_nn_nearest_neighbors()
             NNtags = fKD.tags[:] - 1
             nIDs = []
             for n in NNtags:
                 nIDs.append(n)
-            if len(nIDs) < 5:
-                # We need to fill in fake halos if there aren't enough halos,
-                # which can happen at high redshifts.
-                while len(nIDs) < 5:
-                    nIDs.append(-1)
+            # We need to fill in fake halos if there aren't enough halos,
+            # which can happen at high redshifts.
+            while len(nIDs) < 5:
+                nIDs.append(-1)
             candidates[row[0]] = nIDs
         
         del fKD.pos, fKD.tags, fKD.dist
diff -r 82d276c572eb -r f5ec91bcb1ef yt/analysis_modules/two_point_functions/two_point_functions.py
--- a/yt/analysis_modules/two_point_functions/two_point_functions.py	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/analysis_modules/two_point_functions/two_point_functions.py	Mon Dec 20 15:44:13 2010 -0700
@@ -29,7 +29,7 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import ParallelAnalysisInterface, parallel_blocking_call, parallel_root_only
 
 try:
-    from yt.utilities.kdtree import *
+    from yt.extensions.kdtree import *
 except ImportError:
     mylog.debug("The Fortran kD-Tree did not import correctly.")
 
@@ -300,13 +300,13 @@
         yp = self.ds["y"]
         zp = self.ds["z"]
         fKD.pos = na.asfortranarray(na.empty((3,xp.size), dtype='float64'))
-        fKD.pos[0, :] = xp[:]
-        fKD.pos[1, :] = yp[:]
-        fKD.pos[2, :] = zp[:]
+        # Normalize the grid points only within the kdtree.
+        fKD.pos[0, :] = xp[:] / self.period[0]
+        fKD.pos[1, :] = yp[:] / self.period[1]
+        fKD.pos[2, :] = zp[:] / self.period[2]
         fKD.nn = 1
         fKD.sort = False
         fKD.rearrange = True
-        fKD.period = self.period
         create_tree(0)
 
     def _build_sort_array(self):
@@ -483,6 +483,10 @@
             n += self.sizes[2] * (self.sizes[1] * pos[:,1]).astype('int32')
             n += self.sizes[2] * self.sizes[1] * (self.sizes[0] * pos[:,0]).astype('int32')
         else:
+            # Normalize the points to a 1-period for use only within the kdtree.
+            points[:, 0] = points[:, 0] / self.period[0]
+            points[:, 1] = points[:, 1] / self.period[1]
+            points[:, 2] = points[:, 2] / self.period[2]
             fKD.qv_many = points.T
             fKD.nn_tags = na.asfortranarray(na.empty((1, points.shape[0]), dtype='int64'))
             find_many_nn_nearest_neighbors()
diff -r 82d276c572eb -r f5ec91bcb1ef yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py	Fri Dec 17 14:52:44 2010 -0800
+++ b/yt/frontends/flash/data_structures.py	Mon Dec 20 15:44:13 2010 -0700
@@ -235,6 +235,11 @@
         self.conversion_factors['velx'] = (1.0 + self.current_redshift)
         self.conversion_factors['vely'] = self.conversion_factors['velx']
         self.conversion_factors['velz'] = self.conversion_factors['velx']
+        self.conversion_factors['particle_velx'] = (1.0 + self.current_redshift)
+        self.conversion_factors['particle_vely'] = \
+            self.conversion_factors['particle_velx']
+        self.conversion_factors['particle_velz'] = \



More information about the yt-svn mailing list