[Yt-svn] yt-commit r873 - trunk/yt/lagos/hop
sskory at wrangler.dreamhost.com
sskory at wrangler.dreamhost.com
Tue Nov 4 13:32:22 PST 2008
Author: sskory
Date: Tue Nov 4 13:32:21 2008
New Revision: 873
URL: http://yt.spacepope.org/changeset/873
Log:
Adding the semi-parallel hop stuff, even though this shouldn't be final, it works and is worhty of being added and eventually deleted in favor of better code.
Modified:
trunk/yt/lagos/hop/HopOutput.py
Modified: trunk/yt/lagos/hop/HopOutput.py
==============================================================================
--- trunk/yt/lagos/hop/HopOutput.py (original)
+++ trunk/yt/lagos/hop/HopOutput.py Tue Nov 4 13:32:21 2008
@@ -24,6 +24,7 @@
"""
from yt.lagos.hop import *
+from numpy import *
class HopList(object):
def __init__(self, data_source, threshold=160.0,
@@ -37,12 +38,20 @@
self.dm_only = dm_only
self.threshold = threshold
self._groups = []
+ self._groups2 = {}
self._max_dens = {}
+ self._max_dens2 = {}
mylog.info("Initializing HOP")
self.__obtain_particles()
+ #self.__enlarge_data()
+ #self.__slice_data()
+ #self.__run_hops()
+ #self.__reduce_data()
self.__run_hop()
mylog.info("Parsing outputs")
+ #self.__parse_outputs()
self.__parse_output()
+ #self.__glue_outputs()
mylog.debug("Finished. (%s)", len(self))
def __obtain_particles(self):
@@ -55,6 +64,115 @@
self.particle_fields[field] = self.data_source[field][ii]
self._base_indices = na.arange(tot_part)[ii]
+ def __enlarge_data(self):
+ sizetemp = self.particle_fields["particle_position_x"].size
+ self.tempx = [i for i in range(3*sizetemp)]
+ self.tempy = [i for i in range(3*sizetemp)]
+ self.tempz = [i for i in range(3*sizetemp)]
+ self.tempm = [i for i in range(3*sizetemp)]
+ self.tracking = [i for i in range(3*sizetemp)]
+ # reverse below 0, straight up between 0 and 1, reverse above 1
+ for part,i in enumerate(self.particle_fields["particle_position_x"]):
+ self.tempx[sizetemp - 1 - part] = -i
+ self.tempx[sizetemp + part] = i
+ self.tempx[3*sizetemp - 1 - part] = 2 - i
+ self.tracking[sizetemp - 1 - part] = part
+ self.tracking[sizetemp + part] = part
+ self.tracking[3*sizetemp - 1 - part] = part
+ for part,i in enumerate(self.particle_fields["particle_position_y"]):
+ self.tempy[sizetemp - 1 - part] = -i
+ self.tempy[sizetemp + part] = i
+ self.tempy[3*sizetemp - 1 - part] = 2 - i
+ for part,i in enumerate(self.particle_fields["particle_position_z"]):
+ self.tempz[sizetemp - 1 - part] = -i
+ self.tempz[sizetemp + part] = i
+ self.tempz[3*sizetemp - 1 - part] = 2 - i
+ for part,i in enumerate(self.particle_fields["ParticleMassMsun"]):
+ self.tempm[sizetemp - 1 - part] = i
+ self.tempm[sizetemp + part] = i
+ self.tempm[3*sizetemp -1 - part] = i
+
+ def __slice_data(self):
+ cuts = 2
+ padding = 0.2
+ self.temp2x = {}
+ self.temp2y = {}
+ self.temp2z = {}
+ self.temp2m = {}
+ self.tracking2 = {}
+ # loop over the sub-boxes
+ for i in range(cuts):
+ for j in range(cuts):
+ for k in range(cuts):
+ x_t = []
+ y_t = []
+ z_t = []
+ m_t = []
+ t_t = {}
+ # define the edges
+ xleft = (1.0 * i)/cuts - padding
+ xright = (1.0 * (i+1))/cuts + padding
+ yleft = (1.0 * j)/cuts - padding
+ yright = (1.0 * (j+1))/cuts + padding
+ zleft = (1.0 * k)/cuts - padding
+ zright = (1.0 * (k+1))/cuts + padding
+ # loop over the temporary expanded fields, test to see if they're in the sub-box
+ # and then re-map to a unit sized box, we'll have inclusive boundary on the left,
+ # exclusive on the right
+ jj = 0
+ for part,ii in enumerate(self.tempx):
+ if ((ii >= xleft) and (ii < xright) and (self.tempy[part] >= yleft) and \
+ (self.tempy[part] < yright) and (self.tempz[part] >= zleft) and \
+ (self.tempz[part] < zright)):
+ x_t.append((ii-xleft)*(1.0/(xright-xleft)))
+ y_t.append((self.tempy[part]-yleft)*(1.0/(yright-yleft)))
+ z_t.append((self.tempz[part]-zleft)*(1.0/(zright-zleft)))
+ m_t.append(self.tempm[part])
+ t_t[jj] = self.tracking[part] # put real ID in t_t at jj
+ jj += 1
+ # save them to their position in the dict
+ self.temp2x[((i*cuts) + j)*cuts + k] = array(x_t)
+ self.temp2y[((i*cuts) + j)*cuts + k] = array(y_t)
+ self.temp2z[((i*cuts) + j)*cuts + k] = array(z_t)
+ self.temp2m[((i*cuts) + j)*cuts + k] = array(m_t)
+ self.tracking2[((i*cuts) + j)*cuts + k] = t_t
+
+ def __reduce_data(self):
+ cuts = 2
+ padding = 0.2
+ # loop over the sub-boxes
+ for i in range(cuts):
+ for j in range(cuts):
+ for k in range(cuts):
+ xleft = (1.0 * i)/cuts - padding
+ xright = (1.0 * (i+1))/cuts + padding
+ yleft = (1.0 * j)/cuts - padding
+ yright = (1.0 * (j+1))/cuts + padding
+ zleft = (1.0 * k)/cuts - padding
+ zright = (1.0 * (k+1))/cuts + padding
+ # we're going to reduce the values of the particles to their true position
+ # with some going negative, of course
+ self.temp2x[((i*cuts) + j)*cuts + k] = self.temp2x[((i*cuts) + j)*cuts + k]*(xright-xleft)+xleft
+ self.temp2y[((i*cuts) + j)*cuts + k] = self.temp2y[((i*cuts) + j)*cuts + k]*(yright-yleft)+yleft
+ self.temp2z[((i*cuts) + j)*cuts + k] = self.temp2z[((i*cuts) + j)*cuts + k]*(zright-zleft)+zleft
+
+ def __run_hops(self):
+ cuts = 2
+ padding = 0.2
+ dens_fix = 1 / (2*padding + 1./cuts)**3
+ self.densities2 = {}
+ self.tags2 = {}
+ for i in range(cuts):
+ for j in range(cuts):
+ for k in range(cuts):
+ print "run_hops %d" % (((i*cuts) + j)*cuts + k)
+ self.densities2[((i*cuts) + j)*cuts + k], self.tags2[((i*cuts) + j)*cuts + k] = \
+ RunHOP(self.temp2x[((i*cuts) + j)*cuts + k],
+ self.temp2y[((i*cuts) + j)*cuts + k],
+ self.temp2z[((i*cuts) + j)*cuts + k],
+ self.temp2m[((i*cuts) + j)*cuts + k]*dens_fix,
+ self.threshold)
+
def __run_hop(self):
self.densities, self.tags = \
RunHOP(self.particle_fields["particle_position_x"],
@@ -64,6 +182,7 @@
self.threshold)
self.particle_fields["densities"] = self.densities
self.particle_fields["tags"] = self.tags
+ print 'tags %d' % (self.tags.size)
def __get_dm_indices(self):
if 'creation_time' in self.data_source.hierarchy.field_list:
@@ -76,6 +195,59 @@
mylog.warning("No particle_type, no creation_time, so not distinguishing.")
return slice(None)
+ def __parse_outputs(self):
+ cuts = 2
+ for i in range(cuts):
+ for j in range(cuts):
+ for k in range(cuts):
+ self._groups2[((i*cuts) + j)*cuts + k] = []
+ self._max_dens2[((i*cuts) + j)*cuts + k] = []
+ unique_ids = na.unique(self.tags2[((i*cuts) + j)*cuts + k])
+ counts = na.bincount(self.tags2[((i*cuts) + j)*cuts + k]+1)
+ sort_indices = na.argsort(self.tags2[((i*cuts) + j)*cuts + k])
+ grab_indices = na.indices(self.tags2[((i*cuts) + j)*cuts + k].shape).ravel()[sort_indices]
+ dens = self.densities2[((i*cuts) + j)*cuts + k][sort_indices]
+ cp = 0
+ print 'uids %d' % (unique_ids.size)
+ for ii in unique_ids:
+ cp_c = cp + counts[ii+1]
+ if ii == -1:
+ cp += counts[ii+1]
+ continue
+ group_indices = grab_indices[cp:cp_c]
+ md_i = na.argmax(dens[cp:cp_c])
+ px = self.temp2x[((i*cuts) + j)*cuts + k][group_indices]
+ py = self.temp2y[((i*cuts) + j)*cuts + k][group_indices]
+ pz = self.temp2z[((i*cuts) + j)*cuts + k][group_indices]
+ max_dens = (dens[cp:cp_c][md_i], px[md_i], py[md_i], pz[md_i])
+ # as a shortcut, if the most dense particle is in the current box, we keep the group
+ if ((math.floor(max_dens[1]*cuts) == i) and (math.floor(max_dens[2]*cuts) == j) \
+ and (math.floor(max_dens[3]*cuts) == k)):
+ # I think this will make the group_indices correct in the overall 'real' sense
+ temp_real_indices = arange(group_indices.size)
+ for gg,gi in enumerate(group_indices):
+ temp_real_indices[gg] = self.tracking2[((i*cuts) + j)*cuts + k][gi]
+ self._groups2[((i*cuts) + j)*cuts + k].append(HopGroup(self,ii, temp_real_indices))
+ self._max_dens2[((i*cuts) + j)*cuts + k].append(max_dens)
+ cp += counts[ii+1]
+ for i in range(cuts**3):
+ print '%d groups2 %d' % (i,len(self._groups2[i]))
+
+ def __glue_outputs(self):
+ cuts = 2
+ ii = 0
+ iii = 0
+ for i in range(cuts):
+ for j in range(cuts):
+ for k in range(cuts):
+ print '%d len = %d' % (((i*cuts) + j)*cuts + k,len(self._groups2[((i*cuts) + j)*cuts + k]))
+ for group2 in self._groups2[((i*cuts) + j)*cuts + k]:
+ self._groups.append(HopGroup(self,iii,group2.indices))
+ iii += 1
+ for dens in self._max_dens2[((i*cuts) + j)*cuts + k]:
+ self._max_dens[ii] = dens
+ ii += 1
+
def __parse_output(self):
unique_ids = na.unique(self.tags)
counts = na.bincount(self.tags+1)
More information about the yt-svn
mailing list