[Yt-svn] yt-commit r380 - trunk/yt/lagos
mturk at wrangler.dreamhost.com
mturk at wrangler.dreamhost.com
Mon Feb 25 09:07:43 PST 2008
Author: mturk
Date: Mon Feb 25 09:07:40 2008
New Revision: 380
URL: http://yt.spacepope.org/changeset/380
Log:
Changes to some fields to include better particle support, reduction of
complexity in the angular momentum fields (using cross product functions) and
metallicity included.
Also a somewhat failed experiment to speed things up in the packed HDF5 format,
but that is still currently enabled, because it has some promise.
Modified:
trunk/yt/lagos/BaseDataTypes.py
trunk/yt/lagos/DataReadingFuncs.py
trunk/yt/lagos/DerivedFields.py
Modified: trunk/yt/lagos/BaseDataTypes.py
==============================================================================
--- trunk/yt/lagos/BaseDataTypes.py (original)
+++ trunk/yt/lagos/BaseDataTypes.py Mon Feb 25 09:07:40 2008
@@ -613,9 +613,27 @@
self.func = na.sum
if not self._deserialize():
self.__calculate_memory()
+ if self.hierarchy.data_style == 6:
+ self.__cache_data()
self._refresh_data()
@time_execution
+ def __cache_data(self):
+ rdf = self.hierarchy.grid.readDataFast
+ self.hierarchy.grid.readDataFast = readDataPackedHandle
+ for fn,g_list in self.hierarchy.cpu_map.items():
+ to_read = na.intersect1d(g_list, self.source._grids)
+ if len(to_read) == 0: continue
+ fh = tables.openFile(to_read[0].filename,'r')
+ for g in to_read:
+ g.handle = fh
+ for field in ensure_list(self.fields):
+ g[field]
+ del g.handle
+ fh.close()
+ self.hierarchy.grid.readDataFast = readDataPackedHandle
+
+ @time_execution
def __calculate_memory(self):
level_mem = {}
h = self.hierarchy
@@ -698,6 +716,20 @@
dx = grids_to_project[0].dx * na.ones(len(dblI[0]), dtype='float64')
return [levelData[0][dblI], levelData[1][dblI], weightedData, dx]
+ def __cleanup_level(self, level):
+ grids_to_project = self.source.select_grids(level)
+ all_data = []
+ for grid in grids_to_project:
+ if hasattr(grid,'coarseData'):
+ if self._weight is not None:
+ weightedData = grid.coarseData[2] / grid.coarseData[4]
+ else:
+ weightedData = grid.coarseData[2]
+ all_data.append([grid.coarseData[0], grid.coarseData[1],
+ weightedData,
+ na.ones(grid.coarseData[0].shape,dtype='float64')*grid.dx])
+ return na.concatenate(all_data, axis=1)
+
def __setup_weave_dict_combine(self, grid1, grid2):
# Recall: x, y, val, mask, weight
my_dict = {}
@@ -774,7 +806,7 @@
goodI = na.where(my_dict['flagged'] == 0)
grid2.coarseData = [grid2.coarseData[i][goodI] for i in range(5)]
for grid1 in self.source.select_grids(level-1):
- if grid1.coarseData[0].shape[0] != 0:
+ if not self._check_region and grid1.coarseData[0].shape[0] != 0:
mylog.error("Something messed up, and %s still has %s points of data",
grid1, grid1.coarseData[0].size)
print grid1.coarseData[0]
@@ -788,6 +820,10 @@
s = self.source
for level in range(0, self._max_level+1):
all_data.append(self.__project_level(level, field))
+ if self._check_region:
+ for level in range(0, self._max_level+1):
+ check=self.__cleanup_level(level)
+ if len(check) > 0: all_data.append(check)
all_data = na.concatenate(all_data, axis=1)
# We now convert to half-widths and center-points
self['pdx'] = all_data[3,:]
Modified: trunk/yt/lagos/DataReadingFuncs.py
==============================================================================
--- trunk/yt/lagos/DataReadingFuncs.py (original)
+++ trunk/yt/lagos/DataReadingFuncs.py Mon Feb 25 09:07:40 2008
@@ -122,6 +122,11 @@
"""
return SD.SD(grid.filename).select(field)[sl].swapaxes(0,2)
+def readDataPackedHandle(self, field):
+ t = self.handle.getNode("/Grid%08i" % (self.id), field).read().astype('float64')
+ t = t.swapaxes(0,2)
+ return t
+
def readDataPacked(self, field):
f = tables.openFile(self.filename,
rootUEP="/Grid%08i" % (self.id),
Modified: trunk/yt/lagos/DerivedFields.py
==============================================================================
--- trunk/yt/lagos/DerivedFields.py (original)
+++ trunk/yt/lagos/DerivedFields.py Mon Feb 25 09:07:40 2008
@@ -243,7 +243,7 @@
_speciesList = ["HI","HII","Electron",
"HeI","HeII","HeIII",
"H2I","H2II","HM",
- "DI","DII","HDI"]
+ "DI","DII","HDI","Metal"]
def _SpeciesFraction(field, data):
sp = field.name.split("_")[0] + "_Density"
return data[sp]/data["Density"]
@@ -252,6 +252,13 @@
function=_SpeciesFraction,
validators=ValidateDataField("%s_Density" % species))
+def _Metallicity(field, data):
+ return data["Metal_Fraction"] / 0.0204
+add_field("Metallicity", units=r"Z_{\rm{Solar}}",
+ validators=ValidateDataField("Metal_Density"),
+ projection_conversion="1")
+
+
def _GridLevel(field, data):
return na.ones(data["Density"].shape)*(data.Level)
add_field("GridLevel", validators=[#ValidateProperty('Level'),
@@ -286,7 +293,7 @@
particles = na.array([], dtype=data["Density"].dtype)
return particles
return _Particles
-for pf in ["index","mass","type"] + \
+for pf in ["index","type"] + \
["velocity_%s" % ax for ax in 'xyz'] + \
["position_%s" % ax for ax in 'xyz']:
pfunc = particle_func(pf)
@@ -294,6 +301,23 @@
validators = [ValidateSpatial(0)],
variable_length=True)
+def _ParticleMass(field, data):
+ try:
+ particles = data._read_data("particle mass").astype('float64')
+ particles *= just_one(data["CellVolumeCode"].ravel())
+ except data._read_exception:
+ particles = na.array([], dtype=data["Density"].dtype)
+ return particles
+def _convertParticleMass(data):
+ return data.convert("Density")*(data.convert("cm")**3.0)
+def _convertParticleMassMsun(data):
+ return data.convert("Density")*((data.convert("cm")**3.0)/1.989e33)
+add_field("ParticleMass", validators=[ValidateSpatial(0)],
+ variable_length=True, convert_function=_convertParticleMass)
+add_field("ParticleMassMsun",
+ function=_ParticleMass, validators=[ValidateSpatial(0)],
+ variable_length=True, convert_function=_convertParticleMassMsun)
+
def _MachNumber(field, data):
"""M{|v|/t_sound}"""
return data["VelocityMagnitude"] / data["SoundSpeed"]
@@ -438,7 +462,8 @@
convert_function=_ConvertCellVolumeCGS)
def _XRayEmissivity(field, data):
- return ((data["Density"]**2.0)*data["Temperature"]**0.5)
+ return ((data["Density"].astype('float64')**2.0) \
+ *data["Temperature"]**0.5)
def _convertXRayEmissivity(data):
return 2.168e60
add_field("XRayEmissivity",
@@ -543,27 +568,26 @@
yv = data["y-velocity"] - bv[1]
zv = data["z-velocity"] - bv[2]
else:
- xv = self["x-velocity"]
- yv = self["y-velocity"]
- zv = self["z-velocity"]
+ xv = data["x-velocity"]
+ yv = data["y-velocity"]
+ zv = data["z-velocity"]
center = data.get_field_parameter('center')
coords = na.array([data['x'],data['y'],data['z']])
r_vec = coords - na.reshape(center,(3,1))
v_vec = na.array([xv,yv,zv])
- tr = na.zeros(v_vec.shape, 'float64')
- tr[0,:] = (r_vec[1,:] * v_vec[2,:]) - \
- (r_vec[2,:] * v_vec[1,:])
- tr[1,:] = (r_vec[0,:] * v_vec[2,:]) - \
- (r_vec[2,:] * v_vec[0,:])
- tr[2,:] = (r_vec[0,:] * v_vec[1,:]) - \
- (r_vec[1,:] * v_vec[0,:])
- return tr
+ return na.cross(r_vec, v_vec, axis=0)
def _convertSpecificAngularMomentum(data):
return data.convert("cm")
+def _convertSpecificAngularMomentumKMSMPC(data):
+ return data.convert("mpc")/1e5
add_field("SpecificAngularMomentum",
convert_function=_convertSpecificAngularMomentum, vector_field=True,
units=r"\rm{cm}^2/\rm{s}", validators=[ValidateParameter('center')])
+add_field("SpecificAngularMomentumKMSMPC",
+ function=_SpecificAngularMomentum, vector_field=True,
+ convert_function=_convertSpecificAngularMomentumKMSMPC, vector_field=True,
+ units=r"\rm{km}\rm{Mpc}/\rm{s}", validators=[ValidateParameter('center')])
def _Radius(field, data):
center = data.get_field_parameter("center")
@@ -646,6 +670,12 @@
for field in _enzo_fields:
add_field(field, function=lambda a, b: None, take_log=True,
validators=[ValidateDataField(field)], units=r"\rm{g}/\rm{cm}^3")
+fieldInfo["x-velocity"].projection_conversion='1'
+fieldInfo["x-velocity"].line_integral = False
+fieldInfo["y-velocity"].projection_conversion='1'
+fieldInfo["y-velocity"].line_integral = False
+fieldInfo["z-velocity"].projection_conversion='1'
+fieldInfo["z-velocity"].line_integral = False
# Now we override
More information about the yt-svn
mailing list