[Yt-svn] yt: 2 new changesets
hg at spacepope.org
hg at spacepope.org
Wed Nov 17 12:21:32 PST 2010
hg Repository: yt
details: yt/rev/edc3d4388648
changeset: 3537:edc3d4388648
user: Britton Smith <brittonsmith at gmail.com>
date:
Wed Nov 17 15:21:17 2010 -0500
description:
Added Baryon_Overdensity field for enzo and cleaned up halo profiler.
hg Repository: yt
details: yt/rev/efac7f6a6fe4
changeset: 3538:efac7f6a6fe4
user: Britton Smith <brittonsmith at gmail.com>
date:
Wed Nov 17 15:21:28 2010 -0500
description:
Merged.
diffstat:
yt/analysis_modules/halo_profiler/multi_halo_profiler.py | 21 ++++++++++-----
yt/frontends/enzo/fields.py | 15 +++++++++++
yt/utilities/_amr_utils/VolumeIntegrator.pyx | 3 ++
yt/utilities/definitions.py | 3 +-
yt/visualization/volume_rendering/grid_partitioner.py | 19 ++++++++++++++
5 files changed, 52 insertions(+), 9 deletions(-)
diffs (167 lines):
diff -r f6dc5b4ba882 -r efac7f6a6fe4 yt/analysis_modules/halo_profiler/multi_halo_profiler.py
--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py Thu Nov 11 22:12:19 2010 -0800
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py Wed Nov 17 15:21:28 2010 -0500
@@ -277,10 +277,12 @@
if 'TotalMassMsun' in all_vqFilters:
mass_filter = vFilter['kwargs']['virial_filters'][all_vqFilters.index('TotalMassMsun')]
if '>' in mass_filter[1]:
- virial_prefilter = "halo['mass'] %s %f * %s" % (mass_filter[1], virial_prefilter_safety_factor, mass_filter[2])
+ virial_prefilter = "halo['mass'] %s %f * %s" % \
+ (mass_filter[1], virial_prefilter_safety_factor, mass_filter[2])
prefilters.append(virial_prefilter)
elif '<' in mass_filter[1]:
- virial_prefilter = "halo['mass'] %s %f * %s" % (mass_filter[1], (1./virial_prefilter_safety_factor), mass_filter[2])
+ virial_prefilter = "halo['mass'] %s %f * %s" % \
+ (mass_filter[1], (1./virial_prefilter_safety_factor), mass_filter[2])
prefilters.append(virial_prefilter)
# Add profile fields necessary for calculating virial quantities.
@@ -312,7 +314,8 @@
# Apply filter and keep track of the quantities that are returned.
for hFilter in self._halo_filters:
- filter_result, filterQuantities = hFilter['function'](profiledHalo, *hFilter['args'], **hFilter['kwargs'])
+ filter_result, filterQuantities = hFilter['function'](profiledHalo, *hFilter['args'],
+ **hFilter['kwargs'])
if not filter_result: break
@@ -387,7 +390,8 @@
else:
mylog.error("Invalid parameter: VelocityCenter.")
elif self.velocity_center[0] == 'max':
- max_grid, max_cell, max_value, max_location = self.pf.h.find_max_cell_location(self.velocity_center[1])
+ max_grid, max_cell, max_value, max_location = \
+ self.pf.h.find_max_cell_location(self.velocity_center[1])
sphere.set_field_parameter('bulk_velocity', [max_grid['x-velocity'][max_cell],
max_grid['y-velocity'][max_cell],
max_grid['z-velocity'][max_cell]])
@@ -521,7 +525,8 @@
output = h5py.File(dataFilename, "a")
# Create fixed resolution buffer for each projection and write them out.
for e, hp in enumerate(self.projection_fields):
- frb = FixedResolutionBuffer(pc.plots[e].data, (proj_left[0], proj_right[0], proj_left[1], proj_right[1]),
+ frb = FixedResolutionBuffer(pc.plots[e].data, (proj_left[0], proj_right[0],
+ proj_left[1], proj_right[1]),
(projectionResolution, projectionResolution),
antialias=False)
dataset_name = "%s_%s" % (hp['field'], hp['weight_field'])
@@ -761,7 +766,8 @@
This is necessary when projecting a preiodic region.
"""
offset = [newCenter[q]-oldCenter[q] for q in range(len(oldCenter))]
- width = [pf.parameters['DomainRightEdge'][q]-pf.parameters['DomainLeftEdge'][q] for q in range(len(oldCenter))]
+ width = [pf.parameters['DomainRightEdge'][q]-pf.parameters['DomainLeftEdge'][q] \
+ for q in range(len(oldCenter))]
del offset[axis]
del width[axis]
@@ -830,7 +836,8 @@
plot.data['pdy'] = na.concatenate([plot['pdy'], add_x_pdy, add_y_pdy, add2_x_pdy, add2_y_pdy])
plot.data[field] = na.concatenate([plot[field], add_x_field, add_y_field, add2_x_field, add2_y_field])
plot.data['weight_field'] = na.concatenate([plot['weight_field'],
- add_x_weight_field, add_y_weight_field, add2_x_weight_field, add2_y_weight_field])
+ add_x_weight_field, add_y_weight_field,
+ add2_x_weight_field, add2_y_weight_field])
# Delete original copies of hanging cells.
del add_x_px, add_y_px, add2_x_px, add2_y_px
diff -r f6dc5b4ba882 -r efac7f6a6fe4 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py Thu Nov 11 22:12:19 2010 -0800
+++ b/yt/frontends/enzo/fields.py Wed Nov 17 15:21:28 2010 -0500
@@ -199,11 +199,26 @@
return data["Density"]/ef
add_field("ComovingDensity", function=_ComovingDensity, units=r"\rm{g}/\rm{cm}^3")
+# This is rho_total / rho_cr(z).
def Overdensity(field,data):
return (data['Density'] + data['Dark_Matter_Density']) / \
(rho_crit_now * (data.pf.hubble_constant**2) * ((1+data.pf.current_redshift)**3))
add_field("Overdensity",function=Overdensity,units=r"")
+# This is rho_b / <rho_b>.
+def _Baryon_Overdensity(field, data):
+ return data['Density']
+def _Convert_Baryon_Overdensity(data):
+ if data.pf.parameters.has_key('omega_baryon_now'):
+ omega_baryon_now = data.pf.parameters['omega_baryon_now']
+ else:
+ omega_baryon_now = 0.0441
+ return 1 / (omega_baryon_now * rho_crit_now *
+ (data.pf['CosmologyHubbleConstantNow']**2) *
+ ((1+data.pf['CosmologyCurrentRedshift'])**3))
+add_field("Baryon_Overdensity", function=_Baryon_Overdensity,
+ convert_function=_Convert_Baryon_Overdensity, units=r"")
+
# Now we add all the fields that we want to control, but we give a null function
# This is every Enzo field we can think of. This will be installation-dependent,
diff -r f6dc5b4ba882 -r efac7f6a6fe4 yt/utilities/_amr_utils/VolumeIntegrator.pyx
--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx Thu Nov 11 22:12:19 2010 -0800
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx Wed Nov 17 15:21:28 2010 -0500
@@ -335,6 +335,7 @@
tdata = data[i]
self.data[i] = <np.float64_t *> tdata.data
+ @cython.cdivision(True)
@cython.boundscheck(False)
@cython.wraparound(False)
def cast_plane(self, TransferFunctionProxy tf, VectorPlane vp):
@@ -396,6 +397,7 @@
if temp > extrema[3]: extrema[3] = temp
#print extrema[0], extrema[1], extrema[2], extrema[3]
+ @cython.cdivision(True)
@cython.boundscheck(False)
@cython.wraparound(False)
cdef int integrate_ray(self, np.float64_t v_pos[3],
@@ -507,6 +509,7 @@
if enter_t > 1.0: break
return hit
+ @cython.cdivision(True)
@cython.boundscheck(False)
@cython.wraparound(False)
cdef void sample_values(self,
diff -r f6dc5b4ba882 -r efac7f6a6fe4 yt/utilities/definitions.py
--- a/yt/utilities/definitions.py Thu Nov 11 22:12:19 2010 -0800
+++ b/yt/utilities/definitions.py Wed Nov 17 15:21:28 2010 -0500
@@ -41,8 +41,7 @@
x_names = ['y','x','x']
y_names = ['z','z','y']
-# All the parameters we read from the parameter file, along with how to convert
-# them from a string
+# How many of each thing are in an Mpc
mpc_conversion = {'mpc' : 1e0,
'kpc' : 1e3,
'pc' : 1e6,
diff -r f6dc5b4ba882 -r efac7f6a6fe4 yt/visualization/volume_rendering/grid_partitioner.py
--- a/yt/visualization/volume_rendering/grid_partitioner.py Thu Nov 11 22:12:19 2010 -0800
+++ b/yt/visualization/volume_rendering/grid_partitioner.py Wed Nov 17 15:21:28 2010 -0500
@@ -182,6 +182,25 @@
def reset_cast(self):
pass
+class SingleBrickVolume(object):
+ bricks = None
+ def __init__(self, data_array):
+ self.bricks = [PartitionedGrid(-1, 1,
+ [data_array.astype("float64")],
+ na.zeros(3, dtype='float64'),
+ na.ones(3, dtype='float64'),
+ na.array(data_array.shape, dtype='int64')-1)]
+ self.brick_dimensions = na.ones((1, 3), dtype='int64')*data_array.shape
+
+ def initialize_source(self):
+ pass
+
+ def traverse(self, back, front):
+ for b in self.bricks: yield b
+
+ def reset_cast(self):
+ pass
+
class HomogenizedBrickCollection(DistributedObjectCollection):
def __init__(self, source):
# The idea here is that we have two sources -- the global_domain
More information about the yt-svn
mailing list