[yt-svn] commit/yt: 3 new changesets
Bitbucket
commits-noreply at bitbucket.org
Tue Feb 19 17:50:00 PST 2013
3 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/f53fb4801dc2/
changeset: f53fb4801dc2
branch: yt
user: mqk
date: 2013-02-19 20:05:25
summary: Added 'hires_dm_mass' keyword to rockstar halo finder, to allow simulations with multiple DM particle masses. Only the highest resolution particles (with mass < 1.1*hires_dm_mass) will be used in the halo finding.
affected #: 2 files
diff -r 7f49529e1bb679fec9ac51b09ccdf2aaa4b03719 -r f53fb4801dc2be6637efa95bff074b1c6867a23b yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -164,6 +164,10 @@
If set to ``True``, it will be assumed that there are only dark
matter particles present in the simulation. This can save analysis
time if this is indeed the case. Default: ``False``.
+ hires_dm_mass : float
+ If supplied, use only the highest resolution dark matter particles,
+ with a mass less than (1.1*hires_dm_mass). This is useful for
+ multi-dm-mass simulations. Default: ``None``.
Returns
-------
@@ -187,7 +191,8 @@
"""
def __init__(self, ts, num_readers = 1, num_writers = None,
outbase="rockstar_halos", dm_type=1,
- force_res=None, total_particles=None, dm_only=False):
+ force_res=None, total_particles=None, dm_only=False,
+ hires_dm_mass=None):
mylog.warning("The citation for the Rockstar halo finder can be found at")
mylog.warning("http://adsabs.harvard.edu/abs/2013ApJ...762..109B")
ParallelAnalysisInterface.__init__(self)
@@ -217,6 +222,7 @@
self.force_res = force_res
self.total_particles = total_particles
self.dm_only = dm_only
+ self.hires_dm_mass = hires_dm_mass
# Setup pool and workgroups.
self.pool, self.workgroup = self.runner.setup_pool()
p = self._setup_parameters(ts)
@@ -227,28 +233,51 @@
def _setup_parameters(self, ts):
if self.workgroup.name != "readers": return None
tpf = ts[0]
+
def _particle_count(field, data):
- if self.dm_only:
- return np.prod(data["particle_position_x"].shape)
try:
- return (data["particle_type"]==self.dm_type).sum()
+ data["particle_type"]
+ has_particle_type=True
except KeyError:
- return np.prod(data["particle_position_x"].shape)
+ has_particle_type=False
+
+ if (self.dm_only or (not has_particle_type)):
+ if self.hires_dm_mass is None:
+ return np.prod(data["particle_position_x"].shape)
+ else:
+ return (data['ParticleMassMsun'] < self.hires_dm_mass*1.1).sum()
+ elif has_particle_type:
+ if self.hires_dm_mass is None:
+ return (data["particle_type"]==self.dm_type).sum()
+ else:
+ return ( (data["particle_type"]==self.dm_type) &
+ (data['ParticleMassMsun'] < self.hires_dm_mass*1.1) ).sum()
+ else:
+ raise RuntimeError() # should never get here
+
add_field("particle_count", function=_particle_count,
not_in_all=True, particle_type=True)
dd = tpf.h.all_data()
# Get DM particle mass.
all_fields = set(tpf.h.derived_field_list + tpf.h.field_list)
- for g in tpf.h._get_objs("grids"):
- if g.NumberOfParticles == 0: continue
- if self.dm_only:
- iddm = Ellipsis
- elif "particle_type" in all_fields:
- iddm = g["particle_type"] == self.dm_type
- else:
- iddm = Ellipsis
- particle_mass = g['ParticleMassMsun'][iddm][0] / tpf.hubble_constant
- break
+ has_particle_type = ("particle_type" in all_fields)
+
+ if self.hires_dm_mass is None:
+ for g in tpf.h._get_objs("grids"):
+ if g.NumberOfParticles == 0: continue
+
+ if (self.dm_only or (not has_particle_type)):
+ iddm = Ellipsis
+ elif has_particle_type:
+ iddm = g["particle_type"] == self.dm_type
+ else:
+ iddm = Ellipsis # should never get here
+
+ particle_mass = g['ParticleMassMsun'][iddm][0] / tpf.hubble_constant
+ break
+ else:
+ particle_mass = self.hires_dm_mass / tpf.hubble_constant
+
p = {}
if self.total_particles is None:
# Get total_particles in parallel.
@@ -302,6 +331,7 @@
force_res = self.force_res,
particle_mass = float(self.particle_mass),
dm_only = int(self.dm_only),
+ hires_only = (self.hires_dm_mass is not None),
**kwargs)
# Make the directory to store the halo lists in.
if self.comm.rank == 0:
diff -r 7f49529e1bb679fec9ac51b09ccdf2aaa4b03719 -r f53fb4801dc2be6637efa95bff074b1c6867a23b yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
@@ -163,6 +163,7 @@
SCALE_NOW = 1.0/(pf.current_redshift+1.0)
# Now we want to grab data from only a subset of the grids for each reader.
all_fields = set(pf.h.derived_field_list + pf.h.field_list)
+ has_particle_type = ("particle_type" in all_fields)
# First we need to find out how many this reader is going to read in
# if the number of readers > 1.
@@ -170,12 +171,19 @@
local_parts = 0
for g in pf.h._get_objs("grids"):
if g.NumberOfParticles == 0: continue
- if rh.dm_only:
- iddm = Ellipsis
- elif "particle_type" in all_fields:
- iddm = g["particle_type"] == rh.dm_type
+ if (rh.dm_only or (not has_particle_type)):
+ if rh.hires_only:
+ iddm = (g['ParticleMassMsun'] < PARTICLE_MASS*1.1)
+ else:
+ iddm = Ellipsis
+ elif has_particle_type:
+ if rh.hires_only:
+ iddm = ( (g["particle_type"]==rh.dm_type) &
+ (g['ParticleMassMsun'] < PARTICLE_MASS*1.1) )
+ else:
+ iddm = g["particle_type"] == rh.dm_type
else:
- iddm = Ellipsis
+ iddm = Ellipsis # should never get here
arri = g["particle_index"].astype("int64")
arri = arri[iddm] #pick only DM
local_parts += arri.size
@@ -195,12 +203,19 @@
pi = 0
for g in pf.h._get_objs("grids"):
if g.NumberOfParticles == 0: continue
- if rh.dm_only:
- iddm = Ellipsis
- elif "particle_type" in all_fields:
- iddm = g["particle_type"] == rh.dm_type
- else:
- iddm = Ellipsis
+ if (rh.dm_only or (not has_particle_type)):
+ if rh.hires_only:
+ iddm = (g['ParticleMassMsun'] < PARTICLE_MASS*1.1)
+ else:
+ iddm = Ellipsis
+ elif has_particle_type:
+ if rh.hires_only:
+ iddm = ( (g["particle_type"]==rh.dm_type) &
+ (g['ParticleMassMsun'] < PARTICLE_MASS*1.1) )
+ else:
+ iddm = g["particle_type"] == rh.dm_type
+ else:
+ iddm = Ellipsis # should never get here
arri = g["particle_index"].astype("int64")
arri = arri[iddm] #pick only DM
npart = arri.size
@@ -230,6 +245,7 @@
cdef public int dm_type
cdef public int total_particles
cdef public int dm_only
+ cdef public int hires_only
def __cinit__(self, ts):
self.ts = ts
@@ -244,7 +260,7 @@
int writing_port = -1, int block_ratio = 1,
int periodic = 1, force_res=None,
int min_halo_size = 25, outbase = "None",
- int dm_only = 0):
+ int dm_only = 0, int hires_only = False):
global PARALLEL_IO, PARALLEL_IO_SERVER_ADDRESS, PARALLEL_IO_SERVER_PORT
global FILENAME, FILE_FORMAT, NUM_SNAPS, STARTING_SNAP, h0, Ol, Om
global BOX_SIZE, PERIODIC, PARTICLE_MASS, NUM_BLOCKS, NUM_READERS
@@ -276,6 +292,7 @@
TOTAL_PARTICLES = total_particles
self.block_ratio = block_ratio
self.dm_only = dm_only
+ self.hires_only = hires_only
tpf = self.ts[0]
h0 = tpf.hubble_constant
https://bitbucket.org/yt_analysis/yt/commits/b48e0411316f/
changeset: b48e0411316f
branch: yt
user: mqk
date: 2013-02-20 00:46:52
summary: Small docstring addition.
affected #: 1 file
diff -r f53fb4801dc2be6637efa95bff074b1c6867a23b -r b48e0411316fbf00a18ebc8a194c38d7abdbd495 yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -165,9 +165,12 @@
matter particles present in the simulation. This can save analysis
time if this is indeed the case. Default: ``False``.
hires_dm_mass : float
- If supplied, use only the highest resolution dark matter particles,
- with a mass less than (1.1*hires_dm_mass). This is useful for
- multi-dm-mass simulations. Default: ``None``.
+ If supplied, use only the highest resolution dark matter
+ particles, with a mass less than (1.1*hires_dm_mass), in units
+ of ParticleMassMsun. This is useful for multi-dm-mass
+ simulations. Note that this will only give sensible results for
+ halos that are not "polluted" by lower resolution
+ particles. Default: ``None``.
Returns
-------
https://bitbucket.org/yt_analysis/yt/commits/09a0fd75818b/
changeset: 09a0fd75818b
branch: yt
user: sskory
date: 2013-02-20 02:49:55
summary: Merged in mqk/yt_clean (pull request #437)
hires_dm_mass keyword to rockstar halo finder
affected #: 2 files
diff -r db1d8020d9fb8484147d65366968b012db4d0b50 -r 09a0fd75818bc91ba4166fa3a3db5cabd27f9c0a yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -164,6 +164,13 @@
If set to ``True``, it will be assumed that there are only dark
matter particles present in the simulation. This can save analysis
time if this is indeed the case. Default: ``False``.
+ hires_dm_mass : float
+ If supplied, use only the highest resolution dark matter
+ particles, with a mass less than (1.1*hires_dm_mass), in units
+ of ParticleMassMsun. This is useful for multi-dm-mass
+ simulations. Note that this will only give sensible results for
+ halos that are not "polluted" by lower resolution
+ particles. Default: ``None``.
Returns
-------
@@ -187,7 +194,8 @@
"""
def __init__(self, ts, num_readers = 1, num_writers = None,
outbase="rockstar_halos", dm_type=1,
- force_res=None, total_particles=None, dm_only=False):
+ force_res=None, total_particles=None, dm_only=False,
+ hires_dm_mass=None):
mylog.warning("The citation for the Rockstar halo finder can be found at")
mylog.warning("http://adsabs.harvard.edu/abs/2013ApJ...762..109B")
ParallelAnalysisInterface.__init__(self)
@@ -217,6 +225,7 @@
self.force_res = force_res
self.total_particles = total_particles
self.dm_only = dm_only
+ self.hires_dm_mass = hires_dm_mass
# Setup pool and workgroups.
self.pool, self.workgroup = self.runner.setup_pool()
p = self._setup_parameters(ts)
@@ -227,28 +236,51 @@
def _setup_parameters(self, ts):
if self.workgroup.name != "readers": return None
tpf = ts[0]
+
def _particle_count(field, data):
- if self.dm_only:
- return np.prod(data["particle_position_x"].shape)
try:
- return (data["particle_type"]==self.dm_type).sum()
+ data["particle_type"]
+ has_particle_type=True
except KeyError:
- return np.prod(data["particle_position_x"].shape)
+ has_particle_type=False
+
+ if (self.dm_only or (not has_particle_type)):
+ if self.hires_dm_mass is None:
+ return np.prod(data["particle_position_x"].shape)
+ else:
+ return (data['ParticleMassMsun'] < self.hires_dm_mass*1.1).sum()
+ elif has_particle_type:
+ if self.hires_dm_mass is None:
+ return (data["particle_type"]==self.dm_type).sum()
+ else:
+ return ( (data["particle_type"]==self.dm_type) &
+ (data['ParticleMassMsun'] < self.hires_dm_mass*1.1) ).sum()
+ else:
+ raise RuntimeError() # should never get here
+
add_field("particle_count", function=_particle_count,
not_in_all=True, particle_type=True)
dd = tpf.h.all_data()
# Get DM particle mass.
all_fields = set(tpf.h.derived_field_list + tpf.h.field_list)
- for g in tpf.h._get_objs("grids"):
- if g.NumberOfParticles == 0: continue
- if self.dm_only:
- iddm = Ellipsis
- elif "particle_type" in all_fields:
- iddm = g["particle_type"] == self.dm_type
- else:
- iddm = Ellipsis
- particle_mass = g['ParticleMassMsun'][iddm][0] / tpf.hubble_constant
- break
+ has_particle_type = ("particle_type" in all_fields)
+
+ if self.hires_dm_mass is None:
+ for g in tpf.h._get_objs("grids"):
+ if g.NumberOfParticles == 0: continue
+
+ if (self.dm_only or (not has_particle_type)):
+ iddm = Ellipsis
+ elif has_particle_type:
+ iddm = g["particle_type"] == self.dm_type
+ else:
+ iddm = Ellipsis # should never get here
+
+ particle_mass = g['ParticleMassMsun'][iddm][0] / tpf.hubble_constant
+ break
+ else:
+ particle_mass = self.hires_dm_mass / tpf.hubble_constant
+
p = {}
if self.total_particles is None:
# Get total_particles in parallel.
@@ -302,6 +334,7 @@
force_res = self.force_res,
particle_mass = float(self.particle_mass),
dm_only = int(self.dm_only),
+ hires_only = (self.hires_dm_mass is not None),
**kwargs)
# Make the directory to store the halo lists in.
if self.comm.rank == 0:
diff -r db1d8020d9fb8484147d65366968b012db4d0b50 -r 09a0fd75818bc91ba4166fa3a3db5cabd27f9c0a yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
@@ -163,6 +163,7 @@
SCALE_NOW = 1.0/(pf.current_redshift+1.0)
# Now we want to grab data from only a subset of the grids for each reader.
all_fields = set(pf.h.derived_field_list + pf.h.field_list)
+ has_particle_type = ("particle_type" in all_fields)
# First we need to find out how many this reader is going to read in
# if the number of readers > 1.
@@ -170,12 +171,19 @@
local_parts = 0
for g in pf.h._get_objs("grids"):
if g.NumberOfParticles == 0: continue
- if rh.dm_only:
- iddm = Ellipsis
- elif "particle_type" in all_fields:
- iddm = g["particle_type"] == rh.dm_type
+ if (rh.dm_only or (not has_particle_type)):
+ if rh.hires_only:
+ iddm = (g['ParticleMassMsun'] < PARTICLE_MASS*1.1)
+ else:
+ iddm = Ellipsis
+ elif has_particle_type:
+ if rh.hires_only:
+ iddm = ( (g["particle_type"]==rh.dm_type) &
+ (g['ParticleMassMsun'] < PARTICLE_MASS*1.1) )
+ else:
+ iddm = g["particle_type"] == rh.dm_type
else:
- iddm = Ellipsis
+ iddm = Ellipsis # should never get here
arri = g["particle_index"].astype("int64")
arri = arri[iddm] #pick only DM
local_parts += arri.size
@@ -195,12 +203,19 @@
pi = 0
for g in pf.h._get_objs("grids"):
if g.NumberOfParticles == 0: continue
- if rh.dm_only:
- iddm = Ellipsis
- elif "particle_type" in all_fields:
- iddm = g["particle_type"] == rh.dm_type
- else:
- iddm = Ellipsis
+ if (rh.dm_only or (not has_particle_type)):
+ if rh.hires_only:
+ iddm = (g['ParticleMassMsun'] < PARTICLE_MASS*1.1)
+ else:
+ iddm = Ellipsis
+ elif has_particle_type:
+ if rh.hires_only:
+ iddm = ( (g["particle_type"]==rh.dm_type) &
+ (g['ParticleMassMsun'] < PARTICLE_MASS*1.1) )
+ else:
+ iddm = g["particle_type"] == rh.dm_type
+ else:
+ iddm = Ellipsis # should never get here
arri = g["particle_index"].astype("int64")
arri = arri[iddm] #pick only DM
npart = arri.size
@@ -230,6 +245,7 @@
cdef public int dm_type
cdef public int total_particles
cdef public int dm_only
+ cdef public int hires_only
def __cinit__(self, ts):
self.ts = ts
@@ -244,7 +260,7 @@
int writing_port = -1, int block_ratio = 1,
int periodic = 1, force_res=None,
int min_halo_size = 25, outbase = "None",
- int dm_only = 0):
+ int dm_only = 0, int hires_only = False):
global PARALLEL_IO, PARALLEL_IO_SERVER_ADDRESS, PARALLEL_IO_SERVER_PORT
global FILENAME, FILE_FORMAT, NUM_SNAPS, STARTING_SNAP, h0, Ol, Om
global BOX_SIZE, PERIODIC, PARTICLE_MASS, NUM_BLOCKS, NUM_READERS
@@ -276,6 +292,7 @@
TOTAL_PARTICLES = total_particles
self.block_ratio = block_ratio
self.dm_only = dm_only
+ self.hires_only = hires_only
tpf = self.ts[0]
h0 = tpf.hubble_constant
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list