[yt-svn] commit/yt: 4 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Apr 16 03:53:57 PDT 2014
4 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/85c0d8ed61c9/
Changeset: 85c0d8ed61c9
Branch: yt-3.0
User: hegan
Date: 2014-04-14 20:28:11
Summary: ParticleMassMsun -> particle_mass
Affected #: 1 file
diff -r f4d28647881af0f472f0df0cf71440536d8cbf33 -r 85c0d8ed61c9e43a30ce251cc01866a6ae77e5b3 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -129,7 +129,7 @@
"""
if self.CoM is not None:
return self.CoM
- pm = self["ParticleMassMsun"]
+ pm = self["particle_mass"]
c = {}
# We shift into a box where the origin is the left edge
c[0] = self["particle_position_x"] - self.pf.domain_left_edge[0]
@@ -199,7 +199,7 @@
"""
if self.group_total_mass is not None:
return self.group_total_mass
- return self["ParticleMassMsun"].sum()
+ return self["particle_mass"].sum()
def bulk_velocity(self):
r"""Returns the mass-weighted average velocity in cm/s.
@@ -213,7 +213,7 @@
"""
if self.bulk_vel is not None:
return self.bulk_vel
- pm = self["ParticleMassMsun"]
+ pm = self["particle_mass"]
vx = (self["particle_velocity_x"] * pm).sum()
vy = (self["particle_velocity_y"] * pm).sum()
vz = (self["particle_velocity_z"] * pm).sum()
@@ -234,7 +234,7 @@
if self.rms_vel is not None:
return self.rms_vel
bv = self.bulk_velocity()
- pm = self["ParticleMassMsun"]
+ pm = self["particle_mass"]
sm = pm.sum()
vx = (self["particle_velocity_x"] - bv[0]) * pm / sm
vy = (self["particle_velocity_y"] - bv[1]) * pm / sm
@@ -331,7 +331,7 @@
handle.create_group("/%s" % gn)
for field in ["particle_position_%s" % ax for ax in 'xyz'] \
+ ["particle_velocity_%s" % ax for ax in 'xyz'] \
- + ["particle_index"] + ["ParticleMassMsun"]:
+ + ["particle_index"] + ["particle_mass"]:
handle.create_dataset("/%s/%s" % (gn, field), data=self[field])
if 'creation_time' in self.data.pf.field_list:
handle.create_dataset("/%s/creation_time" % gn,
@@ -464,7 +464,7 @@
if self["particle_position_x"].size > 1:
for index in np.unique(inds):
self.mass_bins[index] += \
- np.sum(self["ParticleMassMsun"][inds == index])
+ np.sum(self["particle_mass"][inds == index])
# Now forward sum the masses in the bins.
for i in xrange(self.bin_count):
self.mass_bins[i + 1] += self.mass_bins[i]
@@ -750,7 +750,7 @@
inds = np.digitize(dist, self.radial_bins) - 1
for index in np.unique(inds):
self.mass_bins[index] += \
- np.sum(self["ParticleMassMsun"][inds == index])
+ np.sum(self["particle_mass"][inds == index])
# Now forward sum the masses in the bins.
for i in xrange(self.bin_count):
self.mass_bins[i + 1] += self.mass_bins[i]
@@ -1356,7 +1356,7 @@
_name = "HOP"
_halo_class = HOPHalo
_fields = ["particle_position_%s" % ax for ax in 'xyz'] + \
- ["ParticleMassMsun"]
+ ["particle_mass"]
def __init__(self, data_source, threshold=160.0, dm_only=True):
self.threshold = threshold
@@ -1368,7 +1368,7 @@
RunHOP(self.particle_fields["particle_position_x"] / self.period[0],
self.particle_fields["particle_position_y"] / self.period[1],
self.particle_fields["particle_position_z"] / self.period[2],
- self.particle_fields["ParticleMassMsun"],
+ self.particle_fields["particle_mass"],
self.threshold)
self.particle_fields["densities"] = self.densities
self.particle_fields["tags"] = self.tags
@@ -1555,7 +1555,7 @@
_name = "parallelHOP"
_halo_class = parallelHOPHalo
_fields = ["particle_position_%s" % ax for ax in 'xyz'] + \
- ["ParticleMassMsun", "particle_index"]
+ ["particle_mass", "particle_index"]
def __init__(self, data_source, padding, num_neighbors, bounds, total_mass,
period, threshold=160.0, dm_only=True, rearrange=True, premerge=True,
@@ -1589,8 +1589,8 @@
self.comm.mpi_exit_test(exit)
# Try to do this in a memory conservative way.
- np.divide(self.particle_fields['ParticleMassMsun'], self.total_mass,
- self.particle_fields['ParticleMassMsun'])
+ np.divide(self.particle_fields['particle_mass'], self.total_mass,
+ self.particle_fields['particle_mass'])
np.divide(self.particle_fields["particle_position_x"],
self.old_period[0], self.particle_fields["particle_position_x"])
np.divide(self.particle_fields["particle_position_y"],
@@ -2190,7 +2190,7 @@
# Now we get the full box mass after we have the final composition of
# subvolumes.
if total_mass is None:
- total_mass = self.comm.mpi_allreduce((self._data_source["ParticleMassMsun"].astype('float64')).sum(),
+ total_mass = self.comm.mpi_allreduce((self._data_source["particle_mass"].astype('float64')).sum(),
op='sum')
if not self._distributed:
self.padding = (np.zeros(3, dtype='float64'),
@@ -2386,9 +2386,9 @@
if dm_only:
select = self._get_dm_indices()
total_mass = \
- self.comm.mpi_allreduce((self._data_source['all', "ParticleMassMsun"][select]).sum(dtype='float64'), op='sum')
+ self.comm.mpi_allreduce((self._data_source['all', "particle_mass"][select]).sum(dtype='float64'), op='sum')
else:
- total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("ParticleMassMsun")[0], op='sum')
+ total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("particle_mass")[0], op='sum')
# MJT: Note that instead of this, if we are assuming that the particles
# are all on different processors, we should instead construct an
# object representing the entire domain and sum it "lazily" with
@@ -2409,10 +2409,10 @@
sub_mass = total_mass
elif dm_only:
select = self._get_dm_indices()
- sub_mass = self._data_source["ParticleMassMsun"][select].sum(dtype='float64')
+ sub_mass = self._data_source["particle_mass"][select].sum(dtype='float64')
else:
sub_mass = \
- self._data_source.quantities["TotalQuantity"]("ParticleMassMsun")[0]
+ self._data_source.quantities["TotalQuantity"]("particle_mass")[0]
HOPHaloList.__init__(self, self._data_source,
threshold * total_mass / sub_mass, dm_only)
self._parse_halolist(total_mass / sub_mass)
https://bitbucket.org/yt_analysis/yt/commits/1ed14a685b0d/
Changeset: 1ed14a685b0d
Branch: yt-3.0
User: hegan
Date: 2014-04-14 22:48:13
Summary: annnd units -> .in_units('Msun')
Affected #: 1 file
diff -r 85c0d8ed61c9e43a30ce251cc01866a6ae77e5b3 -r 1ed14a685b0d7afbc644cb8737b4413f30cb9eb6 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -129,7 +129,7 @@
"""
if self.CoM is not None:
return self.CoM
- pm = self["particle_mass"]
+ pm = self["particle_mass"].in_units('Msun')
c = {}
# We shift into a box where the origin is the left edge
c[0] = self["particle_position_x"] - self.pf.domain_left_edge[0]
@@ -199,7 +199,7 @@
"""
if self.group_total_mass is not None:
return self.group_total_mass
- return self["particle_mass"].sum()
+ return self["particle_mass"].in_units('Msun').sum()
def bulk_velocity(self):
r"""Returns the mass-weighted average velocity in cm/s.
@@ -213,7 +213,7 @@
"""
if self.bulk_vel is not None:
return self.bulk_vel
- pm = self["particle_mass"]
+ pm = self["particle_mass"].in_units('Msun')
vx = (self["particle_velocity_x"] * pm).sum()
vy = (self["particle_velocity_y"] * pm).sum()
vz = (self["particle_velocity_z"] * pm).sum()
@@ -234,7 +234,7 @@
if self.rms_vel is not None:
return self.rms_vel
bv = self.bulk_velocity()
- pm = self["particle_mass"]
+ pm = self["particle_mass"].in_units('Msun')
sm = pm.sum()
vx = (self["particle_velocity_x"] - bv[0]) * pm / sm
vy = (self["particle_velocity_y"] - bv[1]) * pm / sm
@@ -331,7 +331,7 @@
handle.create_group("/%s" % gn)
for field in ["particle_position_%s" % ax for ax in 'xyz'] \
+ ["particle_velocity_%s" % ax for ax in 'xyz'] \
- + ["particle_index"] + ["particle_mass"]:
+ + ["particle_index"] + ["particle_mass"].in_units('Msun'):
handle.create_dataset("/%s/%s" % (gn, field), data=self[field])
if 'creation_time' in self.data.pf.field_list:
handle.create_dataset("/%s/creation_time" % gn,
@@ -464,7 +464,7 @@
if self["particle_position_x"].size > 1:
for index in np.unique(inds):
self.mass_bins[index] += \
- np.sum(self["particle_mass"][inds == index])
+ np.sum(self["particle_mass"][inds == index]).in_units('Msun')
# Now forward sum the masses in the bins.
for i in xrange(self.bin_count):
self.mass_bins[i + 1] += self.mass_bins[i]
@@ -750,7 +750,7 @@
inds = np.digitize(dist, self.radial_bins) - 1
for index in np.unique(inds):
self.mass_bins[index] += \
- np.sum(self["particle_mass"][inds == index])
+ np.sum(self["particle_mass"][inds == index]).in_units('Msun')
# Now forward sum the masses in the bins.
for i in xrange(self.bin_count):
self.mass_bins[i + 1] += self.mass_bins[i]
@@ -1368,7 +1368,7 @@
RunHOP(self.particle_fields["particle_position_x"] / self.period[0],
self.particle_fields["particle_position_y"] / self.period[1],
self.particle_fields["particle_position_z"] / self.period[2],
- self.particle_fields["particle_mass"],
+ self.particle_fields["particle_mass"].in_units('Msun'),
self.threshold)
self.particle_fields["densities"] = self.densities
self.particle_fields["tags"] = self.tags
@@ -2386,9 +2386,9 @@
if dm_only:
select = self._get_dm_indices()
total_mass = \
- self.comm.mpi_allreduce((self._data_source['all', "particle_mass"][select]).sum(dtype='float64'), op='sum')
+ self.comm.mpi_allreduce((self._data_source['all', "particle_mass"][select].in_units('Msun')).sum(dtype='float64'), op='sum')
else:
- total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("particle_mass")[0], op='sum')
+ total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("particle_mass")[0].in_units('Msun'), op='sum')
# MJT: Note that instead of this, if we are assuming that the particles
# are all on different processors, we should instead construct an
# object representing the entire domain and sum it "lazily" with
@@ -2409,10 +2409,10 @@
sub_mass = total_mass
elif dm_only:
select = self._get_dm_indices()
- sub_mass = self._data_source["particle_mass"][select].sum(dtype='float64')
+ sub_mass = self._data_source["particle_mass"][select].in_units('Msun').sum(dtype='float64')
else:
sub_mass = \
- self._data_source.quantities["TotalQuantity"]("particle_mass")[0]
+ self._data_source.quantities["TotalQuantity"]("particle_mass")[0].in_units('Msun')
HOPHaloList.__init__(self, self._data_source,
threshold * total_mass / sub_mass, dm_only)
self._parse_halolist(total_mass / sub_mass)
https://bitbucket.org/yt_analysis/yt/commits/d8ea30c8b4cc/
Changeset: d8ea30c8b4cc
Branch: yt-3.0
User: hegan
Date: 2014-04-15 17:17:02
Summary: fiiiiine
Affected #: 1 file
diff -r 1ed14a685b0d7afbc644cb8737b4413f30cb9eb6 -r d8ea30c8b4cce665aa36c503676eda93b382abe2 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -1589,7 +1589,7 @@
self.comm.mpi_exit_test(exit)
# Try to do this in a memory conservative way.
- np.divide(self.particle_fields['particle_mass'], self.total_mass,
+ np.divide(self.particle_fields['particle_mass'].in_units('Msun'), self.total_mass,
self.particle_fields['particle_mass'])
np.divide(self.particle_fields["particle_position_x"],
self.old_period[0], self.particle_fields["particle_position_x"])
@@ -2190,7 +2190,7 @@
# Now we get the full box mass after we have the final composition of
# subvolumes.
if total_mass is None:
- total_mass = self.comm.mpi_allreduce((self._data_source["particle_mass"].astype('float64')).sum(),
+ total_mass = self.comm.mpi_allreduce((self._data_source["particle_mass"].in_units('Msun').astype('float64')).sum(),
op='sum')
if not self._distributed:
self.padding = (np.zeros(3, dtype='float64'),
https://bitbucket.org/yt_analysis/yt/commits/a1842ded9f19/
Changeset: a1842ded9f19
Branch: yt-3.0
User: brittonsmith
Date: 2014-04-16 12:53:50
Summary: Merged in hegan/yt/yt-3.0 (pull request #817)
ParticleMassMsun -> particle_mass
Affected #: 1 file
diff -r 5acca4b24803da91accfcb8937aed96432fe7edd -r a1842ded9f19a5ef60f14da86d71770fe08a7388 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -129,7 +129,7 @@
"""
if self.CoM is not None:
return self.CoM
- pm = self["ParticleMassMsun"]
+ pm = self["particle_mass"].in_units('Msun')
c = {}
# We shift into a box where the origin is the left edge
c[0] = self["particle_position_x"] - self.pf.domain_left_edge[0]
@@ -199,7 +199,7 @@
"""
if self.group_total_mass is not None:
return self.group_total_mass
- return self["ParticleMassMsun"].sum()
+ return self["particle_mass"].in_units('Msun').sum()
def bulk_velocity(self):
r"""Returns the mass-weighted average velocity in cm/s.
@@ -213,7 +213,7 @@
"""
if self.bulk_vel is not None:
return self.bulk_vel
- pm = self["ParticleMassMsun"]
+ pm = self["particle_mass"].in_units('Msun')
vx = (self["particle_velocity_x"] * pm).sum()
vy = (self["particle_velocity_y"] * pm).sum()
vz = (self["particle_velocity_z"] * pm).sum()
@@ -234,7 +234,7 @@
if self.rms_vel is not None:
return self.rms_vel
bv = self.bulk_velocity()
- pm = self["ParticleMassMsun"]
+ pm = self["particle_mass"].in_units('Msun')
sm = pm.sum()
vx = (self["particle_velocity_x"] - bv[0]) * pm / sm
vy = (self["particle_velocity_y"] - bv[1]) * pm / sm
@@ -331,7 +331,7 @@
handle.create_group("/%s" % gn)
for field in ["particle_position_%s" % ax for ax in 'xyz'] \
+ ["particle_velocity_%s" % ax for ax in 'xyz'] \
- + ["particle_index"] + ["ParticleMassMsun"]:
+ + ["particle_index"] + ["particle_mass"].in_units('Msun'):
handle.create_dataset("/%s/%s" % (gn, field), data=self[field])
if 'creation_time' in self.data.pf.field_list:
handle.create_dataset("/%s/creation_time" % gn,
@@ -464,7 +464,7 @@
if self["particle_position_x"].size > 1:
for index in np.unique(inds):
self.mass_bins[index] += \
- np.sum(self["ParticleMassMsun"][inds == index])
+ np.sum(self["particle_mass"][inds == index]).in_units('Msun')
# Now forward sum the masses in the bins.
for i in xrange(self.bin_count):
self.mass_bins[i + 1] += self.mass_bins[i]
@@ -750,7 +750,7 @@
inds = np.digitize(dist, self.radial_bins) - 1
for index in np.unique(inds):
self.mass_bins[index] += \
- np.sum(self["ParticleMassMsun"][inds == index])
+ np.sum(self["particle_mass"][inds == index]).in_units('Msun')
# Now forward sum the masses in the bins.
for i in xrange(self.bin_count):
self.mass_bins[i + 1] += self.mass_bins[i]
@@ -1356,7 +1356,7 @@
_name = "HOP"
_halo_class = HOPHalo
_fields = ["particle_position_%s" % ax for ax in 'xyz'] + \
- ["ParticleMassMsun"]
+ ["particle_mass"]
def __init__(self, data_source, threshold=160.0, dm_only=True):
self.threshold = threshold
@@ -1368,7 +1368,7 @@
RunHOP(self.particle_fields["particle_position_x"] / self.period[0],
self.particle_fields["particle_position_y"] / self.period[1],
self.particle_fields["particle_position_z"] / self.period[2],
- self.particle_fields["ParticleMassMsun"],
+ self.particle_fields["particle_mass"].in_units('Msun'),
self.threshold)
self.particle_fields["densities"] = self.densities
self.particle_fields["tags"] = self.tags
@@ -1555,7 +1555,7 @@
_name = "parallelHOP"
_halo_class = parallelHOPHalo
_fields = ["particle_position_%s" % ax for ax in 'xyz'] + \
- ["ParticleMassMsun", "particle_index"]
+ ["particle_mass", "particle_index"]
def __init__(self, data_source, padding, num_neighbors, bounds, total_mass,
period, threshold=160.0, dm_only=True, rearrange=True, premerge=True,
@@ -1589,8 +1589,8 @@
self.comm.mpi_exit_test(exit)
# Try to do this in a memory conservative way.
- np.divide(self.particle_fields['ParticleMassMsun'], self.total_mass,
- self.particle_fields['ParticleMassMsun'])
+ np.divide(self.particle_fields['particle_mass'].in_units('Msun'), self.total_mass,
+ self.particle_fields['particle_mass'])
np.divide(self.particle_fields["particle_position_x"],
self.old_period[0], self.particle_fields["particle_position_x"])
np.divide(self.particle_fields["particle_position_y"],
@@ -2190,7 +2190,7 @@
# Now we get the full box mass after we have the final composition of
# subvolumes.
if total_mass is None:
- total_mass = self.comm.mpi_allreduce((self._data_source["ParticleMassMsun"].astype('float64')).sum(),
+ total_mass = self.comm.mpi_allreduce((self._data_source["particle_mass"].in_units('Msun').astype('float64')).sum(),
op='sum')
if not self._distributed:
self.padding = (np.zeros(3, dtype='float64'),
@@ -2386,9 +2386,9 @@
if dm_only:
select = self._get_dm_indices()
total_mass = \
- self.comm.mpi_allreduce((self._data_source['all', "ParticleMassMsun"][select]).sum(dtype='float64'), op='sum')
+ self.comm.mpi_allreduce((self._data_source['all', "particle_mass"][select].in_units('Msun')).sum(dtype='float64'), op='sum')
else:
- total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("ParticleMassMsun")[0], op='sum')
+ total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("particle_mass")[0].in_units('Msun'), op='sum')
# MJT: Note that instead of this, if we are assuming that the particles
# are all on different processors, we should instead construct an
# object representing the entire domain and sum it "lazily" with
@@ -2409,10 +2409,10 @@
sub_mass = total_mass
elif dm_only:
select = self._get_dm_indices()
- sub_mass = self._data_source["ParticleMassMsun"][select].sum(dtype='float64')
+ sub_mass = self._data_source["particle_mass"][select].in_units('Msun').sum(dtype='float64')
else:
sub_mass = \
- self._data_source.quantities["TotalQuantity"]("ParticleMassMsun")[0]
+ self._data_source.quantities["TotalQuantity"]("particle_mass")[0].in_units('Msun')
HOPHaloList.__init__(self, self._data_source,
threshold * total_mass / sub_mass, dm_only)
self._parse_halolist(total_mass / sub_mass)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list