[yt-svn] commit/yt: 14 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Fri Mar 14 08:49:00 PDT 2014
14 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/c9da29fc3789/
Changeset: c9da29fc3789
Branch: yt-3.0
User: brittonsmith
Date: 2014-02-28 12:59:25
Summary: Casting indices arrays as int64 because we were going beyond the int32 limit with deep hierarchies.
Affected #: 1 file
diff -r cdef3ee466d87c46981123ed451b266fe1ebc8b7 -r c9da29fc3789fb4d181fe19cc3cd63d131fb419d yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -62,8 +62,8 @@
def convert_mask_to_indices(np.ndarray[np.uint8_t, ndim=3, cast=True] mask,
int count, int transpose = 0):
cdef int i, j, k, cpos
- cdef np.ndarray[np.int32_t, ndim=2] indices
- indices = np.zeros((count, 3), dtype='int32')
+ cdef np.ndarray[np.int64_t, ndim=2] indices
+ indices = np.zeros((count, 3), dtype='int64')
cpos = 0
for i in range(mask.shape[0]):
for j in range(mask.shape[1]):
https://bitbucket.org/yt_analysis/yt/commits/fb0d279be260/
Changeset: fb0d279be260
Branch: yt-3.0
User: brittonsmith
Date: 2014-02-28 14:40:36
Summary: Adding SN_Colour Enzo field.
Affected #: 1 file
diff -r c9da29fc3789fb4d181fe19cc3cd63d131fb419d -r fb0d279be26059c6d636500ae270e20845c5bdb2 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -70,6 +70,7 @@
("PhotoGamma", (ra_units, ["photo_gamma"], None)),
("Density", (rho_units, ["density"], None)),
("Metal_Density", (rho_units, ["metal_density"], None)),
+ ("SN_Colour", (rho_units, [], None)),
)
known_particle_fields = (
https://bitbucket.org/yt_analysis/yt/commits/1ae9a0e21658/
Changeset: 1ae9a0e21658
Branch: yt-3.0
User: brittonsmith
Date: 2014-03-02 17:43:23
Summary: Adding catch for another corner case in virial quantities callback.
Affected #: 1 file
diff -r fb0d279be26059c6d636500ae270e20845c5bdb2 -r 1ae9a0e216581adc31b8975214b076513a1370d2 yt/analysis_modules/halo_analysis/halo_callbacks.py
--- a/yt/analysis_modules/halo_analysis/halo_callbacks.py
+++ b/yt/analysis_modules/halo_analysis/halo_callbacks.py
@@ -427,8 +427,12 @@
return
else:
# take first instance of downward intersection with critical value
- index = np.where((vod[:-1] >= critical_overdensity) &
- (vod[1:] < critical_overdensity))[0][0]
+ intersections = (vod[:-1] >= critical_overdensity) & \
+ (vod[1:] < critical_overdensity)
+ if not intersections.any():
+ halo.quantities.update(vquantities)
+ return
+ index = np.where(intersections)[0][0]
for field in fields:
v_prof = profile_data[field][dfilter].to_ndarray()
https://bitbucket.org/yt_analysis/yt/commits/d323612355d9/
Changeset: d323612355d9
Branch: yt-3.0
User: brittonsmith
Date: 2014-03-03 13:31:52
Summary: Fixing case in halo catalog frontend where a file has no halos.
Affected #: 1 file
diff -r 1ae9a0e216581adc31b8975214b076513a1370d2 -r d323612355d94cbb3f985767c8c8a25961a85f94 yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ b/yt/frontends/halo_catalogs/halo_catalog/io.py
@@ -82,6 +82,7 @@
data_file.file_id, pcount)
ind = 0
with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
pos = np.empty((pcount, 3), dtype="float64")
pos = data_file.pf.arr(pos, "code_length")
dx = np.finfo(f['particle_position_x'].dtype).eps
https://bitbucket.org/yt_analysis/yt/commits/187161d0c16a/
Changeset: 187161d0c16a
Branch: yt-3.0
User: brittonsmith
Date: 2014-03-08 11:23:33
Summary: Rockstar particle mass check now uses a tolerance instead of exact equality for min and max particle masses.
Affected #: 1 file
diff -r d323612355d94cbb3f985767c8c8a25961a85f94 -r 187161d0c16a894c34218bdefaac9d62796d03f3 yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -251,7 +251,7 @@
if particle_mass is None:
pmass_min, pmass_max = dd.quantities.extrema(
(ptype, "particle_mass"), non_zero = True)
- if pmass_min != pmass_max:
+ if np.abs(pmass_max - pmass_min) / pmass_max > 0.01:
raise YTRockstarMultiMassNotSupported(pmass_min, pmass_max,
ptype)
particle_mass = pmass_min
https://bitbucket.org/yt_analysis/yt/commits/45ed00e4a0e3/
Changeset: 45ed00e4a0e3
Branch: yt-3.0
User: MatthewTurk
Date: 2014-03-13 21:41:13
Summary: Adding dicts to return values for particle geometry handler
Affected #: 4 files
diff -r 187161d0c16a894c34218bdefaac9d62796d03f3 -r 45ed00e4a0e3f1cfa0f99ac0f37e4aa8583adc51 yt/frontends/halo_catalogs/rockstar/io.py
--- a/yt/frontends/halo_catalogs/rockstar/io.py
+++ b/yt/frontends/halo_catalogs/rockstar/io.py
@@ -123,4 +123,4 @@
def _identify_fields(self, data_file):
fields = [("halos", f) for f in halo_dt.fields if
"padding" not in f]
- return fields
+ return fields, {}
diff -r 187161d0c16a894c34218bdefaac9d62796d03f3 -r 45ed00e4a0e3f1cfa0f99ac0f37e4aa8583adc51 yt/frontends/sph/io.py
--- a/yt/frontends/sph/io.py
+++ b/yt/frontends/sph/io.py
@@ -162,7 +162,7 @@
# We'll append it anyway.
fields.append((ptype, mname))
f.close()
- return fields
+ return fields, {}
class IOHandlerGadgetHDF5(IOHandlerOWLS):
_data_style = "gadget_hdf5"
@@ -345,7 +345,7 @@
elif req != ptype:
continue
field_list.append((ptype, field))
- return field_list
+ return field_list, {}
class IOHandlerTipsyBinary(BaseIOHandler):
_data_style = "tipsy"
@@ -527,7 +527,7 @@
return self._field_list
def _identify_fields(self, data_file):
- return self._field_list
+ return self._field_list, {}
def _calculate_particle_offsets(self, data_file):
field_offsets = {}
@@ -567,7 +567,7 @@
f = []
for ftype, fname in self.pf.parameters["field_list"]:
f.append((str(ftype), str(fname)))
- return f
+ return f, {}
def _read_particle_coords(self, chunks, ptf):
chunks = list(chunks)
diff -r 187161d0c16a894c34218bdefaac9d62796d03f3 -r 45ed00e4a0e3f1cfa0f99ac0f37e4aa8583adc51 yt/frontends/stream/io.py
--- a/yt/frontends/stream/io.py
+++ b/yt/frontends/stream/io.py
@@ -161,7 +161,7 @@
return {'io': npart}
def _identify_fields(self, data_file):
- return self.fields[data_file.filename].keys()
+ return self.fields[data_file.filename].keys(), {}
class IOHandlerStreamHexahedral(BaseIOHandler):
_data_style = "stream_hexahedral"
diff -r 187161d0c16a894c34218bdefaac9d62796d03f3 -r 45ed00e4a0e3f1cfa0f99ac0f37e4aa8583adc51 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -117,8 +117,10 @@
def _detect_output_fields(self):
# TODO: Add additional fields
pfl = []
+ units = {}
for dom in self.data_files:
- fl = self.io._identify_fields(dom)
+ fl, _units = self.io._identify_fields(dom)
+ units.update(units)
dom._calculate_offsets(fl)
for f in fl:
if f not in pfl: pfl.append(f)
@@ -127,6 +129,7 @@
pf.particle_types = tuple(set(pt for pt, pf in pfl))
# This is an attribute that means these particle types *actually*
# exist. As in, they are real, in the dataset.
+ self.field_units = units
pf.particle_types_raw = pf.particle_types
def _setup_classes(self):
https://bitbucket.org/yt_analysis/yt/commits/079c6debf95a/
Changeset: 079c6debf95a
Branch: yt-3.0
User: brittonsmith
Date: 2014-03-13 21:42:55
Summary: Adding units to field detection for halo catalog frontend.
Affected #: 1 file
diff -r 187161d0c16a894c34218bdefaac9d62796d03f3 -r 079c6debf95a79387e66ceb1dd9e462204489b92 yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ b/yt/frontends/halo_catalogs/halo_catalog/io.py
@@ -114,4 +114,6 @@
def _identify_fields(self, data_file):
with h5py.File(data_file.filename, "r") as f:
fields = [("halos", field) for field in f]
- return fields
+ units = dict([(("halos", field),
+ f[field].attrs["units"]) for field in f])
+ return fields, units
https://bitbucket.org/yt_analysis/yt/commits/8caeac0b1df9/
Changeset: 8caeac0b1df9
Branch: yt-3.0
User: brittonsmith
Date: 2014-03-13 21:43:11
Summary: Merging.
Affected #: 4 files
diff -r 079c6debf95a79387e66ceb1dd9e462204489b92 -r 8caeac0b1df90b240c244de32e962250a1cc1fa7 yt/frontends/halo_catalogs/rockstar/io.py
--- a/yt/frontends/halo_catalogs/rockstar/io.py
+++ b/yt/frontends/halo_catalogs/rockstar/io.py
@@ -123,4 +123,4 @@
def _identify_fields(self, data_file):
fields = [("halos", f) for f in halo_dt.fields if
"padding" not in f]
- return fields
+ return fields, {}
diff -r 079c6debf95a79387e66ceb1dd9e462204489b92 -r 8caeac0b1df90b240c244de32e962250a1cc1fa7 yt/frontends/sph/io.py
--- a/yt/frontends/sph/io.py
+++ b/yt/frontends/sph/io.py
@@ -162,7 +162,7 @@
# We'll append it anyway.
fields.append((ptype, mname))
f.close()
- return fields
+ return fields, {}
class IOHandlerGadgetHDF5(IOHandlerOWLS):
_data_style = "gadget_hdf5"
@@ -345,7 +345,7 @@
elif req != ptype:
continue
field_list.append((ptype, field))
- return field_list
+ return field_list, {}
class IOHandlerTipsyBinary(BaseIOHandler):
_data_style = "tipsy"
@@ -527,7 +527,7 @@
return self._field_list
def _identify_fields(self, data_file):
- return self._field_list
+ return self._field_list, {}
def _calculate_particle_offsets(self, data_file):
field_offsets = {}
@@ -567,7 +567,7 @@
f = []
for ftype, fname in self.pf.parameters["field_list"]:
f.append((str(ftype), str(fname)))
- return f
+ return f, {}
def _read_particle_coords(self, chunks, ptf):
chunks = list(chunks)
diff -r 079c6debf95a79387e66ceb1dd9e462204489b92 -r 8caeac0b1df90b240c244de32e962250a1cc1fa7 yt/frontends/stream/io.py
--- a/yt/frontends/stream/io.py
+++ b/yt/frontends/stream/io.py
@@ -161,7 +161,7 @@
return {'io': npart}
def _identify_fields(self, data_file):
- return self.fields[data_file.filename].keys()
+ return self.fields[data_file.filename].keys(), {}
class IOHandlerStreamHexahedral(BaseIOHandler):
_data_style = "stream_hexahedral"
diff -r 079c6debf95a79387e66ceb1dd9e462204489b92 -r 8caeac0b1df90b240c244de32e962250a1cc1fa7 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -117,8 +117,10 @@
def _detect_output_fields(self):
# TODO: Add additional fields
pfl = []
+ units = {}
for dom in self.data_files:
- fl = self.io._identify_fields(dom)
+ fl, _units = self.io._identify_fields(dom)
+ units.update(units)
dom._calculate_offsets(fl)
for f in fl:
if f not in pfl: pfl.append(f)
@@ -127,6 +129,7 @@
pf.particle_types = tuple(set(pt for pt, pf in pfl))
# This is an attribute that means these particle types *actually*
# exist. As in, they are real, in the dataset.
+ self.field_units = units
pf.particle_types_raw = pf.particle_types
def _setup_classes(self):
https://bitbucket.org/yt_analysis/yt/commits/cc8d3421d1e7/
Changeset: cc8d3421d1e7
Branch: yt-3.0
User: brittonsmith
Date: 2014-03-13 21:48:16
Summary: Adding attempt to get units for detected fields.
Affected #: 1 file
diff -r 8caeac0b1df90b240c244de32e962250a1cc1fa7 -r cc8d3421d1e724503a8678fae9d1ec5be0561c2b yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -90,7 +90,8 @@
raise RuntimeError
if field[0] not in self.pf.particle_types:
continue
- self.add_output_field(field, units = "",
+ self.add_output_field(field,
+ units = self.pf.field_units.get(field, ""),
particle_type = True)
def setup_fluid_aliases(self):
https://bitbucket.org/yt_analysis/yt/commits/b190f9922902/
Changeset: b190f9922902
Branch: yt-3.0
User: MatthewTurk
Date: 2014-03-13 21:51:31
Summary: Adding field_units attribute
Affected #: 2 files
diff -r cc8d3421d1e724503a8678fae9d1ec5be0561c2b -r b190f9922902620b1a72a18d6f47ea6358fd7533 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -71,6 +71,7 @@
_particle_velocity_name = None
particle_unions = None
known_filters = None
+ field_units = None
class __metaclass__(type):
def __init__(cls, name, b, d):
@@ -110,6 +111,7 @@
self.parameters = {}
self.known_filters = self.known_filters or {}
self.particle_unions = self.particle_unions or {}
+ self.field_units = self.field_units or {}
# path stuff
self.parameter_filename = str(filename)
diff -r cc8d3421d1e724503a8678fae9d1ec5be0561c2b -r b190f9922902620b1a72a18d6f47ea6358fd7533 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -129,7 +129,7 @@
pf.particle_types = tuple(set(pt for pt, pf in pfl))
# This is an attribute that means these particle types *actually*
# exist. As in, they are real, in the dataset.
- self.field_units = units
+ pf.field_units.update(units)
pf.particle_types_raw = pf.particle_types
def _setup_classes(self):
https://bitbucket.org/yt_analysis/yt/commits/e58cecb96940/
Changeset: e58cecb96940
Branch: yt-3.0
User: MatthewTurk
Date: 2014-03-13 21:59:14
Summary: Fixing units variable name
Affected #: 1 file
diff -r b190f9922902620b1a72a18d6f47ea6358fd7533 -r e58cecb9694014900804b391b13a5a2bbf8921ac yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -120,7 +120,7 @@
units = {}
for dom in self.data_files:
fl, _units = self.io._identify_fields(dom)
- units.update(units)
+ units.update(_units)
dom._calculate_offsets(fl)
for f in fl:
if f not in pfl: pfl.append(f)
https://bitbucket.org/yt_analysis/yt/commits/d344ec6aacfa/
Changeset: d344ec6aacfa
Branch: yt-3.0
User: MatthewTurk
Date: 2014-03-13 22:08:40
Summary: Set the units correctly for particle unions
Affected #: 1 file
diff -r e58cecb9694014900804b391b13a5a2bbf8921ac -r d344ec6aacfa045a7b59dc508940239cbdb4e10d yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -307,6 +307,12 @@
f = self.particle_fields_by_type
fields = set_intersection([f[s] for s in union
if s in self.particle_types_raw])
+ for field in fields:
+ units = set([])
+ for s in union:
+ units.add(self.field_units.get((s, field), ""))
+ if len(units) == 1:
+ self.field_units[union.name, field] = list(units)[0]
self.particle_types += (union.name,)
self.particle_unions[union.name] = union
fields = [ (union.name, field) for field in fields]
https://bitbucket.org/yt_analysis/yt/commits/5f6fd78389bd/
Changeset: 5f6fd78389bd
Branch: yt-3.0
User: MatthewTurk
Date: 2014-03-13 22:14:30
Summary: Merging ytep0012 bookmark into experimental bookmark.
Affected #: 11 files
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/analysis_modules/halo_analysis/halo_callbacks.py
--- a/yt/analysis_modules/halo_analysis/halo_callbacks.py
+++ b/yt/analysis_modules/halo_analysis/halo_callbacks.py
@@ -427,8 +427,12 @@
return
else:
# take first instance of downward intersection with critical value
- index = np.where((vod[:-1] >= critical_overdensity) &
- (vod[1:] < critical_overdensity))[0][0]
+ intersections = (vod[:-1] >= critical_overdensity) & \
+ (vod[1:] < critical_overdensity)
+ if not intersections.any():
+ halo.quantities.update(vquantities)
+ return
+ index = np.where(intersections)[0][0]
for field in fields:
v_prof = profile_data[field][dfilter].to_ndarray()
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -255,7 +255,7 @@
if particle_mass is None:
pmass_min, pmass_max = dd.quantities.extrema(
(ptype, "particle_mass"), non_zero = True)
- if pmass_min != pmass_max:
+ if np.abs(pmass_max - pmass_min) / pmass_max > 0.01:
raise YTRockstarMultiMassNotSupported(pmass_min, pmass_max,
ptype)
particle_mass = pmass_min
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -94,6 +94,7 @@
particle_unions = None
known_filters = None
_index_class = None
+ field_units = None
class __metaclass__(type):
def __init__(cls, name, b, d):
@@ -133,6 +134,7 @@
self.parameters = {}
self.known_filters = self.known_filters or {}
self.particle_unions = self.particle_unions or {}
+ self.field_units = self.field_units or {}
# path stuff
self.parameter_filename = str(filename)
@@ -345,6 +347,12 @@
f = self.particle_fields_by_type
fields = set_intersection([f[s] for s in union
if s in self.particle_types_raw])
+ for field in fields:
+ units = set([])
+ for s in union:
+ units.add(self.field_units.get((s, field), ""))
+ if len(units) == 1:
+ self.field_units[union.name, field] = list(units)[0]
self.particle_types += (union.name,)
self.particle_unions[union.name] = union
fields = [ (union.name, field) for field in fields]
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -90,7 +90,8 @@
raise RuntimeError
if field[0] not in self.pf.particle_types:
continue
- self.add_output_field(field, units = "",
+ self.add_output_field(field,
+ units = self.pf.field_units.get(field, ""),
particle_type = True)
def setup_fluid_aliases(self):
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -70,6 +70,7 @@
("PhotoGamma", (ra_units, ["photo_gamma"], None)),
("Density", (rho_units, ["density"], None)),
("Metal_Density", (rho_units, ["metal_density"], None)),
+ ("SN_Colour", (rho_units, [], None)),
)
known_particle_fields = (
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ b/yt/frontends/halo_catalogs/halo_catalog/io.py
@@ -82,6 +82,7 @@
data_file.file_id, pcount)
ind = 0
with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
pos = np.empty((pcount, 3), dtype="float64")
pos = data_file.pf.arr(pos, "code_length")
dx = np.finfo(f['particle_position_x'].dtype).eps
@@ -113,4 +114,6 @@
def _identify_fields(self, data_file):
with h5py.File(data_file.filename, "r") as f:
fields = [("halos", field) for field in f]
- return fields
+ units = dict([(("halos", field),
+ f[field].attrs["units"]) for field in f])
+ return fields, units
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/frontends/halo_catalogs/rockstar/io.py
--- a/yt/frontends/halo_catalogs/rockstar/io.py
+++ b/yt/frontends/halo_catalogs/rockstar/io.py
@@ -123,4 +123,4 @@
def _identify_fields(self, data_file):
fields = [("halos", f) for f in halo_dt.fields if
"padding" not in f]
- return fields
+ return fields, {}
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/frontends/sph/io.py
--- a/yt/frontends/sph/io.py
+++ b/yt/frontends/sph/io.py
@@ -162,7 +162,7 @@
# We'll append it anyway.
fields.append((ptype, mname))
f.close()
- return fields
+ return fields, {}
class IOHandlerGadgetHDF5(IOHandlerOWLS):
_dataset_type = "gadget_hdf5"
@@ -345,7 +345,7 @@
elif req != ptype:
continue
field_list.append((ptype, field))
- return field_list
+ return field_list, {}
class IOHandlerTipsyBinary(BaseIOHandler):
_dataset_type = "tipsy"
@@ -527,7 +527,7 @@
return self._field_list
def _identify_fields(self, data_file):
- return self._field_list
+ return self._field_list, {}
def _calculate_particle_offsets(self, data_file):
field_offsets = {}
@@ -567,7 +567,7 @@
f = []
for ftype, fname in self.pf.parameters["field_list"]:
f.append((str(ftype), str(fname)))
- return f
+ return f, {}
def _read_particle_coords(self, chunks, ptf):
chunks = list(chunks)
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/frontends/stream/io.py
--- a/yt/frontends/stream/io.py
+++ b/yt/frontends/stream/io.py
@@ -161,7 +161,7 @@
return {'io': npart}
def _identify_fields(self, data_file):
- return self.fields[data_file.filename].keys()
+ return self.fields[data_file.filename].keys(), {}
class IOHandlerStreamHexahedral(BaseIOHandler):
_dataset_type = "stream_hexahedral"
diff -r cc4e15d9f4d95b558690cd94e4f74b30bed6a02b -r 5f6fd78389bda6de168d6f2f0ae00b8b467875c2 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -117,8 +117,10 @@
def _detect_output_fields(self):
# TODO: Add additional fields
pfl = []
+ units = {}
for dom in self.data_files:
- fl = self.io._identify_fields(dom)
+ fl, _units = self.io._identify_fields(dom)
+ units.update(_units)
dom._calculate_offsets(fl)
for f in fl:
if f not in pfl: pfl.append(f)
@@ -127,6 +129,7 @@
pf.particle_types = tuple(set(pt for pt, pf in pfl))
# This is an attribute that means these particle types *actually*
# exist. As in, they are real, in the dataset.
+ pf.field_units.update(units)
pf.particle_types_raw = pf.particle_types
def _identify_base_chunk(self, dobj):
https://bitbucket.org/yt_analysis/yt/commits/fa45403f8a39/
Changeset: fa45403f8a39
Branch: yt-3.0
User: MatthewTurk
Date: 2014-03-14 16:48:50
Summary: Merged in brittonsmith/yt/yt-3.0 (pull request #720)
Detected Field Units for HaloCatalog Frontend and other Bug Fixes
Affected #: 11 files
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/analysis_modules/halo_analysis/halo_callbacks.py
--- a/yt/analysis_modules/halo_analysis/halo_callbacks.py
+++ b/yt/analysis_modules/halo_analysis/halo_callbacks.py
@@ -427,8 +427,12 @@
return
else:
# take first instance of downward intersection with critical value
- index = np.where((vod[:-1] >= critical_overdensity) &
- (vod[1:] < critical_overdensity))[0][0]
+ intersections = (vod[:-1] >= critical_overdensity) & \
+ (vod[1:] < critical_overdensity)
+ if not intersections.any():
+ halo.quantities.update(vquantities)
+ return
+ index = np.where(intersections)[0][0]
for field in fields:
v_prof = profile_data[field][dfilter].to_ndarray()
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -255,7 +255,7 @@
if particle_mass is None:
pmass_min, pmass_max = dd.quantities.extrema(
(ptype, "particle_mass"), non_zero = True)
- if pmass_min != pmass_max:
+ if np.abs(pmass_max - pmass_min) / pmass_max > 0.01:
raise YTRockstarMultiMassNotSupported(pmass_min, pmass_max,
ptype)
particle_mass = pmass_min
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -94,6 +94,7 @@
particle_unions = None
known_filters = None
_index_class = None
+ field_units = None
class __metaclass__(type):
def __init__(cls, name, b, d):
@@ -133,6 +134,7 @@
self.parameters = {}
self.known_filters = self.known_filters or {}
self.particle_unions = self.particle_unions or {}
+ self.field_units = self.field_units or {}
# path stuff
self.parameter_filename = str(filename)
@@ -345,6 +347,12 @@
f = self.particle_fields_by_type
fields = set_intersection([f[s] for s in union
if s in self.particle_types_raw])
+ for field in fields:
+ units = set([])
+ for s in union:
+ units.add(self.field_units.get((s, field), ""))
+ if len(units) == 1:
+ self.field_units[union.name, field] = list(units)[0]
self.particle_types += (union.name,)
self.particle_unions[union.name] = union
fields = [ (union.name, field) for field in fields]
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -90,7 +90,8 @@
raise RuntimeError
if field[0] not in self.pf.particle_types:
continue
- self.add_output_field(field, units = "",
+ self.add_output_field(field,
+ units = self.pf.field_units.get(field, ""),
particle_type = True)
def setup_fluid_aliases(self):
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -70,6 +70,7 @@
("PhotoGamma", (ra_units, ["photo_gamma"], None)),
("Density", (rho_units, ["density"], None)),
("Metal_Density", (rho_units, ["metal_density"], None)),
+ ("SN_Colour", (rho_units, [], None)),
)
known_particle_fields = (
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ b/yt/frontends/halo_catalogs/halo_catalog/io.py
@@ -82,6 +82,7 @@
data_file.file_id, pcount)
ind = 0
with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
pos = np.empty((pcount, 3), dtype="float64")
pos = data_file.pf.arr(pos, "code_length")
dx = np.finfo(f['particle_position_x'].dtype).eps
@@ -113,4 +114,6 @@
def _identify_fields(self, data_file):
with h5py.File(data_file.filename, "r") as f:
fields = [("halos", field) for field in f]
- return fields
+ units = dict([(("halos", field),
+ f[field].attrs["units"]) for field in f])
+ return fields, units
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/frontends/halo_catalogs/rockstar/io.py
--- a/yt/frontends/halo_catalogs/rockstar/io.py
+++ b/yt/frontends/halo_catalogs/rockstar/io.py
@@ -123,4 +123,4 @@
def _identify_fields(self, data_file):
fields = [("halos", f) for f in halo_dt.fields if
"padding" not in f]
- return fields
+ return fields, {}
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/frontends/sph/io.py
--- a/yt/frontends/sph/io.py
+++ b/yt/frontends/sph/io.py
@@ -162,7 +162,7 @@
# We'll append it anyway.
fields.append((ptype, mname))
f.close()
- return fields
+ return fields, {}
class IOHandlerGadgetHDF5(IOHandlerOWLS):
_dataset_type = "gadget_hdf5"
@@ -345,7 +345,7 @@
elif req != ptype:
continue
field_list.append((ptype, field))
- return field_list
+ return field_list, {}
class IOHandlerTipsyBinary(BaseIOHandler):
_dataset_type = "tipsy"
@@ -527,7 +527,7 @@
return self._field_list
def _identify_fields(self, data_file):
- return self._field_list
+ return self._field_list, {}
def _calculate_particle_offsets(self, data_file):
field_offsets = {}
@@ -567,7 +567,7 @@
f = []
for ftype, fname in self.pf.parameters["field_list"]:
f.append((str(ftype), str(fname)))
- return f
+ return f, {}
def _read_particle_coords(self, chunks, ptf):
chunks = list(chunks)
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/frontends/stream/io.py
--- a/yt/frontends/stream/io.py
+++ b/yt/frontends/stream/io.py
@@ -161,7 +161,7 @@
return {'io': npart}
def _identify_fields(self, data_file):
- return self.fields[data_file.filename].keys()
+ return self.fields[data_file.filename].keys(), {}
class IOHandlerStreamHexahedral(BaseIOHandler):
_dataset_type = "stream_hexahedral"
diff -r 4fdf65441a28c200fab0128f55bb43808247fc21 -r fa45403f8a392652e6ec58d992f49dbdeed64899 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -117,8 +117,10 @@
def _detect_output_fields(self):
# TODO: Add additional fields
pfl = []
+ units = {}
for dom in self.data_files:
- fl = self.io._identify_fields(dom)
+ fl, _units = self.io._identify_fields(dom)
+ units.update(_units)
dom._calculate_offsets(fl)
for f in fl:
if f not in pfl: pfl.append(f)
@@ -127,6 +129,7 @@
pf.particle_types = tuple(set(pt for pt, pf in pfl))
# This is an attribute that means these particle types *actually*
# exist. As in, they are real, in the dataset.
+ pf.field_units.update(units)
pf.particle_types_raw = pf.particle_types
def _identify_base_chunk(self, dobj):
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list