[yt-svn] commit/yt: xarthisius: Merged in jzuhone/yt (pull request #1872)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Mon Nov 23 14:25:04 PST 2015
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/129722a2fc4c/
Changeset: 129722a2fc4c
Branch: yt
User: xarthisius
Date: 2015-11-23 22:24:53+00:00
Summary: Merged in jzuhone/yt (pull request #1872)
Changing photon_simulator's output file structure
Affected #: 6 files
diff -r eb45e6e815fb3f781e57ebf9dd941b404da4e030 -r 129722a2fc4cfca129377785af88e615c4aec7a1 doc/source/analyzing/analysis_modules/photon_simulator.rst
--- a/doc/source/analyzing/analysis_modules/photon_simulator.rst
+++ b/doc/source/analyzing/analysis_modules/photon_simulator.rst
@@ -627,3 +627,31 @@
.. image:: _images/ds9_sloshing.png
.. image:: _images/ds9_bubbles.png
+
+In November 2015, the structure of the photon and event HDF5 files changed. To
+convert an old-format file to the new format, use the ``convert_old_file`` utility:
+
+.. code:: python
+
+ from yt.analysis_modules.photon_simulator.api import convert_old_file
+ convert_old_file("old_photons.h5", "new_photons.h5", clobber=True)
+ convert_old_file("old_events.h5", "new_events.h5", clobber=True)
+
+This utility will auto-detect the kind of file (photons or events) and will write
+the correct replacement for the new version.
+
+At times it may be convenient to write several ``EventLists`` to disk to be merged
+together later. This can be achieved with the ``merge_files`` utility. It takes a
+list of
+
+.. code:: python
+
+ from yt.analysis_modules.photon_simulator.api import merge_files
+ merge_files(["events_0.h5", "events_1.h5", "events_2.h5"], "merged_events.h5",
+ add_exposure_times=True, clobber=False)
+
+At the current time this utility is very limited, as it only allows merging of
+``EventLists`` which have the same parameters, with the exception of the exposure
+time. If the ``add_exposure_times`` argument to ``merge_files`` is set to ``True``,
+the lists will be merged together with the exposure times added. Otherwise, the
+exposure times of the different files must be equal.
diff -r eb45e6e815fb3f781e57ebf9dd941b404da4e030 -r 129722a2fc4cfca129377785af88e615c4aec7a1 yt/analysis_modules/photon_simulator/api.py
--- a/yt/analysis_modules/photon_simulator/api.py
+++ b/yt/analysis_modules/photon_simulator/api.py
@@ -16,7 +16,9 @@
from .photon_simulator import \
PhotonList, \
- EventList
+ EventList, \
+ merge_files, \
+ convert_old_file
from .spectral_models import \
SpectralModel, \
diff -r eb45e6e815fb3f781e57ebf9dd941b404da4e030 -r 129722a2fc4cfca129377785af88e615c4aec7a1 yt/analysis_modules/photon_simulator/photon_models.py
--- a/yt/analysis_modules/photon_simulator/photon_models.py
+++ b/yt/analysis_modules/photon_simulator/photon_models.py
@@ -52,14 +52,13 @@
class ThermalPhotonModel(PhotonModel):
r"""
- Initialize a ThermalPhotonModel from a thermal spectrum.
+ Initialize a ThermalPhotonModel from a thermal spectrum.
Parameters
----------
-
spectral_model : `SpectralModel`
A thermal spectral model instance, either of `XSpecThermalModel`
- or `TableApecModel`.
+ or `TableApecModel`.
X_H : float, optional
The hydrogen mass fraction.
Zmet : float or string, optional
diff -r eb45e6e815fb3f781e57ebf9dd941b404da4e030 -r 129722a2fc4cfca129377785af88e615c4aec7a1 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -25,6 +25,7 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yt.extern.six import string_types
+from collections import defaultdict
import numpy as np
from yt.funcs import mylog, get_pbar, iterable, ensure_list
from yt.utilities.physical_constants import clight
@@ -38,6 +39,7 @@
from yt.utilities.on_demand_imports import _h5py as h5py
from yt.utilities.on_demand_imports import _astropy
import warnings
+import os
comm = communication_system.communicators[-1]
@@ -53,6 +55,25 @@
else:
return YTQuantity(value, default_units)
+def validate_parameters(first, second, skip=[]):
+ keys1 = list(first.keys())
+ keys2 = list(first.keys())
+ keys1.sort()
+ keys2.sort()
+ if keys1 != keys2:
+ raise RuntimeError("The two inputs do not have the same parameters!")
+ for k1, k2 in zip(keys1, keys2):
+ if k1 not in skip:
+ v1 = first[k1]
+ v2 = second[k2]
+ if isinstance(v1, string_types) or isinstance(v2, string_types):
+ check_equal = v1 == v2
+ else:
+ check_equal = np.allclose(v1, v2, rtol=0.0, atol=1.0e-10)
+ if not check_equal:
+ raise RuntimeError("The values for the parameter '%s' in the two inputs" % k1 +
+ " are not identical (%s vs. %s)!" % (v1, v2))
+
class PhotonList(object):
def __init__(self, photons, parameters, cosmo, p_bins):
@@ -107,29 +128,32 @@
f = h5py.File(filename, "r")
- parameters["FiducialExposureTime"] = YTQuantity(f["/fid_exp_time"].value, "s")
- parameters["FiducialArea"] = YTQuantity(f["/fid_area"].value, "cm**2")
- parameters["FiducialRedshift"] = f["/fid_redshift"].value
- parameters["FiducialAngularDiameterDistance"] = YTQuantity(f["/fid_d_a"].value, "Mpc")
- parameters["Dimension"] = f["/dimension"].value
- parameters["Width"] = YTQuantity(f["/width"].value, "kpc")
- parameters["HubbleConstant"] = f["/hubble"].value
- parameters["OmegaMatter"] = f["/omega_matter"].value
- parameters["OmegaLambda"] = f["/omega_lambda"].value
+ p = f["/parameters"]
+ parameters["FiducialExposureTime"] = YTQuantity(p["fid_exp_time"].value, "s")
+ parameters["FiducialArea"] = YTQuantity(p["fid_area"].value, "cm**2")
+ parameters["FiducialRedshift"] = p["fid_redshift"].value
+ parameters["FiducialAngularDiameterDistance"] = YTQuantity(p["fid_d_a"].value, "Mpc")
+ parameters["Dimension"] = p["dimension"].value
+ parameters["Width"] = YTQuantity(p["width"].value, "kpc")
+ parameters["HubbleConstant"] = p["hubble"].value
+ parameters["OmegaMatter"] = p["omega_matter"].value
+ parameters["OmegaLambda"] = p["omega_lambda"].value
- num_cells = f["/x"][:].shape[0]
+ d = f["/data"]
+
+ num_cells = d["x"][:].shape[0]
start_c = comm.rank*num_cells//comm.size
end_c = (comm.rank+1)*num_cells//comm.size
- photons["x"] = YTArray(f["/x"][start_c:end_c], "kpc")
- photons["y"] = YTArray(f["/y"][start_c:end_c], "kpc")
- photons["z"] = YTArray(f["/z"][start_c:end_c], "kpc")
- photons["dx"] = YTArray(f["/dx"][start_c:end_c], "kpc")
- photons["vx"] = YTArray(f["/vx"][start_c:end_c], "km/s")
- photons["vy"] = YTArray(f["/vy"][start_c:end_c], "km/s")
- photons["vz"] = YTArray(f["/vz"][start_c:end_c], "km/s")
+ photons["x"] = YTArray(d["x"][start_c:end_c], "kpc")
+ photons["y"] = YTArray(d["y"][start_c:end_c], "kpc")
+ photons["z"] = YTArray(d["z"][start_c:end_c], "kpc")
+ photons["dx"] = YTArray(d["dx"][start_c:end_c], "kpc")
+ photons["vx"] = YTArray(d["vx"][start_c:end_c], "km/s")
+ photons["vy"] = YTArray(d["vy"][start_c:end_c], "km/s")
+ photons["vz"] = YTArray(d["vz"][start_c:end_c], "km/s")
- n_ph = f["/num_photons"][:]
+ n_ph = d["num_photons"][:]
if comm.rank == 0:
start_e = np.uint64(0)
@@ -142,7 +166,7 @@
p_bins = np.cumsum(photons["NumberOfPhotons"])
p_bins = np.insert(p_bins, 0, [np.uint64(0)])
- photons["Energy"] = YTArray(f["/energy"][start_e:end_e], "keV")
+ photons["Energy"] = YTArray(d["energy"][start_e:end_e], "keV")
f.close()
@@ -163,7 +187,6 @@
Parameters
----------
-
data_source : `yt.data_objects.data_containers.YTSelectionContainer`
The data source from which the photons will be generated.
redshift : float
@@ -177,9 +200,9 @@
dictionary and returns a *photons* dictionary. Must be of the
form: photon_model(data_source, parameters)
parameters : dict, optional
- A dictionary of parameters to be passed to the user function.
+ A dictionary of parameters to be passed to the user function.
center : string or array_like, optional
- The origin of the photons. Accepts "c", "max", or a coordinate.
+ The origin of the photons. Accepts "c", "max", or a coordinate.
dist : tuple, optional
The angular diameter distance in the form (value, unit), used
mainly for nearby sources. This may be optionally supplied
@@ -191,7 +214,6 @@
Examples
--------
-
This is the simplest possible example, where we call the built-in thermal model:
>>> thermal_model = ThermalPhotonModel(apec_model, Zmet=0.3)
@@ -227,7 +249,7 @@
>>> import numpy as np
>>> import yt
- >>> from yt.analysis_modules.photon_simulator import *
+ >>> from yt.analysis_modules.photon_simulator.api import PhotonList
>>> def line_func(source, parameters):
...
... ds = source.ds
@@ -257,7 +279,7 @@
>>> ddims = (128,128,128)
>>> random_data = {"density":(np.random.random(ddims),"g/cm**3")}
>>> ds = yt.load_uniform_grid(random_data, ddims)
- >>> dd = ds.all_data
+ >>> dd = ds.all_data()
>>> my_photons = PhotonList.from_user_model(dd, redshift, area,
... time, line_func,
... parameters=parameters)
@@ -357,7 +379,7 @@
if comm.rank == 0:
num_cells = sum(sizes_c)
- num_photons = sum(sizes_p)
+ num_photons = sum(sizes_p)
disps_c = [sum(sizes_c[:i]) for i in range(len(sizes_c))]
disps_p = [sum(sizes_p[:i]) for i in range(len(sizes_p))]
x = np.zeros(num_cells)
@@ -401,7 +423,7 @@
comm.comm.Gatherv([self.photons["NumberOfPhotons"], local_num_cells, mpi_long],
[n_ph, (sizes_c, disps_c), mpi_long], root=0)
comm.comm.Gatherv([self.photons["Energy"].d, local_num_photons, mpi_double],
- [e, (sizes_p, disps_p), mpi_double], root=0)
+ [e, (sizes_p, disps_p), mpi_double], root=0)
else:
@@ -419,35 +441,37 @@
f = h5py.File(photonfile, "w")
- # Scalars
+ # Parameters
- f.create_dataset("fid_area", data=float(self.parameters["FiducialArea"]))
- f.create_dataset("fid_exp_time", data=float(self.parameters["FiducialExposureTime"]))
- f.create_dataset("fid_redshift", data=self.parameters["FiducialRedshift"])
- f.create_dataset("hubble", data=self.parameters["HubbleConstant"])
- f.create_dataset("omega_matter", data=self.parameters["OmegaMatter"])
- f.create_dataset("omega_lambda", data=self.parameters["OmegaLambda"])
- f.create_dataset("fid_d_a", data=float(self.parameters["FiducialAngularDiameterDistance"]))
- f.create_dataset("dimension", data=self.parameters["Dimension"])
- f.create_dataset("width", data=float(self.parameters["Width"]))
+ p = f.create_group("parameters")
+ p.create_dataset("fid_area", data=float(self.parameters["FiducialArea"]))
+ p.create_dataset("fid_exp_time", data=float(self.parameters["FiducialExposureTime"]))
+ p.create_dataset("fid_redshift", data=self.parameters["FiducialRedshift"])
+ p.create_dataset("hubble", data=self.parameters["HubbleConstant"])
+ p.create_dataset("omega_matter", data=self.parameters["OmegaMatter"])
+ p.create_dataset("omega_lambda", data=self.parameters["OmegaLambda"])
+ p.create_dataset("fid_d_a", data=float(self.parameters["FiducialAngularDiameterDistance"]))
+ p.create_dataset("dimension", data=self.parameters["Dimension"])
+ p.create_dataset("width", data=float(self.parameters["Width"]))
- # Arrays
+ # Data
- f.create_dataset("x", data=x)
- f.create_dataset("y", data=y)
- f.create_dataset("z", data=z)
- f.create_dataset("vx", data=vx)
- f.create_dataset("vy", data=vy)
- f.create_dataset("vz", data=vz)
- f.create_dataset("dx", data=dx)
- f.create_dataset("num_photons", data=n_ph)
- f.create_dataset("energy", data=e)
+ d = f.create_group("data")
+ d.create_dataset("x", data=x)
+ d.create_dataset("y", data=y)
+ d.create_dataset("z", data=z)
+ d.create_dataset("vx", data=vx)
+ d.create_dataset("vy", data=vy)
+ d.create_dataset("vz", data=vz)
+ d.create_dataset("dx", data=dx)
+ d.create_dataset("num_photons", data=n_ph)
+ d.create_dataset("energy", data=e)
f.close()
comm.barrier()
- def project_photons(self, normal, area_new=None, exp_time_new=None,
+ def project_photons(self, normal, area_new=None, exp_time_new=None,
redshift_new=None, dist_new=None,
absorb_model=None, psf_sigma=None,
sky_center=None, responses=None,
@@ -458,7 +482,6 @@
Parameters
----------
-
normal : character or array_like
Normal vector to the plane of projection. If "x", "y", or "z", will
assume to be along that axis (and will probably be faster). Otherwise,
@@ -814,7 +837,7 @@
return events, info
-class EventList(object) :
+class EventList(object):
def __init__(self, events, parameters):
self.events = events
@@ -855,22 +878,7 @@
def __add__(self, other):
assert_same_wcs(self.wcs, other.wcs)
- keys1 = list(self.parameters.keys())
- keys2 = list(other.parameters.keys())
- keys1.sort()
- keys2.sort()
- if keys1 != keys2:
- raise RuntimeError("The two EventLists do not have the same parameters!")
- for k1, k2 in zip(keys1, keys2):
- v1 = self.parameters[k1]
- v2 = other.parameters[k2]
- if isinstance(v1, string_types) or isinstance(v2, string_types):
- check_equal = v1 == v2
- else:
- check_equal = np.allclose(v1, v2, rtol=0.0, atol=1.0e-10)
- if not check_equal:
- raise RuntimeError("The values for the parameter '%s' in the two EventLists" % k1 +
- " are not identical (%s vs. %s)!" % (v1, v2))
+ validate_parameters(self.parameters, other.parameters)
events = {}
for item1, item2 in zip(self.items(), other.items()):
k1, v1 = item1
@@ -880,7 +888,7 @@
def filter_events(self, region):
"""
- Filter events using a ds9 region. Requires the pyregion package.
+ Filter events using a ds9 *region*. Requires the pyregion package.
Returns a new EventList.
"""
import pyregion
@@ -909,36 +917,38 @@
f = h5py.File(h5file, "r")
- parameters["ExposureTime"] = YTQuantity(f["/exp_time"].value, "s")
- if isinstance(f["/area"].value, (string_types, bytes)):
- parameters["Area"] = f["/area"].value.decode("utf8")
+ p = f["/parameters"]
+ parameters["ExposureTime"] = YTQuantity(p["exp_time"].value, "s")
+ if isinstance(p["area"].value, (string_types, bytes)):
+ parameters["Area"] = p["area"].value.decode("utf8")
else:
- parameters["Area"] = YTQuantity(f["/area"].value, "cm**2")
- parameters["Redshift"] = f["/redshift"].value
- parameters["AngularDiameterDistance"] = YTQuantity(f["/d_a"].value, "Mpc")
- if "rmf" in f:
- parameters["RMF"] = f["/rmf"].value.decode("utf8")
- if "arf" in f:
- parameters["ARF"] = f["/arf"].value.decode("utf8")
- if "channel_type" in f:
- parameters["ChannelType"] = f["/channel_type"].value.decode("utf8")
- if "mission" in f:
- parameters["Mission"] = f["/mission"].value.decode("utf8")
- if "telescope" in f:
- parameters["Telescope"] = f["/telescope"].value.decode("utf8")
- if "instrument" in f:
- parameters["Instrument"] = f["/instrument"].value.decode("utf8")
+ parameters["Area"] = YTQuantity(p["area"].value, "cm**2")
+ parameters["Redshift"] = p["redshift"].value
+ parameters["AngularDiameterDistance"] = YTQuantity(p["d_a"].value, "Mpc")
+ parameters["sky_center"] = YTArray(p["sky_center"][:], "deg")
+ parameters["dtheta"] = YTQuantity(p["dtheta"].value, "deg")
+ parameters["pix_center"] = p["pix_center"][:]
+ if "rmf" in p:
+ parameters["RMF"] = p["rmf"].value.decode("utf8")
+ if "arf" in p:
+ parameters["ARF"] = p["arf"].value.decode("utf8")
+ if "channel_type" in p:
+ parameters["ChannelType"] = p["channel_type"].value.decode("utf8")
+ if "mission" in p:
+ parameters["Mission"] = p["mission"].value.decode("utf8")
+ if "telescope" in p:
+ parameters["Telescope"] = p["telescope"].value.decode("utf8")
+ if "instrument" in p:
+ parameters["Instrument"] = p["instrument"].value.decode("utf8")
- events["xpix"] = f["/xpix"][:]
- events["ypix"] = f["/ypix"][:]
- events["eobs"] = YTArray(f["/eobs"][:], "keV")
- if "pi" in f:
- events["PI"] = f["/pi"][:]
- if "pha" in f:
- events["PHA"] = f["/pha"][:]
- parameters["sky_center"] = YTArray(f["/sky_center"][:], "deg")
- parameters["dtheta"] = YTQuantity(f["/dtheta"].value, "deg")
- parameters["pix_center"] = f["/pix_center"][:]
+ d = f["/data"]
+ events["xpix"] = d["xpix"][:]
+ events["ypix"] = d["ypix"][:]
+ events["eobs"] = YTArray(d["eobs"][:], "keV")
+ if "pi" in d:
+ events["PI"] = d["pi"][:]
+ if "pha" in d:
+ events["PHA"] = d["pha"][:]
f.close()
@@ -1031,10 +1041,10 @@
time = np.random.uniform(size=self.num_events, low=0.0,
high=float(self.parameters["ExposureTime"]))
- col_t = pyfits.Column(name="TIME", format='1D', unit='s',
+ col_t = pyfits.Column(name="TIME", format='1D', unit='s',
array=time)
cols.append(col_t)
-
+
coldefs = pyfits.ColDefs(cols)
tbhdu = pyfits.BinTableHDU.from_columns(coldefs)
tbhdu.update_ext_name("EVENTS")
@@ -1123,7 +1133,6 @@
Parameters
----------
-
prefix : string
The filename prefix.
clobber : boolean, optional
@@ -1207,38 +1216,40 @@
"""
f = h5py.File(h5file, "w")
- f.create_dataset("/exp_time", data=float(self.parameters["ExposureTime"]))
+ p = f.create_group("parameters")
+ p.create_dataset("exp_time", data=float(self.parameters["ExposureTime"]))
area = self.parameters["Area"]
if not isinstance(area, string_types):
area = float(area)
- f.create_dataset("/area", data=area)
- f.create_dataset("/redshift", data=self.parameters["Redshift"])
- f.create_dataset("/d_a", data=float(self.parameters["AngularDiameterDistance"]))
+ p.create_dataset("area", data=area)
+ p.create_dataset("redshift", data=self.parameters["Redshift"])
+ p.create_dataset("d_a", data=float(self.parameters["AngularDiameterDistance"]))
if "ARF" in self.parameters:
- f.create_dataset("/arf", data=self.parameters["ARF"])
+ p.create_dataset("arf", data=self.parameters["ARF"])
if "RMF" in self.parameters:
- f.create_dataset("/rmf", data=self.parameters["RMF"])
+ p.create_dataset("rmf", data=self.parameters["RMF"])
if "ChannelType" in self.parameters:
- f.create_dataset("/channel_type", data=self.parameters["ChannelType"])
+ p.create_dataset("channel_type", data=self.parameters["ChannelType"])
if "Mission" in self.parameters:
- f.create_dataset("/mission", data=self.parameters["Mission"])
+ p.create_dataset("mission", data=self.parameters["Mission"])
if "Telescope" in self.parameters:
- f.create_dataset("/telescope", data=self.parameters["Telescope"])
+ p.create_dataset("telescope", data=self.parameters["Telescope"])
if "Instrument" in self.parameters:
- f.create_dataset("/instrument", data=self.parameters["Instrument"])
+ p.create_dataset("instrument", data=self.parameters["Instrument"])
+ p.create_dataset("sky_center", data=self.parameters["sky_center"].d)
+ p.create_dataset("pix_center", data=self.parameters["pix_center"])
+ p.create_dataset("dtheta", data=float(self.parameters["dtheta"]))
- f.create_dataset("/xpix", data=self["xpix"])
- f.create_dataset("/ypix", data=self["ypix"])
- f.create_dataset("/xsky", data=self["xsky"].d)
- f.create_dataset("/ysky", data=self["ysky"].d)
- f.create_dataset("/eobs", data=self["eobs"].d)
- if "PI" in self:
- f.create_dataset("/pi", data=self["PI"])
- if "PHA" in self:
- f.create_dataset("/pha", data=self["PHA"])
- f.create_dataset("/sky_center", data=self.parameters["sky_center"].d)
- f.create_dataset("/pix_center", data=self.parameters["pix_center"])
- f.create_dataset("/dtheta", data=float(self.parameters["dtheta"]))
+ d = f.create_group("data")
+ d.create_dataset("xpix", data=self["xpix"])
+ d.create_dataset("ypix", data=self["ypix"])
+ d.create_dataset("xsky", data=self["xsky"].d)
+ d.create_dataset("ysky", data=self["ysky"].d)
+ d.create_dataset("eobs", data=self["eobs"].d)
+ if "PI" in self.events:
+ d.create_dataset("pi", data=self.events["PI"])
+ if "PHA" in self.events:
+ d.create_dataset("pha", data=self.events["PHA"])
f.close()
@@ -1250,7 +1261,6 @@
Parameters
----------
-
imagefile : string
The name of the image file to write.
clobber : boolean, optional
@@ -1309,7 +1319,6 @@
Parameters
----------
-
specfile : string
The name of the FITS file to be written.
bin_type : string, optional
@@ -1417,3 +1426,130 @@
hdulist = pyfits.HDUList([pyfits.PrimaryHDU(), tbhdu])
hdulist.writeto(specfile, clobber=clobber)
+
+def merge_files(input_files, output_file, clobber=False,
+ add_exposure_times=False):
+ r"""
+ Helper function for merging PhotonList or EventList HDF5 files.
+
+ Parameters
+ ----------
+ input_files : list of strings
+ List of filenames that will be merged together.
+ output_file : string
+ Name of the merged file to be outputted.
+ clobber : boolean, default False
+ If a the output file already exists, set this to True to
+ overwrite it.
+ add_exposure_times : boolean, default False
+ If set to True, exposure times will be added together. Otherwise,
+ the exposure times of all of the files must be the same.
+
+ Examples
+ --------
+ >>> from yt.analysis_modules.photon_simulator.api import merge_files
+ >>> merge_files(["events_0.h5","events_1.h5","events_3.h5"], "events.h5",
+ ... clobber=True, add_exposure_times=True)
+
+ Notes
+ -----
+ Currently, to merge files it is mandated that all of the parameters have the
+ same values, with the possible exception of the exposure time parameter "exp_time"
+ if add_exposure_times=False.
+ """
+ if os.path.exists(output_file) and not clobber:
+ raise IOError("Cannot overwrite existing file %s. " % output_file +
+ "If you want to do this, set clobber=True.")
+
+ f_in = h5py.File(input_files[0], "r")
+ f_out = h5py.File(output_file, "w")
+
+ exp_time_key = ""
+ p_out = f_out.create_group("parameters")
+ for key, param in f_in["parameters"].items():
+ if key.endswith("exp_time"):
+ exp_time_key = key
+ else:
+ p_out[key] = param.value
+
+ skip = [exp_time_key] if add_exposure_times else []
+ for fn in input_files[1:]:
+ f = h5py.File(fn, "r")
+ validate_parameters(f_in["parameters"], f["parameters"], skip=skip)
+ f.close()
+
+ f_in.close()
+
+ data = defaultdict(list)
+ tot_exp_time = 0.0
+
+ for i, fn in enumerate(input_files):
+ f = h5py.File(fn, "r")
+ if add_exposure_times:
+ tot_exp_time += f["/parameters"][exp_time_key].value
+ elif i == 0:
+ tot_exp_time = f["/parameters"][exp_time_key].value
+ for key in f["/data"]:
+ data[key].append(f["/data"][key][:])
+ f.close()
+
+ p_out["exp_time"] = tot_exp_time
+
+ d = f_out.create_group("data")
+ for k in data:
+ d.create_dataset(k, data=np.concatenate(data[k]))
+
+ f_out.close()
+
+def convert_old_file(input_file, output_file, clobber=False):
+ r"""
+ Helper function for converting old PhotonList or EventList HDF5
+ files (pre yt v3.3) to their new versions.
+
+ Parameters
+ ----------
+ input_file : list of strings
+ The filename of the old-versioned file to be converted.
+ output_file : string
+ Name of the new file to be outputted.
+ clobber : boolean, default False
+ If a the output file already exists, set this to True to
+ overwrite it.
+
+ Examples
+ --------
+ >>> from yt.analysis_modules.photon_simulator.api import convert_old_file
+ >>> convert_old_file("photons_old.h5", "photons_new.h5", clobber=True)
+ """
+ if os.path.exists(output_file) and not clobber:
+ raise IOError("Cannot overwrite existing file %s. " % output_file +
+ "If you want to do this, set clobber=True.")
+
+ f_in = h5py.File(input_file, "r")
+
+ if "num_photons" in f_in:
+ params = ["fid_exp_time", "fid_area", "fid_d_a", "fid_redshift",
+ "dimension", "width", "hubble", "omega_matter",
+ "omega_lambda"]
+ data = ["x", "y", "z", "dx", "vx", "vy", "vz", "energy", "num_photons"]
+ elif "pix_center" in f_in:
+ params = ["exp_time", "area", "redshift", "d_a", "arf",
+ "rmf", "channel_type", "mission", "telescope",
+ "instrument", "sky_center", "pix_center", "dtheta"]
+ data = ["xsky", "ysky", "xpix", "ypix", "eobs", "pi", "pha"]
+
+ f_out = h5py.File(output_file, "w")
+
+ p = f_out.create_group("parameters")
+ d = f_out.create_group("data")
+
+ for key in params:
+ if key in f_in:
+ p.create_dataset(key, data=f_in[key].value)
+
+ for key in data:
+ if key in f_in:
+ d.create_dataset(key, data=f_in[key].value)
+
+ f_in.close()
+ f_out.close()
\ No newline at end of file
diff -r eb45e6e815fb3f781e57ebf9dd941b404da4e030 -r 129722a2fc4cfca129377785af88e615c4aec7a1 yt/analysis_modules/photon_simulator/spectral_models.py
--- a/yt/analysis_modules/photon_simulator/spectral_models.py
+++ b/yt/analysis_modules/photon_simulator/spectral_models.py
@@ -51,7 +51,6 @@
Parameters
----------
-
model_name : string
The name of the thermal emission model.
emin : float
@@ -123,7 +122,6 @@
Parameters
----------
-
model_name : string
The name of the absorption model.
nH : float
@@ -180,11 +178,10 @@
Initialize a thermal gas emission model from the AtomDB APEC tables
available at http://www.atomdb.org. This code borrows heavily from Python
routines used to read the APEC tables developed by Adam Foster at the
- CfA (afoster at cfa.harvard.edu).
+ CfA (afoster at cfa.harvard.edu).
Parameters
----------
-
apec_root : string
The directory root where the APEC model files are stored.
emin : float
@@ -323,7 +320,6 @@
Parameters
----------
-
filename : string
The name of the table file.
nH : float
@@ -333,7 +329,6 @@
--------
>>> abs_model = XSpecAbsorbModel("abs_table.h5", 0.1)
"""
-
def __init__(self, filename, nH):
if not os.path.exists(filename):
raise IOError("File does not exist: %s." % filename)
diff -r eb45e6e815fb3f781e57ebf9dd941b404da4e030 -r 129722a2fc4cfca129377785af88e615c4aec7a1 yt/analysis_modules/photon_simulator/tests/test_sloshing.py
--- a/yt/analysis_modules/photon_simulator/tests/test_sloshing.py
+++ b/yt/analysis_modules/photon_simulator/tests/test_sloshing.py
@@ -12,13 +12,18 @@
from yt.analysis_modules.photon_simulator.api import \
TableApecModel, TableAbsorbModel, \
- ThermalPhotonModel, PhotonList
+ ThermalPhotonModel, PhotonList, EventList, \
+ convert_old_file, merge_files
from yt.config import ytcfg
from yt.testing import requires_file
from yt.utilities.answer_testing.framework import requires_ds, \
GenericArrayTest, data_dir_load
+from numpy.testing import assert_array_equal
from numpy.random import RandomState
+from yt.units.yt_array import uconcatenate
import os
+import tempfile
+import shutil
def setup():
from yt.config import ytcfg
@@ -34,9 +39,11 @@
"aciss_aimpt_cy17.arf", "nustar_3arcminA.arf",
"sxt-s_120210_ts02um_intallpxl.arf"]
-gslr = test_data_dir+"/GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
+gslr = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
APEC = xray_data_dir
-TBABS = xray_data_dir+"/tbabs_table.h5"
+TBABS = os.path.join(xray_data_dir, "tbabs_table.h5")
+old_photon_file = os.path.join(xray_data_dir, "old_photons.h5")
+old_event_file = os.path.join(xray_data_dir, "old_events.h5")
def return_data(data):
def _return_data(name):
@@ -46,8 +53,14 @@
@requires_ds(gslr)
@requires_file(APEC)
@requires_file(TBABS)
+ at requires_file(old_photon_file)
+ at requires_file(old_event_file)
def test_sloshing():
+ tmpdir = tempfile.mkdtemp()
+ curdir = os.getcwd()
+ os.chdir(tmpdir)
+
prng = RandomState(0x4d3d3d3)
ds = data_dir_load(gslr)
@@ -61,25 +74,72 @@
sphere = ds.sphere("c", (0.1, "Mpc"))
thermal_model = ThermalPhotonModel(apec_model, Zmet=0.3, prng=prng)
- photons = PhotonList.from_scratch(sphere, redshift, A, exp_time,
- thermal_model)
+ photons1 = PhotonList.from_scratch(sphere, redshift, A, exp_time,
+ thermal_model)
- return_photons = return_data(photons.photons)
+ return_photons = return_data(photons1.photons)
- tests = []
- tests.append(GenericArrayTest(ds, return_photons, args=["photons"]))
+ tests = [GenericArrayTest(ds, return_photons, args=["photons"])]
for a, r in zip(arfs, rmfs):
- arf = os.path.join(xray_data_dir,a)
- rmf = os.path.join(xray_data_dir,r)
- events = photons.project_photons([1.0,-0.5,0.2], responses=[arf,rmf],
- absorb_model=tbabs_model,
- convolve_energies=True, prng=prng)
+ arf = os.path.join(xray_data_dir, a)
+ rmf = os.path.join(xray_data_dir, r)
+ events1 = photons1.project_photons([1.0,-0.5,0.2], responses=[arf,rmf],
+ absorb_model=tbabs_model,
+ convolve_energies=True, prng=prng)
- return_events = return_data(events.events)
+ return_events = return_data(events1.events)
tests.append(GenericArrayTest(ds, return_events, args=[a]))
for test in tests:
test_sloshing.__name__ = test.description
yield test
+
+ photons1.write_h5_file("test_photons.h5")
+ events1.write_h5_file("test_events.h5")
+
+ photons2 = PhotonList.from_file("test_photons.h5")
+ events2 = EventList.from_h5_file("test_events.h5")
+
+ convert_old_file(old_photon_file, "converted_photons.h5")
+ convert_old_file(old_event_file, "converted_events.h5")
+
+ photons3 = PhotonList.from_file("converted_photons.h5")
+ events3 = EventList.from_h5_file("converted_events.h5")
+
+ for k in photons1.keys():
+ if k == "Energy":
+ arr1 = uconcatenate(photons1[k])
+ arr2 = uconcatenate(photons2[k])
+ arr3 = uconcatenate(photons3[k])
+ else:
+ arr1 = photons1[k]
+ arr2 = photons2[k]
+ arr3 = photons3[k]
+ yield assert_array_equal, arr1, arr2
+ yield assert_array_equal, arr1, arr3
+ for k in events1.keys():
+ yield assert_array_equal, events1[k], events2[k]
+ yield assert_array_equal, events1[k], events3[k]
+
+ nevents = 0
+
+ for i in range(4):
+ events = photons1.project_photons([1.0,-0.5,0.2],
+ exp_time_new=0.25*exp_time,
+ absorb_model=tbabs_model,
+ prng=prng)
+ events.write_h5_file("split_events_%d.h5" % i)
+ nevents += len(events["xsky"])
+
+ merge_files(["split_events_%d.h5" % i for i in range(4)],
+ "merged_events.h5", add_exposure_times=True,
+ clobber=True)
+
+ merged_events = EventList.from_h5_file("merged_events.h5")
+ assert len(merged_events["xsky"]) == nevents
+ assert merged_events.parameters["ExposureTime"] == exp_time
+
+ os.chdir(curdir)
+ shutil.rmtree(tmpdir)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list