[yt-svn] commit/yt: 11 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Thu Aug 18 08:58:54 PDT 2016
11 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/e5b69955736f/
Changeset: e5b69955736f
Branch: yt
User: chummels
Date: 2016-08-04 22:29:28+00:00
Summary: Updating add_continuum source formatting and casting column_density in cm**-2.
Affected #: 1 file
diff -r 1a9c9ab1575b1d2fc4ce517acc44074cf333a4c7 -r e5b69955736f9e73696a6cebc598219ac54a08e4 yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -280,35 +280,55 @@
redshift, redshift_eff = self._apply_observing_redshift(field_data,
use_peculiar_velocity, observing_redshift)
- # Only add continuum features down to tau of 1.e-4.
+ # Only add continuum features down to tau of 1.e-3
min_tau = 1.e-3
for continuum in self.continuum_list:
- column_density = field_data[continuum['field_name']] * field_data['dl']
+ # Normalization is in cm**-2, so column density must be as well
+ column_density = (field_data[continuum['field_name']] *
+ field_data['dl']).in_units('cm**-2')
# redshift_eff field combines cosmological and velocity redshifts
if use_peculiar_velocity:
delta_lambda = continuum['wavelength'] * redshift_eff
else:
delta_lambda = continuum['wavelength'] * redshift
+ # Right index of continuum affected area is wavelength itself
this_wavelength = delta_lambda + continuum['wavelength']
- right_index = np.digitize(this_wavelength, self.lambda_field).clip(0, self.n_lambda)
+ right_index = np.digitize(this_wavelength,
+ self.lambda_field).clip(0, self.n_lambda)
+ # left index of continuum affected area is to low enough wavelength
+ # to assure the tau value drops to below min_tau
left_index = np.digitize((this_wavelength *
- np.power((min_tau * continuum['normalization'] /
- column_density), (1. / continuum['index']))),
- self.lambda_field).clip(0, self.n_lambda)
+ np.power((min_tau * continuum['normalization'] /
+ column_density),
+ (1. / continuum['index']))),
+ self.lambda_field).clip(0, self.n_lambda)
+ # Only calculate the effects of continuua where column_density
+ # greater than min_tau times the normalization (i.e. N > norm/1000)
+ # because lower column will not have significant effect
valid_continuua = np.where(((column_density /
continuum['normalization']) > min_tau) &
(right_index - left_index > 1))[0]
+ if valid_continuua.size == 0:
+ mylog.info("Not adding continuum %s because no col_dens > %g" %
+ (continuum['label'], continuum['normalization']*min_tau))
+ return
+
pbar = get_pbar("Adding continuum - %s [%f A]: " % \
(continuum['label'], continuum['wavelength']),
valid_continuua.size)
+ # Tau value is (wavelength / continuum_wavelength)**index / norm
+ # i.e. a power law decreasing as wavelength decreases
for i, lixel in enumerate(valid_continuua):
- line_tau = np.power((self.lambda_field[left_index[lixel]:right_index[lixel]] /
- this_wavelength[lixel]), continuum['index']) * \
- column_density[lixel] / continuum['normalization']
- self.tau_field[left_index[lixel]:right_index[lixel]] += line_tau
+ cont_tau = \
+ np.power((self.lambda_field[left_index[lixel] :
+ right_index[lixel]] /
+ this_wavelength[lixel]), \
+ continuum['index']) * \
+ (column_density[lixel] / continuum['normalization'])
+ self.tau_field[left_index[lixel]:right_index[lixel]] += cont_tau
pbar.update(i)
pbar.finish()
https://bitbucket.org/yt_analysis/yt/commits/f0afcd4eacd1/
Changeset: f0afcd4eacd1
Branch: yt
User: chummels
Date: 2016-08-05 02:18:31+00:00
Summary: Adding comments to continuum code and scaling min_tau by n_absorbers
Affected #: 1 file
diff -r e5b69955736f9e73696a6cebc598219ac54a08e4 -r f0afcd4eacd14e70614ef69afa28a127ba985767 yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -273,17 +273,34 @@
def _add_continua_to_spectrum(self, field_data, use_peculiar_velocity,
observing_redshift=0.):
"""
- Add continuum features to the spectrum.
+ Add continuum features to the spectrum. Continuua are recorded as
+ a name, associated field, wavelength, normalization value, and index.
+ Continuua are applied at and below the denoted wavelength, where the
+ optical depth decreases as a power law of desired index. For positive
+ index values, this means optical depth is highest at the denoted
+ wavelength, and it drops with shorter and shorter wavelengths.
+ Consequently, transmitted flux undergoes a discontinuous cutoff at the
+ denoted wavelength, and then slowly increases with decreasing wavelength
+ according to the power law.
"""
# Change the redshifts of continuum sources to account for the
# redshift at which the observer sits
redshift, redshift_eff = self._apply_observing_redshift(field_data,
use_peculiar_velocity, observing_redshift)
- # Only add continuum features down to tau of 1.e-3
- min_tau = 1.e-3
+ # min_tau is the minimum optical depth value that warrants
+ # accounting for an absorber. for a single absorber, noticeable
+ # continuum effects begin for tau = 1e-3 (leading to transmitted
+ # flux of e^-tau ~ 0.999). but we apply a cutoff to remove
+ # absorbers with insufficient column_density to contribute
+ # significantly to a continuum (see below). because lots of
+ # low column density absorbers can add up to a significant
+ # continuum effect, we normalize min_tau by the n_absorbers.
+ n_absorbers = len(field_data['dl'])
+ min_tau = 1.e-3/n_absorbers
for continuum in self.continuum_list:
+
# Normalization is in cm**-2, so column density must be as well
column_density = (field_data[continuum['field_name']] *
field_data['dl']).in_units('cm**-2')
@@ -293,34 +310,40 @@
delta_lambda = continuum['wavelength'] * redshift_eff
else:
delta_lambda = continuum['wavelength'] * redshift
- # Right index of continuum affected area is wavelength itself
+
+ # right index of continuum affected area is wavelength itself
this_wavelength = delta_lambda + continuum['wavelength']
right_index = np.digitize(this_wavelength,
self.lambda_field).clip(0, self.n_lambda)
- # left index of continuum affected area is to low enough wavelength
- # to assure the tau value drops to below min_tau
+ # left index of continuum affected area wavelength at which
+ # optical depth reaches tau_min
left_index = np.digitize((this_wavelength *
np.power((min_tau * continuum['normalization'] /
column_density),
(1. / continuum['index']))),
self.lambda_field).clip(0, self.n_lambda)
- # Only calculate the effects of continuua where column_density
- # greater than min_tau times the normalization (i.e. N > norm/1000)
- # because lower column will not have significant effect
+ # Only calculate the effects of continuua where normalized
+ # column_density is greater than min_tau
+ # because lower column will not have significant contribution
valid_continuua = np.where(((column_density /
continuum['normalization']) > min_tau) &
(right_index - left_index > 1))[0]
if valid_continuua.size == 0:
- mylog.info("Not adding continuum %s because no col_dens > %g" %
- (continuum['label'], continuum['normalization']*min_tau))
+ mylog.info("Not adding continuum %s: insufficient column density" %
+ continuum['label'])
return
pbar = get_pbar("Adding continuum - %s [%f A]: " % \
(continuum['label'], continuum['wavelength']),
valid_continuua.size)
- # Tau value is (wavelength / continuum_wavelength)**index / norm
+
+ # Tau value is (wavelength / continuum_wavelength)**index /
+ # (column_dens / norm)
# i.e. a power law decreasing as wavelength decreases
+
+ # Step through the absorber list and add continuum tau for each to
+ # the total optical depth for all wavelengths
for i, lixel in enumerate(valid_continuua):
cont_tau = \
np.power((self.lambda_field[left_index[lixel] :
https://bitbucket.org/yt_analysis/yt/commits/c3a5ef755c72/
Changeset: c3a5ef755c72
Branch: yt
User: chummels
Date: 2016-08-05 02:19:04+00:00
Summary: Adding test for adding a significant continuum feature to a spectrum.
Affected #: 1 file
diff -r f0afcd4eacd14e70614ef69afa28a127ba985767 -r c3a5ef755c7206ceb2f3505964717c1c4c48e838 yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
@@ -33,7 +33,7 @@
COSMO_PLUS_SINGLE = "enzo_cosmology_plus/RD0009/RD0009"
GIZMO_PLUS = "gizmo_cosmology_plus/N128L16.param"
GIZMO_PLUS_SINGLE = "gizmo_cosmology_plus/snap_N128L16_151.hdf5"
-
+ISO_GALAXY = "IsolatedGalaxy/galaxy0030/galaxy0030"
@requires_file(COSMO_PLUS)
@requires_answer_testing()
@@ -360,3 +360,65 @@
# clean up
os.chdir(curdir)
shutil.rmtree(tmpdir)
+
+ at requires_file(ISO_GALAXY)
+ at requires_answer_testing()
+def test_absorption_spectrum_with_continuum():
+ """
+ This test generates an absorption spectrum from a simple light ray on a
+ grid dataset and adds a Lyman continuum to it
+ """
+
+ # Set up in a temp dir
+ tmpdir = tempfile.mkdtemp()
+ curdir = os.getcwd()
+ os.chdir(tmpdir)
+
+ ds = yt.load(ISO_GALAXY)
+ lr = LightRay(ds)
+
+ ray_start = ds.domain_left_edge
+ ray_end = ds.domain_right_edge
+ lr.make_light_ray(start_position=ray_start, end_position=ray_end,
+ fields=['temperature', 'density', 'H_number_density'],
+ data_filename='lightray.h5')
+
+ sp = AbsorptionSpectrum(800.0, 1300.0, 5001)
+
+ my_label = 'HI Lya'
+ field = 'H_number_density'
+ wavelength = 1215.6700 # Angstromss
+ f_value = 4.164E-01
+ gamma = 6.265e+08
+ mass = 1.00794
+
+ sp.add_line(my_label, field, wavelength, f_value,
+ gamma, mass, label_threshold=1.e10)
+
+ my_label = 'Ly C'
+ field = 'H_number_density'
+ wavelength = 912.323660 # Angstroms
+ normalization = 1.6e17
+ index = 3.0
+
+ sp.add_continuum(my_label, field, wavelength, normalization, index)
+
+ wavelength, flux = sp.make_spectrum('lightray.h5',
+ output_file='spectrum.h5',
+ line_list_file='lines.txt',
+ use_peculiar_velocity=True)
+
+ # load just-generated hdf5 file of spectral data (for consistency)
+ data = h5.File('spectrum.h5', 'r')
+
+ for key in data.keys():
+ func = lambda x=key: data[x][:]
+ func.__name__ = "{}_continuum".format(key)
+ test = GenericArrayTest(None, func)
+ test_absorption_spectrum_with_continuum.__name__ = test.description
+ yield test
+
+ # clean up
+ os.chdir(curdir)
+ shutil.rmtree(tmpdir)
+
https://bitbucket.org/yt_analysis/yt/commits/4e3bce476f8f/
Changeset: 4e3bce476f8f
Branch: yt
User: chummels
Date: 2016-08-05 02:26:01+00:00
Summary: Updating the continuum docs to be more descriptive.
Affected #: 1 file
diff -r c3a5ef755c7206ceb2f3505964717c1c4c48e838 -r 4e3bce476f8f3501ea80cd0648bf6566026a2809 doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -116,7 +116,12 @@
Continuum features with optical depths that follow a power law can also be
added. Like adding lines, you must specify details like the wavelength
and the field in the dataset and LightRay that is tied to this feature.
-Below, we will add H Lyman continuum.
+The wavelength refers to the location at which the continuum begins to be
+applied to the dataset, and as it moves to lower wavelength values, the
+optical depth value decreases according the defined power law. The
+normalization value is the column density of the linked field which results
+in an optical depth of 1 at the defined wavelength. Below, we add the hydrogen
+Lyman continuum.
.. code-block:: python
@@ -131,7 +136,7 @@
Making the Spectrum
^^^^^^^^^^^^^^^^^^^
-Once all the lines and continuum are added, it is time to make a spectrum out
+Once all the lines and continuua are added, it is time to make a spectrum out
of some light ray data.
.. code-block:: python
https://bitbucket.org/yt_analysis/yt/commits/c86b00506c82/
Changeset: c86b00506c82
Branch: yt
User: chummels
Date: 2016-08-05 02:35:28+00:00
Summary: Updating yaml file to account for new continuum test.
Affected #: 2 files
diff -r 4e3bce476f8f3501ea80cd0648bf6566026a2809 -r c86b00506c82a6a9f6284ad5835ccef5a467dd71 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -67,11 +67,12 @@
local_ytdata_000:
- yt/frontends/ytdata
- local_absorption_spectrum_001:
+ local_absorption_spectrum_002:
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo_sph
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo_sph
+ - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_with_continuum
local_axialpix_001:
- yt/geometry/coordinates/tests/test_axial_pixelization.py:test_axial_pixelization
diff -r 4e3bce476f8f3501ea80cd0648bf6566026a2809 -r c86b00506c82a6a9f6284ad5835ccef5a467dd71 yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
@@ -366,7 +366,7 @@
def test_absorption_spectrum_with_continuum():
"""
This test generates an absorption spectrum from a simple light ray on a
- grid dataset and adds a Lyman continuum to it
+ grid dataset and adds Lyman alpha and Lyman continuum to it
"""
# Set up in a temp dir
@@ -421,4 +421,3 @@
# clean up
os.chdir(curdir)
shutil.rmtree(tmpdir)
-
https://bitbucket.org/yt_analysis/yt/commits/1021e1e3ddf5/
Changeset: 1021e1e3ddf5
Branch: yt
User: chummels
Date: 2016-08-05 05:03:41+00:00
Summary: Fixing typo that caused test to fail.
Affected #: 1 file
diff -r c86b00506c82a6a9f6284ad5835ccef5a467dd71 -r 1021e1e3ddf53313f7de1de4a42ec51223209d3d yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
@@ -374,7 +374,7 @@
curdir = os.getcwd()
os.chdir(tmpdir)
- ds = yt.load(ISO_GALAXY)
+ ds = load(ISO_GALAXY)
lr = LightRay(ds)
ray_start = ds.domain_left_edge
https://bitbucket.org/yt_analysis/yt/commits/23cdb36f52c0/
Changeset: 23cdb36f52c0
Branch: yt
User: chummels
Date: 2016-08-05 18:54:44+00:00
Summary: Incrementing absorption tests in yaml to trigger new generation of test answers.
Affected #: 1 file
diff -r 1021e1e3ddf53313f7de1de4a42ec51223209d3d -r 23cdb36f52c0cb040bd9f9da4607079e1dd49382 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -67,7 +67,7 @@
local_ytdata_000:
- yt/frontends/ytdata
- local_absorption_spectrum_002:
+ local_absorption_spectrum_003:
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo_sph
https://bitbucket.org/yt_analysis/yt/commits/c9b152bdd145/
Changeset: c9b152bdd145
Branch: yt
User: ngoldbaum
Date: 2016-08-05 20:45:45+00:00
Summary: Ensure GenericArrayTest always compares dictionaries
Affected #: 1 file
diff -r 1a9c9ab1575b1d2fc4ce517acc44074cf333a4c7 -r c9b152bdd14567f3fdadbe19a6201aedf5162631 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -819,6 +819,10 @@
kwargs = self.kwargs
return self.array_func(*args, **kwargs)
def compare(self, new_result, old_result):
+ if not isinstance(new_result, dict):
+ new_result = {'answer': new_result}
+ old_result = {'answer': old_result}
+
assert_equal(len(new_result), len(old_result),
err_msg="Number of outputs not equal.",
verbose=True)
https://bitbucket.org/yt_analysis/yt/commits/fcc4c4b45789/
Changeset: fcc4c4b45789
Branch: yt
User: chummels
Date: 2016-08-05 21:52:53+00:00
Summary: Merging with Nathan's fixes for GenericArrayTest
Affected #: 1 file
diff -r 23cdb36f52c0cb040bd9f9da4607079e1dd49382 -r fcc4c4b4578960ce0eb714607154dd097dabdd09 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -819,6 +819,10 @@
kwargs = self.kwargs
return self.array_func(*args, **kwargs)
def compare(self, new_result, old_result):
+ if not isinstance(new_result, dict):
+ new_result = {'answer': new_result}
+ old_result = {'answer': old_result}
+
assert_equal(len(new_result), len(old_result),
err_msg="Number of outputs not equal.",
verbose=True)
https://bitbucket.org/yt_analysis/yt/commits/ffb3f22c89ad/
Changeset: ffb3f22c89ad
Branch: yt
User: chummels
Date: 2016-08-17 18:45:52+00:00
Summary: Fixing some typos.
Affected #: 2 files
diff -r fcc4c4b4578960ce0eb714607154dd097dabdd09 -r ffb3f22c89ada70bc99cb5a8c52ad22ba62bb253 doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -118,7 +118,7 @@
and the field in the dataset and LightRay that is tied to this feature.
The wavelength refers to the location at which the continuum begins to be
applied to the dataset, and as it moves to lower wavelength values, the
-optical depth value decreases according the defined power law. The
+optical depth value decreases according to the defined power law. The
normalization value is the column density of the linked field which results
in an optical depth of 1 at the defined wavelength. Below, we add the hydrogen
Lyman continuum.
diff -r fcc4c4b4578960ce0eb714607154dd097dabdd09 -r ffb3f22c89ada70bc99cb5a8c52ad22ba62bb253 yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -296,7 +296,7 @@
# significantly to a continuum (see below). because lots of
# low column density absorbers can add up to a significant
# continuum effect, we normalize min_tau by the n_absorbers.
- n_absorbers = len(field_data['dl'])
+ n_absorbers = field_data['dl'].size
min_tau = 1.e-3/n_absorbers
for continuum in self.continuum_list:
https://bitbucket.org/yt_analysis/yt/commits/df6fc87fa819/
Changeset: df6fc87fa819
Branch: yt
User: ngoldbaum
Date: 2016-08-18 15:58:12+00:00
Summary: Merged in chummels/yt (pull request #2323)
AbsorptionSpectrum continuum improvements and fixes
Affected #: 5 files
diff -r 8de8a3c315df8cbf36140c6cc6cd0d4e7a5e748e -r df6fc87fa819404a937c497f8187afa123e92bac doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -116,7 +116,12 @@
Continuum features with optical depths that follow a power law can also be
added. Like adding lines, you must specify details like the wavelength
and the field in the dataset and LightRay that is tied to this feature.
-Below, we will add H Lyman continuum.
+The wavelength refers to the location at which the continuum begins to be
+applied to the dataset, and as it moves to lower wavelength values, the
+optical depth value decreases according to the defined power law. The
+normalization value is the column density of the linked field which results
+in an optical depth of 1 at the defined wavelength. Below, we add the hydrogen
+Lyman continuum.
.. code-block:: python
@@ -131,7 +136,7 @@
Making the Spectrum
^^^^^^^^^^^^^^^^^^^
-Once all the lines and continuum are added, it is time to make a spectrum out
+Once all the lines and continuua are added, it is time to make a spectrum out
of some light ray data.
.. code-block:: python
diff -r 8de8a3c315df8cbf36140c6cc6cd0d4e7a5e748e -r df6fc87fa819404a937c497f8187afa123e92bac tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -67,11 +67,12 @@
local_ytdata_000:
- yt/frontends/ytdata
- local_absorption_spectrum_001:
+ local_absorption_spectrum_003:
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo_sph
- yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo_sph
+ - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_with_continuum
local_axialpix_001:
- yt/geometry/coordinates/tests/test_axial_pixelization.py:test_axial_pixelization
diff -r 8de8a3c315df8cbf36140c6cc6cd0d4e7a5e748e -r df6fc87fa819404a937c497f8187afa123e92bac yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -273,42 +273,85 @@
def _add_continua_to_spectrum(self, field_data, use_peculiar_velocity,
observing_redshift=0.):
"""
- Add continuum features to the spectrum.
+ Add continuum features to the spectrum. Continuua are recorded as
+ a name, associated field, wavelength, normalization value, and index.
+ Continuua are applied at and below the denoted wavelength, where the
+ optical depth decreases as a power law of desired index. For positive
+ index values, this means optical depth is highest at the denoted
+ wavelength, and it drops with shorter and shorter wavelengths.
+ Consequently, transmitted flux undergoes a discontinuous cutoff at the
+ denoted wavelength, and then slowly increases with decreasing wavelength
+ according to the power law.
"""
# Change the redshifts of continuum sources to account for the
# redshift at which the observer sits
redshift, redshift_eff = self._apply_observing_redshift(field_data,
use_peculiar_velocity, observing_redshift)
- # Only add continuum features down to tau of 1.e-4.
- min_tau = 1.e-3
+ # min_tau is the minimum optical depth value that warrants
+ # accounting for an absorber. for a single absorber, noticeable
+ # continuum effects begin for tau = 1e-3 (leading to transmitted
+ # flux of e^-tau ~ 0.999). but we apply a cutoff to remove
+ # absorbers with insufficient column_density to contribute
+ # significantly to a continuum (see below). because lots of
+ # low column density absorbers can add up to a significant
+ # continuum effect, we normalize min_tau by the n_absorbers.
+ n_absorbers = field_data['dl'].size
+ min_tau = 1.e-3/n_absorbers
for continuum in self.continuum_list:
- column_density = field_data[continuum['field_name']] * field_data['dl']
+
+ # Normalization is in cm**-2, so column density must be as well
+ column_density = (field_data[continuum['field_name']] *
+ field_data['dl']).in_units('cm**-2')
# redshift_eff field combines cosmological and velocity redshifts
if use_peculiar_velocity:
delta_lambda = continuum['wavelength'] * redshift_eff
else:
delta_lambda = continuum['wavelength'] * redshift
+
+ # right index of continuum affected area is wavelength itself
this_wavelength = delta_lambda + continuum['wavelength']
- right_index = np.digitize(this_wavelength, self.lambda_field).clip(0, self.n_lambda)
+ right_index = np.digitize(this_wavelength,
+ self.lambda_field).clip(0, self.n_lambda)
+ # left index of continuum affected area wavelength at which
+ # optical depth reaches tau_min
left_index = np.digitize((this_wavelength *
- np.power((min_tau * continuum['normalization'] /
- column_density), (1. / continuum['index']))),
- self.lambda_field).clip(0, self.n_lambda)
+ np.power((min_tau * continuum['normalization'] /
+ column_density),
+ (1. / continuum['index']))),
+ self.lambda_field).clip(0, self.n_lambda)
+ # Only calculate the effects of continuua where normalized
+ # column_density is greater than min_tau
+ # because lower column will not have significant contribution
valid_continuua = np.where(((column_density /
continuum['normalization']) > min_tau) &
(right_index - left_index > 1))[0]
+ if valid_continuua.size == 0:
+ mylog.info("Not adding continuum %s: insufficient column density" %
+ continuum['label'])
+ return
+
pbar = get_pbar("Adding continuum - %s [%f A]: " % \
(continuum['label'], continuum['wavelength']),
valid_continuua.size)
+
+ # Tau value is (wavelength / continuum_wavelength)**index /
+ # (column_dens / norm)
+ # i.e. a power law decreasing as wavelength decreases
+
+ # Step through the absorber list and add continuum tau for each to
+ # the total optical depth for all wavelengths
for i, lixel in enumerate(valid_continuua):
- line_tau = np.power((self.lambda_field[left_index[lixel]:right_index[lixel]] /
- this_wavelength[lixel]), continuum['index']) * \
- column_density[lixel] / continuum['normalization']
- self.tau_field[left_index[lixel]:right_index[lixel]] += line_tau
+ cont_tau = \
+ np.power((self.lambda_field[left_index[lixel] :
+ right_index[lixel]] /
+ this_wavelength[lixel]), \
+ continuum['index']) * \
+ (column_density[lixel] / continuum['normalization'])
+ self.tau_field[left_index[lixel]:right_index[lixel]] += cont_tau
pbar.update(i)
pbar.finish()
diff -r 8de8a3c315df8cbf36140c6cc6cd0d4e7a5e748e -r df6fc87fa819404a937c497f8187afa123e92bac yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
@@ -33,7 +33,7 @@
COSMO_PLUS_SINGLE = "enzo_cosmology_plus/RD0009/RD0009"
GIZMO_PLUS = "gizmo_cosmology_plus/N128L16.param"
GIZMO_PLUS_SINGLE = "gizmo_cosmology_plus/snap_N128L16_151.hdf5"
-
+ISO_GALAXY = "IsolatedGalaxy/galaxy0030/galaxy0030"
@requires_file(COSMO_PLUS)
@requires_answer_testing()
@@ -360,3 +360,64 @@
# clean up
os.chdir(curdir)
shutil.rmtree(tmpdir)
+
+ at requires_file(ISO_GALAXY)
+ at requires_answer_testing()
+def test_absorption_spectrum_with_continuum():
+ """
+ This test generates an absorption spectrum from a simple light ray on a
+ grid dataset and adds Lyman alpha and Lyman continuum to it
+ """
+
+ # Set up in a temp dir
+ tmpdir = tempfile.mkdtemp()
+ curdir = os.getcwd()
+ os.chdir(tmpdir)
+
+ ds = load(ISO_GALAXY)
+ lr = LightRay(ds)
+
+ ray_start = ds.domain_left_edge
+ ray_end = ds.domain_right_edge
+ lr.make_light_ray(start_position=ray_start, end_position=ray_end,
+ fields=['temperature', 'density', 'H_number_density'],
+ data_filename='lightray.h5')
+
+ sp = AbsorptionSpectrum(800.0, 1300.0, 5001)
+
+ my_label = 'HI Lya'
+ field = 'H_number_density'
+ wavelength = 1215.6700 # Angstromss
+ f_value = 4.164E-01
+ gamma = 6.265e+08
+ mass = 1.00794
+
+ sp.add_line(my_label, field, wavelength, f_value,
+ gamma, mass, label_threshold=1.e10)
+
+ my_label = 'Ly C'
+ field = 'H_number_density'
+ wavelength = 912.323660 # Angstroms
+ normalization = 1.6e17
+ index = 3.0
+
+ sp.add_continuum(my_label, field, wavelength, normalization, index)
+
+ wavelength, flux = sp.make_spectrum('lightray.h5',
+ output_file='spectrum.h5',
+ line_list_file='lines.txt',
+ use_peculiar_velocity=True)
+
+ # load just-generated hdf5 file of spectral data (for consistency)
+ data = h5.File('spectrum.h5', 'r')
+
+ for key in data.keys():
+ func = lambda x=key: data[x][:]
+ func.__name__ = "{}_continuum".format(key)
+ test = GenericArrayTest(None, func)
+ test_absorption_spectrum_with_continuum.__name__ = test.description
+ yield test
+
+ # clean up
+ os.chdir(curdir)
+ shutil.rmtree(tmpdir)
diff -r 8de8a3c315df8cbf36140c6cc6cd0d4e7a5e748e -r df6fc87fa819404a937c497f8187afa123e92bac yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -819,6 +819,10 @@
kwargs = self.kwargs
return self.array_func(*args, **kwargs)
def compare(self, new_result, old_result):
+ if not isinstance(new_result, dict):
+ new_result = {'answer': new_result}
+ old_result = {'answer': old_result}
+
assert_equal(len(new_result), len(old_result),
err_msg="Number of outputs not equal.",
verbose=True)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list