[yt-svn] commit/yt: 10 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Mon Oct 12 11:08:29 PDT 2015
10 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/75de3c4968e0/
Changeset: 75de3c4968e0
Branch: stable
User: ngoldbaum
Date: 2015-09-21 19:45:48+00:00
Summary: Backporting PR #1752 https://bitbucket.org/yt_analysis/yt/pull-requests/1752
Affected #: 1 file
diff -r 96b3f211c60dc6fb4405080e9470231d4bcb8fcb -r 75de3c4968e073457ee3df6f267df464129d9c22 yt/utilities/png_writer.py
--- a/yt/utilities/png_writer.py
+++ b/yt/utilities/png_writer.py
@@ -10,20 +10,31 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
+import matplotlib
import matplotlib._png as _png
from yt.extern.six.moves import cStringIO
+from distutils.version import LooseVersion
+
+MPL_VERSION = LooseVersion(matplotlib.__version__)
+MPL_API_2_VERSION = LooseVersion("1.5.0")
+
+if MPL_VERSION < MPL_API_2_VERSION:
+ def call_png_write_png(buffer, width, height, filename, dpi):
+ _png.write_png(buffer, width, height, filename, dpi)
+else:
+ def call_png_write_png(buffer, width, height, filename, dpi):
+ _png.write_png(buffer, filename, dpi)
def write_png(buffer, filename, dpi=100):
width = buffer.shape[1]
height = buffer.shape[0]
- _png.write_png(buffer, width, height, filename, dpi)
+ call_png_write_png(buffer, width, height, filename, dpi)
def write_png_to_string(buffer, dpi=100, gray=0):
width = buffer.shape[1]
height = buffer.shape[0]
fileobj = cStringIO()
- _png.write_png(buffer, width, height, fileobj, dpi)
+ call_png_write_png(buffer, width, height, fileobj, dpi)
png_str = fileobj.getvalue()
fileobj.close()
return png_str
-
https://bitbucket.org/yt_analysis/yt/commits/a964d0d89841/
Changeset: a964d0d89841
Branch: stable
User: chummels
Date: 2015-09-23 21:27:50+00:00
Summary: Backporting PR #1758 https://bitbucket.org/yt_analysis/yt/pull-requests/1758
Affected #: 2 files
diff -r 75de3c4968e073457ee3df6f267df464129d9c22 -r a964d0d89841466ee4e682c1d7d6a947454b3695 yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -131,7 +131,9 @@
field_data = {}
if use_peculiar_velocity:
input_fields.append('velocity_los')
+ input_fields.append('redshift_eff')
field_units["velocity_los"] = "cm/s"
+ field_units["redshift_eff"] = ""
for feature in self.line_list + self.continuum_list:
if not feature['field_name'] in input_fields:
input_fields.append(feature['field_name'])
@@ -170,11 +172,11 @@
for continuum in self.continuum_list:
column_density = field_data[continuum['field_name']] * field_data['dl']
- delta_lambda = continuum['wavelength'] * field_data['redshift']
+ # redshift_eff field combines cosmological and velocity redshifts
if use_peculiar_velocity:
- # include factor of (1 + z) because our velocity is in proper frame.
- delta_lambda += continuum['wavelength'] * (1 + field_data['redshift']) * \
- field_data['velocity_los'] / speed_of_light_cgs
+ delta_lambda = continuum['wavelength'] * field_data['redshift_eff']
+ else:
+ delta_lambda = continuum['wavelength'] * field_data['redshift']
this_wavelength = delta_lambda + continuum['wavelength']
right_index = np.digitize(this_wavelength, self.lambda_bins).clip(0, self.n_lambda)
left_index = np.digitize((this_wavelength *
@@ -207,11 +209,11 @@
for line in self.line_list:
column_density = field_data[line['field_name']] * field_data['dl']
- delta_lambda = line['wavelength'] * field_data['redshift']
+ # redshift_eff field combines cosmological and velocity redshifts
if use_peculiar_velocity:
- # include factor of (1 + z) because our velocity is in proper frame.
- delta_lambda += line['wavelength'] * (1 + field_data['redshift']) * \
- field_data['velocity_los'] / speed_of_light_cgs
+ delta_lambda = line['wavelength'] * field_data['redshift_eff']
+ else:
+ delta_lambda = line['wavelength'] * field_data['redshift']
thermal_b = np.sqrt((2 * boltzmann_constant_cgs *
field_data['temperature']) /
line['atomic_mass'])
diff -r 75de3c4968e073457ee3df6f267df464129d9c22 -r a964d0d89841466ee4e682c1d7d6a947454b3695 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -30,6 +30,7 @@
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_objects, \
parallel_root_only
+from yt.utilities.physical_constants import speed_of_light_cgs
class LightRay(CosmologySplice):
"""
@@ -366,7 +367,7 @@
all_fields.extend(['dl', 'dredshift', 'redshift'])
if get_los_velocity:
all_fields.extend(['velocity_x', 'velocity_y',
- 'velocity_z', 'velocity_los'])
+ 'velocity_z', 'velocity_los', 'redshift_eff'])
data_fields.extend(['velocity_x', 'velocity_y', 'velocity_z'])
all_ray_storage = {}
@@ -458,6 +459,28 @@
sub_data['redshift'] = my_segment['redshift'] - \
sub_data['dredshift'].cumsum() + sub_data['dredshift']
+ # When velocity_los is present, add effective redshift
+ # (redshift_eff) field by combining cosmological redshift and
+ # doppler redshift.
+
+ # first convert los velocities to comoving frame (ie mult. by (1+z)),
+ # then calculate doppler redshift:
+ # 1 + redshift_dopp = sqrt((1+v/c) / (1-v/c))
+
+ # then to add cosmological redshift and doppler redshift, follow
+ # eqn 3.75 in Peacock's Cosmological Physics:
+ # 1 + z_obs = (1 + z_cosmo) * (1 + z_doppler)
+ # Alternatively, see eqn 5.49 in Peebles for a similar result.
+ if get_los_velocity:
+
+ velocity_los_cm = (1 + sub_data['redshift']) * \
+ sub_data['velocity_los']
+ redshift_dopp = ((1 + velocity_los_cm / speed_of_light_cgs) /
+ (1 - velocity_los_cm / speed_of_light_cgs))**(0.5) - 1
+ sub_data['redshift_eff'] = (1 + redshift_dopp) * \
+ (1 + sub_data['redshift'])
+ del velocity_los_cm, redshift_dopp
+
# Remove empty lixels.
sub_dl_nonzero = sub_data['dl'].nonzero()
for field in all_fields:
https://bitbucket.org/yt_analysis/yt/commits/fafd6f847311/
Changeset: fafd6f847311
Branch: stable
User: karraki
Date: 2015-08-31 04:51:42+00:00
Summary: Backporting PR #1764 https://bitbucket.org/yt_analysis/yt/pull-requests/1764
Affected #: 1 file
diff -r a964d0d89841466ee4e682c1d7d6a947454b3695 -r fafd6f847311910ffb0a9c15c27b07be0b176dd8 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -193,7 +193,7 @@
particle header, star files, etc.
"""
base_prefix, base_suffix = filename_pattern['amr']
- aexpstr = 'a'+file_amr.rsplit('a',1)[1].replace(base_suffix,'')
+ numericstr = file_amr.rsplit('_',1)[1].replace(base_suffix,'')
possibles = glob.glob(os.path.dirname(os.path.abspath(file_amr))+"/*")
for filetype, (prefix, suffix) in filename_pattern.items():
# if this attribute is already set skip it
@@ -201,7 +201,10 @@
continue
match = None
for possible in possibles:
- if possible.endswith(aexpstr+suffix):
+ if possible.endswith(numericstr+suffix):
+ if os.path.basename(possible).startswith(prefix):
+ match = possible
+ elif possible.endswith(suffix):
if os.path.basename(possible).startswith(prefix):
match = possible
if match is not None:
https://bitbucket.org/yt_analysis/yt/commits/3f7547299119/
Changeset: 3f7547299119
Branch: stable
User: ngoldbaum
Date: 2015-09-23 04:43:04+00:00
Summary: Fixing test failures on Numpy 1.10
Affected #: 1 file
diff -r fafd6f847311910ffb0a9c15c27b07be0b176dd8 -r 3f7547299119397d1e2e2baa1d42f809aef21e35 yt/data_objects/tests/test_compose.py
--- a/yt/data_objects/tests/test_compose.py
+++ b/yt/data_objects/tests/test_compose.py
@@ -20,7 +20,6 @@
yi = y / min_dx
zi = z / min_dx
index = xi + delta[0] * (yi + delta[1] * zi)
- index = index.astype('int64')
return index
def test_compose_no_overlap():
https://bitbucket.org/yt_analysis/yt/commits/f6ce226ece8a/
Changeset: f6ce226ece8a
Branch: stable
User: atmyers
Date: 2015-09-24 20:45:38+00:00
Summary: Backporting PR #1768 https://bitbucket.org/yt_analysis/yt/pull-requests/1768
Affected #: 1 file
diff -r 3f7547299119397d1e2e2baa1d42f809aef21e35 -r f6ce226ece8af697f31018f3585f8f8052746afb yt/utilities/lib/setup.py
--- a/yt/utilities/lib/setup.py
+++ b/yt/utilities/lib/setup.py
@@ -17,6 +17,7 @@
# Get compiler invocation
compiler = os.getenv('CC', 'cc')
+ compiler = compiler.split(' ')
# Attempt to compile a test script.
# See http://openmp.org/wp/openmp-compilers/
@@ -32,11 +33,13 @@
)
file.flush()
with open(os.devnull, 'w') as fnull:
- exit_code = subprocess.call([compiler, '-fopenmp', filename],
+ exit_code = subprocess.call(compiler + ['-fopenmp', filename],
stdout=fnull, stderr=fnull)
# Clean up
file.close()
+ except OSError:
+ return False
finally:
os.chdir(curdir)
shutil.rmtree(tmpdir)
https://bitbucket.org/yt_analysis/yt/commits/9037555522ce/
Changeset: 9037555522ce
Branch: stable
User: qobilidop
Date: 2015-09-25 20:24:51+00:00
Summary: fix issue #1086
Affected #: 1 file
diff -r f6ce226ece8af697f31018f3585f8f8052746afb -r 9037555522ce74b4414d0e17d9fc1fb4223d653d yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -797,6 +797,7 @@
registry.add_field(field_name, function = _vol_weight,
validators = [ValidateSpatial(0)],
units = field_units)
+ registry.find_dependencies((field_name,))
return [field_name]
def add_nearest_neighbor_field(ptype, coord_name, registry, nneighbors = 64):
https://bitbucket.org/yt_analysis/yt/commits/557ed8d49f9b/
Changeset: 557ed8d49f9b
Branch: stable
User: ngoldbaum
Date: 2015-09-30 17:13:35+00:00
Summary: Fix casting issues for 1D and 2D Enzo simulations
Affected #: 1 file
diff -r 9037555522ce74b4414d0e17d9fc1fb4223d653d -r 557ed8d49f9b249e55b76985ae23fb3d9a12a354 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -636,7 +636,7 @@
def _fill_arrays(self, ei, si, LE, RE, npart, nap):
self.grid_dimensions[:,:1] = ei
- self.grid_dimensions[:,:1] -= np.array(si, self.float_type)
+ self.grid_dimensions[:,:1] -= np.array(si, dtype='i4')
self.grid_dimensions += 1
self.grid_left_edge[:,:1] = LE
self.grid_right_edge[:,:1] = RE
@@ -651,7 +651,7 @@
def _fill_arrays(self, ei, si, LE, RE, npart, nap):
self.grid_dimensions[:,:2] = ei
- self.grid_dimensions[:,:2] -= np.array(si, self.float_type)
+ self.grid_dimensions[:,:2] -= np.array(si, dtype='i4')
self.grid_dimensions += 1
self.grid_left_edge[:,:2] = LE
self.grid_right_edge[:,:2] = RE
https://bitbucket.org/yt_analysis/yt/commits/77f5b5447868/
Changeset: 77f5b5447868
Branch: stable
User: atmyers
Date: 2015-10-01 02:04:55+00:00
Summary: Handle the case where you have more than 10 on-disk particle fields in the file
Affected #: 1 file
diff -r 557ed8d49f9b249e55b76985ae23fb3d9a12a354 -r 77f5b54478689a425c130789f7cc80cd8de02ac5 yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -88,7 +88,7 @@
field_dict = {}
for key, val in self._handle.attrs.items():
if key.startswith('particle_'):
- comp_number = int(re.match('particle_component_(\d)', key).groups()[0])
+ comp_number = int(re.match('particle_component_(\d+)', key).groups()[0])
field_dict[val.decode("ascii")] = comp_number
self._particle_field_index = field_dict
return self._particle_field_index
https://bitbucket.org/yt_analysis/yt/commits/ab903a6f15de/
Changeset: ab903a6f15de
Branch: stable
User: ngoldbaum
Date: 2015-10-01 05:15:00+00:00
Summary: Support enzo answers tests with yt-3
Affected #: 2 files
diff -r 77f5b54478689a425c130789f7cc80cd8de02ac5 -r ab903a6f15de946b215c0d3c0e05432880007537 yt/fields/field_aliases.py
--- a/yt/fields/field_aliases.py
+++ b/yt/fields/field_aliases.py
@@ -83,7 +83,11 @@
("CuttingPlaneBy", "cutting_plane_by"),
("MeanMolecularWeight", "mean_molecular_weight"),
("particle_density", "particle_density"),
+ ("ThermalEnergy", "thermal_energy"),
+ ("TotalEnergy", "total_energy"),
("MagneticEnergy", "magnetic_energy"),
+ ("GasEnergy", "thermal_energy"),
+ ("Gas_Energy", "thermal_energy"),
("BMagnitude", "b_magnitude"),
("PlasmaBeta", "plasma_beta"),
("MagneticPressure", "magnetic_pressure"),
diff -r 77f5b54478689a425c130789f7cc80cd8de02ac5 -r ab903a6f15de946b215c0d3c0e05432880007537 yt/frontends/enzo/answer_testing_support.py
--- a/yt/frontends/enzo/answer_testing_support.py
+++ b/yt/frontends/enzo/answer_testing_support.py
@@ -78,16 +78,17 @@
def __call__(self):
# Read in the ds
- ds = load(self.data_file)
- exact = self.get_analytical_solution()
+ ds = load(self.data_file)
+ ds.setup_deprecated_fields()
+ exact = self.get_analytical_solution()
ad = ds.all_data()
position = ad['x']
for k in self.fields:
- field = ad[k]
+ field = ad[k].d
for xmin, xmax in zip(self.left_edges, self.right_edges):
mask = (position >= xmin)*(position <= xmax)
- exact_field = np.interp(position[mask], exact['pos'], exact[k])
+ exact_field = np.interp(position[mask], exact['pos'], exact[k])
myname = "ShockTubeTest_%s" % k
# yield test vs analytical solution
yield AssertWrapper(myname, assert_allclose, field[mask],
https://bitbucket.org/yt_analysis/yt/commits/d20a02c62329/
Changeset: d20a02c62329
Branch: stable
User: chummels
Date: 2015-10-05 03:38:06+00:00
Summary: Fixing bug with redshift calculation in LightRay analysis module. Forgot a 1!
Affected #: 1 file
diff -r ab903a6f15de946b215c0d3c0e05432880007537 -r d20a02c6232921a4105a5b32e1df4de08be0fc87 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -477,8 +477,8 @@
sub_data['velocity_los']
redshift_dopp = ((1 + velocity_los_cm / speed_of_light_cgs) /
(1 - velocity_los_cm / speed_of_light_cgs))**(0.5) - 1
- sub_data['redshift_eff'] = (1 + redshift_dopp) * \
- (1 + sub_data['redshift'])
+ sub_data['redshift_eff'] = ((1 + redshift_dopp) * \
+ (1 + sub_data['redshift'])) - 1
del velocity_los_cm, redshift_dopp
# Remove empty lixels.
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list