[yt-svn] commit/yt: 2 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Fri Dec 8 10:28:31 PST 2017
2 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/fb709dd70d49/
Changeset: fb709dd70d49
User: ngoldbaum
Date: 2017-12-06 16:13:06+00:00
Summary: do a better job of handling global config state for tests that disable the dataset cache
Affected #: 4 files
diff -r 691d304f219cc3b5db48ba9b93c7eb02ecd6c604 -r fb709dd70d49c748e510061d2d7c85ea25c934cf yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -173,7 +173,7 @@
for ax in 'xyz':
for method, name in zip(("cic", "sum"), ("cic", "nn")):
function = _get_density_weighted_deposit_field(
- "particle_velocity_%s" % ax, "cm/s", method)
+ "particle_velocity_%s" % ax, "code_velocity", method)
registry.add_field(
("deposit", ("%s_"+name+"_velocity_%s") % (ptype, ax)), sampling_type="cell",
function=function, units=unit_system["velocity"], take_log=False,
@@ -181,7 +181,7 @@
for method, name in zip(("cic", "sum"), ("cic", "nn")):
function = _get_density_weighted_deposit_field(
- "age", "s", method)
+ "age", "code_time", method)
registry.add_field(
("deposit", ("%s_"+name+"_age") % (ptype)), sampling_type="cell",
function=function, units=unit_system["time"], take_log=False,
diff -r 691d304f219cc3b5db48ba9b93c7eb02ecd6c604 -r fb709dd70d49c748e510061d2d7c85ea25c934cf yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -16,13 +16,13 @@
from yt.testing import \
assert_equal, \
requires_file, \
- assert_allclose_units
+ assert_allclose_units, \
+ disable_dataset_cache
from yt.utilities.answer_testing.framework import \
requires_ds, \
small_patch_amr, \
data_dir_load
from yt.frontends.athena.api import AthenaDataset
-from yt.config import ytcfg
from yt.convenience import load
import yt.units as u
@@ -85,9 +85,8 @@
"mass_unit": (1.0e14,"Msun")}
@requires_file(sloshing)
+ at disable_dataset_cache
def test_nprocs():
- ytcfg["yt","skip_dataset_cache"] = "True"
-
ds1 = load(sloshing, units_override=uo_sloshing)
sp1 = ds1.sphere("c", (100.,"kpc"))
prj1 = ds1.proj("density",0)
@@ -111,8 +110,6 @@
sp2.quantities.bulk_velocity())
assert_equal(prj1["density"], prj2["density"])
- ytcfg["yt","skip_dataset_cache"] = "False"
-
@requires_file(cloud)
def test_AthenaDataset():
assert isinstance(data_dir_load(cloud), AthenaDataset)
diff -r 691d304f219cc3b5db48ba9b93c7eb02ecd6c604 -r fb709dd70d49c748e510061d2d7c85ea25c934cf yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -18,6 +18,7 @@
from yt.extern.six.moves import cPickle
import itertools as it
import numpy as np
+import functools
import importlib
import os
import unittest
@@ -578,8 +579,20 @@
else:
return ffalse
+def disable_dataset_cache(func):
+ @functools.wraps(func)
+ def newfunc(*args, **kwargs):
+ restore_cfg_state = False
+ if ytcfg.get("yt", "skip_dataset_cache") == "False":
+ ytcfg["yt","skip_dataset_cache"] = "True"
+ rv = func(*args, **kwargs)
+ if restore_cfg_state:
+ ytcfg["yt","skip_dataset_cache"] = "False"
+ return rv
+ return newfunc
+
+ at disable_dataset_cache
def units_override_check(fn):
- ytcfg["yt","skip_dataset_cache"] = "True"
units_list = ["length","time","mass","velocity",
"magnetic","temperature"]
ds1 = load(fn)
@@ -593,7 +606,6 @@
units_override["%s_unit" % u] = (unit_attr.v, str(unit_attr.units))
del ds1
ds2 = load(fn, units_override=units_override)
- ytcfg["yt","skip_dataset_cache"] = "False"
assert(len(ds2.units_override) > 0)
for u in units_list:
unit_attr = getattr(ds2, "%s_unit" % u, None)
diff -r 691d304f219cc3b5db48ba9b93c7eb02ecd6c604 -r fb709dd70d49c748e510061d2d7c85ea25c934cf yt/units/tests/test_unit_systems.py
--- a/yt/units/tests/test_unit_systems.py
+++ b/yt/units/tests/test_unit_systems.py
@@ -16,8 +16,7 @@
from yt.units import dimensions
from yt.convenience import load
from yt.testing import assert_almost_equal, assert_allclose, requires_file, \
- fake_random_ds
-from yt.config import ytcfg
+ fake_random_ds, disable_dataset_cache
def test_unit_systems():
goofy_unit_system = UnitSystem("goofy", "ly", "lbm", "hr", temperature_unit="R",
@@ -60,9 +59,8 @@
gslr = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
@requires_file(gslr)
+ at disable_dataset_cache
def test_fields_diff_systems_sloshing():
- ytcfg["yt","skip_dataset_cache"] = "True"
-
ds_cgs = load(gslr)
dd_cgs = ds_cgs.sphere("c", (15., "kpc"))
@@ -77,9 +75,8 @@
etc = "enzo_tiny_cosmology/DD0046/DD0046"
@requires_file(etc)
+ at disable_dataset_cache
def test_fields_diff_systems_etc():
- ytcfg["yt","skip_dataset_cache"] = "True"
-
ds_cgs = load(etc)
dd_cgs = ds_cgs.sphere("max", (500., "kpc"))
@@ -97,8 +94,8 @@
wdm = 'WDMerger_hdf5_chk_1000/WDMerger_hdf5_chk_1000.hdf5'
@requires_file(wdm)
+ at disable_dataset_cache
def test_tesla_magnetic_unit():
- ytcfg["yt", "skip_dataset_cache"] = "True"
for us in ['cgs', 'mks', 'code']:
ds = load(wdm, unit_system=us,
units_override={'magnetic_unit': (1.0, 'T')})
@@ -144,4 +141,4 @@
assert ds1.unit_registry.unit_system_id == ds3.unit_registry.unit_system_id
assert ds1.unit_registry.unit_system_id in unit_system_registry.keys()
- assert ds2.unit_registry.unit_system_id in unit_system_registry.keys()
\ No newline at end of file
+ assert ds2.unit_registry.unit_system_id in unit_system_registry.keys()
https://bitbucket.org/yt_analysis/yt/commits/91ee6f6250a6/
Changeset: 91ee6f6250a6
User: ngoldbaum
Date: 2017-12-08 18:28:19+00:00
Summary: Merge pull request #1643 from ngoldbaum/dataset-cache-fix
do a better job of handling global config state for tests that disable the dataset cache
Affected #: 3 files
diff -r a374b49fe3098b802b33caaa82ae541e9f66b777 -r 91ee6f6250a69efdb25302026b71d6c9df09ab9b yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -16,13 +16,13 @@
from yt.testing import \
assert_equal, \
requires_file, \
- assert_allclose_units
+ assert_allclose_units, \
+ disable_dataset_cache
from yt.utilities.answer_testing.framework import \
requires_ds, \
small_patch_amr, \
data_dir_load
from yt.frontends.athena.api import AthenaDataset
-from yt.config import ytcfg
from yt.convenience import load
import yt.units as u
@@ -85,9 +85,8 @@
"mass_unit": (1.0e14,"Msun")}
@requires_file(sloshing)
+ at disable_dataset_cache
def test_nprocs():
- ytcfg["yt","skip_dataset_cache"] = "True"
-
ds1 = load(sloshing, units_override=uo_sloshing)
sp1 = ds1.sphere("c", (100.,"kpc"))
prj1 = ds1.proj("density",0)
@@ -111,8 +110,6 @@
sp2.quantities.bulk_velocity())
assert_equal(prj1["density"], prj2["density"])
- ytcfg["yt","skip_dataset_cache"] = "False"
-
@requires_file(cloud)
def test_AthenaDataset():
assert isinstance(data_dir_load(cloud), AthenaDataset)
diff -r a374b49fe3098b802b33caaa82ae541e9f66b777 -r 91ee6f6250a69efdb25302026b71d6c9df09ab9b yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -18,6 +18,7 @@
from yt.extern.six.moves import cPickle
import itertools as it
import numpy as np
+import functools
import importlib
import os
import unittest
@@ -578,8 +579,20 @@
else:
return ffalse
+def disable_dataset_cache(func):
+ @functools.wraps(func)
+ def newfunc(*args, **kwargs):
+ restore_cfg_state = False
+ if ytcfg.get("yt", "skip_dataset_cache") == "False":
+ ytcfg["yt","skip_dataset_cache"] = "True"
+ rv = func(*args, **kwargs)
+ if restore_cfg_state:
+ ytcfg["yt","skip_dataset_cache"] = "False"
+ return rv
+ return newfunc
+
+ at disable_dataset_cache
def units_override_check(fn):
- ytcfg["yt","skip_dataset_cache"] = "True"
units_list = ["length","time","mass","velocity",
"magnetic","temperature"]
ds1 = load(fn)
@@ -593,7 +606,6 @@
units_override["%s_unit" % u] = (unit_attr.v, str(unit_attr.units))
del ds1
ds2 = load(fn, units_override=units_override)
- ytcfg["yt","skip_dataset_cache"] = "False"
assert(len(ds2.units_override) > 0)
for u in units_list:
unit_attr = getattr(ds2, "%s_unit" % u, None)
diff -r a374b49fe3098b802b33caaa82ae541e9f66b777 -r 91ee6f6250a69efdb25302026b71d6c9df09ab9b yt/units/tests/test_unit_systems.py
--- a/yt/units/tests/test_unit_systems.py
+++ b/yt/units/tests/test_unit_systems.py
@@ -16,8 +16,7 @@
from yt.units import dimensions
from yt.convenience import load
from yt.testing import assert_almost_equal, assert_allclose, requires_file, \
- fake_random_ds
-from yt.config import ytcfg
+ fake_random_ds, disable_dataset_cache
def test_unit_systems():
goofy_unit_system = UnitSystem("goofy", "ly", "lbm", "hr", temperature_unit="R",
@@ -60,9 +59,8 @@
gslr = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
@requires_file(gslr)
+ at disable_dataset_cache
def test_fields_diff_systems_sloshing():
- ytcfg["yt","skip_dataset_cache"] = "True"
-
ds_cgs = load(gslr)
dd_cgs = ds_cgs.sphere("c", (15., "kpc"))
@@ -77,9 +75,8 @@
etc = "enzo_tiny_cosmology/DD0046/DD0046"
@requires_file(etc)
+ at disable_dataset_cache
def test_fields_diff_systems_etc():
- ytcfg["yt","skip_dataset_cache"] = "True"
-
ds_cgs = load(etc)
dd_cgs = ds_cgs.sphere("max", (500., "kpc"))
@@ -97,8 +94,8 @@
wdm = 'WDMerger_hdf5_chk_1000/WDMerger_hdf5_chk_1000.hdf5'
@requires_file(wdm)
+ at disable_dataset_cache
def test_tesla_magnetic_unit():
- ytcfg["yt", "skip_dataset_cache"] = "True"
for us in ['cgs', 'mks', 'code']:
ds = load(wdm, unit_system=us,
units_override={'magnetic_unit': (1.0, 'T')})
@@ -144,4 +141,4 @@
assert ds1.unit_registry.unit_system_id == ds3.unit_registry.unit_system_id
assert ds1.unit_registry.unit_system_id in unit_system_registry.keys()
- assert ds2.unit_registry.unit_system_id in unit_system_registry.keys()
\ No newline at end of file
+ assert ds2.unit_registry.unit_system_id in unit_system_registry.keys()
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list