[yt-svn] commit/yt: 8 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Thu Feb 2 08:43:19 PST 2017
8 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/59b01f6de1f5/
Changeset: 59b01f6de1f5
Branch: yt
User: atmyers
Date: 2017-01-31 21:35:39+00:00
Summary: Adding support for Castro particles.
Affected #: 2 files
diff -r 2e2dcac8a83a1086033d4640cb4767373478a4ad -r 59b01f6de1f5f9035c57e30424d244edb8d2a93d yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -822,8 +822,26 @@
return False
+class CastroHierarchy(BoxlibHierarchy):
+
+ def __init__(self, ds, dataset_type='castro_native'):
+ super(CastroHierarchy, self).__init__(ds, dataset_type)
+
+ # extra beyond the base real fields that all Boxlib
+ # particles have, i.e. the xyz positions
+ castro_extra_real_fields = ['particle_velocity_x',
+ 'particle_velocity_y',
+ 'particle_velocity_z']
+
+ is_checkpoint = True
+
+ self._read_particles("Tracer", is_checkpoint,
+ castro_extra_real_fields[0:self.ds.dimensionality])
+
+
class CastroDataset(BoxlibDataset):
+ _index_class = CastroHierarchy
_field_info_class = CastroFieldInfo
@classmethod
@@ -882,7 +900,13 @@
if not self.parameters['-z'] == "interior": periodicity[2] = False
self.periodicity = ensure_tuple(periodicity)
-
+ if os.path.isdir(os.path.join(self.output_dir, "Tracer")):
+ # we have particles
+ self.parameters["particles"] = 1
+ self.particle_types = ("Tracer",)
+ self.particle_types_raw = self.particle_types
+
+
class MaestroDataset(BoxlibDataset):
@@ -956,7 +980,8 @@
is_checkpoint = False
- self._read_particles("DM", is_checkpoint, nyx_extra_real_fields)
+ self._read_particles("DM", is_checkpoint,
+ nyx_extra_real_fields[0:self.ds.dimensionality+1])
class NyxDataset(BoxlibDataset):
@@ -1169,7 +1194,8 @@
is_checkpoint = False
for ptype in self.ds.particle_types:
- self._read_particles(ptype, is_checkpoint, warpx_extra_real_fields)
+ self._read_particles(ptype, is_checkpoint,
+ warpx_extra_real_fields[0:self.ds.dimensionality+1])
# Additional WarpX particle information (used to set up species)
with open(self.ds.output_dir + "/WarpXHeader", 'r') as f:
diff -r 2e2dcac8a83a1086033d4640cb4767373478a4ad -r 59b01f6de1f5f9035c57e30424d244edb8d2a93d yt/frontends/boxlib/fields.py
--- a/yt/frontends/boxlib/fields.py
+++ b/yt/frontends/boxlib/fields.py
@@ -251,6 +251,12 @@
("rot_z", ("cm/s**2", [], r"\mathbf{f}_{\rm{rot}} \cdot \mathbf{e}_z")),
)
+ known_particle_fields = (
+ ("particle_position_x", ("code_length", [], None)),
+ ("particle_position_y", ("code_length", [], None)),
+ ("particle_position_z", ("code_length", [], None)),
+ )
+
def setup_fluid_fields(self):
# add X's
for _, field in self.ds.field_list:
https://bitbucket.org/yt_analysis/yt/commits/f6ae1358cb0b/
Changeset: f6ae1358cb0b
Branch: yt
User: atmyers
Date: 2017-01-31 21:45:57+00:00
Summary: Adding answer tests for Castro particle datasets.
Affected #: 2 files
diff -r 59b01f6de1f5f9035c57e30424d244edb8d2a93d -r f6ae1358cb0b154e608cb7c371d6f5487809d04c tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -73,11 +73,14 @@
- yt/frontends/boxlib/tests/test_outputs.py:test_radtube
- yt/frontends/boxlib/tests/test_outputs.py:test_star
- yt/frontends/boxlib/tests/test_outputs.py:test_OrionDataset
+ - yt/frontends/boxlib/tests/test_outputs.py:test_CastroDataset
+ - yt/frontends/boxlib/tests/test_outputs.py:test_RT_particles
- yt/frontends/boxlib/tests/test_outputs.py:test_units_override
local_boxlib_particles_001:
- yt/frontends/boxlib/tests/test_outputs.py:test_LyA
- yt/frontends/boxlib/tests/test_outputs.py:test_nyx_particle_io
+ - yt/frontends/boxlib/tests/test_outputs.py:test_castro_particle_io
- yt/frontends/boxlib/tests/test_outputs.py:test_langmuir
- yt/frontends/boxlib/tests/test_outputs.py:test_plasma
- yt/frontends/boxlib/tests/test_outputs.py:test_warpx_particle_io
diff -r 59b01f6de1f5f9035c57e30424d244edb8d2a93d -r f6ae1358cb0b154e608cb7c371d6f5487809d04c yt/frontends/boxlib/tests/test_outputs.py
--- a/yt/frontends/boxlib/tests/test_outputs.py
+++ b/yt/frontends/boxlib/tests/test_outputs.py
@@ -32,6 +32,7 @@
_orion_fields = ("temperature", "density", "velocity_magnitude")
_nyx_fields = ("Ne", "Temp", "particle_mass_density")
_warpx_fields = ("Ex", "By", "jz")
+_castro_fields = ("Temp", "density", "particle_count")
radadvect = "RadAdvect/plt00000"
@requires_ds(radadvect)
@@ -104,6 +105,44 @@
assert(np.all(np.logical_and(reg['particle_position_z'] <= right_edge[2],
reg['particle_position_z'] >= left_edge[2])))
+RT_particles = "RT_particles/plt00050"
+ at requires_ds(RT_particles)
+def test_RT_particles():
+ ds = data_dir_load(RT_particles)
+ yield assert_equal, str(ds), "plt00050"
+ for test in small_patch_amr(ds, _castro_fields):
+ test_RT_particles.__name__ = test.description
+ yield test
+
+
+ at requires_file(RT_particles)
+def test_castro_particle_io():
+ ds = data_dir_load(RT_particles)
+
+ grid = ds.index.grids[2]
+ npart_grid_2 = 49 # read directly from the header
+ assert_equal(grid['particle_position_x'].size, npart_grid_2)
+ assert_equal(grid['Tracer', 'particle_position_y'].size, npart_grid_2)
+ assert_equal(grid['all', 'particle_position_y'].size, npart_grid_2)
+
+ ad = ds.all_data()
+ npart = 49 # read directly from the header
+ assert_equal(ad['particle_velocity_x'].size, npart)
+ assert_equal(ad['Tracer', 'particle_velocity_y'].size, npart)
+ assert_equal(ad['all', 'particle_velocity_y'].size, npart)
+
+ left_edge = ds.arr([0.0, 0.0, 0.0], 'code_length')
+ right_edge = ds.arr([0.25, 1.0, 1.0], 'code_length')
+ center = 0.5*(left_edge + right_edge)
+
+ reg = ds.region(center, left_edge, right_edge)
+
+ assert(np.all(np.logical_and(reg['particle_position_x'] <= right_edge[0],
+ reg['particle_position_x'] >= left_edge[0])))
+
+ assert(np.all(np.logical_and(reg['particle_position_y'] <= right_edge[1],
+ reg['particle_position_y'] >= left_edge[1])))
+
langmuir = "LangmuirWave/plt00020"
@requires_ds(langmuir)
def test_langmuir():
@@ -173,6 +212,10 @@
def test_NyxDataset():
assert isinstance(data_dir_load(LyA), NyxDataset)
+ at requires_file(RT_particles)
+def test_CastroDataset():
+ assert isinstance(data_dir_load(RT_particles), CastroDataset)
+
@requires_file(LyA)
def test_WarpXDataset():
assert isinstance(data_dir_load(plasma), WarpXDataset)
https://bitbucket.org/yt_analysis/yt/commits/34552b41081a/
Changeset: 34552b41081a
Branch: yt
User: atmyers
Date: 2017-01-31 22:04:16+00:00
Summary: don't try to read the particles if we don't have any!
Affected #: 1 file
diff -r f6ae1358cb0b154e608cb7c371d6f5487809d04c -r 34552b41081aabbe56b3943b7f871280c881bb92 yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -827,16 +827,18 @@
def __init__(self, ds, dataset_type='castro_native'):
super(CastroHierarchy, self).__init__(ds, dataset_type)
- # extra beyond the base real fields that all Boxlib
- # particles have, i.e. the xyz positions
- castro_extra_real_fields = ['particle_velocity_x',
- 'particle_velocity_y',
- 'particle_velocity_z']
+ if ("particles" in self.ds.parameters):
- is_checkpoint = True
+ # extra beyond the base real fields that all Boxlib
+ # particles have, i.e. the xyz positions
+ castro_extra_real_fields = ['particle_velocity_x',
+ 'particle_velocity_y',
+ 'particle_velocity_z']
- self._read_particles("Tracer", is_checkpoint,
- castro_extra_real_fields[0:self.ds.dimensionality])
+ is_checkpoint = True
+
+ self._read_particles("Tracer", is_checkpoint,
+ castro_extra_real_fields[0:self.ds.dimensionality])
class CastroDataset(BoxlibDataset):
https://bitbucket.org/yt_analysis/yt/commits/4166e56b355a/
Changeset: 4166e56b355a
Branch: yt
User: atmyers
Date: 2017-01-31 22:06:25+00:00
Summary: updating code support section.
Affected #: 1 file
diff -r 34552b41081aabbe56b3943b7f871280c881bb92 -r 4166e56b355afeca01a01514121cd4facc6527fb doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -22,7 +22,7 @@
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
| Athena | Y | N | Y | Y | Y | Y | Y | Full |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
-| Castro | Y | N | Partial | Y | Y | Y | N | Full |
+| Castro | Y | Y [#f3]_ | Partial | Y | Y | Y | N | Full |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
| Chombo | Y | Y | Y | Y | Y | Y | Y | Full |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
@@ -68,7 +68,8 @@
.. [#f1] one-dimensional base-state not read in currently.
.. [#f2] These handle mesh fields using an in-memory octree that has not been parallelized.
Datasets larger than approximately 1024^3 will not scale well.
-
+.. [#f3] Newer versions of Castro that use BoxLib's standard particle format are supported.
+ The older ASCII format is not.
If you have a dataset that uses an output format not yet supported by yt, you
can either input your data following :ref:`loading-numpy-array` or
https://bitbucket.org/yt_analysis/yt/commits/36d0c7366c39/
Changeset: 36d0c7366c39
Branch: yt
User: atmyers
Date: 2017-01-31 23:01:12+00:00
Summary: Forgot to import CastroDataset
Affected #: 1 file
diff -r 4166e56b355afeca01a01514121cd4facc6527fb -r 36d0c7366c39e0d9cab8874577ffb542b08c8930 yt/frontends/boxlib/tests/test_outputs.py
--- a/yt/frontends/boxlib/tests/test_outputs.py
+++ b/yt/frontends/boxlib/tests/test_outputs.py
@@ -24,7 +24,8 @@
from yt.frontends.boxlib.api import \
OrionDataset, \
NyxDataset, \
- WarpXDataset
+ WarpXDataset, \
+ CastroDataset
import numpy as np
# We don't do anything needing ghost zone generation right now, because these
https://bitbucket.org/yt_analysis/yt/commits/62cb153fb97d/
Changeset: 62cb153fb97d
Branch: yt
User: atmyers
Date: 2017-02-01 18:01:16+00:00
Summary: Two bug fixes: Warpx frontend should ignore the inputs and probin files, and the number of particle fields that get written out doesn't change with dimensionality.
Affected #: 1 file
diff -r 36d0c7366c39e0d9cab8874577ffb542b08c8930 -r 62cb153fb97d199e138e8fd492a30bb74695c45a yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -437,6 +437,8 @@
"materials.gamma", 1.6667)
def _localize_check(self, fn):
+ if fn is None:
+ return None
# If the file exists, use it. If not, set it to None.
root_dir = os.path.dirname(self.output_dir)
full_fn = os.path.join(root_dir, fn)
@@ -1196,8 +1198,7 @@
is_checkpoint = False
for ptype in self.ds.particle_types:
- self._read_particles(ptype, is_checkpoint,
- warpx_extra_real_fields[0:self.ds.dimensionality+1])
+ self._read_particles(ptype, is_checkpoint, warpx_extra_real_fields)
# Additional WarpX particle information (used to set up species)
with open(self.ds.output_dir + "/WarpXHeader", 'r') as f:
@@ -1238,8 +1239,8 @@
_field_info_class = WarpXFieldInfo
def __init__(self, output_dir,
- cparam_filename="inputs",
- fparam_filename="probin",
+ cparam_filename=None,
+ fparam_filename=None,
dataset_type='boxlib_native',
storage_filename=None,
units_override=None,
https://bitbucket.org/yt_analysis/yt/commits/a3fe288c7e02/
Changeset: a3fe288c7e02
Branch: yt
User: atmyers
Date: 2017-02-02 04:27:32+00:00
Summary: incrementing test numbers.
Affected #: 1 file
diff -r 62cb153fb97d199e138e8fd492a30bb74695c45a -r a3fe288c7e02d1b4311ff82069da540f11446190 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -68,7 +68,7 @@
- yt/visualization/tests/test_mesh_slices.py:test_tri2
- yt/visualization/tests/test_mesh_slices.py:test_multi_region
- local_boxlib_002:
+ local_boxlib_003:
- yt/frontends/boxlib/tests/test_outputs.py:test_radadvect
- yt/frontends/boxlib/tests/test_outputs.py:test_radtube
- yt/frontends/boxlib/tests/test_outputs.py:test_star
@@ -77,7 +77,7 @@
- yt/frontends/boxlib/tests/test_outputs.py:test_RT_particles
- yt/frontends/boxlib/tests/test_outputs.py:test_units_override
- local_boxlib_particles_001:
+ local_boxlib_particles_002:
- yt/frontends/boxlib/tests/test_outputs.py:test_LyA
- yt/frontends/boxlib/tests/test_outputs.py:test_nyx_particle_io
- yt/frontends/boxlib/tests/test_outputs.py:test_castro_particle_io
https://bitbucket.org/yt_analysis/yt/commits/706bb6f73754/
Changeset: 706bb6f73754
Branch: yt
User: jzuhone
Date: 2017-02-02 16:43:11+00:00
Summary: Merged in atmyers/yt (pull request #2510)
Adding support for Castro particles
Approved-by: Michael Zingale
Approved-by: Nathan Goldbaum
Approved-by: John ZuHone
Affected #: 5 files
diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -22,7 +22,7 @@
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
| Athena | Y | N | Y | Y | Y | Y | Y | Full |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
-| Castro | Y | N | Partial | Y | Y | Y | N | Full |
+| Castro | Y | Y [#f3]_ | Partial | Y | Y | Y | N | Full |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
| Chombo | Y | Y | Y | Y | Y | Y | Y | Full |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
@@ -68,7 +68,8 @@
.. [#f1] one-dimensional base-state not read in currently.
.. [#f2] These handle mesh fields using an in-memory octree that has not been parallelized.
Datasets larger than approximately 1024^3 will not scale well.
-
+.. [#f3] Newer versions of Castro that use BoxLib's standard particle format are supported.
+ The older ASCII format is not.
If you have a dataset that uses an output format not yet supported by yt, you
can either input your data following :ref:`loading-numpy-array` or
diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -68,16 +68,19 @@
- yt/visualization/tests/test_mesh_slices.py:test_tri2
- yt/visualization/tests/test_mesh_slices.py:test_multi_region
- local_boxlib_002:
+ local_boxlib_003:
- yt/frontends/boxlib/tests/test_outputs.py:test_radadvect
- yt/frontends/boxlib/tests/test_outputs.py:test_radtube
- yt/frontends/boxlib/tests/test_outputs.py:test_star
- yt/frontends/boxlib/tests/test_outputs.py:test_OrionDataset
+ - yt/frontends/boxlib/tests/test_outputs.py:test_CastroDataset
+ - yt/frontends/boxlib/tests/test_outputs.py:test_RT_particles
- yt/frontends/boxlib/tests/test_outputs.py:test_units_override
- local_boxlib_particles_001:
+ local_boxlib_particles_002:
- yt/frontends/boxlib/tests/test_outputs.py:test_LyA
- yt/frontends/boxlib/tests/test_outputs.py:test_nyx_particle_io
+ - yt/frontends/boxlib/tests/test_outputs.py:test_castro_particle_io
- yt/frontends/boxlib/tests/test_outputs.py:test_langmuir
- yt/frontends/boxlib/tests/test_outputs.py:test_plasma
- yt/frontends/boxlib/tests/test_outputs.py:test_warpx_particle_io
diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -437,6 +437,8 @@
"materials.gamma", 1.6667)
def _localize_check(self, fn):
+ if fn is None:
+ return None
# If the file exists, use it. If not, set it to None.
root_dir = os.path.dirname(self.output_dir)
full_fn = os.path.join(root_dir, fn)
@@ -822,8 +824,28 @@
return False
+class CastroHierarchy(BoxlibHierarchy):
+
+ def __init__(self, ds, dataset_type='castro_native'):
+ super(CastroHierarchy, self).__init__(ds, dataset_type)
+
+ if ("particles" in self.ds.parameters):
+
+ # extra beyond the base real fields that all Boxlib
+ # particles have, i.e. the xyz positions
+ castro_extra_real_fields = ['particle_velocity_x',
+ 'particle_velocity_y',
+ 'particle_velocity_z']
+
+ is_checkpoint = True
+
+ self._read_particles("Tracer", is_checkpoint,
+ castro_extra_real_fields[0:self.ds.dimensionality])
+
+
class CastroDataset(BoxlibDataset):
+ _index_class = CastroHierarchy
_field_info_class = CastroFieldInfo
@classmethod
@@ -882,7 +904,13 @@
if not self.parameters['-z'] == "interior": periodicity[2] = False
self.periodicity = ensure_tuple(periodicity)
-
+ if os.path.isdir(os.path.join(self.output_dir, "Tracer")):
+ # we have particles
+ self.parameters["particles"] = 1
+ self.particle_types = ("Tracer",)
+ self.particle_types_raw = self.particle_types
+
+
class MaestroDataset(BoxlibDataset):
@@ -956,7 +984,8 @@
is_checkpoint = False
- self._read_particles("DM", is_checkpoint, nyx_extra_real_fields)
+ self._read_particles("DM", is_checkpoint,
+ nyx_extra_real_fields[0:self.ds.dimensionality+1])
class NyxDataset(BoxlibDataset):
@@ -1210,8 +1239,8 @@
_field_info_class = WarpXFieldInfo
def __init__(self, output_dir,
- cparam_filename="inputs",
- fparam_filename="probin",
+ cparam_filename=None,
+ fparam_filename=None,
dataset_type='boxlib_native',
storage_filename=None,
units_override=None,
diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad yt/frontends/boxlib/fields.py
--- a/yt/frontends/boxlib/fields.py
+++ b/yt/frontends/boxlib/fields.py
@@ -251,6 +251,12 @@
("rot_z", ("cm/s**2", [], r"\mathbf{f}_{\rm{rot}} \cdot \mathbf{e}_z")),
)
+ known_particle_fields = (
+ ("particle_position_x", ("code_length", [], None)),
+ ("particle_position_y", ("code_length", [], None)),
+ ("particle_position_z", ("code_length", [], None)),
+ )
+
def setup_fluid_fields(self):
# add X's
for _, field in self.ds.field_list:
diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad yt/frontends/boxlib/tests/test_outputs.py
--- a/yt/frontends/boxlib/tests/test_outputs.py
+++ b/yt/frontends/boxlib/tests/test_outputs.py
@@ -24,7 +24,8 @@
from yt.frontends.boxlib.api import \
OrionDataset, \
NyxDataset, \
- WarpXDataset
+ WarpXDataset, \
+ CastroDataset
import numpy as np
# We don't do anything needing ghost zone generation right now, because these
@@ -32,6 +33,7 @@
_orion_fields = ("temperature", "density", "velocity_magnitude")
_nyx_fields = ("Ne", "Temp", "particle_mass_density")
_warpx_fields = ("Ex", "By", "jz")
+_castro_fields = ("Temp", "density", "particle_count")
radadvect = "RadAdvect/plt00000"
@requires_ds(radadvect)
@@ -104,6 +106,44 @@
assert(np.all(np.logical_and(reg['particle_position_z'] <= right_edge[2],
reg['particle_position_z'] >= left_edge[2])))
+RT_particles = "RT_particles/plt00050"
+ at requires_ds(RT_particles)
+def test_RT_particles():
+ ds = data_dir_load(RT_particles)
+ yield assert_equal, str(ds), "plt00050"
+ for test in small_patch_amr(ds, _castro_fields):
+ test_RT_particles.__name__ = test.description
+ yield test
+
+
+ at requires_file(RT_particles)
+def test_castro_particle_io():
+ ds = data_dir_load(RT_particles)
+
+ grid = ds.index.grids[2]
+ npart_grid_2 = 49 # read directly from the header
+ assert_equal(grid['particle_position_x'].size, npart_grid_2)
+ assert_equal(grid['Tracer', 'particle_position_y'].size, npart_grid_2)
+ assert_equal(grid['all', 'particle_position_y'].size, npart_grid_2)
+
+ ad = ds.all_data()
+ npart = 49 # read directly from the header
+ assert_equal(ad['particle_velocity_x'].size, npart)
+ assert_equal(ad['Tracer', 'particle_velocity_y'].size, npart)
+ assert_equal(ad['all', 'particle_velocity_y'].size, npart)
+
+ left_edge = ds.arr([0.0, 0.0, 0.0], 'code_length')
+ right_edge = ds.arr([0.25, 1.0, 1.0], 'code_length')
+ center = 0.5*(left_edge + right_edge)
+
+ reg = ds.region(center, left_edge, right_edge)
+
+ assert(np.all(np.logical_and(reg['particle_position_x'] <= right_edge[0],
+ reg['particle_position_x'] >= left_edge[0])))
+
+ assert(np.all(np.logical_and(reg['particle_position_y'] <= right_edge[1],
+ reg['particle_position_y'] >= left_edge[1])))
+
langmuir = "LangmuirWave/plt00020"
@requires_ds(langmuir)
def test_langmuir():
@@ -173,6 +213,10 @@
def test_NyxDataset():
assert isinstance(data_dir_load(LyA), NyxDataset)
+ at requires_file(RT_particles)
+def test_CastroDataset():
+ assert isinstance(data_dir_load(RT_particles), CastroDataset)
+
@requires_file(LyA)
def test_WarpXDataset():
assert isinstance(data_dir_load(plasma), WarpXDataset)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list