[yt-svn] commit/yt: jzuhone: Merged in atmyers/yt (pull request #2510)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Feb 2 08:43:18 PST 2017


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/706bb6f73754/
Changeset:   706bb6f73754
Branch:      yt
User:        jzuhone
Date:        2017-02-02 16:43:11+00:00
Summary:     Merged in atmyers/yt (pull request #2510)

Adding support for Castro particles

Approved-by: Michael  Zingale
Approved-by: Nathan Goldbaum
Approved-by: John ZuHone
Affected #:  5 files

diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -22,7 +22,7 @@
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Athena                |     Y      |     N     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
-| Castro                |     Y      |     N     |   Partial  |   Y   |    Y     |    Y     |     N      |   Full   |
+| Castro                |     Y      |  Y [#f3]_ |   Partial  |   Y   |    Y     |    Y     |     N      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Chombo                |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
@@ -68,7 +68,8 @@
 .. [#f1] one-dimensional base-state not read in currently.
 .. [#f2] These handle mesh fields using an in-memory octree that has not been parallelized.
          Datasets larger than approximately 1024^3 will not scale well.
-
+.. [#f3] Newer versions of Castro that use BoxLib's standard particle format are supported.
+	 The older ASCII format is not.
 
 If you have a dataset that uses an output format not yet supported by yt, you
 can either input your data following :ref:`loading-numpy-array` or

diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -68,16 +68,19 @@
     - yt/visualization/tests/test_mesh_slices.py:test_tri2
     - yt/visualization/tests/test_mesh_slices.py:test_multi_region
 
-  local_boxlib_002:
+  local_boxlib_003:
     - yt/frontends/boxlib/tests/test_outputs.py:test_radadvect
     - yt/frontends/boxlib/tests/test_outputs.py:test_radtube
     - yt/frontends/boxlib/tests/test_outputs.py:test_star
     - yt/frontends/boxlib/tests/test_outputs.py:test_OrionDataset
+    - yt/frontends/boxlib/tests/test_outputs.py:test_CastroDataset
+    - yt/frontends/boxlib/tests/test_outputs.py:test_RT_particles
     - yt/frontends/boxlib/tests/test_outputs.py:test_units_override
 
-  local_boxlib_particles_001:
+  local_boxlib_particles_002:
     - yt/frontends/boxlib/tests/test_outputs.py:test_LyA
     - yt/frontends/boxlib/tests/test_outputs.py:test_nyx_particle_io
+    - yt/frontends/boxlib/tests/test_outputs.py:test_castro_particle_io
     - yt/frontends/boxlib/tests/test_outputs.py:test_langmuir
     - yt/frontends/boxlib/tests/test_outputs.py:test_plasma
     - yt/frontends/boxlib/tests/test_outputs.py:test_warpx_particle_io

diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -437,6 +437,8 @@
             "materials.gamma", 1.6667)
 
     def _localize_check(self, fn):
+        if fn is None:
+            return None
         # If the file exists, use it.  If not, set it to None.
         root_dir = os.path.dirname(self.output_dir)
         full_fn = os.path.join(root_dir, fn)
@@ -822,8 +824,28 @@
         return False
 
 
+class CastroHierarchy(BoxlibHierarchy):
+
+    def __init__(self, ds, dataset_type='castro_native'):
+        super(CastroHierarchy, self).__init__(ds, dataset_type)
+
+        if ("particles" in self.ds.parameters):
+
+            # extra beyond the base real fields that all Boxlib
+            # particles have, i.e. the xyz positions
+            castro_extra_real_fields = ['particle_velocity_x',
+                                        'particle_velocity_y',
+                                        'particle_velocity_z']
+
+            is_checkpoint = True
+
+            self._read_particles("Tracer", is_checkpoint, 
+                                 castro_extra_real_fields[0:self.ds.dimensionality])
+
+
 class CastroDataset(BoxlibDataset):
 
+    _index_class = CastroHierarchy
     _field_info_class = CastroFieldInfo
 
     @classmethod
@@ -882,7 +904,13 @@
             if not self.parameters['-z'] == "interior": periodicity[2] = False
 
         self.periodicity = ensure_tuple(periodicity)
-    
+        if os.path.isdir(os.path.join(self.output_dir, "Tracer")):
+            # we have particles
+            self.parameters["particles"] = 1 
+            self.particle_types = ("Tracer",)
+            self.particle_types_raw = self.particle_types
+
+
 
 class MaestroDataset(BoxlibDataset):
 
@@ -956,7 +984,8 @@
 
         is_checkpoint = False
 
-        self._read_particles("DM", is_checkpoint, nyx_extra_real_fields)
+        self._read_particles("DM", is_checkpoint, 
+                             nyx_extra_real_fields[0:self.ds.dimensionality+1])
 
 
 class NyxDataset(BoxlibDataset):
@@ -1210,8 +1239,8 @@
     _field_info_class = WarpXFieldInfo
 
     def __init__(self, output_dir,
-                 cparam_filename="inputs",
-                 fparam_filename="probin",
+                 cparam_filename=None,
+                 fparam_filename=None,
                  dataset_type='boxlib_native',
                  storage_filename=None,
                  units_override=None,

diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad yt/frontends/boxlib/fields.py
--- a/yt/frontends/boxlib/fields.py
+++ b/yt/frontends/boxlib/fields.py
@@ -251,6 +251,12 @@
         ("rot_z", ("cm/s**2", [], r"\mathbf{f}_{\rm{rot}} \cdot \mathbf{e}_z")),
     )
 
+    known_particle_fields = (
+        ("particle_position_x", ("code_length", [], None)),
+        ("particle_position_y", ("code_length", [], None)),
+        ("particle_position_z", ("code_length", [], None)),
+    )
+
     def setup_fluid_fields(self):
         # add X's
         for _, field in self.ds.field_list:

diff -r 22ce92624c6f5d630852e58f427262dd516f3d82 -r 706bb6f73754c583ca5c7c7e79c00694104db8ad yt/frontends/boxlib/tests/test_outputs.py
--- a/yt/frontends/boxlib/tests/test_outputs.py
+++ b/yt/frontends/boxlib/tests/test_outputs.py
@@ -24,7 +24,8 @@
 from yt.frontends.boxlib.api import \
     OrionDataset, \
     NyxDataset, \
-    WarpXDataset
+    WarpXDataset, \
+    CastroDataset
 import numpy as np    
 
 # We don't do anything needing ghost zone generation right now, because these
@@ -32,6 +33,7 @@
 _orion_fields = ("temperature", "density", "velocity_magnitude")
 _nyx_fields = ("Ne", "Temp", "particle_mass_density")
 _warpx_fields = ("Ex", "By", "jz")
+_castro_fields = ("Temp", "density", "particle_count")
 
 radadvect = "RadAdvect/plt00000"
 @requires_ds(radadvect)
@@ -104,6 +106,44 @@
     assert(np.all(np.logical_and(reg['particle_position_z'] <= right_edge[2], 
                                  reg['particle_position_z'] >= left_edge[2])))
 
+RT_particles = "RT_particles/plt00050"
+ at requires_ds(RT_particles)
+def test_RT_particles():
+    ds = data_dir_load(RT_particles)
+    yield assert_equal, str(ds), "plt00050"
+    for test in small_patch_amr(ds, _castro_fields):
+        test_RT_particles.__name__ = test.description
+        yield test
+
+
+ at requires_file(RT_particles)
+def test_castro_particle_io():
+    ds = data_dir_load(RT_particles)
+
+    grid = ds.index.grids[2]
+    npart_grid_2 = 49  # read directly from the header
+    assert_equal(grid['particle_position_x'].size, npart_grid_2)
+    assert_equal(grid['Tracer', 'particle_position_y'].size, npart_grid_2)
+    assert_equal(grid['all', 'particle_position_y'].size, npart_grid_2)
+
+    ad = ds.all_data()
+    npart = 49  # read directly from the header
+    assert_equal(ad['particle_velocity_x'].size, npart)
+    assert_equal(ad['Tracer', 'particle_velocity_y'].size, npart)
+    assert_equal(ad['all', 'particle_velocity_y'].size, npart)
+
+    left_edge = ds.arr([0.0, 0.0, 0.0], 'code_length')
+    right_edge = ds.arr([0.25, 1.0, 1.0], 'code_length')
+    center = 0.5*(left_edge + right_edge)
+                   
+    reg = ds.region(center, left_edge, right_edge)
+
+    assert(np.all(np.logical_and(reg['particle_position_x'] <= right_edge[0], 
+                                 reg['particle_position_x'] >= left_edge[0])))
+
+    assert(np.all(np.logical_and(reg['particle_position_y'] <= right_edge[1], 
+                                 reg['particle_position_y'] >= left_edge[1])))
+
 langmuir = "LangmuirWave/plt00020"
 @requires_ds(langmuir)
 def test_langmuir():
@@ -173,6 +213,10 @@
 def test_NyxDataset():
     assert isinstance(data_dir_load(LyA), NyxDataset)
 
+ at requires_file(RT_particles)
+def test_CastroDataset():
+    assert isinstance(data_dir_load(RT_particles), CastroDataset)
+
 @requires_file(LyA)
 def test_WarpXDataset():
     assert isinstance(data_dir_load(plasma), WarpXDataset)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list