[yt-svn] commit/yt: atmyers: Merged in C0nsultant/openpmd (pull request #2376)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Fri Sep 23 10:56:24 PDT 2016


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/71960f84ae9d/
Changeset:   71960f84ae9d
Branch:      yt
User:        atmyers
Date:        2016-09-23 17:55:57+00:00
Summary:     Merged in C0nsultant/openpmd (pull request #2376)

Add openPMD frontend
Affected #:  17 files

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1519,6 +1519,57 @@
    # The halo mass
    print(ad["FOF", "particle_mass"])
 
+.. _loading-openpmd-data:
+
+openPMD Data
+---------
+
+`openPMD <http://www.openpmd.org>`_ is an open source meta-standard and naming
+scheme for mesh based data and particle data. It does not actually define a file
+format.
+
+HDF5-containers respecting the minimal set of meta information from
+versions 1.0.0 and 1.0.1 of the standard are compatible.
+Support for the ED-PIC extension is not available. Mesh data in cartesian coordinates
+and particle data can be read by this frontend.
+
+To load the first in-file iteration of a openPMD datasets using the standard HDF5
+output format:
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load('example-3d/hdf5/data00000100.h5')
+
+If you operate on large files, you may want to modify the virtual chunking behaviour through
+``open_pmd_virtual_gridsize``. The supplied value is an estimate of the size of a single read request
+for each particle attribute/mesh (in Byte).
+
+.. code-block:: python
+
+  import yt
+  ds = yt.load('example-3d/hdf5/data00000100.h5', open_pmd_virtual_gridsize=10e4)
+  sp = yt.SlicePlot(ds, 'x', 'rho')
+  sp.show()
+
+Particle data is fully supported:
+
+.. code-block:: python
+
+  import yt
+  ds = yt.load('example-3d/hdf5/data00000100.h5')
+  ad = f.all_data()
+  ppp = yt.ParticlePhasePlot(ad, 'particle_position_y', 'particle_momentum_y', 'particle_weighting')
+  ppp.show()
+
+.. rubric:: Caveats
+
+* 1D, 2D and 3D data is compatible, but lower dimensional data might yield
+  strange results since it gets padded and treated as 3D. Extraneous dimensions are
+  set to be of length 1.0m and have a width of one cell.
+* The frontend has hardcoded logic for renaming the openPMD ``position``
+  of particles to ``positionCoarse``
+
 .. _loading-pyne-data:
 
 PyNE Data

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c doc/source/reference/api/api.rst
--- a/doc/source/reference/api/api.rst
+++ b/doc/source/reference/api/api.rst
@@ -323,6 +323,21 @@
    ~yt.frontends.moab.io.IOHandlerMoabH5MHex8
    ~yt.frontends.moab.io.IOHandlerMoabPyneHex8
 
+OpenPMD
+^^^^^^^
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.frontends.open_pmd.data_structures.OpenPMDGrid
+   ~yt.frontends.open_pmd.data_structures.OpenPMDHierarchy
+   ~yt.frontends.open_pmd.data_structures.OpenPMDDataset
+   ~yt.frontends.open_pmd.fields.OpenPMDFieldInfo
+   ~yt.frontends.open_pmd.io.IOHandlerOpenPMDHDF5
+   ~yt.frontends.open_pmd.misc.parse_unit_dimension
+   ~yt.frontends.open_pmd.misc.is_const_component
+   ~yt.frontends.open_pmd.misc.get_component
+
 RAMSES
 ^^^^^^
 

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -48,6 +48,8 @@
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Nyx                   |     Y      |     N     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+| openPMD               |     Y      |     Y     |      N     |   Y   |    Y     |    Y     |     N      | Partial  |
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Orion                 |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | OWLS/EAGLE            |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -35,6 +35,7 @@
     'halo_catalog',
     'http_stream',
     'moab',
+    'open_pmd',
     'owls',
     'owls_subfind',
     'ramses',

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/__init__.py
--- /dev/null
+++ b/yt/frontends/open_pmd/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for yt.frontends.open_pmd
+
+
+
+"""
+
+# -----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+# Copyright (c) 2015, Daniel Grassinger (HZDR)
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/api.py
--- /dev/null
+++ b/yt/frontends/open_pmd/api.py
@@ -0,0 +1,29 @@
+"""
+API for yt.frontends.open_pmd
+
+
+
+"""
+
+# -----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+# Copyright (c) 2015, Daniel Grassinger (HZDR)
+# Copyright (c) 2016, Fabian Koller (HZDR)
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------
+
+from .data_structures import \
+    OpenPMDDataset, \
+    OpenPMDGrid, \
+    OpenPMDHierarchy
+
+from .fields import \
+    OpenPMDFieldInfo
+
+from .io import \
+    IOHandlerOpenPMDHDF5
+
+from . import \
+    tests

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/data_structures.py
--- /dev/null
+++ b/yt/frontends/open_pmd/data_structures.py
@@ -0,0 +1,517 @@
+"""
+openPMD data structures
+
+
+"""
+
+# -----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+# Copyright (c) 2015, Daniel Grassinger (HZDR)
+# Copyright (c) 2016, Fabian Koller (HZDR)
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------
+
+from distutils.version import StrictVersion
+from functools import reduce
+from operator import mul
+from os import \
+    path, \
+    listdir
+from re import match
+
+import numpy as np
+from yt.data_objects.grid_patch import AMRGridPatch
+from yt.data_objects.static_output import Dataset
+from yt.frontends.open_pmd.fields import OpenPMDFieldInfo
+from yt.frontends.open_pmd.misc import \
+    is_const_component, \
+    get_component
+from yt.funcs import setdefaultattr
+from yt.geometry.grid_geometry_handler import GridIndex
+from yt.utilities.file_handler import HDF5FileHandler
+from yt.utilities.logger import ytLogger as mylog
+from yt.utilities.on_demand_imports import _h5py as h5
+
+
+class OpenPMDGrid(AMRGridPatch):
+    """Represents chunk of data on-disk.
+
+    This defines the index and offset for every mesh and particle type.
+    It also defines parents and children grids. Since openPMD does not have multiple levels of refinement,
+    there are no parents or children for any grid.
+    """
+    _id_offset = 0
+    __slots__ = ["_level_id"]
+    # Every particle species and mesh might have different hdf5-indices and offsets
+    ftypes=[]
+    ptypes=[]
+    findex = 0
+    foffset = 0
+    pindex = 0
+    poffset = 0
+
+    def __init__(self, gid, index, level=-1, fi=0, fo=0, pi=0, po=0, ft=[], pt=[]):
+        AMRGridPatch.__init__(self, gid, filename=index.index_filename,
+                              index=index)
+        self.findex = fi
+        self.foffset = fo
+        self.pindex = pi
+        self.poffset = po
+        self.ftypes = ft
+        self.ptypes = pt
+        self.Parent = None
+        self.Children = []
+        self.Level = level
+
+    def __repr__(self):
+        return "OpenPMDGrid_%04i (%s)" % (self.id, self.ActiveDimensions)
+
+
+class OpenPMDHierarchy(GridIndex):
+    """Defines which fields and particles are created and read from disk.
+
+    Furthermore it defines the characteristics of the grids.
+    """
+    grid = OpenPMDGrid
+
+    def __init__(self, ds, dataset_type="openPMD"):
+        self.dataset_type = dataset_type
+        self.dataset = ds
+        self.index_filename = ds.parameter_filename
+        self.directory = path.dirname(self.index_filename)
+        GridIndex.__init__(self, ds, dataset_type)
+
+    def _get_particle_type_counts(self):
+        """Reads the active number of particles for every species.
+
+        Returns
+        -------
+        dict
+            keys are ptypes
+            values are integer counts of the ptype
+        """
+        result = {}
+        f = self.dataset._handle
+        bp = self.dataset.base_path
+        pp = self.dataset.particles_path
+        for ptype in self.ds.particle_types_raw:
+            if str(ptype) == "io":
+                spec = list(f[bp + pp].keys())[0]
+            else:
+                spec = ptype
+            axis = list(f[bp + pp + "/" + spec + "/position"].keys())[0]
+            pos = f[bp + pp + "/" + spec + "/position/" + axis]
+            if is_const_component(pos):
+                result[ptype] = pos.attrs["shape"]
+            else:
+                result[ptype] = pos.len()
+        return result
+
+    def _detect_output_fields(self):
+        """Populates ``self.field_list`` with native fields (mesh and particle) on disk.
+
+        Each entry is a tuple of two strings. The first element is the on-disk fluid type or particle type.
+        The second element is the name of the field in yt. This string is later used for accessing the data.
+        Convention suggests that the on-disk fluid type should be "openPMD",
+        the on-disk particle type (for a single species of particles) is "io"
+        or (for multiple species of particles) the particle name on-disk.
+        """
+        f = self.dataset._handle
+        bp = self.dataset.base_path
+        mp = self.dataset.meshes_path
+        pp = self.dataset.particles_path
+
+        mesh_fields = []
+        try:
+            for field in f[bp + mp].keys():
+                try:
+                    for axis in f[bp + mp + field].keys():
+                        mesh_fields.append(field.replace("_", "-")
+                                           + "_" + axis)
+                except AttributeError:
+                    # This is a h5.Dataset (i.e. no axes)
+                    mesh_fields.append(field.replace("_", "-"))
+        except KeyError:
+            # There are no mesh fields
+            pass
+        self.field_list = [("openPMD", str(field)) for field in mesh_fields]
+
+        particle_fields = []
+        try:
+            for species in f[bp + pp].keys():
+                for record in f[bp + pp + species].keys():
+                    if is_const_component(f[bp + pp + species + "/" + record]):
+                        # Record itself (e.g. particle_mass) is constant
+                        particle_fields.append(species.replace("_", "-")
+                                               + "_" + record.replace("_", "-"))
+                    elif "particlePatches" not in record:
+                        try:
+                            # Create a field for every axis (x,y,z) of every property (position)
+                            # of every species (electrons)
+                            axes = list(f[bp + pp + species + "/" + record].keys())
+                            if str(record) == "position":
+                                record = "positionCoarse"
+                            for axis in axes:
+                                particle_fields.append(species.replace("_", "-")
+                                                       + "_" + record.replace("_", "-")
+                                                       + "_" + axis)
+                        except AttributeError:
+                            # Record is a dataset, does not have axes (e.g. weighting)
+                            particle_fields.append(species.replace("_", "-")
+                                                   + "_" + record.replace("_", "-"))
+                            pass
+                    else:
+                        pass
+            if len(list(f[bp + pp].keys())) > 1:
+                # There is more than one particle species, use the specific names as field types
+                self.field_list.extend(
+                    [(str(field).split("_")[0],
+                      ("particle_" + "_".join(str(field).split("_")[1:]))) for field in particle_fields])
+            else:
+                # Only one particle species, fall back to "io"
+                self.field_list.extend(
+                    [("io",
+                      ("particle_" + "_".join(str(field).split("_")[1:]))) for field in particle_fields])
+        except KeyError:
+            # There are no particle fields
+            pass
+
+    def _count_grids(self):
+        """Sets ``self.num_grids`` to be the total number of grids in the simulation.
+
+        The number of grids is determined by their respective memory footprint.
+        """
+        f = self.dataset._handle
+        bp = self.dataset.base_path
+        mp = self.dataset.meshes_path
+        pp = self.dataset.particles_path
+
+        self.meshshapes = {}
+        self.numparts = {}
+
+        self.num_grids = 0
+
+        for mesh in f[bp + mp].keys():
+            if type(f[bp + mp + mesh]) is h5.Group:
+                shape = f[bp + mp + mesh + "/" + list(f[bp + mp + mesh].keys())[0]].shape
+            else:
+                shape = f[bp + mp + mesh].shape
+            spacing = tuple(f[bp + mp + mesh].attrs["gridSpacing"])
+            offset = tuple(f[bp + mp + mesh].attrs["gridGlobalOffset"])
+            unit_si = f[bp + mp + mesh].attrs["gridUnitSI"]
+            self.meshshapes[mesh] = (shape, spacing, offset, unit_si)
+        for species in f[bp + pp].keys():
+            if "particlePatches" in f[bp + pp + "/" + species].keys():
+                for (patch, size) in enumerate(f[bp + pp + "/" + species + "/particlePatches/numParticles"]):
+                    self.numparts[species + "#" + str(patch)] = size
+            else:
+                axis = list(f[bp + pp + species + "/position"].keys())[0]
+                if is_const_component(f[bp + pp + species + "/position/" + axis]):
+                    self.numparts[species] = f[bp + pp + species + "/position/" + axis].attrs["shape"]
+                else:
+                    self.numparts[species] = f[bp + pp + species + "/position/" + axis].len()
+
+        # Limit values per grid by resulting memory footprint
+        self.vpg = int(self.dataset.gridsize / 4)  # 4Byte per value (f32)
+
+        # Meshes of the same size do not need separate chunks
+        for (shape, spacing, offset, unit_si) in set(self.meshshapes.values()):
+            self.num_grids += min(shape[0], int(np.ceil(reduce(mul, shape) * self.vpg**-1)))
+
+        # Same goes for particle chunks if they are not inside particlePatches
+        patches = {}
+        no_patches = {}
+        for (k, v) in self.numparts.items():
+            if "#" in k:
+                patches[k] = v
+            else:
+                no_patches[k] = v
+        for size in set(no_patches.values()):
+            self.num_grids += int(np.ceil(size * self.vpg ** -1))
+        for size in patches.values():
+            self.num_grids += int(np.ceil(size * self.vpg ** -1))
+
+    def _parse_index(self):
+        """Fills each grid with appropriate properties (extent, dimensions, ...)
+
+        This calculates the properties of every OpenPMDGrid based on the total number of grids in the simulation.
+        The domain is divided into ``self.num_grids`` (roughly) equally sized chunks along the x-axis.
+        ``grid_levels`` is always equal to 0 since we only have one level of refinement in openPMD.
+
+        Notes
+        -----
+        ``self.grid_dimensions`` is rounded to the nearest integer. Grid edges are calculated from this dimension.
+        Grids with dimensions [0, 0, 0] are particle only. The others do not have any particles affiliated with them.
+        """
+        f = self.dataset._handle
+        bp = self.dataset.base_path
+        pp = self.dataset.particles_path
+
+        self.grid_levels.flat[:] = 0
+        self.grids = np.empty(self.num_grids, dtype="object")
+
+        grid_index_total = 0
+
+        # Mesh grids
+        for mesh in set(self.meshshapes.values()):
+            (shape, spacing, offset, unit_si) = mesh
+            shape = np.asarray(shape)
+            spacing = np.asarray(spacing)
+            offset = np.asarray(offset)
+            # Total dimension of this grid
+            domain_dimension = np.asarray(shape, dtype=np.int32)
+            domain_dimension = np.append(domain_dimension, np.ones(3 - len(domain_dimension)))
+            # Number of grids of this shape
+            num_grids = min(shape[0], int(np.ceil(reduce(mul, shape) * self.vpg ** -1)))
+            gle = offset * unit_si  # self.dataset.domain_left_edge
+            gre = domain_dimension[:spacing.size] * unit_si * spacing + gle  # self.dataset.domain_right_edge
+            gle = np.append(gle, np.zeros(3 - len(gle)))
+            gre = np.append(gre, np.ones(3 - len(gre)))
+            grid_dim_offset = np.linspace(0, domain_dimension[0], num_grids + 1, dtype=np.int32)
+            grid_edge_offset = grid_dim_offset * np.float(domain_dimension[0]) ** -1 * (gre[0] - gle[0]) + gle[0]
+            mesh_names = []
+            for (mname, mdata) in self.meshshapes.items():
+                if mesh == mdata:
+                    mesh_names.append(str(mname))
+            prev = 0
+            for grid in np.arange(num_grids):
+                self.grid_dimensions[grid_index_total] = domain_dimension
+                self.grid_dimensions[grid_index_total][0] = grid_dim_offset[grid + 1] - grid_dim_offset[grid]
+                self.grid_left_edge[grid_index_total] = gle
+                self.grid_left_edge[grid_index_total][0] = grid_edge_offset[grid]
+                self.grid_right_edge[grid_index_total] = gre
+                self.grid_right_edge[grid_index_total][0] = grid_edge_offset[grid + 1]
+                self.grid_particle_count[grid_index_total] = 0
+                self.grids[grid_index_total] = self.grid(grid_index_total, self, 0,
+                                                         fi=prev,
+                                                         fo=self.grid_dimensions[grid_index_total][0],
+                                                         ft=mesh_names)
+                prev += self.grid_dimensions[grid_index_total][0]
+                grid_index_total += 1
+
+        handled_ptypes = []
+
+        # Particle grids
+        for (species, count) in self.numparts.items():
+            if "#" in species:
+                # This is a particlePatch
+                spec = species.split("#")
+                patch = f[bp + pp + "/" + spec[0] + "/particlePatches"]
+                num_grids = int(np.ceil(count * self.vpg ** -1))
+                gle = []
+                for axis in patch["offset"].keys():
+                    gle.append(get_component(patch, "offset/" + axis, int(spec[1]), 1)[0])
+                gle = np.asarray(gle)
+                gle = np.append(gle, np.zeros(3 - len(gle)))
+                gre = []
+                for axis in patch["extent"].keys():
+                    gre.append(get_component(patch, "extent/" + axis, int(spec[1]), 1)[0])
+                gre = np.asarray(gre)
+                gre = np.append(gre, np.ones(3 - len(gre)))
+                np.add(gle, gre, gre)
+                npo = patch["numParticlesOffset"].value.item(int(spec[1]))
+                particle_count = np.linspace(npo, npo + count, num_grids + 1,
+                                             dtype=np.int32)
+                particle_names = [str(spec[0])]
+            elif str(species) not in handled_ptypes:
+                num_grids = int(np.ceil(count * self.vpg ** -1))
+                gle = self.dataset.domain_left_edge
+                gre = self.dataset.domain_right_edge
+                particle_count = np.linspace(0, count, num_grids + 1, dtype=np.int32)
+                particle_names = []
+                for (pname, size) in self.numparts.items():
+                    if size == count:
+                        # Since this is not part of a particlePatch, we can include multiple same-sized ptypes
+                        particle_names.append(str(pname))
+                        handled_ptypes.append(str(pname))
+            else:
+                # A grid with this exact particle count has already been created
+                continue
+            for grid in np.arange(num_grids):
+                self.grid_dimensions[grid_index_total] = [0, 0, 0]  # Counted as mesh-size, thus no dimensional extent
+                self.grid_left_edge[grid_index_total] = gle
+                self.grid_right_edge[grid_index_total] = gre
+                self.grid_particle_count[grid_index_total] = (particle_count[grid + 1] - particle_count[grid]) * len(
+                    particle_names)
+                self.grids[grid_index_total] = self.grid(grid_index_total, self, 0,
+                                                         pi=particle_count[grid],
+                                                         po=particle_count[grid + 1] - particle_count[grid],
+                                                         pt=particle_names)
+                grid_index_total += 1
+
+    def _populate_grid_objects(self):
+        """This initializes all grids.
+
+        Additionally, it should set up Children and Parent lists on each grid object.
+        openPMD is not adaptive and thus there are no Children and Parents for any grid.
+        """
+        for i in np.arange(self.num_grids):
+            self.grids[i]._prepare_grid()
+            self.grids[i]._setup_dx()
+        self.max_level = 0
+
+
+class OpenPMDDataset(Dataset):
+    """Contains all the required information of a single iteration of the simulation.
+
+    Notes
+    -----
+    It is assumed that all meshes cover the same region. Their resolution can be different.
+    It is assumed that all particles reside in this same region exclusively.
+    It is assumed that the particle and mesh positions are *absolute* with respect to the simulation origin.
+    """
+    _index_class = OpenPMDHierarchy
+    _field_info_class = OpenPMDFieldInfo
+
+    def __init__(self,
+                 filename,
+                 dataset_type="openPMD",
+                 storage_filename=None,
+                 units_override=None,
+                 unit_system="mks",
+                 **kwargs):
+        self._handle = HDF5FileHandler(filename)
+        self.gridsize = kwargs.pop("open_pmd_virtual_gridsize", 10**9)
+        self._set_paths(self._handle, path.dirname(filename))
+        Dataset.__init__(self,
+                         filename,
+                         dataset_type,
+                         units_override=units_override,
+                         unit_system=unit_system)
+        self.storage_filename = storage_filename
+        self.fluid_types += ("openPMD",)
+        particles = tuple(str(c) for c in self._handle[self.base_path + self.particles_path].keys())
+        if len(particles) > 1:
+            # Only use on-disk particle names if there is more than one species
+            self.particle_types = particles
+        mylog.debug("open_pmd - self.particle_types: {}".format(self.particle_types))
+        self.particle_types_raw = self.particle_types
+        self.particle_types = tuple(self.particle_types)
+
+    def _set_paths(self, handle, path):
+        """Parses relevant hdf5-paths out of ``handle``.
+
+        Parameters
+        ----------
+        handle : h5py.File
+        path : str
+            (absolute) filepath for current hdf5 container
+        """
+        iterations = []
+        encoding = handle.attrs["iterationEncoding"].decode()
+        if "groupBased" in encoding:
+            iterations = list(handle["/data"].keys())
+            mylog.info("open_pmd - found {} iterations in file".format(len(iterations)))
+        elif "fileBased" in encoding:
+            itformat = handle.attrs["iterationFormat"].decode().split("/")[-1]
+            regex = "^" + itformat.replace("%T", "[0-9]+") + "$"
+            if path is "":
+                mylog.warning("open_pmd - For file based iterations, please use absolute file paths!")
+                pass
+            for filename in listdir(path):
+                if match(regex, filename):
+                    iterations.append(filename)
+            mylog.info("open_pmd - found {} iterations in directory".format(len(iterations)))
+
+        if len(iterations) == 0:
+            mylog.warning("open_pmd - no iterations found!")
+        if "groupBased" in encoding and len(iterations) > 1:
+            mylog.warning("open_pmd - only choose to load one iteration ({})".format(list(handle["/data"].keys())[0]))
+
+        self.base_path = "/data/{}/".format(list(handle["/data"].keys())[0])
+        self.meshes_path = self._handle["/"].attrs["meshesPath"].decode()
+        self.particles_path = self._handle["/"].attrs["particlesPath"].decode()
+
+    def _set_code_unit_attributes(self):
+        """Handle conversion between different physical units and the code units.
+
+        Every dataset in openPMD can have different code <-> physical scaling.
+        The individual factor is obtained by multiplying with "unitSI" reading getting data from disk.
+        """
+        setdefaultattr(self, "length_unit", self.quan(1.0, "m"))
+        setdefaultattr(self, "mass_unit", self.quan(1.0, "kg"))
+        setdefaultattr(self, "time_unit", self.quan(1.0, "s"))
+        setdefaultattr(self, "velocity_unit", self.quan(1.0, "m/s"))
+        setdefaultattr(self, "magnetic_unit", self.quan(1.0, "T"))
+
+    def _parse_parameter_file(self):
+        """Read in metadata describing the overall data on-disk.
+        """
+        f = self._handle
+        bp = self.base_path
+        mp = self.meshes_path
+
+        self.unique_identifier = 0
+        self.parameters = 0
+        self.periodicity = np.zeros(3, dtype=np.bool)
+        self.refine_by = 1
+        self.cosmological_simulation = 0
+
+        try:
+            shapes = {}
+            left_edges = {}
+            right_edges = {}
+            for mesh in f[bp + mp].keys():
+                if type(f[bp + mp + mesh]) is h5.Group:
+                    shape = np.asarray(f[bp + mp + mesh + "/" + list(f[bp + mp + mesh].keys())[0]].shape)
+                else:
+                    shapes[mesh] = np.asarray(f[bp + mp + mesh].shape)
+                spacing = np.asarray(f[bp + mp + mesh].attrs["gridSpacing"])
+                offset = np.asarray(f[bp + mp + mesh].attrs["gridGlobalOffset"])
+                unit_si = np.asarray(f[bp + mp + mesh].attrs["gridUnitSI"])
+                le = offset * unit_si
+                re = le + shape * unit_si * spacing
+                shapes[mesh] = shape
+                left_edges[mesh] = le
+                right_edges[mesh] = re
+            lowest_dim = np.min([len(i) for i in shapes.values()])
+            shapes = np.asarray([i[:lowest_dim] for i in shapes.values()])
+            left_edges = np.asarray([i[:lowest_dim] for i in left_edges.values()])
+            right_edges = np.asarray([i[:lowest_dim] for i in right_edges.values()])
+            fs = []
+            dle = []
+            dre = []
+            for i in np.arange(lowest_dim):
+                fs.append(np.max(shapes.transpose()[i]))
+                dle.append(np.min(left_edges.transpose()[i]))
+                dre.append(np.min(right_edges.transpose()[i]))
+            self.dimensionality = len(fs)
+            self.domain_dimensions = np.append(fs, np.ones(3 - self.dimensionality))
+            self.domain_left_edge = np.append(dle, np.zeros(3 - len(dle)))
+            self.domain_right_edge = np.append(dre, np.ones(3 - len(dre)))
+        except ValueError:
+            mylog.warning("open_pmd - It seems your data does not contain meshes. Assuming domain extent of 1m^3!")
+            self.dimensionality = 3
+            self.domain_dimensions = np.ones(3, dtype=np.float64)
+            self.domain_left_edge = np.zeros(3, dtype=np.float64)
+            self.domain_right_edge = np.ones(3, dtype=np.float64)
+
+        self.current_time = f[bp].attrs["time"] * f[bp].attrs["timeUnitSI"]
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        """Checks whether the supplied file can be read by this frontend.
+        """
+        try:
+            f = h5.File(args[0], "r")
+        except (IOError, OSError):
+            return False
+
+        requirements = ["openPMD", "basePath", "meshesPath", "particlesPath"]
+        attrs = list(f["/"].attrs.keys())
+        for i in requirements:
+            if i not in attrs:
+                f.close()
+                return False
+
+        known_versions = [StrictVersion("1.0.0"),
+                          StrictVersion("1.0.1")]
+        if StrictVersion(f.attrs["openPMD"].decode()) in known_versions:
+            f.close()
+            return True
+        else:
+            f.close()
+            return False

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/fields.py
--- /dev/null
+++ b/yt/frontends/open_pmd/fields.py
@@ -0,0 +1,217 @@
+"""
+openPMD-specific fields
+
+
+
+"""
+
+# -----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+# Copyright (c) 2015, Daniel Grassinger (HZDR)
+# Copyright (c) 2016, Fabian Koller (HZDR)
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.fields.field_info_container import FieldInfoContainer
+from yt.fields.magnetic_field import setup_magnetic_field_aliases
+from yt.frontends.open_pmd.misc import \
+    parse_unit_dimension, \
+    is_const_component
+from yt.units.yt_array import YTQuantity
+from yt.utilities.logger import ytLogger as mylog
+from yt.utilities.on_demand_imports import _h5py as h5
+from yt.utilities.physical_constants import \
+    speed_of_light, \
+    mu_0
+
+
+def setup_poynting_vector(self):
+    def _get_poyn(axis):
+        def poynting(field, data):
+            u = mu_0**-1
+            if axis in "x":
+                return u * (data["E_y"] * data["magnetic_field_z"] - data["E_z"] * data["magnetic_field_y"])
+            elif axis in "y":
+                return u * (data["E_z"] * data["magnetic_field_x"] - data["E_x"] * data["magnetic_field_z"])
+            elif axis in "z":
+                return u * (data["E_x"] * data["magnetic_field_y"] - data["E_y"] * data["magnetic_field_x"])
+
+        return poynting
+
+    for ax in "xyz":
+        self.add_field(("openPMD", "poynting_vector_%s" % ax),
+                       function=_get_poyn(ax),
+                       units="W/m**2")
+
+
+def setup_kinetic_energy(self, ptype):
+    def _kin_en(field, data):
+        p2 = (data[ptype, "particle_momentum_x"] ** 2 +
+              data[ptype, "particle_momentum_y"] ** 2 +
+              data[ptype, "particle_momentum_z"] ** 2)
+        mass = data[ptype, "particle_mass"] * data[ptype, "particle_weighting"]
+        return speed_of_light * np.sqrt(p2 + mass ** 2 * speed_of_light ** 2) - mass * speed_of_light ** 2
+
+    self.add_field((ptype, "particle_kinetic_energy"),
+                   function=_kin_en,
+                   units="kg*m**2/s**2",
+                   particle_type=True)
+
+
+def setup_velocity(self, ptype):
+    def _get_vel(axis):
+        def velocity(field, data):
+            c = speed_of_light
+            momentum = data[ptype, "particle_momentum_{}".format(axis)]
+            mass = data[ptype, "particle_mass"]
+            weighting = data[ptype, "particle_weighting"]
+            return momentum / np.sqrt(
+                (mass * weighting) ** 2 +
+                (momentum ** 2) / (c ** 2)
+            )
+
+        return velocity
+
+    for ax in "xyz":
+        self.add_field((ptype, "particle_velocity_%s" % ax),
+                       function=_get_vel(ax),
+                       units="m/s",
+                       particle_type=True)
+
+
+def setup_absolute_positions(self, ptype):
+    def _abs_pos(axis):
+        def ap(field, data):
+            return np.add(data[ptype, "particle_positionCoarse_{}".format(axis)],
+                          data[ptype, "particle_positionOffset_{}".format(axis)])
+
+        return ap
+
+    for ax in "xyz":
+        self.add_field((ptype, "particle_position_%s" % ax),
+                       function=_abs_pos(ax),
+                       units="m",
+                       particle_type=True)
+
+
+class OpenPMDFieldInfo(FieldInfoContainer):
+    """Specifies which fields from the dataset yt should know about.
+
+    ``self.known_other_fields`` and ``self.known_particle_fields`` must be populated.
+    Entries for both of these lists must be tuples of the form
+        ("name", ("units", ["fields", "to", "alias"], "display_name"))
+    These fields will be represented and handled in yt in the way you define them here.
+    The fields defined in both ``self.known_other_fields`` and ``self.known_particle_fields`` will only be added
+    to a dataset (with units, aliases, etc), if they match any entry in the ``OpenPMDHierarchy``'s ``self.field_list``.
+
+    Notes
+    -----
+
+    Contrary to many other frontends, we dynamically obtain the known fields from the simulation output.
+    The openPMD markup is extremely flexible - names, dimensions and the number of individual datasets
+    can (and very likely will) vary.
+
+    openPMD states that names of records and their components are only allowed to contain the
+        characters a-Z,
+        the numbers 0-9
+        and the underscore _
+        (equivalently, the regex \w).
+    Since yt widely uses the underscore in field names, openPMD's underscores (_) are replaced by hyphen (-).
+
+    Derived fields will automatically be set up, if names and units of your known on-disk (or manually derived)
+    fields match the ones in [1].
+
+    References
+    ----------
+    .. http://yt-project.org/docs/dev/analyzing/fields.html
+    .. http://yt-project.org/docs/dev/developing/creating_frontend.html#data-meaning-structures
+    .. https://github.com/openPMD/openPMD-standard/blob/latest/STANDARD.md
+    .. [1] http://yt-project.org/docs/dev/reference/field_list.html#universal-fields
+    """
+    _mag_fields = []
+
+    def __init__(self, ds, field_list):
+        f = ds._handle
+        bp = ds.base_path
+        mp = ds.meshes_path
+        pp = ds.particles_path
+        fields = f[bp + mp]
+
+        for fname in fields.keys():
+            field = fields[fname]
+            if type(field) is h5.Dataset or is_const_component(field):
+                # Don't consider axes. This appears to be a vector field of single dimensionality
+                ytname = str("_".join([fname.replace("_", "-")]))
+                parsed = parse_unit_dimension(np.asarray(field.attrs["unitDimension"], dtype=np.int))
+                unit = str(YTQuantity(1, parsed).units)
+                aliases = []
+                # Save a list of magnetic fields for aliasing later on
+                # We can not reasonably infer field type/unit by name in openPMD
+                if unit == "T" or unit == "kg/(A*s**2)":
+                    self._mag_fields.append(ytname)
+                self.known_other_fields += ((ytname, (unit, aliases, None)),)
+            else:
+                for axis in field.keys():
+                    ytname = str("_".join([fname.replace("_", "-"), axis]))
+                    parsed = parse_unit_dimension(np.asarray(field.attrs["unitDimension"], dtype=np.int))
+                    unit = str(YTQuantity(1, parsed).units)
+                    aliases = []
+                    # Save a list of magnetic fields for aliasing later on
+                    # We can not reasonably infer field type by name in openPMD
+                    if unit == "T" or unit == "kg/(A*s**2)":
+                        self._mag_fields.append(ytname)
+                    self.known_other_fields += ((ytname, (unit, aliases, None)),)
+        for i in self.known_other_fields:
+            mylog.debug("open_pmd - known_other_fields - {}".format(i))
+        particles = f[bp + pp]
+        for species in particles.keys():
+            for record in particles[species].keys():
+                try:
+                    pds = particles[species + "/" + record]
+                    parsed = parse_unit_dimension(pds.attrs["unitDimension"])
+                    unit = str(YTQuantity(1, parsed).units)
+                    ytattrib = str(record).replace("_", "-")
+                    if ytattrib == "position":
+                        # Symbolically rename position to preserve yt's interpretation of the pfield
+                        # particle_position is later derived in setup_absolute_positions in the way yt expects it
+                        ytattrib = "positionCoarse"
+                    if type(pds) is h5.Dataset or is_const_component(pds):
+                        name = ["particle", ytattrib]
+                        self.known_particle_fields += ((str("_".join(name)), (unit, [], None)),)
+                    else:
+                        for axis in pds.keys():
+                            aliases = []
+                            name = ["particle", ytattrib, axis]
+                            ytname = str("_".join(name))
+                            self.known_particle_fields += ((ytname, (unit, aliases, None)),)
+                except KeyError:
+                    mylog.info("open_pmd - {}_{} does not seem to have unitDimension".format(species, record))
+        for i in self.known_particle_fields:
+            mylog.debug("open_pmd - known_particle_fields - {}".format(i))
+        super(OpenPMDFieldInfo, self).__init__(ds, field_list)
+
+    def setup_fluid_fields(self):
+        """Defines which derived mesh fields to create.
+
+        If a field can not be calculated, it will simply be skipped.
+        """
+        # Set up aliases first so the setup for poynting can use them
+        if len(self._mag_fields) > 0:
+            setup_magnetic_field_aliases(self, "openPMD", self._mag_fields)
+            setup_poynting_vector(self)
+
+    def setup_particle_fields(self, ptype):
+        """Defines which derived particle fields to create.
+
+        This will be called for every entry in `OpenPMDDataset``'s ``self.particle_types``.
+        If a field can not be calculated, it will simply be skipped.
+        """
+        setup_absolute_positions(self, ptype)
+        setup_kinetic_energy(self, ptype)
+        setup_velocity(self, ptype)
+        super(OpenPMDFieldInfo, self).setup_particle_fields(ptype)

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/io.py
--- /dev/null
+++ b/yt/frontends/open_pmd/io.py
@@ -0,0 +1,213 @@
+"""
+openPMD-specific IO functions
+
+
+
+"""
+
+# -----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+# Copyright (c) 2015, Daniel Grassinger (HZDR)
+# Copyright (c) 2016, Fabian Koller (HZDR)
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------
+
+from collections import defaultdict
+
+import numpy as np
+
+from yt.frontends.open_pmd.misc import \
+    is_const_component, \
+    get_component
+from yt.utilities.io_handler import BaseIOHandler
+
+
+class IOHandlerOpenPMDHDF5(BaseIOHandler):
+    _field_dtype = "float32"
+    _dataset_type = "openPMD"
+
+    def __init__(self, ds, *args, **kwargs):
+        self.ds = ds
+        self._handle = ds._handle
+        self.base_path = ds.base_path
+        self.meshes_path = ds.meshes_path
+        self.particles_path = ds.particles_path
+        self._array_fields = {}
+        self._cached_ptype = ""
+
+    def _fill_cache(self, ptype, index=0, offset=None):
+        """Fills the particle position cache for the ``ptype``.
+
+        Parameters
+        ----------
+        ptype : str
+            The on-disk name of the particle species
+        index : int, optional
+        offset : int, optional
+        """
+        if str((ptype, index, offset)) not in self._cached_ptype:
+            self._cached_ptype = str((ptype, index, offset))
+            pds = self._handle[self.base_path + self.particles_path + "/" + ptype]
+            axes = list(pds["position"].keys())
+            if offset is None:
+                if is_const_component(pds["position/" + axes[0]]):
+                    offset = pds["position/" + axes[0]].attrs["shape"]
+                else:
+                    offset = pds["position/" + axes[0]].len()
+            self.cache = np.empty((3, offset), dtype=np.float64)
+            for i in np.arange(3):
+                ax = "xyz"[i]
+                if ax in axes:
+                    np.add(get_component(pds, "position/" + ax, index, offset),
+                           get_component(pds, "positionOffset/" + ax, index, offset),
+                           self.cache[i])
+                else:
+                    # Pad accordingly with zeros to make 1D/2D datasets compatible
+                    # These have to be the same shape as the existing axes since that equals the number of particles
+                    self.cache[i] = np.zeros(offset)
+
+    def _read_particle_selection(self, chunks, selector, fields):
+        """Reads given particle fields for given particle species masked by a given selection.
+
+        Parameters
+        ----------
+        chunks
+            A list of chunks
+            A chunk is a list of grids
+        selector
+            A region (inside your domain) specifying which parts of the field you want to read
+            See [1] and [2]
+        fields : array_like
+            Tuples (ptype, pfield) representing a field
+
+        Returns
+        -------
+        dict
+            keys are tuples (ptype, pfield) representing a field
+            values are (N,) ndarrays with data from that field
+        """
+        f = self._handle
+        bp = self.base_path
+        pp = self.particles_path
+        ds = f[bp + pp]
+        unions = self.ds.particle_unions
+        chunks = list(chunks)  # chunks is a generator
+
+        rv = {}
+        ind = {}
+        particle_count = {}
+        ptf = defaultdict(list)  # ParticleTypes&Fields
+        rfm = defaultdict(list)  # RequestFieldMapping
+
+        for (ptype, pname) in fields:
+            pfield = (ptype, pname)
+            # Overestimate the size of all pfields so they include all particles, shrink it later
+            particle_count[pfield] = 0
+            if ptype in unions:
+                for pt in unions[ptype]:
+                    particle_count[pfield] += self.ds.particle_type_counts[pt]
+                    ptf[pt].append(pname)
+                    rfm[pt, pname].append(pfield)
+            else:
+                particle_count[pfield] = self.ds.particle_type_counts[ptype]
+                ptf[ptype].append(pname)
+                rfm[pfield].append(pfield)
+            rv[pfield] = np.empty((particle_count[pfield],), dtype=np.float64)
+            ind[pfield] = 0
+
+        for ptype in ptf:
+            for chunk in chunks:
+                for grid in chunk.objs:
+                    if str(ptype) == "io":
+                        species = list(ds.keys())[0]
+                    else:
+                        species = ptype
+                    if species not in grid.ptypes:
+                        continue
+                    # read particle coords into cache
+                    self._fill_cache(species, grid.pindex, grid.poffset)
+                    mask = selector.select_points(self.cache[0], self.cache[1], self.cache[2], 0.0)
+                    if mask is None:
+                        continue
+                    pds = ds[species]
+                    for field in ptf[ptype]:
+                        component = "/".join(field.split("_")[1:])
+                        component = component.replace("positionCoarse", "position")
+                        component = component.replace("-", "_")
+                        data = get_component(pds, component, grid.pindex, grid.poffset)[mask]
+                        for request_field in rfm[(ptype, field)]:
+                            rv[request_field][ind[request_field]:ind[request_field] + data.shape[0]] = data
+                            ind[request_field] += data.shape[0]
+
+        for field in fields:
+            rv[field] = rv[field][:ind[field]]
+
+        return rv
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        """Reads given fields masked by a given selection.
+
+        Parameters
+        ----------
+        chunks
+            A list of chunks
+            A chunk is a list of grids
+        selector
+            A region (inside your domain) specifying which parts of the field you want to read
+            See [1] and [2]
+        fields : array_like
+            Tuples (fname, ftype) representing a field
+        size : int
+            Size of the data to read
+
+        Returns
+        -------
+        dict
+            keys are tuples (ftype, fname) representing a field
+            values are flat (``size``,) ndarrays with data from that field
+        """
+        f = self._handle
+        bp = self.base_path
+        mp = self.meshes_path
+        ds = f[bp + mp]
+        chunks = list(chunks)
+
+        rv = {}
+        ind = {}
+
+        if selector.__class__.__name__ == "GridSelector":
+            if not (len(chunks) == len(chunks[0].objs) == 1):
+                raise RuntimeError
+
+        if size is None:
+            size = sum((g.count(selector) for chunk in chunks
+                        for g in chunk.objs))
+        for field in fields:
+            rv[field] = np.empty(size, dtype=np.float64)
+            ind[field] = 0
+
+        for (ftype, fname) in fields:
+            field = (ftype, fname)
+            for chunk in chunks:
+                for grid in chunk.objs:
+                    component = fname.replace("_", "/").replace("-", "_")
+                    if component.split("/")[0] not in grid.ftypes:
+                        continue
+                    mask = grid._get_selector_mask(selector)
+                    if mask is None:
+                        continue
+                    data = get_component(ds, component, grid.findex, grid.foffset)
+                    # The following is a modified AMRGridPatch.select(...)
+                    data.shape = mask.shape  # Workaround - casts a 2D (x,y) array to 3D (x,y,1)
+                    count = grid.count(selector)
+                    rv[field][ind[field]:ind[field] + count] = data[mask]
+                    ind[field] += count
+
+        for field in fields:
+            rv[field] = rv[field][:ind[field]]
+            rv[field].flatten()
+
+        return rv

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/misc.py
--- /dev/null
+++ b/yt/frontends/open_pmd/misc.py
@@ -0,0 +1,125 @@
+# -----------------------------------------------------------------------------
+# Copyright (c) 2016, Fabian Koller (HZDR)
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.utilities.logger import ytLogger as mylog
+
+
+def parse_unit_dimension(unit_dimension):
+    """Transforms an openPMD unitDimension into a string.
+
+    Parameters
+    ----------
+    unit_dimension : array_like
+        integer array of length 7 with one entry for the dimensional component of every SI unit
+
+        [0] length L,
+        [1] mass M,
+        [2] time T,
+        [3] electric current I,
+        [4] thermodynamic temperature theta,
+        [5] amount of substance N,
+        [6] luminous intensity J
+
+    References
+    ----------
+    .. https://github.com/openPMD/openPMD-standard/blob/latest/STANDARD.md#unit-systems-and-dimensionality
+
+    Returns
+    -------
+    str
+
+    Examples
+    --------
+    >>> velocity = [1., 0., -1., 0., 0., 0., 0.]
+    >>> print parse_unit_dimension(velocity)
+    'm**1*s**-1'
+
+    >>> magnetic_field = [0., 1., -2., -1., 0., 0., 0.]
+    >>> print parse_unit_dimension(magnetic_field)
+    'kg**1*s**-2*A**-1'
+    """
+    if len(unit_dimension) is not 7:
+        mylog.error("open_pmd - SI must have 7 base dimensions!")
+    unit_dimension = np.asarray(unit_dimension, dtype=np.int)
+    dim = []
+    si = ["m",
+          "kg",
+          "s",
+          "A",
+          "C",
+          "mol",
+          "cd"]
+    for i in np.arange(7):
+        if unit_dimension[i] != 0:
+            dim.append("{}**{}".format(si[i], unit_dimension[i]))
+    return "*".join(dim)
+
+
+def is_const_component(record_component):
+    """Determines whether a group or dataset in the HDF5 file is constant.
+
+    Parameters
+    ----------
+    record_component : h5py.Group or h5py.Dataset
+
+    Returns
+    -------
+    bool
+        True if constant, False otherwise
+
+    References
+    ----------
+    .. https://github.com/openPMD/openPMD-standard/blob/latest/STANDARD.md,
+       section 'Constant Record Components'
+    """
+    return "value" in record_component.attrs.keys()
+
+
+def get_component(group, component_name, index=0, offset=None):
+    """Grabs a dataset component from a group as a whole or sliced.
+
+    Parameters
+    ----------
+    group : h5py.Group
+    component_name : str
+        relative path of the component in the group
+    index : int, optional
+        first entry along the first axis to read
+    offset : int, optional
+        number of entries to read
+        if not supplied, every entry after index is returned
+
+    Notes
+    -----
+    This scales every entry of the component with the respective "unitSI".
+
+    Returns
+    -------
+    ndarray
+        (N,) 1D in case of particle data
+        (O,P,Q) 1D/2D/3D in case of mesh data
+    """
+    record_component = group[component_name]
+    unit_si = record_component.attrs["unitSI"]
+    if is_const_component(record_component):
+        shape = np.asarray(record_component.attrs["shape"])
+        if offset is None:
+            shape[0] -= index
+        else:
+            shape[0] = offset
+        # component is constant, craft an array by hand
+        # mylog.debug("open_pmd - get_component: {}/{} [const {}]".format(group.name, component_name, shape))
+        return np.full(shape, record_component.attrs["value"] * unit_si)
+    else:
+        if offset is not None:
+            offset += index
+        # component is a dataset, return it (possibly masked)
+        # mylog.debug("open_pmd - get_component: {}/{}[{}:{}]".format(group.name, component_name, index, offset))
+        return np.multiply(record_component[index:offset], unit_si)

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/frontends/open_pmd/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/open_pmd/tests/test_outputs.py
@@ -0,0 +1,137 @@
+"""
+openPMD frontend tests
+
+
+
+"""
+
+# -----------------------------------------------------------------------------
+# Copyright (c) 2016, Fabian Koller (HZDR).
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+# -----------------------------------------------------------------------------
+
+from yt.frontends.open_pmd.data_structures import \
+    OpenPMDDataset
+from yt.testing import \
+    assert_almost_equal, \
+    assert_equal, \
+    assert_array_equal, \
+    requires_file
+from yt.utilities.answer_testing.framework import \
+    data_dir_load
+
+import numpy as np
+
+twoD = "example-2d/hdf5/data00000100.h5"
+threeD = "example-3d/hdf5/data00000100.h5"
+
+
+ at requires_file(threeD)
+def test_3d_out():
+    ds = data_dir_load(threeD)
+    field_list = [('all', 'particle_charge'),
+                  ('all', 'particle_mass'),
+                  ('all', 'particle_momentum_x'),
+                  ('all', 'particle_momentum_y'),
+                  ('all', 'particle_momentum_z'),
+                  ('all', 'particle_positionCoarse_x'),
+                  ('all', 'particle_positionCoarse_y'),
+                  ('all', 'particle_positionCoarse_z'),
+                  ('all', 'particle_positionOffset_x'),
+                  ('all', 'particle_positionOffset_y'),
+                  ('all', 'particle_positionOffset_z'),
+                  ('all', 'particle_weighting'),
+                  ('io', 'particle_charge'),
+                  ('io', 'particle_mass'),
+                  ('io', 'particle_momentum_x'),
+                  ('io', 'particle_momentum_y'),
+                  ('io', 'particle_momentum_z'),
+                  ('io', 'particle_positionCoarse_x'),
+                  ('io', 'particle_positionCoarse_y'),
+                  ('io', 'particle_positionCoarse_z'),
+                  ('io', 'particle_positionOffset_x'),
+                  ('io', 'particle_positionOffset_y'),
+                  ('io', 'particle_positionOffset_z'),
+                  ('io', 'particle_weighting'),
+                  ('openPMD', 'E_x'),
+                  ('openPMD', 'E_y'),
+                  ('openPMD', 'E_z'),
+                  ('openPMD', 'rho')]
+    domain_dimensions = [26, 26, 201] * np.ones_like(ds.domain_dimensions)
+    domain_width = [2.08e-05, 2.08e-05, 2.01e-05] * np.ones_like(ds.domain_left_edge)
+
+    assert isinstance(ds, OpenPMDDataset)
+    yield assert_equal, str(ds), "data00000100.h5"
+    yield assert_equal, ds.dimensionality, 3
+    yield assert_equal, ds.particle_types_raw, ('io',)
+    assert "all" in ds.particle_unions
+    yield assert_array_equal, ds.field_list, field_list
+    yield assert_array_equal, ds.domain_dimensions, domain_dimensions
+    yield assert_almost_equal, ds.current_time, 3.28471214521e-14 * np.ones_like(ds.current_time)
+    yield assert_almost_equal, ds.domain_right_edge - ds.domain_left_edge, domain_width
+
+
+ at requires_file(twoD)
+def test_2d_out():
+    ds = data_dir_load(twoD)
+    field_list = [('Hydrogen1+', 'particle_charge'),
+                  ('Hydrogen1+', 'particle_mass'),
+                  ('Hydrogen1+', 'particle_momentum_x'),
+                  ('Hydrogen1+', 'particle_momentum_y'),
+                  ('Hydrogen1+', 'particle_momentum_z'),
+                  ('Hydrogen1+', 'particle_positionCoarse_x'),
+                  ('Hydrogen1+', 'particle_positionCoarse_y'),
+                  ('Hydrogen1+', 'particle_positionCoarse_z'),
+                  ('Hydrogen1+', 'particle_positionOffset_x'),
+                  ('Hydrogen1+', 'particle_positionOffset_y'),
+                  ('Hydrogen1+', 'particle_positionOffset_z'),
+                  ('Hydrogen1+', 'particle_weighting'),
+                  ('all', 'particle_charge'),
+                  ('all', 'particle_mass'),
+                  ('all', 'particle_momentum_x'),
+                  ('all', 'particle_momentum_y'),
+                  ('all', 'particle_momentum_z'),
+                  ('all', 'particle_positionCoarse_x'),
+                  ('all', 'particle_positionCoarse_y'),
+                  ('all', 'particle_positionCoarse_z'),
+                  ('all', 'particle_positionOffset_x'),
+                  ('all', 'particle_positionOffset_y'),
+                  ('all', 'particle_positionOffset_z'),
+                  ('all', 'particle_weighting'),
+                  ('electrons', 'particle_charge'),
+                  ('electrons', 'particle_mass'),
+                  ('electrons', 'particle_momentum_x'),
+                  ('electrons', 'particle_momentum_y'),
+                  ('electrons', 'particle_momentum_z'),
+                  ('electrons', 'particle_positionCoarse_x'),
+                  ('electrons', 'particle_positionCoarse_y'),
+                  ('electrons', 'particle_positionCoarse_z'),
+                  ('electrons', 'particle_positionOffset_x'),
+                  ('electrons', 'particle_positionOffset_y'),
+                  ('electrons', 'particle_positionOffset_z'),
+                  ('electrons', 'particle_weighting'),
+                  ('openPMD', 'B_x'),
+                  ('openPMD', 'B_y'),
+                  ('openPMD', 'B_z'),
+                  ('openPMD', 'E_x'),
+                  ('openPMD', 'E_y'),
+                  ('openPMD', 'E_z'),
+                  ('openPMD', 'J_x'),
+                  ('openPMD', 'J_y'),
+                  ('openPMD', 'J_z'),
+                  ('openPMD', 'rho')]
+    domain_dimensions = [51, 201, 1] * np.ones_like(ds.domain_dimensions)
+    domain_width = [3.06e-05, 2.01e-05, 1e+0] * np.ones_like(ds.domain_left_edge)
+
+    assert isinstance(ds, OpenPMDDataset)
+    yield assert_equal, str(ds), "data00000100.h5"
+    yield assert_equal, ds.dimensionality, 2
+    yield assert_equal, ds.particle_types_raw, ('Hydrogen1+', 'electrons')
+    assert "all" in ds.particle_unions
+    yield assert_array_equal, ds.field_list, field_list
+    yield assert_array_equal, ds.domain_dimensions, domain_dimensions
+    yield assert_almost_equal, ds.current_time, 3.29025596712e-14 * np.ones_like(ds.current_time)
+    yield assert_almost_equal, ds.domain_right_edge - ds.domain_left_edge, domain_width

diff -r 068ad05bda58e31a1684d907398a61ce93fd595c -r 71960f84ae9d3e12d70759e342b2bc581581674c yt/utilities/on_demand_imports.py
--- a/yt/utilities/on_demand_imports.py
+++ b/yt/utilities/on_demand_imports.py
@@ -230,6 +230,19 @@
             self._Group = Group
         return self._Group
 
+    _Dataset = None
+    @property
+    def Dataset(self):
+        if self._err:
+            raise self._err
+        if self._Dataset is None:
+            try:
+                from h5py import Dataset
+            except ImportError:
+                Dataset = NotAModule(self._name)
+            self._Dataset = Dataset
+        return self._Dataset
+
     ___version__ = None
     @property
     def __version__(self):

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list