[yt-svn] commit/yt: chummels: Merged in brittonsmith/yt (pull request #1570)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Jul 16 10:14:18 PDT 2015


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/d48960b8f6a7/
Changeset:   d48960b8f6a7
Branch:      yt
User:        chummels
Date:        2015-07-16 17:14:06+00:00
Summary:     Merged in brittonsmith/yt (pull request #1570)

Adding frontend for Gadget_FOF
Affected #:  13 files

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1079,6 +1079,76 @@
 
 .. _loading-pyne-data:
 
+Halo Catalog Data
+-----------------
+
+yt has support for reading halo catalogs produced by Rockstar and the inline 
+FOF/SUBFIND halo finders of Gadget and OWLS.  The halo catalogs are treated as 
+particle datasets where each particle represents a single halo.  At this time, 
+yt does not have the ability to load the member particles for a given halo.  
+However, once loaded, further halo analysis can be performed using 
+:ref:`halo_catalog`.
+
+In the case where halo catalogs are written to multiple files, one must only 
+give the path to one of them.
+
+Gadget FOF/SUBFIND
+^^^^^^^^^^^^^^^^^^
+
+The two field types for GadgetFOF data are "Group" (FOF) and "Subhalo" (SUBFIND).
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("gadget_fof_halos/groups_042/fof_subhalo_tab_042.0.hdf5")
+   ad = ds.all_data()
+   # The halo mass
+   print ad["Group", "particle_mass"]
+   print ad["Subhalo", "particle_mass"]
+   # Halo ID
+   print ad["Group", "particle_identifier"]
+   print ad["Subhalo", "particle_identifier"]
+   # positions
+   print ad["Group", "particle_position_x"]
+   # velocities
+   print ad["Group", "particle_velocity_x"]
+
+Multidimensional fields can be accessed through the field name followed by an 
+underscore and the index.
+
+.. code-block:: python
+
+   # x component of the spin
+   print ad["Subhalo", "SubhaloSpin_0"]
+
+OWLS FOF/SUBFIND
+^^^^^^^^^^^^^^^^
+
+OWLS halo catalogs have a very similar structure to regular Gadget halo catalogs.  
+The two field types are "FOF" and "SUBFIND".
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("owls_fof_halos/groups_008/group_008.0.hdf5")
+   ad = ds.all_data()
+   # The halo mass
+   print ad["FOF", "particle_mass"]
+
+Rockstar
+^^^^^^^^
+
+Rockstar halo catalogs are loaded by providing the path to one of the .bin files.
+The single field type available is "halos".
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("rockstar_halos/halos_0.0.bin")
+   ad = ds.all_data()
+   # The halo mass
+   print ad["halos", "particle_mass"]
+
 PyNE Data
 ---------
 

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -27,6 +27,7 @@
     'fits',
     'flash',
     'gadget',
+    'gadget_fof',
     'gdf',
     'halo_catalog',
     'http_stream',

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -387,7 +387,7 @@
     @classmethod
     def _is_valid(self, *args, **kwargs):
         need_groups = ['Header']
-        veto_groups = ['FOF']
+        veto_groups = ['FOF', 'Group', 'Subhalo']
         valid = True
         try:
             fh = h5py.File(args[0], mode='r')

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/__init__.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/api.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/api.py
@@ -0,0 +1,26 @@
+"""
+API for GadgetFOF frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     GadgetFOFDataset
+
+from .io import \
+     IOHandlerGadgetFOFHDF5
+
+from .fields import \
+     GadgetFOFFieldInfo
+
+from . import tests

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -0,0 +1,246 @@
+"""
+Data structures for GadgetFOF frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import defaultdict
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+    GadgetFOFFieldInfo
+
+from yt.utilities.cosmology import \
+    Cosmology
+from yt.utilities.definitions import \
+    mpc_conversion, sec_conversion
+from yt.utilities.exceptions import \
+    YTException
+from yt.utilities.logger import ytLogger as \
+    mylog
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+from yt.frontends.gadget.data_structures import \
+    _fix_unit_ordering
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity
+
+class GadgetFOFParticleIndex(ParticleIndex):
+    def __init__(self, ds, dataset_type):
+        super(GadgetFOFParticleIndex, self).__init__(ds, dataset_type)
+
+    def _calculate_particle_index_starts(self):
+        # Halo indices are not saved in the file, so we must count by hand.
+        # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
+        particle_count = defaultdict(int)
+        offset_count = 0
+        for data_file in self.data_files:
+            data_file.index_start = dict([(ptype, particle_count[ptype]) for
+                                           ptype in data_file.total_particles])
+            data_file.offset_start = offset_count
+            for ptype in data_file.total_particles:
+                particle_count[ptype] += data_file.total_particles[ptype]
+            offset_count += data_file.total_offset
+
+    def _calculate_file_offset_map(self):
+        # After the FOF  is performed, a load-balancing step redistributes halos 
+        # and then writes more fields.  Here, for each file, we create a list of 
+        # files which contain the rest of the redistributed particles.
+        ifof = np.array([data_file.total_particles["Group"]
+                         for data_file in self.data_files])
+        isub = np.array([data_file.total_offset
+                         for data_file in self.data_files])
+        subend = isub.cumsum()
+        fofend = ifof.cumsum()
+        istart = np.digitize(fofend - ifof, subend - isub) - 1
+        iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
+        for i, data_file in enumerate(self.data_files):
+            data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
+
+    def _detect_output_fields(self):
+        # TODO: Add additional fields
+        dsl = []
+        units = {}
+        for dom in self.data_files:
+            fl, _units = self.io._identify_fields(dom)
+            units.update(_units)
+            dom._calculate_offsets(fl)
+            for f in fl:
+                if f not in dsl: dsl.append(f)
+        self.field_list = dsl
+        ds = self.dataset
+        ds.particle_types = tuple(set(pt for pt, ds in dsl))
+        # This is an attribute that means these particle types *actually*
+        # exist.  As in, they are real, in the dataset.
+        ds.field_units.update(units)
+        ds.particle_types_raw = ds.particle_types
+            
+    def _setup_geometry(self):
+        super(GadgetFOFParticleIndex, self)._setup_geometry()
+        self._calculate_particle_index_starts()
+        self._calculate_file_offset_map()
+    
+class GadgetFOFHDF5File(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        super(GadgetFOFHDF5File, self).__init__(ds, io, filename, file_id)
+        with h5py.File(filename, "r") as f:
+            self.header = dict((field, f.attrs[field]) \
+                               for field in f.attrs.keys())
+    
+class GadgetFOFDataset(Dataset):
+    _index_class = GadgetFOFParticleIndex
+    _file_class = GadgetFOFHDF5File
+    _field_info_class = GadgetFOFFieldInfo
+    _suffix = ".hdf5"
+
+    def __init__(self, filename, dataset_type="gadget_fof_hdf5",
+                 n_ref=16, over_refine_factor=1,
+                 unit_base=None, units_override=None):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        if unit_base is not None and "UnitLength_in_cm" in unit_base:
+            # We assume this is comoving, because in the absence of comoving
+            # integration the redshift will be zero.
+            unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+        self._unit_base = unit_base
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetFOFDataset. "+
+                               "Use unit_base instead.")
+        super(GadgetFOFDataset, self).__init__(filename, dataset_type,
+                                                 units_override=units_override)
+
+    def _parse_parameter_file(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        hvals = {}
+        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+        hvals["NumFiles"] = hvals["NumFiles"]
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        # Set standard values
+        self.domain_left_edge = np.zeros(3, "float64")
+        self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.cosmological_simulation = 1
+        self.periodicity = (True, True, True)
+        self.current_redshift = hvals["Redshift"]
+        self.omega_lambda = hvals["OmegaLambda"]
+        self.omega_matter = hvals["Omega0"]
+        self.hubble_constant = hvals["HubbleParam"]
+
+        cosmology = Cosmology(hubble_constant=self.hubble_constant,
+                              omega_matter=self.omega_matter,
+                              omega_lambda=self.omega_lambda)
+        self.current_time = cosmology.t_from_z(self.current_redshift)
+
+        self.parameters = hvals
+        prefix = os.path.abspath(
+            os.path.join(os.path.dirname(self.parameter_filename), 
+                         os.path.basename(self.parameter_filename).split(".", 1)[0]))
+        
+        suffix = self.parameter_filename.rsplit(".", 1)[-1]
+        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+        if self.file_count == 0:
+            raise YTException(message="No data files found.", ds=self)
+        self.particle_types = ("Group", "Subhalo")
+        self.particle_types_raw = ("Group", "Subhalo")
+        
+        handle.close()
+
+    def _set_code_unit_attributes(self):
+        # Set a sane default for cosmological simulations.
+        if self._unit_base is None and self.cosmological_simulation == 1:
+            mylog.info("Assuming length units are in Mpc/h (comoving)")
+            self._unit_base = dict(length = (1.0, "Mpccm/h"))
+        # The other same defaults we will use from the standard Gadget
+        # defaults.
+        unit_base = self._unit_base or {}
+        
+        if "length" in unit_base:
+            length_unit = unit_base["length"]
+        elif "UnitLength_in_cm" in unit_base:
+            if self.cosmological_simulation == 0:
+                length_unit = (unit_base["UnitLength_in_cm"], "cm")
+            else:
+                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+        else:
+            raise RuntimeError
+        length_unit = _fix_unit_ordering(length_unit)
+        self.length_unit = self.quan(length_unit[0], length_unit[1])
+        
+        if "velocity" in unit_base:
+            velocity_unit = unit_base["velocity"]
+        elif "UnitVelocity_in_cm_per_s" in unit_base:
+            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+        else:
+            if self.cosmological_simulation == 0:
+                velocity_unit = (1e5, "cm/s")
+            else:
+                velocity_unit = (1e5, "cmcm/s")
+        velocity_unit = _fix_unit_ordering(velocity_unit)
+        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+        # Default to 1e10 Msun/h if mass is not specified.
+        if "mass" in unit_base:
+            mass_unit = unit_base["mass"]
+        elif "UnitMass_in_g" in unit_base:
+            if self.cosmological_simulation == 0:
+                mass_unit = (unit_base["UnitMass_in_g"], "g")
+            else:
+                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+        else:
+            # Sane default
+            mass_unit = (1.0, "1e10*Msun/h")
+        mass_unit = _fix_unit_ordering(mass_unit)
+        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+
+        if "time" in unit_base:
+            time_unit = unit_base["time"]
+        elif "UnitTime_in_s" in unit_base:
+            time_unit = (unit_base["UnitTime_in_s"], "s")
+        else:
+            time_unit = (1., "s")        
+        self.time_unit = self.quan(time_unit[0], time_unit[1])
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Group', 'Header', 'Subhalo']
+        veto_groups = ['FOF']
+        valid = True
+        try:
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            fh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/fields.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/fields.py
@@ -0,0 +1,48 @@
+"""
+GadgetFOF-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+from yt.units.yt_array import \
+    YTArray
+
+m_units = "code_mass"
+p_units = "code_length"
+v_units = "code_velocity"
+
+class GadgetFOFFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+    )
+
+    known_particle_fields = (
+        ("GroupPos_0", (p_units, ["Group", "particle_position_x"], None)),
+        ("GroupPos_1", (p_units, ["Group", "particle_position_y"], None)),
+        ("GroupPos_2", (p_units, ["Group", "particle_position_z"], None)),
+        ("GroupVel_0", (v_units, ["Group", "particle_velocity_x"], None)),
+        ("GroupVel_1", (v_units, ["Group", "particle_velocity_y"], None)),
+        ("GroupVel_2", (v_units, ["Group", "particle_velocity_z"], None)),
+        ("GroupMass",  (m_units, ["Group", "particle_mass"], None)),
+        ("GroupLen",   ("",      ["Group", "particle_number"], None)),
+        ("SubhaloPos_0", (p_units, ["Subhalo", "particle_position_x"], None)),
+        ("SubhaloPos_1", (p_units, ["Subhalo", "particle_position_y"], None)),
+        ("SubhaloPos_2", (p_units, ["Subhalo", "particle_position_z"], None)),
+        ("SubhaloVel_0", (v_units, ["Subhalo", "particle_velocity_x"], None)),
+        ("SubhaloVel_1", (v_units, ["Subhalo", "particle_velocity_y"], None)),
+        ("SubhaloVel_2", (v_units, ["Subhalo", "particle_velocity_z"], None)),
+        ("SubhaloMass",  (m_units, ["Subhalo", "particle_mass"], None)),
+        ("SubhaloLen",   ("",      ["Subhalo", "particle_number"], None)),
+)

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/io.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/io.py
@@ -0,0 +1,207 @@
+"""
+GadgetFOF data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+class IOHandlerGadgetFOFHDF5(BaseIOHandler):
+    _dataset_type = "gadget_fof_hdf5"
+
+    def __init__(self, ds):
+        super(IOHandlerGadgetFOFHDF5, self).__init__(ds)
+        self.offset_fields = set([])
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    pcount = data_file.total_particles[ptype]
+                    coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+                    coords = np.resize(coords, (pcount, 3))
+                    x = coords[:, 0]
+                    y = coords[:, 1]
+                    z = coords[:, 2]
+                    yield ptype, (x, y, z)
+
+    def _read_offset_particle_field(self, field, data_file, fh):
+        field_data = np.empty(data_file.total_particles["Group"], dtype="float64")
+        fofindex = np.arange(data_file.total_particles["Group"]) + data_file.index_start["Group"]
+        for offset_file in data_file.offset_files:
+            if fh.filename == offset_file.filename:
+                ofh = fh
+            else:
+                ofh = h5py.File(offset_file.filename, "r")
+            subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
+            substart = max(fofindex[0] - subindex[0], 0)
+            subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
+            fofstart = substart + subindex[0] - fofindex[0]
+            fofend = subend + subindex[0] - fofindex[0]
+            field_data[fofstart:fofend + 1] = ofh["Subhalo"][field][substart:subend + 1]
+        return field_data
+                    
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    pcount = data_file.total_particles[ptype]
+                    if pcount == 0: continue
+                    coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+                    coords = np.resize(coords, (pcount, 3))
+                    x = coords[:, 0]
+                    y = coords[:, 1]
+                    z = coords[:, 2]
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        if field in self.offset_fields:
+                            field_data = \
+                              self._read_offset_particle_field(field, data_file, f)
+                        else:
+                            if field == "particle_identifier":
+                                field_data = \
+                                  np.arange(data_file.total_particles[ptype]) + \
+                                  data_file.index_start[ptype]
+                            elif field in f[ptype]:
+                                field_data = f[ptype][field].value.astype("float64")
+                            else:
+                                fname = field[:field.rfind("_")]
+                                field_data = f[ptype][fname].value.astype("float64")
+                                my_div = field_data.size / pcount
+                                if my_div > 1:
+                                    field_data = np.resize(field_data, (pcount, my_div))
+                                    findex = int(field[field.rfind("_") + 1:])
+                                    field_data = field_data[:, findex]
+                        data = field_data[mask]
+                        yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        pcount = sum(data_file.total_particles.values())
+        morton = np.empty(pcount, dtype='uint64')
+        if pcount == 0: return morton
+        mylog.debug("Initializing index % 5i (% 7i particles)",
+                    data_file.file_id, pcount)
+        ind = 0
+        with h5py.File(data_file.filename, "r") as f:
+            if not f.keys(): return None
+            dx = np.finfo(f["Group"]["GroupPos"].dtype).eps
+            dx = 2.0*self.ds.quan(dx, "code_length")
+
+            for ptype in data_file.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                pos = f[ptype]["%sPos" % ptype].value.astype("float64")
+                pos = np.resize(pos, (data_file.total_particles[ptype], 3))
+                pos = data_file.ds.arr(pos, "code_length")
+                
+                # These are 32 bit numbers, so we give a little lee-way.
+                # Otherwise, for big sets of particles, we often will bump into the
+                # domain edges.  This helps alleviate that.
+                np.clip(pos, self.ds.domain_left_edge + dx,
+                             self.ds.domain_right_edge - dx, pos)
+                if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+                   np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+                    raise YTDomainOverflow(pos.min(axis=0),
+                                           pos.max(axis=0),
+                                           self.ds.domain_left_edge,
+                                           self.ds.domain_right_edge)
+                regions.add_data_file(pos, data_file.file_id)
+                morton[ind:ind+pos.shape[0]] = compute_morton(
+                    pos[:,0], pos[:,1], pos[:,2],
+                    data_file.ds.domain_left_edge,
+                    data_file.ds.domain_right_edge)
+                ind += pos.shape[0]
+        return morton
+
+    def _count_particles(self, data_file):
+        with h5py.File(data_file.filename, "r") as f:
+            pcount = {"Group": f["Header"].attrs["Ngroups_ThisFile"],
+                      "Subhalo": f["Header"].attrs["Nsubgroups_ThisFile"]}
+            data_file.total_offset = 0 # need to figure out how subfind works here
+            return pcount
+
+    def _identify_fields(self, data_file):
+        fields = []
+        pcount = data_file.total_particles
+        if sum(pcount.values()) == 0: return fields, {}
+        with h5py.File(data_file.filename, "r") as f:
+            for ptype in self.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                fields.append((ptype, "particle_identifier"))
+                my_fields, my_offset_fields = \
+                  subfind_field_list(f[ptype], ptype, data_file.total_particles)
+                fields.extend(my_fields)
+                self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+        return fields, {}
+
+def subfind_field_list(fh, ptype, pcount):
+    fields = []
+    offset_fields = []
+    for field in fh.keys():
+        if isinstance(fh[field], h5py.Group):
+            my_fields, my_offset_fields = \
+              subfind_field_list(fh[field], ptype, pcount)
+            fields.extend(my_fields)
+            my_offset_fields.extend(offset_fields)
+        else:
+            if not fh[field].size % pcount[ptype]:
+                my_div = fh[field].size / pcount[ptype]
+                fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+                if my_div > 1:
+                    for i in range(my_div):
+                        fields.append((ptype, "%s_%d" % (fname, i)))
+                else:
+                    fields.append((ptype, fname))
+            elif ptype == "Subfind" and \
+              not fh[field].size % fh["/Subfind"].attrs["Number_of_groups"]:
+                # These are actually Group fields, but they were written after 
+                # a load balancing step moved halos around and thus they do not
+                # correspond to the halos stored in the Group group.
+                my_div = fh[field].size / fh["/Subfind"].attrs["Number_of_groups"]
+                fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+                if my_div > 1:
+                    for i in range(my_div):
+                        fields.append(("Group", "%s_%d" % (fname, i)))
+                else:
+                    fields.append(("Group", fname))
+                offset_fields.append(fname)
+            else:
+                mylog.warn("Cannot add field (%s, %s) with size %d." % \
+                           (ptype, fh[field].name, fh[field].size))
+                continue
+    return fields, offset_fields

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/setup.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('gadget_fof', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/tests/test_outputs.py
@@ -0,0 +1,56 @@
+"""
+GadgetFOF frontend tests using gadget_fof datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os.path
+from yt.testing import \
+    assert_equal
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    requires_file, \
+    data_dir_load
+from yt.frontends.gadget_fof.api import GadgetFOFDataset
+
+p_types  = ("Group", "Subhalo")
+p_fields = ("particle_position_x", "particle_position_y",
+            "particle_position_z", "particle_velocity_x",
+            "particle_velocity_y", "particle_velocity_z",
+            "particle_mass", "particle_identifier")
+_fields = tuple([(p_type, p_field) for p_type in p_types
+                                   for p_field in p_fields])
+
+# a dataset with empty files
+g5 = "gadget_fof_halos/groups_005/fof_subhalo_tab_005.0.hdf5"
+g42 = "gadget_fof_halos/groups_042/fof_subhalo_tab_042.0.hdf5"
+
+
+ at requires_ds(g5)
+def test_fields_g5():
+    ds = data_dir_load(g5)
+    yield assert_equal, str(ds), os.path.basename(g5)
+    for field in _fields:
+        yield FieldValuesTest(g5, field, particle_type=True)
+
+
+ at requires_ds(g42)
+def test_fields_g42():
+    ds = data_dir_load(g42)
+    yield assert_equal, str(ds), os.path.basename(g42)
+    for field in _fields:
+        yield FieldValuesTest(g42, field, particle_type=True)
+
+ at requires_file(g42)
+def test_GadgetFOFDataset():
+    assert isinstance(data_dir_load(g42), GadgetFOFDataset)

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -27,11 +27,14 @@
 from .fields import \
     OWLSSubfindFieldInfo
 
-from yt.utilities.cosmology import Cosmology
+from yt.utilities.cosmology import \
+    Cosmology
 from yt.utilities.definitions import \
     mpc_conversion, sec_conversion
 from yt.utilities.exceptions import \
-     YTException
+    YTException
+from yt.utilities.logger import ytLogger as \
+     mylog
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
@@ -170,6 +173,7 @@
         # The other same defaults we will use from the standard Gadget
         # defaults.
         unit_base = self._unit_base or {}
+
         if "length" in unit_base:
             length_unit = unit_base["length"]
         elif "UnitLength_in_cm" in unit_base:
@@ -182,7 +186,6 @@
         length_unit = _fix_unit_ordering(length_unit)
         self.length_unit = self.quan(length_unit[0], length_unit[1])
 
-        unit_base = self._unit_base or {}
         if "velocity" in unit_base:
             velocity_unit = unit_base["velocity"]
         elif "UnitVelocity_in_cm_per_s" in unit_base:
@@ -191,6 +194,7 @@
             velocity_unit = (1e5, "cm/s")
         velocity_unit = _fix_unit_ordering(velocity_unit)
         self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
         # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
         # Default to 1e10 Msun/h if mass is not specified.
         if "mass" in unit_base:
@@ -205,7 +209,14 @@
             mass_unit = (1.0, "1e10*Msun/h")
         mass_unit = _fix_unit_ordering(mass_unit)
         self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
-        self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
+
+        if "time" in unit_base:
+            time_unit = unit_base["time"]
+        elif "UnitTime_in_s" in unit_base:
+            time_unit = (unit_base["UnitTime_in_s"], "s")
+        else:
+            time_unit = (1., "s")        
+        self.time_unit = self.quan(time_unit[0], time_unit[1])
 
     @classmethod
     def _is_valid(self, *args, **kwargs):

diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/setup.py
--- a/yt/frontends/setup.py
+++ b/yt/frontends/setup.py
@@ -17,6 +17,7 @@
     config.add_subpackage("fits")
     config.add_subpackage("flash")
     config.add_subpackage("gadget")
+    config.add_subpackage("gadget_fof")
     config.add_subpackage("gdf")
     config.add_subpackage("halo_catalog")
     config.add_subpackage("http_stream")
@@ -34,11 +35,13 @@
     config.add_subpackage("athena/tests")
     config.add_subpackage("boxlib/tests")
     config.add_subpackage("chombo/tests")
+    config.add_subpackage("eagle/tests")
     config.add_subpackage("enzo/tests")
     config.add_subpackage("eagle/tests")
     config.add_subpackage("fits/tests")
     config.add_subpackage("flash/tests")
     config.add_subpackage("gadget/tests")
+    config.add_subpackage("gadget_fof/tests")
     config.add_subpackage("moab/tests")
     config.add_subpackage("owls/tests")
     config.add_subpackage("owls_subfind/tests")

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list