[yt-svn] commit/yt: jzuhone: Merged in brittonsmith/yt (pull request #1281)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Nov 6 13:40:51 PST 2014


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/b4e9e508e031/
Changeset:   b4e9e508e031
Branch:      yt
User:        jzuhone
Date:        2014-11-06 21:40:37+00:00
Summary:     Merged in brittonsmith/yt (pull request #1281)

Moving SPH frontends into their own directories.
Affected #:  93 files

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/__init__.py
--- a/yt/__init__.py
+++ b/yt/__init__.py
@@ -136,9 +136,9 @@
     hexahedral_connectivity
 
 # For backwards compatibility
-GadgetDataset = frontends.sph.GadgetDataset
+GadgetDataset = frontends.gadget.GadgetDataset
 GadgetStaticOutput = deprecated_class(GadgetDataset)
-TipsyDataset = frontends.sph.TipsyDataset
+TipsyDataset = frontends.tipsy.TipsyDataset
 TipsyStaticOutput = deprecated_class(TipsyDataset)
 
 # Now individual component imports from the visualization API

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/analysis_modules/halo_analysis/halo_finding_methods.py
--- a/yt/analysis_modules/halo_analysis/halo_finding_methods.py
+++ b/yt/analysis_modules/halo_analysis/halo_finding_methods.py
@@ -17,7 +17,7 @@
 
 from yt.analysis_modules.halo_finding.halo_objects import \
     FOFHaloFinder, HOPHaloFinder
-from yt.frontends.halo_catalogs.halo_catalog.data_structures import \
+from yt.frontends.halo_catalog.data_structures import \
     HaloCatalogDataset
 from yt.frontends.stream.data_structures import \
     load_particles
@@ -70,7 +70,7 @@
     Run the Rockstar halo finding method.
     """
 
-    from yt.frontends.halo_catalogs.rockstar.data_structures import \
+    from yt.frontends.rockstar.data_structures import \
      RockstarDataset
     from yt.analysis_modules.halo_finding.rockstar.api import \
      RockstarHaloFinder

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -21,17 +21,21 @@
     'athena',
     'boxlib',
     'chombo',
+    'eagle',
     'enzo',
     'fits',
     'flash',
+    'gadget',
     'gdf',
-    'halo_catalogs',
+    'halo_catalog',
+    'http_stream',
     'moab',
+    'owls',
     #'pluto',
     'ramses',
     'sdf',
-    'sph',
     'stream',
+    'tipsy',
 ]
 
 class _frontend_container:

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/api.py
--- /dev/null
+++ b/yt/frontends/eagle/api.py
@@ -0,0 +1,25 @@
+"""
+API for EAGLE frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    EagleDataset, \
+    EagleNetworkDataset
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+from .io import \
+    IOHandlerEagleNetwork

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/data_structures.py
--- /dev/null
+++ b/yt/frontends/eagle/data_structures.py
@@ -0,0 +1,98 @@
+"""
+Data structures for EAGLE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import types
+
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+from yt.frontends.owls.fields import \
+    OWLSFieldInfo
+import yt.units
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+class EagleDataset(GadgetHDF5Dataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = OWLSFieldInfo
+    _time_readin_ = 'Time'
+
+    def _parse_parameter_file(self):
+
+        # read values from header
+        hvals = self._get_hvals()
+        self.parameters = hvals
+
+        # set features common to OWLS and Eagle
+        self._set_owls_eagle()
+
+        # Set time from analytic solution for flat LCDM universe
+        a = hvals['ExpansionFactor']
+        H0 = hvals['H(z)'] / hvals['E(z)']
+        a_eq = ( self.omega_matter / self.omega_lambda )**(1./3)
+        t1 = 2.0 / ( 3.0 * np.sqrt( self.omega_lambda ) )
+        t2 = (a/a_eq)**(3./2)
+        t3 = np.sqrt( 1.0 + (a/a_eq)**3 )
+        t = t1 * np.log( t2 + t3 ) / H0
+        self.current_time = t * yt.units.s
+
+    def _set_code_unit_attributes(self):
+        self._set_owls_eagle_units()
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Config', 'Constants', 'HashTable', 'Header', 
+                       'Parameters', 'RuntimePars', 'Units']
+        veto_groups = ['SUBFIND',
+                       'PartType0/ChemistryAbundances', 
+                       'PartType0/ChemicalAbundances']
+        valid = True
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
+            fileh.close()
+        except:
+            valid = False
+            pass
+        return valid
+
+class EagleNetworkDataset(EagleDataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = EagleNetworkFieldInfo
+    _time_readin = 'Time'
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys() and \
+               "SUBFIND" not in fileh["/"].keys() and \
+               ("ChemistryAbundances" in fileh["PartType0"].keys()
+                or "ChemicalAbundances" in fileh["PartType0"].keys()):
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/definitions.py
--- /dev/null
+++ b/yt/frontends/eagle/definitions.py
@@ -0,0 +1,35 @@
+"""
+EAGLE definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+eaglenetwork_ions = \
+    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
+     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
+     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
+     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/fields.py
--- /dev/null
+++ b/yt/frontends/eagle/fields.py
@@ -0,0 +1,73 @@
+"""
+EAGLE fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.owls.fields import \
+    OWLSFieldInfo
+from yt.units.yt_array import YTQuantity
+from yt.utilities.periodic_table import periodic_table
+
+from .definitions import \
+    eaglenetwork_ion_lookup
+
+class EagleNetworkFieldInfo(OWLSFieldInfo):
+
+    _ions = \
+        ('H1', 'H2', 'He1', 'He2','He3', 'C1',\
+         'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'N1', 'N2', \
+         'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+         'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'Ne1', 'Ne2',\
+         'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+         'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+         'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+         'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+         'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+         'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+         'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+         'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+         'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+         'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+         'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+         'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+    def __init__(self, *args, **kwargs):
+        
+        super(EagleNetworkFieldInfo,self).__init__( *args, **kwargs )
+        
+    def _create_ion_density_func( self, ftype, ion ):
+        """ returns a function that calculates the ion density of a particle. 
+        """ 
+
+        def _ion_density(field, data):
+
+            # Lookup the index of the ion 
+            index = eaglenetwork_ion_lookup[ion] 
+
+            # Ion to hydrogen number density ratio
+            ion_chem = data[ftype, "Chemistry_%03i"%index]
+
+            # Mass of a single ion
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+            else:
+                symbol = ion[0:1].capitalize()
+            m_ion = YTQuantity(periodic_table.elements_by_symbol[symbol].weight, 'amu')
+
+            # hydrogen number density 
+            n_H = data["PartType0", "H_number_density"] 
+
+            return m_ion*ion_chem*n_H 
+        
+        return _ion_density

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/io.py
--- /dev/null
+++ b/yt/frontends/eagle/io.py
@@ -0,0 +1,21 @@
+"""
+EAGLE data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.owls.io import \
+    IOHandlerOWLS
+
+class IOHandlerEagleNetwork(IOHandlerOWLS):
+    _dataset_type = "eagle_network"

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/setup.py
--- /dev/null
+++ b/yt/frontends/eagle/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('eagle', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/api.py
--- /dev/null
+++ b/yt/frontends/gadget/api.py
@@ -0,0 +1,23 @@
+"""
+API for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    GadgetDataset, \
+    GadgetHDF5Dataset
+
+from .io import \
+    IOHandlerGadgetBinary, \
+    IOHandlerGadgetHDF5

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget/data_structures.py
@@ -0,0 +1,337 @@
+"""
+Data structures for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import os
+import types
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.cosmology import \
+    Cosmology
+from yt.utilities.definitions import \
+    sec_conversion
+from yt.utilities.fortran_utils import read_record
+from yt.utilities.logger import ytLogger as mylog
+
+from .definitions import \
+    gadget_header_specs, \
+    gadget_field_specs, \
+    gadget_ptype_specs
+
+def _fix_unit_ordering(unit):
+    if isinstance(unit[0], types.StringTypes):
+        unit = unit[1], unit[0]
+    return unit
+
+class GadgetBinaryFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with open(filename, "rb") as f:
+            self.header = read_record(f, ds._header_spec)
+            self._position_offset = f.tell()
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+        super(GadgetBinaryFile, self).__init__(ds, io, filename, file_id)
+
+    def _calculate_offsets(self, field_list):
+        self.field_offsets = self.io._calculate_field_offsets(
+            field_list, self.total_particles,
+            self._position_offset, self._file_size)
+
+class GadgetDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = GadgetBinaryFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _particle_velocity_name = "Velocities"
+    _suffix = ""
+
+    def __init__(self, filename, dataset_type="gadget_binary",
+                 additional_fields=(),
+                 unit_base=None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None,
+                 header_spec = "default",
+                 field_spec = "default",
+                 ptype_spec = "default",
+                 units_override=None):
+        if self._instantiated: return
+        self._header_spec = self._setup_binary_spec(
+            header_spec, gadget_header_specs)
+        self._field_spec = self._setup_binary_spec(
+            field_spec, gadget_field_specs)
+        self._ptype_spec = self._setup_binary_spec(
+            ptype_spec, gadget_ptype_specs)
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        self.storage_filename = None
+        if unit_base is not None and "UnitLength_in_cm" in unit_base:
+            # We assume this is comoving, because in the absence of comoving
+            # integration the redshift will be zero.
+            unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+        self._unit_base = unit_base
+        if bounding_box is not None:
+            bbox = np.array(bounding_box, dtype="float64")
+            if bbox.shape == (2, 3):
+                bbox = bbox.transpose()
+            self.domain_left_edge = bbox[:,0]
+            self.domain_right_edge = bbox[:,1]
+        else:
+            self.domain_left_edge = self.domain_right_edge = None
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetDataset. "+
+                               "Use unit_base instead.")
+        super(GadgetDataset, self).__init__(filename, dataset_type)
+
+    def _setup_binary_spec(self, spec, spec_dict):
+        if isinstance(spec, types.StringTypes):
+            _hs = ()
+            for hs in spec.split("+"):
+                _hs += spec_dict[hs]
+            spec = _hs
+        return spec
+
+    def __repr__(self):
+        return os.path.basename(self.parameter_filename).split(".")[0]
+
+    def _get_hvals(self):
+        # The entries in this header are capitalized and named to match Table 4
+        # in the GADGET-2 user guide.
+
+        f = open(self.parameter_filename)
+        hvals = read_record(f, self._header_spec)
+        for i in hvals:
+            if len(hvals[i]) == 1:
+                hvals[i] = hvals[i][0]
+        return hvals
+
+    def _parse_parameter_file(self):
+
+        hvals = self._get_hvals()
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        # Set standard values
+
+        # We may have an overridden bounding box.
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.periodicity = (True, True, True)
+
+        self.cosmological_simulation = 1
+
+        self.current_redshift = hvals["Redshift"]
+        self.omega_lambda = hvals["OmegaLambda"]
+        self.omega_matter = hvals["Omega0"]
+        self.hubble_constant = hvals["HubbleParam"]
+        # According to the Gadget manual, OmegaLambda will be zero for
+        # non-cosmological datasets.  However, it may be the case that
+        # individuals are running cosmological simulations *without* Lambda, in
+        # which case we may be doing something incorrect here.
+        # It may be possible to deduce whether ComovingIntegration is on
+        # somehow, but opinions on this vary.
+        if self.omega_lambda == 0.0:
+            mylog.info("Omega Lambda is 0.0, so we are turning off Cosmology.")
+            self.hubble_constant = 1.0  # So that scaling comes out correct
+            self.cosmological_simulation = 0
+            self.current_redshift = 0.0
+            # This may not be correct.
+            self.current_time = hvals["Time"] * sec_conversion["Gyr"]
+        else:
+            # Now we calculate our time based on the cosmology, because in
+            # ComovingIntegration hvals["Time"] will in fact be the expansion
+            # factor, not the actual integration time, so we re-calculate
+            # global time from our Cosmology.
+            cosmo = Cosmology(self.hubble_constant,
+                              self.omega_matter, self.omega_lambda)
+            self.current_time = cosmo.hubble_time(self.current_redshift)
+            mylog.info("Calculating time from %0.3e to be %0.3e seconds",
+                       hvals["Time"], self.current_time)
+        self.parameters = hvals
+
+        prefix = self.parameter_filename.split(".", 1)[0]
+
+        if hvals["NumFiles"] > 1:
+            self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        else:
+            self.filename_template = self.parameter_filename
+
+        self.file_count = hvals["NumFiles"]
+
+    def _set_code_unit_attributes(self):
+        # If no units passed in by user, set a sane default (Gadget-2 users guide).
+        if self._unit_base is None:
+            if self.cosmological_simulation == 1:
+                mylog.info("Assuming length units are in kpc/h (comoving)")
+                self._unit_base = dict(length = (1.0, "kpccm/h"))
+            else:
+                mylog.info("Assuming length units are in kpc (physical)")
+                self._unit_base = dict(length = (1.0, "kpc"))
+                
+        # If units passed in by user, decide what to do about
+        # co-moving and factors of h
+        unit_base = self._unit_base or {}
+        if "length" in unit_base:
+            length_unit = unit_base["length"]
+        elif "UnitLength_in_cm" in unit_base:
+            if self.cosmological_simulation == 0:
+                length_unit = (unit_base["UnitLength_in_cm"], "cm")
+            else:
+                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+        else:
+            raise RuntimeError
+        length_unit = _fix_unit_ordering(length_unit)
+        self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+        unit_base = self._unit_base or {}
+        if "velocity" in unit_base:
+            velocity_unit = unit_base["velocity"]
+        elif "UnitVelocity_in_cm_per_s" in unit_base:
+            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+        else:
+            velocity_unit = (1e5, "cm/s")
+        velocity_unit = _fix_unit_ordering(velocity_unit)
+        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+        # Default to 1e10 Msun/h if mass is not specified.
+        if "mass" in unit_base:
+            mass_unit = unit_base["mass"]
+        elif "UnitMass_in_g" in unit_base:
+            if self.cosmological_simulation == 0:
+                mass_unit = (unit_base["UnitMass_in_g"], "g")
+            else:
+                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+        else:
+            # Sane default
+            mass_unit = (1.0, "1e10*Msun/h")
+        mass_unit = _fix_unit_ordering(mass_unit)
+        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+        self.time_unit = self.length_unit / self.velocity_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        # We do not allow load() of these files.
+        return False
+
+class GadgetHDF5Dataset(GadgetDataset):
+    _file_class = ParticleFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Masses"
+    _suffix = ".hdf5"
+
+    def __init__(self, filename, dataset_type="gadget_hdf5", 
+                 unit_base = None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None,
+                 units_override=None):
+        self.storage_filename = None
+        filename = os.path.abspath(filename)
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetHDF5Dataset. "+
+                               "Use unit_base instead.")
+        super(GadgetHDF5Dataset, self).__init__(
+            filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
+            over_refine_factor=over_refine_factor,
+            bounding_box = bounding_box)
+
+    def _get_hvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        hvals = {}
+        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+        # Compat reasons.
+        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
+        hvals["Massarr"] = hvals["MassTable"]
+        handle.close()
+        return hvals
+
+    def _get_uvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        uvals = {}
+        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
+        handle.close()
+        return uvals
+
+
+
+    def _set_owls_eagle(self):
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        self._unit_base = self._get_uvals()
+        self._unit_base['cmcm'] = 1.0 / self._unit_base["UnitLength_in_cm"]
+
+        self.current_redshift = self.parameters["Redshift"]
+        self.omega_lambda = self.parameters["OmegaLambda"]
+        self.omega_matter = self.parameters["Omega0"]
+        self.hubble_constant = self.parameters["HubbleParam"]
+
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * self.parameters["BoxSize"]
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+
+        self.cosmological_simulation = 1
+        self.periodicity = (True, True, True)
+
+        prefix = os.path.abspath(self.parameter_filename.split(".", 1)[0])
+        suffix = self.parameter_filename.rsplit(".", 1)[-1]
+        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+        self.file_count = self.parameters["NumFilesPerSnapshot"]
+
+    def _set_owls_eagle_units(self):
+
+        # note the contents of the HDF5 Units group are in _unit_base 
+        # note the velocity stored on disk is sqrt(a) dx/dt 
+        self.length_unit = self.quan(self._unit_base["UnitLength_in_cm"], 'cmcm/h')
+        self.mass_unit = self.quan(self._unit_base["UnitMass_in_g"], 'g/h')
+        self.velocity_unit = self.quan(self._unit_base["UnitVelocity_in_cm_per_s"], 'cm/s')
+        self.time_unit = self.quan(self._unit_base["UnitTime_in_s"], 's/h')
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" not in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys():
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/definitions.py
--- /dev/null
+++ b/yt/frontends/gadget/definitions.py
@@ -0,0 +1,69 @@
+"""
+Gadget definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+gadget_header_specs = dict(
+    default      = (('Npart', 6, 'i'),
+                    ('Massarr', 6, 'd'),
+                    ('Time', 1, 'd'),
+                    ('Redshift', 1, 'd'),
+                    ('FlagSfr', 1, 'i'),
+                    ('FlagFeedback', 1, 'i'),
+                    ('Nall', 6, 'i'),
+                    ('FlagCooling', 1, 'i'),
+                    ('NumFiles', 1, 'i'),
+                    ('BoxSize', 1, 'd'),
+                    ('Omega0', 1, 'd'),
+                    ('OmegaLambda', 1, 'd'),
+                    ('HubbleParam', 1, 'd'),
+                    ('FlagAge', 1, 'i'),
+                    ('FlagMEtals', 1, 'i'),
+                    ('NallHW', 6, 'i'),
+                    ('unused', 16, 'i')),
+    pad32       = (('empty',  32, 'c'),),
+    pad64       = (('empty',  64, 'c'),),
+    pad128      = (('empty', 128, 'c'),),
+    pad256      = (('empty', 256, 'c'),),
+)
+
+gadget_ptype_specs = dict(
+    default = ( "Gas",
+                "Halo",
+                "Disk",
+                "Bulge",
+                "Stars",
+                "Bndry" )
+)
+
+gadget_field_specs = dict(
+    default = ( "Coordinates",
+                "Velocities",
+                "ParticleIDs",
+                "Mass",
+                ("InternalEnergy", "Gas"),
+                ("Density", "Gas"),
+                ("SmoothingLength", "Gas"),
+    ),
+    agora_unlv = ( "Coordinates",
+                   "Velocities",
+                   "ParticleIDs",
+                   "Mass",
+                   ("InternalEnergy", "Gas"),
+                   ("Density", "Gas"),
+                   ("Electron_Number_Density", "Gas"),
+                   ("HI_NumberDensity", "Gas"),
+                   ("SmoothingLength", "Gas"),
+    )
+)

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/io.py
--- /dev/null
+++ b/yt/frontends/gadget/io.py
@@ -0,0 +1,210 @@
+"""
+Gadget data-file handling functions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.frontends.owls.io import \
+    IOHandlerOWLS
+from yt.geometry.oct_container import \
+    _ORDER_MAX
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+class IOHandlerGadgetHDF5(IOHandlerOWLS):
+    _dataset_type = "gadget_hdf5"
+
+ZeroMass = object()
+    
+class IOHandlerGadgetBinary(BaseIOHandler):
+    _dataset_type = "gadget_binary"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    # Particle types (Table 3 in GADGET-2 user guide)
+    #
+    # Blocks in the file:
+    #   HEAD
+    #   POS
+    #   VEL
+    #   ID
+    #   MASS    (variable mass only)
+    #   U       (gas only)
+    #   RHO     (gas only)
+    #   HSML    (gas only)
+    #   POT     (only if enabled in makefile)
+    #   ACCE    (only if enabled in makefile)
+    #   ENDT    (only if enabled in makefile)
+    #   TSTP    (only if enabled in makefile)
+
+    _var_mass = None
+
+    def __init__(self, ds, *args, **kwargs):
+        self._fields = ds._field_spec
+        self._ptypes = ds._ptype_spec
+        super(IOHandlerGadgetBinary, self).__init__(ds, *args, **kwargs)
+
+    @property
+    def var_mass(self):
+        if self._var_mass is None:
+            vm = []
+            for i, v in enumerate(self.ds["Massarr"]):
+                if v == 0:
+                    vm.append(self._ptypes[i])
+            self._var_mass = tuple(vm)
+        return self._var_mass
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype in ptf:
+                # This is where we could implement sub-chunking
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                yield ptype, (pos[:,0], pos[:,1], pos[:,2])
+            f.close()
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype, field_list in sorted(ptf.items()):
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                mask = selector.select_points(
+                    pos[:,0], pos[:,1], pos[:,2], 0.0)
+                del pos
+                if mask is None: continue
+                for field in field_list:
+                    if field == "Mass" and ptype not in self.var_mass:
+                        data = np.empty(mask.sum(), dtype="float64")
+                        m = self.ds.parameters["Massarr"][
+                            self._ptypes.index(ptype)]
+                        data[:] = m
+                        yield (ptype, field), data
+                        continue
+                    f.seek(poff[ptype, field], os.SEEK_SET)
+                    data = self._read_field_from_file(f, tp[ptype], field)
+                    data = data[mask,...]
+                    yield (ptype, field), data
+            f.close()
+
+    def _read_field_from_file(self, f, count, name):
+        if count == 0: return
+        if name == "ParticleIDs":
+            dt = "uint32"
+        else:
+            dt = "float32"
+        if name in self._vector_fields:
+            count *= 3
+        arr = np.fromfile(f, dtype=dt, count = count)
+        if name in self._vector_fields:
+            arr = arr.reshape((count/3, 3), order="C")
+        return arr.astype("float64")
+
+    def _initialize_index(self, data_file, regions):
+        count = sum(data_file.total_particles.values())
+        DLE = data_file.ds.domain_left_edge
+        DRE = data_file.ds.domain_right_edge
+        dx = (DRE - DLE) / 2**_ORDER_MAX
+        with open(data_file.filename, "rb") as f:
+            # We add on an additionally 4 for the first record.
+            f.seek(data_file._position_offset + 4)
+            # The first total_particles * 3 values are positions
+            pp = np.fromfile(f, dtype = 'float32', count = count*3)
+            pp.shape = (count, 3)
+        regions.add_data_file(pp, data_file.file_id, data_file.ds.filter_bbox)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE,
+                                data_file.ds.filter_bbox)
+        return morton
+
+    def _count_particles(self, data_file):
+        npart = dict((self._ptypes[i], v)
+            for i, v in enumerate(data_file.header["Npart"]))
+        return npart
+
+    # header is 256, but we have 4 at beginning and end for ints
+    _field_size = 4
+    def _calculate_field_offsets(self, field_list, pcount,
+                                 offset, file_size = None):
+        # field_list is (ftype, fname) but the blocks are ordered
+        # (fname, ftype) in the file.
+        pos = offset
+        fs = self._field_size
+        offsets = {}
+        for field in self._fields:
+            if not isinstance(field, types.StringTypes):
+                field = field[0]
+            if not any( (ptype, field) in field_list
+                        for ptype in self._ptypes):
+                continue
+            pos += 4
+            any_ptypes = False
+            for ptype in self._ptypes:
+                if field == "Mass" and ptype not in self.var_mass:
+                    continue
+                if (ptype, field) not in field_list:
+                    continue
+                offsets[(ptype, field)] = pos
+                any_ptypes = True
+                if field in self._vector_fields:
+                    pos += 3 * pcount[ptype] * fs
+                else:
+                    pos += pcount[ptype] * fs
+            pos += 4
+            if not any_ptypes: pos -= 8
+        if file_size is not None:
+            if file_size != pos:
+                mylog.warning("Your Gadget-2 file may have extra " +
+                              "columns or different precision!" +
+                              " (%s file vs %s computed)",
+                              file_size, pos)
+        return offsets
+
+    def _identify_fields(self, domain):
+        # We can just look at the particle counts.
+        field_list = []
+        tp = domain.total_particles
+        for i, ptype in enumerate(self._ptypes):
+            count = tp[ptype]
+            if count == 0: continue
+            m = domain.header["Massarr"][i]
+            for field in self._fields:
+                if isinstance(field, types.TupleType):
+                    field, req = field
+                    if req is ZeroMass:
+                        if m > 0.0 : continue
+                    elif req != ptype:
+                        continue
+                field_list.append((ptype, field))
+        return field_list, {}

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/setup.py
--- /dev/null
+++ b/yt/frontends/gadget/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('gadget', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/__init__.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/api.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/api.py
@@ -0,0 +1,24 @@
+"""
+API for HaloCatalog frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     HaloCatalogDataset
+
+from .io import \
+     IOHandlerHaloCatalogHDF5
+
+from .fields import \
+     HaloCatalogFieldInfo

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/data_structures.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -0,0 +1,97 @@
+"""
+Data structures for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+    HaloCatalogFieldInfo
+
+from yt.utilities.cosmology import Cosmology
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity
+    
+class HaloCatalogHDF5File(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with h5py.File(filename, "r") as f:
+            self.header = dict((field, f.attrs[field]) \
+                               for field in f.attrs.keys())
+
+        super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
+    
+class HaloCatalogDataset(Dataset):
+    _index_class = ParticleIndex
+    _file_class = HaloCatalogHDF5File
+    _field_info_class = HaloCatalogFieldInfo
+    _suffix = ".h5"
+
+    def __init__(self, filename, dataset_type="halocatalog_hdf5",
+                 n_ref = 16, over_refine_factor = 1, units_override=None):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(HaloCatalogDataset, self).__init__(filename, dataset_type,
+                                                 units_override=units_override)
+
+    def _parse_parameter_file(self):
+        with h5py.File(self.parameter_filename, "r") as f:
+            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
+        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+
+        for attr in ["cosmological_simulation", "current_time", "current_redshift",
+                     "hubble_constant", "omega_matter", "omega_lambda",
+                     "domain_left_edge", "domain_right_edge"]:
+            setattr(self, attr, hvals[attr])
+        self.periodicity = (True, True, True)
+        self.particle_types = ("halos")
+        self.particle_types_raw = ("halos")
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.parameters.update(hvals)
+
+    def _set_code_unit_attributes(self):
+        self.length_unit = self.quan(1.0, "cm")
+        self.mass_unit = self.quan(1.0, "g")
+        self.velocity_unit = self.quan(1.0, "cm / s")
+        self.time_unit = self.quan(1.0, "s")
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        if not args[0].endswith(".h5"): return False
+        with h5py.File(args[0], "r") as f:
+            if "data_type" in f.attrs and \
+              f.attrs["data_type"] == "halo_catalog":
+                return True
+        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/fields.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/fields.py
@@ -0,0 +1,48 @@
+"""
+HaloCatalog-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+from yt.units.yt_array import \
+    YTArray
+
+from yt.utilities.physical_constants import \
+    mh, \
+    mass_sun_cgs
+
+m_units = "g"
+p_units = "cm"
+v_units = "cm / s"
+r_units = "cm"
+
+class HaloCatalogFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+    )
+
+    known_particle_fields = (
+        ("particle_identifier", ("", [], None)),
+        ("particle_position_x", (p_units, [], None)),
+        ("particle_position_y", (p_units, [], None)),
+        ("particle_position_z", (p_units, [], None)),
+        ("particle_velocity_x", (v_units, [], None)),
+        ("particle_velocity_y", (v_units, [], None)),
+        ("particle_velocity_z", (v_units, [], None)),
+        ("particle_mass", (m_units, [], "Virial Mass")),
+        ("virial_radius", (r_units, [], "Virial Radius")),
+)

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/io.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/io.py
@@ -0,0 +1,119 @@
+"""
+HaloCatalog data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+from yt.geometry.oct_container import _ORDER_MAX
+
+class IOHandlerHaloCatalogHDF5(BaseIOHandler):
+    _dataset_type = "halocatalog_hdf5"
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with h5py.File(data_file.filename, "r") as f:
+                x = f['particle_position_x'].value.astype("float64")
+                y = f['particle_position_y'].value.astype("float64")
+                z = f['particle_position_z'].value.astype("float64")
+                yield "halos", (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    x = f['particle_position_x'].value.astype("float64")
+                    y = f['particle_position_y'].value.astype("float64")
+                    z = f['particle_position_z'].value.astype("float64")
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        data = f[field][mask].astype("float64")
+                        yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        pcount = data_file.header["num_halos"]
+        morton = np.empty(pcount, dtype='uint64')
+        mylog.debug("Initializing index % 5i (% 7i particles)",
+                    data_file.file_id, pcount)
+        ind = 0
+        with h5py.File(data_file.filename, "r") as f:
+            if not f.keys(): return None
+            pos = np.empty((pcount, 3), dtype="float64")
+            pos = data_file.ds.arr(pos, "code_length")
+            dx = np.finfo(f['particle_position_x'].dtype).eps
+            dx = 2.0*self.ds.quan(dx, "code_length")
+            pos[:,0] = f["particle_position_x"].value
+            pos[:,1] = f["particle_position_y"].value
+            pos[:,2] = f["particle_position_z"].value
+            # These are 32 bit numbers, so we give a little lee-way.
+            # Otherwise, for big sets of particles, we often will bump into the
+            # domain edges.  This helps alleviate that.
+            np.clip(pos, self.ds.domain_left_edge + dx,
+                         self.ds.domain_right_edge - dx, pos)
+            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+                raise YTDomainOverflow(pos.min(axis=0),
+                                       pos.max(axis=0),
+                                       self.ds.domain_left_edge,
+                                       self.ds.domain_right_edge)
+            regions.add_data_file(pos, data_file.file_id)
+            morton[ind:ind+pos.shape[0]] = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge)
+        return morton
+
+    def _count_particles(self, data_file):
+        return {'halos': data_file.header['num_halos']}
+
+    def _identify_fields(self, data_file):
+        with h5py.File(data_file.filename, "r") as f:
+            fields = [("halos", field) for field in f]
+            units = dict([(("halos", field), 
+                           f[field].attrs["units"]) for field in f])
+        return fields, units

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/setup.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('halo_catalog', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/__init__.py
--- a/yt/frontends/halo_catalogs/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for halo catalog frontends.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/api.py
--- a/yt/frontends/halo_catalogs/api.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-API for yt.frontends.halo_catalogs
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .halo_catalog.api import \
-     HaloCatalogDataset, \
-     IOHandlerHaloCatalogHDF5, \
-     HaloCatalogFieldInfo
-
-from .rockstar.api import \
-      RockstarDataset, \
-      IOHandlerRockstarBinary, \
-      RockstarFieldInfo
-
-from .owls_subfind.api import \
-     OWLSSubfindDataset, \
-     IOHandlerOWLSSubfindHDF5, \
-     OWLSSubfindFieldInfo

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/__init__.py
--- a/yt/frontends/halo_catalogs/halo_catalog/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/api.py
--- a/yt/frontends/halo_catalogs/halo_catalog/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for HaloCatalog frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     HaloCatalogDataset
-
-from .io import \
-     IOHandlerHaloCatalogHDF5
-
-from .fields import \
-     HaloCatalogFieldInfo

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalogs/halo_catalog/data_structures.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Data structures for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-import stat
-import weakref
-import struct
-import glob
-import time
-import os
-
-from .fields import \
-    HaloCatalogFieldInfo
-
-from yt.utilities.cosmology import Cosmology
-from yt.geometry.particle_geometry_handler import \
-    ParticleIndex
-from yt.data_objects.static_output import \
-    Dataset, \
-    ParticleFile
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
-    
-class HaloCatalogHDF5File(ParticleFile):
-    def __init__(self, ds, io, filename, file_id):
-        with h5py.File(filename, "r") as f:
-            self.header = dict((field, f.attrs[field]) \
-                               for field in f.attrs.keys())
-
-        super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
-    
-class HaloCatalogDataset(Dataset):
-    _index_class = ParticleIndex
-    _file_class = HaloCatalogHDF5File
-    _field_info_class = HaloCatalogFieldInfo
-    _suffix = ".h5"
-
-    def __init__(self, filename, dataset_type="halocatalog_hdf5",
-                 n_ref = 16, over_refine_factor = 1, units_override=None):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HaloCatalogDataset, self).__init__(filename, dataset_type,
-                                                 units_override=units_override)
-
-    def _parse_parameter_file(self):
-        with h5py.File(self.parameter_filename, "r") as f:
-            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
-        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
-        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
-
-        for attr in ["cosmological_simulation", "current_time", "current_redshift",
-                     "hubble_constant", "omega_matter", "omega_lambda",
-                     "domain_left_edge", "domain_right_edge"]:
-            setattr(self, attr, hvals[attr])
-        self.periodicity = (True, True, True)
-        self.particle_types = ("halos")
-        self.particle_types_raw = ("halos")
-
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.parameters.update(hvals)
-
-    def _set_code_unit_attributes(self):
-        self.length_unit = self.quan(1.0, "cm")
-        self.mass_unit = self.quan(1.0, "g")
-        self.velocity_unit = self.quan(1.0, "cm / s")
-        self.time_unit = self.quan(1.0, "s")
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        if not args[0].endswith(".h5"): return False
-        with h5py.File(args[0], "r") as f:
-            if "data_type" in f.attrs and \
-              f.attrs["data_type"] == "halo_catalog":
-                return True
-        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/fields.py
--- a/yt/frontends/halo_catalogs/halo_catalog/fields.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-HaloCatalog-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-from yt.funcs import mylog
-from yt.fields.field_info_container import \
-    FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs
-
-m_units = "g"
-p_units = "cm"
-v_units = "cm / s"
-r_units = "cm"
-
-class HaloCatalogFieldInfo(FieldInfoContainer):
-    known_other_fields = (
-    )
-
-    known_particle_fields = (
-        ("particle_identifier", ("", [], None)),
-        ("particle_position_x", (p_units, [], None)),
-        ("particle_position_y", (p_units, [], None)),
-        ("particle_position_z", (p_units, [], None)),
-        ("particle_velocity_x", (v_units, [], None)),
-        ("particle_velocity_y", (v_units, [], None)),
-        ("particle_velocity_z", (v_units, [], None)),
-        ("particle_mass", (m_units, [], "Virial Mass")),
-        ("virial_radius", (r_units, [], "Virial Radius")),
-)

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-HaloCatalog data-file handling function
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-
-from yt.utilities.exceptions import *
-from yt.funcs import mylog
-
-from yt.utilities.io_handler import \
-    BaseIOHandler
-
-from yt.utilities.lib.geometry_utils import compute_morton
-
-from yt.geometry.oct_container import _ORDER_MAX
-
-class IOHandlerHaloCatalogHDF5(BaseIOHandler):
-    _dataset_type = "halocatalog_hdf5"
-
-    def _read_fluid_selection(self, chunks, selector, fields, size):
-        raise NotImplementedError
-
-    def _read_particle_coords(self, chunks, ptf):
-        # This will read chunks and yield the results.
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with h5py.File(data_file.filename, "r") as f:
-                x = f['particle_position_x'].value.astype("float64")
-                y = f['particle_position_y'].value.astype("float64")
-                z = f['particle_position_z'].value.astype("float64")
-                yield "halos", (x, y, z)
-
-    def _read_particle_fields(self, chunks, ptf, selector):
-        # Now we have all the sizes, and we can allocate
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with h5py.File(data_file.filename, "r") as f:
-                for ptype, field_list in sorted(ptf.items()):
-                    x = f['particle_position_x'].value.astype("float64")
-                    y = f['particle_position_y'].value.astype("float64")
-                    z = f['particle_position_z'].value.astype("float64")
-                    mask = selector.select_points(x, y, z, 0.0)
-                    del x, y, z
-                    if mask is None: continue
-                    for field in field_list:
-                        data = f[field][mask].astype("float64")
-                        yield (ptype, field), data
-
-    def _initialize_index(self, data_file, regions):
-        pcount = data_file.header["num_halos"]
-        morton = np.empty(pcount, dtype='uint64')
-        mylog.debug("Initializing index % 5i (% 7i particles)",
-                    data_file.file_id, pcount)
-        ind = 0
-        with h5py.File(data_file.filename, "r") as f:
-            if not f.keys(): return None
-            pos = np.empty((pcount, 3), dtype="float64")
-            pos = data_file.ds.arr(pos, "code_length")
-            dx = np.finfo(f['particle_position_x'].dtype).eps
-            dx = 2.0*self.ds.quan(dx, "code_length")
-            pos[:,0] = f["particle_position_x"].value
-            pos[:,1] = f["particle_position_y"].value
-            pos[:,2] = f["particle_position_z"].value
-            # These are 32 bit numbers, so we give a little lee-way.
-            # Otherwise, for big sets of particles, we often will bump into the
-            # domain edges.  This helps alleviate that.
-            np.clip(pos, self.ds.domain_left_edge + dx,
-                         self.ds.domain_right_edge - dx, pos)
-            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
-               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
-                raise YTDomainOverflow(pos.min(axis=0),
-                                       pos.max(axis=0),
-                                       self.ds.domain_left_edge,
-                                       self.ds.domain_right_edge)
-            regions.add_data_file(pos, data_file.file_id)
-            morton[ind:ind+pos.shape[0]] = compute_morton(
-                pos[:,0], pos[:,1], pos[:,2],
-                data_file.ds.domain_left_edge,
-                data_file.ds.domain_right_edge)
-        return morton
-
-    def _count_particles(self, data_file):
-        return {'halos': data_file.header['num_halos']}
-
-    def _identify_fields(self, data_file):
-        with h5py.File(data_file.filename, "r") as f:
-            fields = [("halos", field) for field in f]
-            units = dict([(("halos", field), 
-                           f[field].attrs["units"]) for field in f])
-        return fields, units

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/owls_subfind/__init__.py
--- a/yt/frontends/halo_catalogs/owls_subfind/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/owls_subfind/api.py
--- a/yt/frontends/halo_catalogs/owls_subfind/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for OWLSSubfind frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     OWLSSubfindDataset
-
-from .io import \
-     IOHandlerOWLSSubfindHDF5
-
-from .fields import \
-     OWLSSubfindFieldInfo

This diff is so big that we needed to truncate the remainder.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list