[yt-svn] commit/yt: 36 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Nov 6 13:40:53 PST 2014


36 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/cee134553e0b/
Changeset:   cee134553e0b
Branch:      yt
User:        brittonsmith
Date:        2014-10-21 22:16:44+00:00
Summary:     Moving gadget frontend into its own directory.
Affected #:  6 files

diff -r f75c6a2bc84426a9fdc6da2e48d33033143d5d08 -r cee134553e0b1b47c0f2830c748e449bb4a93171 yt/frontends/sph/gadget/api.py
--- /dev/null
+++ b/yt/frontends/sph/gadget/api.py
@@ -0,0 +1,22 @@
+"""
+API for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+      GadgetDataset, \
+      GadgetHDF5Dataset
+
+from .io import \
+      IOHandlerGadgetBinary

diff -r f75c6a2bc84426a9fdc6da2e48d33033143d5d08 -r cee134553e0b1b47c0f2830c748e449bb4a93171 yt/frontends/sph/gadget/data_structures.py
--- /dev/null
+++ b/yt/frontends/sph/gadget/data_structures.py
@@ -0,0 +1,330 @@
+"""
+Data structures for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import os
+import types
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.cosmology import \
+    Cosmology
+from yt.utilities.definitions import \
+    sec_conversion
+from yt.utilities.fortran_utils import read_record
+from yt.utilities.logger import ytLogger as mylog
+
+from .fields import \
+    GadgetFieldInfo
+from .definitions import \
+    gadget_header_specs, \
+    gadget_field_specs, \
+    gadget_ptype_specs
+
+def _fix_unit_ordering(unit):
+    if isinstance(unit[0], types.StringTypes):
+        unit = unit[1], unit[0]
+    return unit
+
+class GadgetBinaryFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with open(filename, "rb") as f:
+            self.header = read_record(f, ds._header_spec)
+            self._position_offset = f.tell()
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+        super(GadgetBinaryFile, self).__init__(ds, io, filename, file_id)
+
+    def _calculate_offsets(self, field_list):
+        self.field_offsets = self.io._calculate_field_offsets(
+            field_list, self.total_particles,
+            self._position_offset, self._file_size)
+
+class GadgetDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = GadgetBinaryFile
+    _field_info_class = GadgetFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _particle_velocity_name = "Velocities"
+    _suffix = ""
+
+    def __init__(self, filename, dataset_type="gadget_binary",
+                 additional_fields=(),
+                 unit_base=None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None,
+                 header_spec = "default",
+                 field_spec = "default",
+                 ptype_spec = "default"):
+        if self._instantiated: return
+        self._header_spec = self._setup_binary_spec(
+            header_spec, gadget_header_specs)
+        self._field_spec = self._setup_binary_spec(
+            field_spec, gadget_field_specs)
+        self._ptype_spec = self._setup_binary_spec(
+            ptype_spec, gadget_ptype_specs)
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        self.storage_filename = None
+        if unit_base is not None and "UnitLength_in_cm" in unit_base:
+            # We assume this is comoving, because in the absence of comoving
+            # integration the redshift will be zero.
+            unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+        self._unit_base = unit_base
+        if bounding_box is not None:
+            bbox = np.array(bounding_box, dtype="float64")
+            if bbox.shape == (2, 3):
+                bbox = bbox.transpose()
+            self.domain_left_edge = bbox[:,0]
+            self.domain_right_edge = bbox[:,1]
+        else:
+            self.domain_left_edge = self.domain_right_edge = None
+        super(GadgetDataset, self).__init__(filename, dataset_type)
+
+    def _setup_binary_spec(self, spec, spec_dict):
+        if isinstance(spec, types.StringTypes):
+            _hs = ()
+            for hs in spec.split("+"):
+                _hs += spec_dict[hs]
+            spec = _hs
+        return spec
+
+    def __repr__(self):
+        return os.path.basename(self.parameter_filename).split(".")[0]
+
+    def _get_hvals(self):
+        # The entries in this header are capitalized and named to match Table 4
+        # in the GADGET-2 user guide.
+
+        f = open(self.parameter_filename)
+        hvals = read_record(f, self._header_spec)
+        for i in hvals:
+            if len(hvals[i]) == 1:
+                hvals[i] = hvals[i][0]
+        return hvals
+
+    def _parse_parameter_file(self):
+
+        hvals = self._get_hvals()
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        # Set standard values
+
+        # We may have an overridden bounding box.
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.periodicity = (True, True, True)
+
+        self.cosmological_simulation = 1
+
+        self.current_redshift = hvals["Redshift"]
+        self.omega_lambda = hvals["OmegaLambda"]
+        self.omega_matter = hvals["Omega0"]
+        self.hubble_constant = hvals["HubbleParam"]
+        # According to the Gadget manual, OmegaLambda will be zero for
+        # non-cosmological datasets.  However, it may be the case that
+        # individuals are running cosmological simulations *without* Lambda, in
+        # which case we may be doing something incorrect here.
+        # It may be possible to deduce whether ComovingIntegration is on
+        # somehow, but opinions on this vary.
+        if self.omega_lambda == 0.0:
+            mylog.info("Omega Lambda is 0.0, so we are turning off Cosmology.")
+            self.hubble_constant = 1.0  # So that scaling comes out correct
+            self.cosmological_simulation = 0
+            self.current_redshift = 0.0
+            # This may not be correct.
+            self.current_time = hvals["Time"] * sec_conversion["Gyr"]
+        else:
+            # Now we calculate our time based on the cosmology, because in
+            # ComovingIntegration hvals["Time"] will in fact be the expansion
+            # factor, not the actual integration time, so we re-calculate
+            # global time from our Cosmology.
+            cosmo = Cosmology(self.hubble_constant,
+                              self.omega_matter, self.omega_lambda)
+            self.current_time = cosmo.hubble_time(self.current_redshift)
+            mylog.info("Calculating time from %0.3e to be %0.3e seconds",
+                       hvals["Time"], self.current_time)
+        self.parameters = hvals
+
+        prefix = self.parameter_filename.split(".", 1)[0]
+
+        if hvals["NumFiles"] > 1:
+            self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        else:
+            self.filename_template = self.parameter_filename
+
+        self.file_count = hvals["NumFiles"]
+
+    def _set_code_unit_attributes(self):
+        # If no units passed in by user, set a sane default (Gadget-2 users guide).
+        if self._unit_base is None:
+            if self.cosmological_simulation == 1:
+                mylog.info("Assuming length units are in kpc/h (comoving)")
+                self._unit_base = dict(length = (1.0, "kpccm/h"))
+            else:
+                mylog.info("Assuming length units are in kpc (physical)")
+                self._unit_base = dict(length = (1.0, "kpc"))
+                
+        # If units passed in by user, decide what to do about
+        # co-moving and factors of h
+        unit_base = self._unit_base or {}
+        if "length" in unit_base:
+            length_unit = unit_base["length"]
+        elif "UnitLength_in_cm" in unit_base:
+            if self.cosmological_simulation == 0:
+                length_unit = (unit_base["UnitLength_in_cm"], "cm")
+            else:
+                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+        else:
+            raise RuntimeError
+        length_unit = _fix_unit_ordering(length_unit)
+        self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+        unit_base = self._unit_base or {}
+        if "velocity" in unit_base:
+            velocity_unit = unit_base["velocity"]
+        elif "UnitVelocity_in_cm_per_s" in unit_base:
+            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+        else:
+            velocity_unit = (1e5, "cm/s")
+        velocity_unit = _fix_unit_ordering(velocity_unit)
+        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+        # Default to 1e10 Msun/h if mass is not specified.
+        if "mass" in unit_base:
+            mass_unit = unit_base["mass"]
+        elif "UnitMass_in_g" in unit_base:
+            if self.cosmological_simulation == 0:
+                mass_unit = (unit_base["UnitMass_in_g"], "g")
+            else:
+                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+        else:
+            # Sane default
+            mass_unit = (1.0, "1e10*Msun/h")
+        mass_unit = _fix_unit_ordering(mass_unit)
+        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+        self.time_unit = self.length_unit / self.velocity_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        # We do not allow load() of these files.
+        return False
+
+
+class GadgetHDF5Dataset(GadgetDataset):
+    _file_class = ParticleFile
+    _field_info_class = GadgetFieldInfo
+    _particle_mass_name = "Masses"
+    _suffix = ".hdf5"
+
+    def __init__(self, filename, dataset_type="gadget_hdf5", 
+                 unit_base = None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None):
+        self.storage_filename = None
+        filename = os.path.abspath(filename)
+        super(GadgetHDF5Dataset, self).__init__(
+            filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
+            over_refine_factor=over_refine_factor,
+            bounding_box = bounding_box)
+
+    def _get_hvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        hvals = {}
+        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+        # Compat reasons.
+        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
+        hvals["Massarr"] = hvals["MassTable"]
+        handle.close()
+        return hvals
+
+    def _get_uvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        uvals = {}
+        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
+        handle.close()
+        return uvals
+
+
+
+    def _set_owls_eagle(self):
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        self._unit_base = self._get_uvals()
+        self._unit_base['cmcm'] = 1.0 / self._unit_base["UnitLength_in_cm"]
+
+        self.current_redshift = self.parameters["Redshift"]
+        self.omega_lambda = self.parameters["OmegaLambda"]
+        self.omega_matter = self.parameters["Omega0"]
+        self.hubble_constant = self.parameters["HubbleParam"]
+
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * self.parameters["BoxSize"]
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+
+        self.cosmological_simulation = 1
+        self.periodicity = (True, True, True)
+
+        prefix = os.path.abspath(self.parameter_filename.split(".", 1)[0])
+        suffix = self.parameter_filename.rsplit(".", 1)[-1]
+        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+        self.file_count = self.parameters["NumFilesPerSnapshot"]
+
+    def _set_owls_eagle_units(self):
+
+        # note the contents of the HDF5 Units group are in _unit_base 
+        # note the velocity stored on disk is sqrt(a) dx/dt 
+        self.length_unit = self.quan(self._unit_base["UnitLength_in_cm"], 'cmcm/h')
+        self.mass_unit = self.quan(self._unit_base["UnitMass_in_g"], 'g/h')
+        self.velocity_unit = self.quan(self._unit_base["UnitVelocity_in_cm_per_s"], 'cm/s')
+        self.time_unit = self.quan(self._unit_base["UnitTime_in_s"], 's/h')
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" not in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys():
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r f75c6a2bc84426a9fdc6da2e48d33033143d5d08 -r cee134553e0b1b47c0f2830c748e449bb4a93171 yt/frontends/sph/gadget/definitions.py
--- /dev/null
+++ b/yt/frontends/sph/gadget/definitions.py
@@ -0,0 +1,79 @@
+
+gadget_ptypes = ("Gas", "Halo", "Disk", "Bulge", "Stars", "Bndry")
+ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
+                 "PartType4", "PartType5")
+
+gadget_header_specs = dict(
+    default      = (('Npart', 6, 'i'),
+                    ('Massarr', 6, 'd'),
+                    ('Time', 1, 'd'),
+                    ('Redshift', 1, 'd'),
+                    ('FlagSfr', 1, 'i'),
+                    ('FlagFeedback', 1, 'i'),
+                    ('Nall', 6, 'i'),
+                    ('FlagCooling', 1, 'i'),
+                    ('NumFiles', 1, 'i'),
+                    ('BoxSize', 1, 'd'),
+                    ('Omega0', 1, 'd'),
+                    ('OmegaLambda', 1, 'd'),
+                    ('HubbleParam', 1, 'd'),
+                    ('FlagAge', 1, 'i'),
+                    ('FlagMEtals', 1, 'i'),
+                    ('NallHW', 6, 'i'),
+                    ('unused', 16, 'i')),
+    pad32       = (('empty',  32, 'c'),),
+    pad64       = (('empty',  64, 'c'),),
+    pad128      = (('empty', 128, 'c'),),
+    pad256      = (('empty', 256, 'c'),),
+)
+
+gadget_ptype_specs = dict(
+    default = ( "Gas",
+                "Halo",
+                "Disk",
+                "Bulge",
+                "Stars",
+                "Bndry" )
+)
+
+gadget_field_specs = dict(
+    default = ( "Coordinates",
+                "Velocities",
+                "ParticleIDs",
+                "Mass",
+                ("InternalEnergy", "Gas"),
+                ("Density", "Gas"),
+                ("SmoothingLength", "Gas"),
+    ),
+    agora_unlv = ( "Coordinates",
+                   "Velocities",
+                   "ParticleIDs",
+                   "Mass",
+                   ("InternalEnergy", "Gas"),
+                   ("Density", "Gas"),
+                   ("Electron_Number_Density", "Gas"),
+                   ("HI_NumberDensity", "Gas"),
+                   ("SmoothingLength", "Gas"),
+    )
+)
+
+
+eaglenetwork_ions = \
+    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
+     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
+     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
+     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r f75c6a2bc84426a9fdc6da2e48d33033143d5d08 -r cee134553e0b1b47c0f2830c748e449bb4a93171 yt/frontends/sph/gadget/fields.py
--- /dev/null
+++ b/yt/frontends/sph/gadget/fields.py
@@ -0,0 +1,69 @@
+"""
+Gadget fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os
+import numpy as np
+
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+
+from yt.fields.species_fields import \
+    setup_species_fields
+
+class GadgetFieldInfo(FieldInfoContainer):
+    known_other_fields = ()
+
+    known_particle_fields = (
+        ("Mass", ("code_mass", ["particle_mass"], None)),
+        ("Masses", ("code_mass", ["particle_mass"], None)),
+        ("Coordinates", ("code_length", ["particle_position"], None)),
+        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
+        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
+        ("ParticleIDs", ("", ["particle_index"], None)),
+        ("InternalEnergy", ("", ["thermal_energy"], None)),
+        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
+        ("Density", ("code_mass / code_length**3", ["density"], None)),
+        ("MaximumTemperature", ("K", [], None)),
+        ("Temperature", ("K", ["temperature"], None)),
+        ("Epsilon", ("code_length", [], None)),
+        ("Metals", ("code_metallicity", ["metallicity"], None)),
+        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
+        ("Phi", ("code_length", [], None)),
+        ("FormationTime", ("code_time", ["creation_time"], None)),
+        # These are metallicity fields that get discovered for FIRE simulations
+        ("Metallicity_00", ("", ["metallicity"], None)),
+        ("Metallicity_01", ("", ["He_fraction"], None)),
+        ("Metallicity_02", ("", ["C_fraction"], None)),
+        ("Metallicity_03", ("", ["N_fraction"], None)),
+        ("Metallicity_04", ("", ["O_fraction"], None)),
+        ("Metallicity_05", ("", ["Ne_fraction"], None)),
+        ("Metallicity_06", ("", ["Mg_fraction"], None)),
+        ("Metallicity_07", ("", ["Si_fraction"], None)),
+        ("Metallicity_08", ("", ["S_fraction"], None)),
+        ("Metallicity_09", ("", ["Ca_fraction"], None)),
+        ("Metallicity_10", ("", ["Fe_fraction"], None)),
+    )
+
+    def __init__(self, *args, **kwargs):
+        super(SPHFieldInfo, self).__init__(*args, **kwargs)
+        # Special case for FIRE
+        if ("PartType0", "Metallicity_00") in self.field_list:
+            self.species_names += ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
+                "Ca", "Fe"]
+
+    def setup_particle_fields(self, ptype, *args, **kwargs):
+        super(SPHFieldInfo, self).setup_particle_fields(ptype, *args, **kwargs)
+        setup_species_fields(self, ptype)

diff -r f75c6a2bc84426a9fdc6da2e48d33033143d5d08 -r cee134553e0b1b47c0f2830c748e449bb4a93171 yt/frontends/sph/gadget/io.py
--- /dev/null
+++ b/yt/frontends/sph/gadget/io.py
@@ -0,0 +1,203 @@
+"""
+Gadget data-file handling functions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.geometry.oct_container import \
+    _ORDER_MAX
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+class IOHandlerGadgetBinary(BaseIOHandler):
+    _dataset_type = "gadget_binary"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    # Particle types (Table 3 in GADGET-2 user guide)
+    #
+    # Blocks in the file:
+    #   HEAD
+    #   POS
+    #   VEL
+    #   ID
+    #   MASS    (variable mass only)
+    #   U       (gas only)
+    #   RHO     (gas only)
+    #   HSML    (gas only)
+    #   POT     (only if enabled in makefile)
+    #   ACCE    (only if enabled in makefile)
+    #   ENDT    (only if enabled in makefile)
+    #   TSTP    (only if enabled in makefile)
+
+    _var_mass = None
+
+    def __init__(self, ds, *args, **kwargs):
+        self._fields = ds._field_spec
+        self._ptypes = ds._ptype_spec
+        super(IOHandlerGadgetBinary, self).__init__(ds, *args, **kwargs)
+
+    @property
+    def var_mass(self):
+        if self._var_mass is None:
+            vm = []
+            for i, v in enumerate(self.ds["Massarr"]):
+                if v == 0:
+                    vm.append(self._ptypes[i])
+            self._var_mass = tuple(vm)
+        return self._var_mass
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype in ptf:
+                # This is where we could implement sub-chunking
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                yield ptype, (pos[:,0], pos[:,1], pos[:,2])
+            f.close()
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype, field_list in sorted(ptf.items()):
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                mask = selector.select_points(
+                    pos[:,0], pos[:,1], pos[:,2], 0.0)
+                del pos
+                if mask is None: continue
+                for field in field_list:
+                    if field == "Mass" and ptype not in self.var_mass:
+                        data = np.empty(mask.sum(), dtype="float64")
+                        m = self.ds.parameters["Massarr"][
+                            self._ptypes.index(ptype)]
+                        data[:] = m
+                        yield (ptype, field), data
+                        continue
+                    f.seek(poff[ptype, field], os.SEEK_SET)
+                    data = self._read_field_from_file(f, tp[ptype], field)
+                    data = data[mask,...]
+                    yield (ptype, field), data
+            f.close()
+
+    def _read_field_from_file(self, f, count, name):
+        if count == 0: return
+        if name == "ParticleIDs":
+            dt = "uint32"
+        else:
+            dt = "float32"
+        if name in self._vector_fields:
+            count *= 3
+        arr = np.fromfile(f, dtype=dt, count = count)
+        if name in self._vector_fields:
+            arr = arr.reshape((count/3, 3), order="C")
+        return arr.astype("float64")
+
+    def _initialize_index(self, data_file, regions):
+        count = sum(data_file.total_particles.values())
+        DLE = data_file.ds.domain_left_edge
+        DRE = data_file.ds.domain_right_edge
+        dx = (DRE - DLE) / 2**_ORDER_MAX
+        with open(data_file.filename, "rb") as f:
+            # We add on an additionally 4 for the first record.
+            f.seek(data_file._position_offset + 4)
+            # The first total_particles * 3 values are positions
+            pp = np.fromfile(f, dtype = 'float32', count = count*3)
+            pp.shape = (count, 3)
+        regions.add_data_file(pp, data_file.file_id, data_file.ds.filter_bbox)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE,
+                                data_file.ds.filter_bbox)
+        return morton
+
+    def _count_particles(self, data_file):
+        npart = dict((self._ptypes[i], v)
+            for i, v in enumerate(data_file.header["Npart"]))
+        return npart
+
+    # header is 256, but we have 4 at beginning and end for ints
+    _field_size = 4
+    def _calculate_field_offsets(self, field_list, pcount,
+                                 offset, file_size = None):
+        # field_list is (ftype, fname) but the blocks are ordered
+        # (fname, ftype) in the file.
+        pos = offset
+        fs = self._field_size
+        offsets = {}
+        for field in self._fields:
+            if not isinstance(field, types.StringTypes):
+                field = field[0]
+            if not any( (ptype, field) in field_list
+                        for ptype in self._ptypes):
+                continue
+            pos += 4
+            any_ptypes = False
+            for ptype in self._ptypes:
+                if field == "Mass" and ptype not in self.var_mass:
+                    continue
+                if (ptype, field) not in field_list:
+                    continue
+                offsets[(ptype, field)] = pos
+                any_ptypes = True
+                if field in self._vector_fields:
+                    pos += 3 * pcount[ptype] * fs
+                else:
+                    pos += pcount[ptype] * fs
+            pos += 4
+            if not any_ptypes: pos -= 8
+        if file_size is not None:
+            if file_size != pos:
+                mylog.warning("Your Gadget-2 file may have extra " +
+                              "columns or different precision!" +
+                              " (%s file vs %s computed)",
+                              file_size, pos)
+        return offsets
+
+    def _identify_fields(self, domain):
+        # We can just look at the particle counts.
+        field_list = []
+        tp = domain.total_particles
+        for i, ptype in enumerate(self._ptypes):
+            count = tp[ptype]
+            if count == 0: continue
+            m = domain.header["Massarr"][i]
+            for field in self._fields:
+                if isinstance(field, types.TupleType):
+                    field, req = field
+                    if req is ZeroMass:
+                        if m > 0.0 : continue
+                    elif req != ptype:
+                        continue
+                field_list.append((ptype, field))
+        return field_list, {}


https://bitbucket.org/yt_analysis/yt/commits/fa5e0bb5d12f/
Changeset:   fa5e0bb5d12f
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 14:29:40+00:00
Summary:     Moving tipsy frontend to its own directory.
Affected #:  5 files

diff -r cee134553e0b1b47c0f2830c748e449bb4a93171 -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 yt/frontends/sph/tipsy/api.py
--- /dev/null
+++ b/yt/frontends/sph/tipsy/api.py
@@ -0,0 +1,18 @@
+"""
+API for Tipsy frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     TipsyDataset

diff -r cee134553e0b1b47c0f2830c748e449bb4a93171 -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 yt/frontends/sph/tipsy/data_structures.py
--- /dev/null
+++ b/yt/frontends/sph/tipsy/data_structures.py
@@ -0,0 +1,258 @@
+"""
+Data structures for Tipsy frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+import stat
+import struct
+import glob
+import os
+
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.utilities.cosmology import \
+    Cosmology
+from yt.utilities.physical_constants import \
+    G, \
+    cm_per_kpc
+
+from .fields import \
+    TipsyFieldInfo
+
+class TipsyFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        # To go above 1 domain, we need to include an indexing step in the
+        # IOHandler, rather than simply reading from a single file.
+        assert file_id == 0
+        super(TipsyFile, self).__init__(ds, io, filename, file_id)
+        io._create_dtypes(self)
+        io._update_domain(self)#Check automatically what the domain size is
+
+    def _calculate_offsets(self, field_list):
+        self.field_offsets = self.io._calculate_particle_offsets(self)
+
+class TipsyDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = TipsyFile
+    _field_info_class = TipsyFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _header_spec = (('time',    'd'),
+                    ('nbodies', 'i'),
+                    ('ndim',    'i'),
+                    ('nsph',    'i'),
+                    ('ndark',   'i'),
+                    ('nstar',   'i'),
+                    ('dummy',   'i'))
+
+    def __init__(self, filename, dataset_type="tipsy",
+                 field_dtypes=None,
+                 unit_base=None,
+                 parameter_file=None,
+                 cosmology_parameters=None,
+                 n_ref=64, over_refine_factor=1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        if field_dtypes is None:
+            field_dtypes = {}
+        success, self.endian = self._validate_header(filename)
+        if not success:
+            print "SOMETHING HAS GONE WRONG.  NBODIES != SUM PARTICLES."
+            print "%s != (%s == %s + %s + %s)" % (
+                self.parameters['nbodies'],
+                tot,
+                self.parameters['nsph'],
+                self.parameters['ndark'],
+                self.parameters['nstar'])
+            print "Often this can be fixed by changing the 'endian' parameter."
+            print "This defaults to '>' but may in fact be '<'."
+            raise RuntimeError
+        self.storage_filename = None
+
+        # My understanding is that dtypes are set on a field by field basis,
+        # not on a (particle type, field) basis
+        self._field_dtypes = field_dtypes
+
+        self._unit_base = unit_base or {}
+        self._cosmology_parameters = cosmology_parameters
+        if parameter_file is not None:
+            parameter_file = os.path.abspath(parameter_file)
+        self._param_file = parameter_file
+        filename = os.path.abspath(filename)
+        super(TipsyDataset, self).__init__(filename, dataset_type)
+
+    def __repr__(self):
+        return os.path.basename(self.parameter_filename)
+
+    def _parse_parameter_file(self):
+
+        # Parsing the header of the tipsy file, from this we obtain
+        # the snapshot time and particle counts.
+
+        f = open(self.parameter_filename, "rb")
+        hh = self.endian + "".join(["%s" % (b) for a, b in self._header_spec])
+        hvals = dict([(a, c) for (a, b), c in zip(self._header_spec,
+                     struct.unpack(hh, f.read(struct.calcsize(hh))))])
+        self.parameters.update(hvals)
+        self._header_offset = f.tell()
+
+        # These are always true, for now.
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+
+
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        # Read in parameter file, if available.
+        if self._param_file is None:
+            pfn = glob.glob(os.path.join(self.directory, "*.param"))
+            assert len(pfn) < 2, \
+                "More than one param file is in the data directory"
+            if pfn == []:
+                pfn = None
+            else:
+                pfn = pfn[0]
+        else:
+            pfn = self._param_file
+
+        if pfn is not None:
+            for line in (l.strip() for l in open(pfn)):
+                # skip comment lines and blank lines
+                l = line.strip()
+                if l.startswith('#') or l == '':
+                    continue
+                # parse parameters according to tipsy parameter type
+                param, val = (i.strip() for i in line.split('=', 1))
+                val = val.split('#')[0]
+                if param.startswith('n') or param.startswith('i'):
+                    val = long(val)
+                elif param.startswith('d'):
+                    val = float(val)
+                elif param.startswith('b'):
+                    val = bool(float(val))
+                self.parameters[param] = val
+
+        self.current_time = hvals["time"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        periodic = self.parameters.get('bPeriodic', True)
+        period = self.parameters.get('dPeriod', None)
+        comoving = self.parameters.get('bComove', False)
+        self.periodicity = (periodic, periodic, periodic)
+        if comoving and period is None:
+            period = 1.0
+        if periodic and period is not None:
+            # If we are periodic, that sets our domain width to either 1 or dPeriod.
+            self.domain_left_edge = np.zeros(3, "float64") - 0.5*period
+            self.domain_right_edge = np.zeros(3, "float64") + 0.5*period
+        else:
+            self.domain_left_edge = None
+            self.domain_right_edge = None
+        if comoving:
+            cosm = self._cosmology_parameters or {}
+            self.scale_factor = hvals["time"]#In comoving simulations, time stores the scale factor a
+            self.cosmological_simulation = 1
+            dcosm = dict(current_redshift=(1.0/self.scale_factor)-1.0,
+                         omega_lambda=self.parameters.get('dLambda', cosm.get('omega_lambda',0.0)),
+                         omega_matter=self.parameters.get('dOmega0', cosm.get('omega_matter',0.0)),
+                         hubble_constant=self.parameters.get('dHubble0', cosm.get('hubble_constant',1.0)))
+            for param in dcosm.keys():
+                pval = dcosm[param]
+                setattr(self, param, pval)
+        else:
+            self.cosmological_simulation = 0.0
+            kpc_unit = self.parameters.get('dKpcUnit', 1.0)
+            self._unit_base['cm'] = 1.0 / (kpc_unit * cm_per_kpc)
+
+        self.filename_template = self.parameter_filename
+        self.file_count = 1
+
+        f.close()
+
+    def _set_derived_attrs(self):
+        if self.domain_left_edge is None or self.domain_right_edge is None:
+            self.domain_left_edge = np.nan
+            self.domain_right_edge = np.nan
+            self.index
+        super(TipsyDataset, self)._set_derived_attrs()
+
+    def _set_code_unit_attributes(self):
+        if self.cosmological_simulation:
+            mu = self.parameters.get('dMsolUnit', 1.)
+            lu = self.parameters.get('dKpcUnit', 1000.)
+            # In cosmological runs, lengths are stored as length*scale_factor
+            self.length_unit = self.quan(lu, 'kpc')*self.scale_factor
+            self.mass_unit = self.quan(mu, 'Msun')
+            density_unit = self.mass_unit/ (self.length_unit/self.scale_factor)**3
+            # Gasoline's hubble constant, dHubble0, is stored units of proper code time.
+            self.hubble_constant *= np.sqrt(G.in_units('kpc**3*Msun**-1*s**-2')*density_unit).value/(3.2407793e-18)  
+            cosmo = Cosmology(self.hubble_constant,
+                              self.omega_matter, self.omega_lambda)
+            self.current_time = cosmo.hubble_time(self.current_redshift)
+        else:
+            mu = self.parameters.get('dMsolUnit', 1.0)
+            self.mass_unit = self.quan(mu, 'Msun')
+            lu = self.parameters.get('dKpcUnit', 1.0)
+            self.length_unit = self.quan(lu, 'kpc')
+            density_unit = self.mass_unit / self.length_unit**3
+        self.time_unit = 1.0 / np.sqrt(G * density_unit)
+
+    @staticmethod
+    def _validate_header(filename):
+        '''
+        This method automatically detects whether the tipsy file is big/little endian
+        and is not corrupt/invalid.  It returns a tuple of (Valid, endianswap) where
+        Valid is a boolean that is true if the file is a tipsy file, and endianswap is 
+        the endianness character '>' or '<'.
+        '''
+        try:
+            f = open(filename,'rb')
+        except:
+            return False, 1
+        try:
+            f.seek(0, os.SEEK_END)
+            fs = f.tell()
+            f.seek(0, os.SEEK_SET)
+            #Read in the header
+            t, n, ndim, ng, nd, ns = struct.unpack("<diiiii", f.read(28))
+        except IOError:
+            return False, 1
+        endianswap = "<"
+        #Check Endianness
+        if (ndim < 1 or ndim > 3):
+            endianswap = ">"
+            f.seek(0)
+            t, n, ndim, ng, nd, ns = struct.unpack(">diiiii", f.read(28))
+        # File is borked if this is true.  The header is 28 bytes, and may
+        # Be followed by a 4 byte pad.  Next comes gas particles, which use
+        # 48 bytes, followed by 36 bytes per dark matter particle, and 44 bytes
+        # per star particle.  If positions are stored as doubles, each of these
+        # sizes is increased by 12 bytes.
+        if (fs != 28+48*ng+36*nd+44*ns and fs != 28+60*ng+48*nd+56*ns and
+                fs != 32+48*ng+36*nd+44*ns and fs != 32+60*ng+48*nd+56*ns):
+            f.close()
+            return False, 0
+        f.close()
+        return True, endianswap
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        return TipsyDataset._validate_header(args[0])[0]

diff -r cee134553e0b1b47c0f2830c748e449bb4a93171 -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 yt/frontends/sph/tipsy/fields.py
--- /dev/null
+++ b/yt/frontends/sph/tipsy/fields.py
@@ -0,0 +1,46 @@
+"""
+Tipsy fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+
+class TipsyFieldInfo(SPHFieldInfo):
+    aux_particle_fields = {
+        'uDotFB':("uDotFB", ("code_mass * code_velocity**2", ["uDotFB"], None)),
+        'uDotAV':("uDotAV", ("code_mass * code_velocity**2", ["uDotAV"], None)),
+        'uDotPdV':("uDotPdV", ("code_mass * code_velocity**2", ["uDotPdV"], None)),
+        'uDotHydro':("uDotHydro", ("code_mass * code_velocity**2", ["uDotHydro"], None)),
+        'uDotDiff':("uDotDiff", ("code_mass * code_velocity**2", ["uDotDiff"], None)),
+        'uDot':("uDot", ("code_mass * code_velocity**2", ["uDot"], None)),
+        'coolontime':("coolontime", ("code_time", ["coolontime"], None)),
+        'timeform':("timeform", ("code_time", ["timeform"], None)),
+        'massform':("massform", ("code_mass", ["massform"], None)),
+        'HI':("HI", ("dimensionless", ["HI"], None)),
+        'HII':("HII", ("dimensionless", ["HII"], None)),
+        'HeI':("HeI", ("dimensionless", ["HeI"], None)),
+        'HeII':("HeII", ("dimensionless", ["HeII"], None)),
+        'OxMassFrac':("OxMassFrac", ("dimensionless", ["OxMassFrac"], None)),
+        'FeMassFrac':("FeMassFrac", ("dimensionless", ["FeMassFrac"], None)),
+        'c':("c", ("code_velocity", ["c"], None)),
+        'acc':("acc", ("code_velocity / code_time", ["acc"], None)),
+        'accg':("accg", ("code_velocity / code_time", ["accg"], None))}
+    
+    def __init__(self, ds, field_list, slice_info = None):
+        for field in field_list:
+            if field[1] in self.aux_particle_fields.keys() and \
+                self.aux_particle_fields[field[1]] not in self.known_particle_fields:
+                self.known_particle_fields += (self.aux_particle_fields[field[1]],)
+        super(TipsyFieldInfo,self).__init__(ds, field_list, slice_info)

diff -r cee134553e0b1b47c0f2830c748e449bb4a93171 -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 yt/frontends/sph/tipsy/io.py
--- /dev/null
+++ b/yt/frontends/sph/tipsy/io.py
@@ -0,0 +1,337 @@
+"""
+Tipsy data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import glob
+import numpy as np
+
+from yt.geometry.oct_container import \
+    _ORDER_MAX
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+CHUNKSIZE = 10000000
+
+class IOHandlerTipsyBinary(BaseIOHandler):
+    _dataset_type = "tipsy"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    _pdtypes = None # dtypes, to be filled in later
+
+    _ptypes = ( "Gas",
+                "DarkMatter",
+                "Stars" )
+    _chunksize = 64*64*64
+
+    _aux_fields = None
+    _fields = ( ("Gas", "Mass"),
+                ("Gas", "Coordinates"),
+                ("Gas", "Velocities"),
+                ("Gas", "Density"),
+                ("Gas", "Temperature"),
+                ("Gas", "Epsilon"),
+                ("Gas", "Metals"),
+                ("Gas", "Phi"),
+                ("DarkMatter", "Mass"),
+                ("DarkMatter", "Coordinates"),
+                ("DarkMatter", "Velocities"),
+                ("DarkMatter", "Epsilon"),
+                ("DarkMatter", "Phi"),
+                ("Stars", "Mass"),
+                ("Stars", "Coordinates"),
+                ("Stars", "Velocities"),
+                ("Stars", "Metals"),
+                ("Stars", "FormationTime"),
+                ("Stars", "Epsilon"),
+                ("Stars", "Phi")
+              )
+
+    def __init__(self, *args, **kwargs):
+        self._aux_fields = []
+        super(IOHandlerTipsyBinary, self).__init__(*args, **kwargs)
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_aux_fields(self, field, mask, data_file):
+        """
+        Read in auxiliary files from gasoline/pkdgrav.
+        This method will automatically detect the format of the file.
+        """
+        filename = data_file.filename+'.'+field
+        dtype = None
+        # We need to do some fairly ugly detection to see what format the auxiliary
+        # files are in.  They can be either ascii or binary, and the binary files can be
+        # either floats, ints, or doubles.  We're going to use a try-catch cascade to
+        # determine the format.
+        try:#ASCII
+            auxdata = np.genfromtxt(filename, skip_header=1)
+            if auxdata.size != np.sum(data_file.total_particles.values()):
+                print "Error reading auxiliary tipsy file"
+                raise RuntimeError
+        except ValueError:#binary/xdr
+            f = open(filename, 'rb')
+            l = struct.unpack(data_file.ds.endian+"i", f.read(4))[0]
+            if l != np.sum(data_file.total_particles.values()):
+                print "Error reading auxiliary tipsy file"
+                raise RuntimeError
+            dtype = 'd'
+            if field in ('iord', 'igasorder', 'grp'):#These fields are integers
+                dtype = 'i'
+            try:# If we try loading doubles by default, we can catch an exception and try floats next
+                auxdata = np.array(struct.unpack(data_file.ds.endian+(l*dtype), f.read()))
+            except struct.error:
+                f.seek(4)
+                dtype = 'f'
+                try:
+                    auxdata = np.array(struct.unpack(data_file.ds.endian+(l*dtype), f.read()))
+                except struct.error: # None of the binary attempts to read succeeded
+                    print "Error reading auxiliary tipsy file"
+                    raise RuntimeError
+
+        # Use the mask to slice out the appropriate particle type data
+        if mask.size == data_file.total_particles['Gas']:
+            return auxdata[:data_file.total_particles['Gas']]
+        elif mask.size == data_file.total_particles['DarkMatter']:
+            return auxdata[data_file.total_particles['Gas']:-data_file.total_particles['DarkMatter']]
+        else:
+            return auxdata[-data_file.total_particles['Stars']:]
+
+    def _fill_fields(self, fields, vals, mask, data_file):
+        if mask is None:
+            size = 0
+        else:
+            size = mask.sum()
+        rv = {}
+        for field in fields:
+            mylog.debug("Allocating %s values for %s", size, field)
+            if field in self._aux_fields: #Read each of the auxiliary fields
+                rv[field] = self._read_aux_fields(field, mask, data_file)
+            elif field in self._vector_fields:
+                rv[field] = np.empty((size, 3), dtype="float64")
+                if size == 0: continue
+                rv[field][:,0] = vals[field]['x'][mask]
+                rv[field][:,1] = vals[field]['y'][mask]
+                rv[field][:,2] = vals[field]['z'][mask]
+            else:
+                rv[field] = np.empty(size, dtype="float64")
+                if size == 0: continue
+                rv[field][:] = vals[field][mask]
+            if field == "Coordinates":
+                eps = np.finfo(rv[field].dtype).eps
+                for i in range(3):
+                  rv[field][:,i] = np.clip(rv[field][:,i],
+                      self.domain_left_edge[i] + eps,
+                      self.domain_right_edge[i] - eps)
+        return rv
+
+
+    def _read_particle_coords(self, chunks, ptf):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype, field_list in sorted(ptf.items(), key=lambda a: poff[a[0]]):
+                f.seek(poff[ptype], os.SEEK_SET)
+                total = 0
+                while total < tp[ptype]:
+                    p = np.fromfile(f, self._pdtypes[ptype],
+                            count=min(self._chunksize, tp[ptype] - total))
+                    total += p.size
+                    d = [p["Coordinates"][ax].astype("float64") for ax in 'xyz']
+                    del p
+                    yield ptype, d
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype, field_list in sorted(ptf.items(), key=lambda a: poff[a[0]]):
+                f.seek(poff[ptype], os.SEEK_SET)
+                total = 0
+                while total < tp[ptype]:
+                    p = np.fromfile(f, self._pdtypes[ptype],
+                        count=min(self._chunksize, tp[ptype] - total))
+                    total += p.size
+                    mask = selector.select_points(
+                        p["Coordinates"]['x'].astype("float64"),
+                        p["Coordinates"]['y'].astype("float64"),
+                        p["Coordinates"]['z'].astype("float64"), 0.0)
+                    if mask is None: continue
+                    tf = self._fill_fields(field_list, p, mask, data_file)
+                    for field in field_list:
+                        yield (ptype, field), tf.pop(field)
+            f.close()
+
+    def _update_domain(self, data_file):
+        '''
+        This method is used to determine the size needed for a box that will
+        bound the particles.  It simply finds the largest position of the
+        whole set of particles, and sets the domain to +/- that value.
+        '''
+        ds = data_file.ds
+        ind = 0
+        # Check to make sure that the domain hasn't already been set
+        # by the parameter file
+        if np.all(np.isfinite(ds.domain_left_edge)) and np.all(np.isfinite(ds.domain_right_edge)):
+            return
+        with open(data_file.filename, "rb") as f:
+            ds.domain_left_edge = 0
+            ds.domain_right_edge = 0
+            f.seek(ds._header_offset)
+            mi =   np.array([1e30, 1e30, 1e30], dtype="float64")
+            ma =  -np.array([1e30, 1e30, 1e30], dtype="float64")
+            for iptype, ptype in enumerate(self._ptypes):
+                # We'll just add the individual types separately
+                count = data_file.total_particles[ptype]
+                if count == 0: continue
+                start, stop = ind, ind + count
+                while ind < stop:
+                    c = min(CHUNKSIZE, stop - ind)
+                    pp = np.fromfile(f, dtype = self._pdtypes[ptype],
+                                     count = c)
+                    eps = np.finfo(pp["Coordinates"]["x"].dtype).eps
+                    np.minimum(mi, [pp["Coordinates"]["x"].min(),
+                                    pp["Coordinates"]["y"].min(),
+                                    pp["Coordinates"]["z"].min()], mi)
+                    np.maximum(ma, [pp["Coordinates"]["x"].max(),
+                                    pp["Coordinates"]["y"].max(),
+                                    pp["Coordinates"]["z"].max()], ma)
+                    ind += c
+        # We extend by 1%.
+        DW = ma - mi
+        mi -= 0.01 * DW
+        ma += 0.01 * DW
+        ds.domain_left_edge = ds.arr(mi, 'code_length')
+        ds.domain_right_edge = ds.arr(ma, 'code_length')
+        ds.domain_width = DW = ds.domain_right_edge - ds.domain_left_edge
+        ds.unit_registry.add("unitary", float(DW.max() * DW.units.cgs_value),
+                                 DW.units.dimensions)
+
+    def _initialize_index(self, data_file, regions):
+        ds = data_file.ds
+        morton = np.empty(sum(data_file.total_particles.values()),
+                          dtype="uint64")
+        ind = 0
+        DLE, DRE = ds.domain_left_edge, ds.domain_right_edge
+        dx = (DRE - DLE) / (2**_ORDER_MAX)
+        self.domain_left_edge = DLE.in_units("code_length").ndarray_view()
+        self.domain_right_edge = DRE.in_units("code_length").ndarray_view()
+        with open(data_file.filename, "rb") as f:
+            f.seek(ds._header_offset)
+            for iptype, ptype in enumerate(self._ptypes):
+                # We'll just add the individual types separately
+                count = data_file.total_particles[ptype]
+                if count == 0: continue
+                start, stop = ind, ind + count
+                while ind < stop:
+                    c = min(CHUNKSIZE, stop - ind)
+                    pp = np.fromfile(f, dtype = self._pdtypes[ptype],
+                                     count = c)
+                    mis = np.empty(3, dtype="float64")
+                    mas = np.empty(3, dtype="float64")
+                    for axi, ax in enumerate('xyz'):
+                        mi = pp["Coordinates"][ax].min()
+                        ma = pp["Coordinates"][ax].max()
+                        mylog.debug("Spanning: %0.3e .. %0.3e in %s", mi, ma, ax)
+                        mis[axi] = mi
+                        mas[axi] = ma
+                    pos = np.empty((pp.size, 3), dtype="float64")
+                    for i, ax in enumerate("xyz"):
+                        eps = np.finfo(pp["Coordinates"][ax].dtype).eps
+                        pos[:,i] = pp["Coordinates"][ax]
+                    regions.add_data_file(pos, data_file.file_id,
+                                          data_file.ds.filter_bbox)
+                    morton[ind:ind+c] = compute_morton(
+                        pos[:,0], pos[:,1], pos[:,2],
+                        DLE, DRE, data_file.ds.filter_bbox)
+                    ind += c
+        mylog.info("Adding %0.3e particles", morton.size)
+        return morton
+
+    def _count_particles(self, data_file):
+        npart = {
+            "Gas": data_file.ds.parameters['nsph'],
+            "Stars": data_file.ds.parameters['nstar'],
+            "DarkMatter": data_file.ds.parameters['ndark']
+        }
+        return npart
+
+    @classmethod
+    def _compute_dtypes(cls, field_dtypes, endian = "<"):
+        pds = {}
+        for ptype, field in cls._fields:
+            dtbase = field_dtypes.get(field, 'f')
+            ff = "%s%s" % (endian, dtbase)
+            if field in cls._vector_fields:
+                dt = (field, [('x', ff), ('y', ff), ('z', ff)])
+            else:
+                dt = (field, ff)
+            pds.setdefault(ptype, []).append(dt)
+        pdtypes = {}
+        for ptype in pds:
+            pdtypes[ptype] = np.dtype(pds[ptype])
+        return pdtypes
+
+    def _create_dtypes(self, data_file):
+        # We can just look at the particle counts.
+        self._header_offset = data_file.ds._header_offset
+        self._pdtypes = {}
+        pds = {}
+        field_list = []
+        tp = data_file.total_particles
+        aux_filenames = glob.glob(data_file.filename+'.*') # Find out which auxiliaries we have
+        self._aux_fields = [f[1+len(data_file.filename):] for f in aux_filenames]
+        self._pdtypes = self._compute_dtypes(data_file.ds._field_dtypes,
+                                             data_file.ds.endian)
+        for ptype, field in self._fields:
+            if tp[ptype] == 0:
+                # We do not want out _pdtypes to have empty particles.
+                self._pdtypes.pop(ptype, None)
+                continue
+            field_list.append((ptype, field))
+        if any(["Gas"==f[0] for f in field_list]): #Add the auxiliary fields to each ptype we have
+            field_list += [("Gas",a) for a in self._aux_fields]
+        if any(["DarkMatter"==f[0] for f in field_list]):
+            field_list += [("DarkMatter",a) for a in self._aux_fields]
+        if any(["Stars"==f[0] for f in field_list]):
+            field_list += [("Stars",a) for a in self._aux_fields]
+        self._field_list = field_list
+        return self._field_list
+
+    def _identify_fields(self, data_file):
+        return self._field_list, {}
+
+    def _calculate_particle_offsets(self, data_file):
+        field_offsets = {}
+        pos = data_file.ds._header_offset
+        for ptype in self._ptypes:
+            field_offsets[ptype] = pos
+            if data_file.total_particles[ptype] == 0: continue
+            size = self._pdtypes[ptype].itemsize
+            pos += data_file.total_particles[ptype] * size
+        return field_offsets


https://bitbucket.org/yt_analysis/yt/commits/b127a65cad34/
Changeset:   b127a65cad34
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 14:49:12+00:00
Summary:     Moving OWLS frontend into its own directory.
Affected #:  7 files

diff -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 -r b127a65cad34538709061407e8b0e4ee6db3e417 yt/frontends/sph/owls/api.py
--- /dev/null
+++ b/yt/frontends/sph/owls/api.py
@@ -0,0 +1,18 @@
+"""
+API for OWLS frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    OWLSDataset

diff -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 -r b127a65cad34538709061407e8b0e4ee6db3e417 yt/frontends/sph/owls/data_structures.py
--- /dev/null
+++ b/yt/frontends/sph/owls/data_structures.py
@@ -0,0 +1,73 @@
+"""
+Data structures for OWLS frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import types
+
+import yt.units
+from yt.frontends.sph.gadget.data_structures import \
+    GadgetHDF5Dataset
+from yt.utilities.definitions import \
+    sec_conversion
+
+from .fields import \
+    OWLSFieldInfo
+
+class OWLSDataset(GadgetHDF5Dataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = OWLSFieldInfo
+    _time_readin = "Time_GYR"
+
+
+    def _parse_parameter_file(self):
+
+        # read values from header
+        hvals = self._get_hvals()
+        self.parameters = hvals
+
+        # set features common to OWLS and Eagle
+        self._set_owls_eagle()
+
+        # Set time from value in header
+        self.current_time = hvals[self._time_readin] * \
+                            sec_conversion["Gyr"] * yt.units.s
+
+
+    def _set_code_unit_attributes(self):
+        self._set_owls_eagle_units()
+
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Constants', 'Header', 'Parameters', 'Units']
+        veto_groups = ['SUBFIND', 'FOF',
+                       'PartType0/ChemistryAbundances', 
+                       'PartType0/ChemicalAbundances',
+                       'RuntimePars', 'HashTable']
+        valid = True
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
+            fileh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 -r b127a65cad34538709061407e8b0e4ee6db3e417 yt/frontends/sph/owls/fields.py
--- /dev/null
+++ b/yt/frontends/sph/owls/fields.py
@@ -0,0 +1,323 @@
+"""
+OWLS fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os
+import numpy as np
+
+from yt.config import ytcfg
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
+from yt.fields.species_fields import \
+    add_species_field_by_fraction, \
+    add_species_field_by_density
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+
+import owls_ion_tables as oit
+
+class OWLSFieldInfo(SPHFieldInfo):
+
+    _ions = ("c1", "c2", "c3", "c4", "c5", "c6",
+             "fe2", "fe17", "h1", "he1", "he2", "mg1", "mg2", "n2", 
+             "n3", "n4", "n5", "n6", "n7", "ne8", "ne9", "ne10", "o1", 
+             "o6", "o7", "o8", "si2", "si3", "si4", "si13")
+
+    _elements = ("H", "He", "C", "N", "O", "Ne", "Mg", "Si", "Fe")
+
+    _num_neighbors = 48
+
+    _add_elements = ("PartType0", "PartType4")
+
+    _add_ions = ("PartType0")
+
+
+    def __init__(self, *args, **kwargs):
+        
+        new_particle_fields = (
+            ("Hydrogen", ("", ["H_fraction"], None)),
+            ("Helium", ("", ["He_fraction"], None)),
+            ("Carbon", ("", ["C_fraction"], None)),
+            ("Nitrogen", ("", ["N_fraction"], None)),
+            ("Oxygen", ("", ["O_fraction"], None)),
+            ("Neon", ("", ["Ne_fraction"], None)),
+            ("Magnesium", ("", ["Mg_fraction"], None)),
+            ("Silicon", ("", ["Si_fraction"], None)),
+            ("Iron", ("", ["Fe_fraction"], None))
+            )
+
+        self.known_particle_fields += new_particle_fields
+        
+        super(OWLSFieldInfo,self).__init__( *args, **kwargs )
+
+
+
+    def setup_particle_fields(self, ptype):
+        """ additional particle fields derived from those in snapshot.
+        we also need to add the smoothed fields here b/c setup_fluid_fields
+        is called before setup_particle_fields. """ 
+
+        smoothed_suffixes = ("_number_density", "_density", "_mass")
+
+
+
+        # we add particle element fields for stars and gas
+        #-----------------------------------------------------
+        if ptype in self._add_elements:
+
+
+            # this adds the particle element fields
+            # X_density, X_mass, and X_number_density
+            # where X is an item of self._elements.
+            # X_fraction are defined in snapshot
+            #-----------------------------------------------
+            for s in self._elements:
+                add_species_field_by_fraction(self, ptype, s,
+                                              particle_type=True)
+
+        # this needs to be called after the call to 
+        # add_species_field_by_fraction for some reason ...
+        # not sure why yet. 
+        #-------------------------------------------------------
+        if ptype == 'PartType0':
+            ftype='gas'
+        elif ptype == 'PartType1':
+            ftype='dm'
+        elif ptype == 'PartType2':
+            ftype='PartType2'
+        elif ptype == 'PartType3':
+            ftype='PartType3'
+        elif ptype == 'PartType4':
+            ftype='star'
+        elif ptype == 'PartType5':
+            ftype='BH'
+        elif ptype == 'all':
+            ftype='all'
+        
+        super(OWLSFieldInfo,self).setup_particle_fields(
+            ptype, num_neighbors=self._num_neighbors, ftype=ftype)
+
+
+        # and now we add the smoothed versions for PartType0
+        #-----------------------------------------------------
+        if ptype == 'PartType0':
+
+            loaded = []
+            for s in self._elements:
+                for sfx in smoothed_suffixes:
+                    fname = s + sfx
+                    fn = add_volume_weighted_smoothed_field( 
+                        ptype, "particle_position", "particle_mass",
+                        "smoothing_length", "density", fname, self,
+                        self._num_neighbors)
+                    loaded += fn
+
+                    self.alias(("gas", fname), fn[0])
+
+            self._show_field_errors += loaded
+            self.find_dependencies(loaded)
+
+
+            # we only add ion fields for gas.  this takes some 
+            # time as the ion abundances have to be interpolated
+            # from cloudy tables (optically thin)
+            #-----------------------------------------------------
+    
+
+            # this defines the ion density on particles
+            # X_density for all items in self._ions
+            #-----------------------------------------------
+            self.setup_gas_ion_density_particle_fields( ptype )
+
+            # this adds the rest of the ion particle fields
+            # X_fraction, X_mass, X_number_density
+            #-----------------------------------------------
+            for ion in self._ions:
+
+                # construct yt name for ion
+                #---------------------------------------------------
+                if ion[0:2].isalpha():
+                    symbol = ion[0:2].capitalize()
+                    roman = int(ion[2:])
+                else:
+                    symbol = ion[0:1].capitalize()
+                    roman = int(ion[1:])
+
+                pstr = "_p" + str(roman-1)
+                yt_ion = symbol + pstr
+
+                # add particle field
+                #---------------------------------------------------
+                add_species_field_by_density(self, ptype, yt_ion,
+                                             particle_type=True)
+
+
+            # add smoothed ion fields
+            #-----------------------------------------------
+            for ion in self._ions:
+
+                # construct yt name for ion
+                #---------------------------------------------------
+                if ion[0:2].isalpha():
+                    symbol = ion[0:2].capitalize()
+                    roman = int(ion[2:])
+                else:
+                    symbol = ion[0:1].capitalize()
+                    roman = int(ion[1:])
+
+                pstr = "_p" + str(roman-1)
+                yt_ion = symbol + pstr
+
+                loaded = []
+                for sfx in smoothed_suffixes:
+                    fname = yt_ion + sfx
+                    fn = add_volume_weighted_smoothed_field( 
+                        ptype, "particle_position", "particle_mass",
+                        "smoothing_length", "density", fname, self,
+                        self._num_neighbors)
+                    loaded += fn
+
+                    self.alias(("gas", fname), fn[0])
+
+                self._show_field_errors += loaded
+                self.find_dependencies(loaded)
+
+
+
+    def setup_gas_ion_density_particle_fields( self, ptype ):
+        """ Sets up particle fields for gas ion densities. """ 
+
+        # loop over all ions and make fields
+        #----------------------------------------------
+        for ion in self._ions:
+
+            # construct yt name for ion
+            #---------------------------------------------------
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+                roman = int(ion[2:])
+            else:
+                symbol = ion[0:1].capitalize()
+                roman = int(ion[1:])
+
+            pstr = "_p" + str(roman-1)
+            yt_ion = symbol + pstr
+            ftype = ptype
+
+            # add ion density field for particles
+            #---------------------------------------------------
+            fname = yt_ion + '_density'
+            dens_func = self._create_ion_density_func( ftype, ion )
+            self.add_field( (ftype, fname),
+                            function = dens_func, 
+                            units="g/cm**3",
+                            particle_type=True )            
+            self._show_field_errors.append( (ftype,fname) )
+
+
+
+        
+    def _create_ion_density_func( self, ftype, ion ):
+        """ returns a function that calculates the ion density of a particle. 
+        """ 
+
+        def _ion_density(field, data):
+
+            # get element symbol from ion string. ion string will 
+            # be a member of the tuple _ions (i.e. si13)
+            #--------------------------------------------------------
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+            else:
+                symbol = ion[0:1].capitalize()
+
+            # mass fraction for the element
+            #--------------------------------------------------------
+            m_frac = data[ftype, symbol+"_fraction"]
+
+            # get nH and T for lookup
+            #--------------------------------------------------------
+            log_nH = np.log10( data["PartType0", "H_number_density"] )
+            log_T = np.log10( data["PartType0", "Temperature"] )
+
+            # get name of owls_ion_file for given ion
+            #--------------------------------------------------------
+            owls_ion_path = self._get_owls_ion_data_dir()
+            fname = os.path.join( owls_ion_path, ion+".hdf5" )
+
+            # create ionization table for this redshift
+            #--------------------------------------------------------
+            itab = oit.IonTableOWLS( fname )
+            itab.set_iz( data.ds.current_redshift )
+
+            # find ion balance using log nH and log T
+            #--------------------------------------------------------
+            i_frac = itab.interp( log_nH, log_T )
+            return data[ftype,"Density"] * m_frac * i_frac 
+        
+        return _ion_density
+
+
+
+
+
+    # this function sets up the X_mass, X_density, X_fraction, and
+    # X_number_density fields where X is the name of an OWLS element.
+    #-------------------------------------------------------------
+    def setup_fluid_fields(self):
+
+        return
+
+
+
+    # this function returns the owls_ion_data directory. if it doesn't
+    # exist it will download the data from http://yt-project.org/data
+    #-------------------------------------------------------------
+    def _get_owls_ion_data_dir(self):
+
+        txt = "Attempting to download ~ 30 Mb of owls ion data from %s to %s."
+        data_file = "owls_ion_data.tar.gz"
+        data_url = "http://yt-project.org/data"
+
+        # get test_data_dir from yt config (ytcgf)
+        #----------------------------------------------
+        tdir = ytcfg.get("yt","test_data_dir")
+
+        # set download destination to tdir or ./ if tdir isnt defined
+        #----------------------------------------------
+        if tdir == "/does/not/exist":
+            data_dir = "./"
+        else:
+            data_dir = tdir            
+
+
+        # check for owls_ion_data directory in data_dir
+        # if not there download the tarball and untar it
+        #----------------------------------------------
+        owls_ion_path = os.path.join( data_dir, "owls_ion_data" )
+
+        if not os.path.exists(owls_ion_path):
+            mylog.info(txt % (data_url, data_dir))                    
+            fname = data_dir + "/" + data_file
+            fn = download_file(os.path.join(data_url, data_file), fname)
+
+            cmnd = "cd " + data_dir + "; " + "tar xf " + data_file
+            os.system(cmnd)
+
+
+        if not os.path.exists(owls_ion_path):
+            raise RuntimeError, "Failed to download owls ion data."
+
+        return owls_ion_path

diff -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 -r b127a65cad34538709061407e8b0e4ee6db3e417 yt/frontends/sph/owls/io.py
--- /dev/null
+++ b/yt/frontends/sph/owls/io.py
@@ -0,0 +1,202 @@
+"""
+OWLS data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.frontends.sph.gradget.definitions import \
+    ghdf5_ptypes
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+CHUNKSIZE = 10000000
+
+def _get_h5_handle(fn):
+    try:
+        f = h5py.File(fn, "r")
+    except IOError as e:
+        print "ERROR OPENING %s" % (fn)
+        if os.path.exists(fn):
+            print "FILENAME EXISTS"
+        else:
+            print "FILENAME DOES NOT EXIST"
+        raise
+    return f
+
+class IOHandlerOWLS(BaseIOHandler):
+    _dataset_type = "OWLS"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+    _known_ptypes = ghdf5_ptypes
+    _var_mass = None
+    _element_names = ('Hydrogen', 'Helium', 'Carbon', 'Nitrogen', 'Oxygen',
+                       'Neon', 'Magnesium', 'Silicon', 'Iron' )
+
+
+    @property
+    def var_mass(self):
+        if self._var_mass is None:
+            vm = []
+            for i, v in enumerate(self.ds["Massarr"]):
+                if v == 0:
+                    vm.append(self._known_ptypes[i])
+            self._var_mass = tuple(vm)
+        return self._var_mass
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            f = _get_h5_handle(data_file.filename)
+            # This double-reads
+            for ptype, field_list in sorted(ptf.items()):
+                if data_file.total_particles[ptype] == 0:
+                    continue
+                x = f["/%s/Coordinates" % ptype][:,0].astype("float64")
+                y = f["/%s/Coordinates" % ptype][:,1].astype("float64")
+                z = f["/%s/Coordinates" % ptype][:,2].astype("float64")
+                yield ptype, (x, y, z)
+            f.close()
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            f = _get_h5_handle(data_file.filename)
+            for ptype, field_list in sorted(ptf.items()):
+                if data_file.total_particles[ptype] == 0:
+                    continue
+                g = f["/%s" % ptype]
+                coords = g["Coordinates"][:].astype("float64")
+                mask = selector.select_points(
+                            coords[:,0], coords[:,1], coords[:,2], 0.0)
+                del coords
+                if mask is None: continue
+                for field in field_list:
+
+                    if field in ("Mass", "Masses") and \
+                        ptype not in self.var_mass:
+                        data = np.empty(mask.sum(), dtype="float64")
+                        ind = self._known_ptypes.index(ptype)
+                        data[:] = self.ds["Massarr"][ind]
+
+                    elif field in self._element_names:
+                        rfield = 'ElementAbundance/' + field
+                        data = g[rfield][:][mask,...]
+                    elif field.startswith("Metallicity_"):
+                        col = int(field.rsplit("_", 1)[-1])
+                        data = g["Metallicity"][:,col][mask]
+                    elif field.startswith("Chemistry_"):
+                        col = int(field.rsplit("_", 1)[-1])
+                        data = g["ChemistryAbundances"][:,col][mask]
+                    else:
+                        data = g[field][:][mask,...]
+
+                    yield (ptype, field), data
+            f.close()
+
+    def _initialize_index(self, data_file, regions):
+        f = _get_h5_handle(data_file.filename)
+        pcount = f["/Header"].attrs["NumPart_ThisFile"][:].sum()
+        morton = np.empty(pcount, dtype='uint64')
+        ind = 0
+        for key in f.keys():
+            if not key.startswith("PartType"): continue
+            if "Coordinates" not in f[key]: continue
+            ds = f[key]["Coordinates"]
+            dt = ds.dtype.newbyteorder("N") # Native
+            pos = np.empty(ds.shape, dtype=dt)
+            pos[:] = ds
+            regions.add_data_file(pos, data_file.file_id,
+                                  data_file.ds.filter_bbox)
+            morton[ind:ind+pos.shape[0]] = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge,
+                data_file.ds.filter_bbox)
+            ind += pos.shape[0]
+        f.close()
+        return morton
+
+    def _count_particles(self, data_file):
+        f = _get_h5_handle(data_file.filename)
+        pcount = f["/Header"].attrs["NumPart_ThisFile"][:]
+        f.close()
+        npart = dict(("PartType%s" % (i), v) for i, v in enumerate(pcount))
+        return npart
+
+
+    def _identify_fields(self, data_file):
+        f = _get_h5_handle(data_file.filename)
+        fields = []
+        cname = self.ds._particle_coordinates_name  # Coordinates
+        mname = self.ds._particle_mass_name  # Mass
+
+        # loop over all keys in OWLS hdf5 file
+        #--------------------------------------------------
+        for key in f.keys():
+
+            # only want particle data
+            #--------------------------------------
+            if not key.startswith("PartType"): continue
+
+            # particle data group
+            #--------------------------------------
+            g = f[key]
+            if cname not in g: continue
+
+            # note str => not unicode!
+
+            #ptype = int(key[8:])
+            ptype = str(key)
+            if ptype not in self.var_mass:
+                fields.append((ptype, mname))
+
+            # loop over all keys in PartTypeX group
+            #----------------------------------------
+            for k in g.keys():
+
+                if k == 'ElementAbundance':
+                    gp = g[k]
+                    for j in gp.keys():
+                        kk = j
+                        fields.append((ptype, str(kk)))
+                elif k == 'Metallicity' and len(g[k].shape) > 1:
+                    # Vector of metallicity
+                    for i in range(g[k].shape[1]):
+                        fields.append((ptype, "Metallicity_%02i" % i))
+                elif k == "ChemistryAbundances" and len(g[k].shape)>1:
+                    for i in range(g[k].shape[1]):
+                        fields.append((ptype, "Chemistry_%03i" % i))
+                else:
+                    kk = k
+                    if not hasattr(g[kk], "shape"): continue
+                    fields.append((ptype, str(kk)))
+
+
+        f.close()
+        return fields, {}

diff -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 -r b127a65cad34538709061407e8b0e4ee6db3e417 yt/frontends/sph/owls/owls_ion_tables.py
--- /dev/null
+++ b/yt/frontends/sph/owls/owls_ion_tables.py
@@ -0,0 +1,210 @@
+""" 
+OWLS ion tables
+
+A module to handle the HM01 UV background spectra and ionization data from the
+OWLS photoionization equilibrium lookup tables. 
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import sys
+import h5py
+import numpy as np
+
+
+
+
+def h5rd( fname, path, dtype=None ):
+    """ Read Data. Return a dataset located at <path> in file <fname> as
+    a numpy array. 
+    e.g. rd( fname, '/PartType0/Coordinates' ). """
+
+    data = None
+    with h5py.File( fname, 'r' ) as h5f:
+        ds = h5f[path]
+        if dtype == None:
+            dtype = ds.dtype
+        data = np.zeros( ds.shape, dtype=dtype )
+        data = ds.value
+    return data
+
+
+
+class IonTableSpectrum:
+
+    """ A class to handle the HM01 spectra in the OWLS ionization tables. """
+
+    def __init__(self, ion_file):
+
+        where = '/header/spectrum/gammahi'
+        self.GH1 = h5rd( ion_file, where ) # GH1[1/s]
+
+        where = '/header/spectrum/logenergy_ryd'
+        self.logryd = h5rd( ion_file, where ) # E[ryd]  
+
+        where = '/header/spectrum/logflux'
+        self.logflux = h5rd( ion_file, where ) # J[ergs/s/Hz/Sr/cm^2] 
+
+        where = '/header/spectrum/redshift'
+        self.z = h5rd( ion_file, where ) # z
+
+
+
+    def return_table_GH1_at_z(self,z):
+
+        # find redshift indices
+        #-----------------------------------------------------------------
+        i_zlo = np.argmin( np.abs( self.z - z ) )
+        if self.z[i_zlo] < z:
+            i_zhi = i_zlo + 1
+        else:
+            i_zhi = i_zlo
+            i_zlo = i_zlo - 1
+    
+        z_frac = (z - self.z[i_zlo]) / (self.z[i_zhi] - self.z[i_zlo])
+   
+        # find GH1 from table
+        #-----------------------------------------------------------------
+        logGH1_all = np.log10( self.GH1 )
+        dlog_GH1 = logGH1_all[i_zhi] - logGH1_all[i_zlo]
+
+        logGH1_table = logGH1_all[i_zlo] + z_frac * dlog_GH1
+        GH1_table = 10.0**logGH1_table
+
+        return GH1_table
+    
+
+
+
+class IonTableOWLS:
+
+    """ A class to handle OWLS ionization tables. """
+
+    DELTA_nH = 0.25
+    DELTA_T = 0.1
+    
+    def __init__(self, ion_file):
+
+        self.ion_file = ion_file
+
+        # ionbal is indexed like [nH, T, z]
+        # nH and T are log quantities
+        #---------------------------------------------------------------
+        self.nH = h5rd( ion_file, '/logd' )         # log nH [cm^-3]
+        self.T = h5rd( ion_file, '/logt' )          # log T [K]
+        self.z = h5rd( ion_file, '/redshift' )      # z
+
+        # read the ionization fractions
+        # linear values stored in file so take log here
+        # ionbal is the ionization balance (i.e. fraction) 
+        #---------------------------------------------------------------
+        self.ionbal = h5rd( ion_file, '/ionbal' ).astype(np.float64)    
+        self.ionbal_orig = self.ionbal.copy()
+
+        ipositive = np.where( self.ionbal > 0.0 )
+        izero = np.where( self.ionbal <= 0.0 )
+        self.ionbal[izero] = self.ionbal[ipositive].min()
+
+        self.ionbal = np.log10( self.ionbal )
+
+
+        # load in background spectrum
+        #---------------------------------------------------------------
+        self.spectrum = IonTableSpectrum( ion_file ) 
+
+        # calculate the spacing along each dimension
+        #---------------------------------------------------------------
+        self.dnH = self.nH[1:] - self.nH[0:-1]
+        self.dT = self.T[1:] - self.T[0:-1]
+        self.dz = self.z[1:] - self.z[0:-1]
+
+        self.order_str = '[log nH, log T, z]'
+
+
+            
+        
+                                                
+    # sets iz and fz
+    #-----------------------------------------------------
+    def set_iz( self, z ):
+
+        if z <= self.z[0]:
+            self.iz = 0
+            self.fz = 0.0
+        elif z >= self.z[-1]:
+            self.iz = len(self.z) - 2
+            self.fz = 1.0
+        else:
+            for iz in range( len(self.z)-1 ):
+                if z < self.z[iz+1]:
+                    self.iz = iz
+                    self.fz = ( z - self.z[iz] ) / self.dz[iz]
+                    break
+
+        
+
+    # interpolate the table at a fixed redshift for the input
+    # values of nH and T ( input should be log ).  A simple    
+    # tri-linear interpolation is used.  
+    #-----------------------------------------------------
+    def interp( self, nH, T ):
+
+        nH = np.array( nH )
+        T  = np.array( T )
+
+        if nH.size != T.size:
+            raise ValueError(' owls_ion_tables: array size mismatch !!! ')
+        
+        # field discovery will have nH.size == 1 and T.size == 1
+        # in that case we simply return 1.0
+
+        if nH.size == 1 and T.size == 1:
+            ionfrac = 1.0
+            return ionfrac
+
+
+        # find inH and fnH
+        #-----------------------------------------------------
+        x_nH = ( nH - self.nH[0] ) / self.DELTA_nH
+        x_nH_clip = np.clip( x_nH, 0.0, self.nH.size-1.001 )
+        fnH,inH = np.modf( x_nH_clip )
+        inH = inH.astype( np.int32 )
+
+
+        # find iT and fT
+        #-----------------------------------------------------
+        x_T = ( T - self.T[0] ) / self.DELTA_T
+        x_T_clip = np.clip( x_T, 0.0, self.T.size-1.001 )
+        fT,iT = np.modf( x_T_clip )
+        iT = iT.astype( np.int32 )
+        
+
+        # short names for previously calculated iz and fz
+        #-----------------------------------------------------
+        iz = self.iz
+        fz = self.fz
+
+                   
+        # calculate interpolated value
+        # use tri-linear interpolation on the log values
+        #-----------------------------------------------------
+
+        ionfrac = self.ionbal[inH,   iT,   iz  ] * (1-fnH) * (1-fT) * (1-fz) + \
+                  self.ionbal[inH+1, iT,   iz  ] * (fnH)   * (1-fT) * (1-fz) + \
+                  self.ionbal[inH,   iT+1, iz  ] * (1-fnH) * (fT)   * (1-fz) + \
+                  self.ionbal[inH,   iT,   iz+1] * (1-fnH) * (1-fT) * (fz)   + \
+                  self.ionbal[inH+1, iT,   iz+1] * (fnH)   * (1-fT) * (fz)   + \
+                  self.ionbal[inH,   iT+1, iz+1] * (1-fnH) * (fT)   * (fz)   + \
+                  self.ionbal[inH+1, iT+1, iz]   * (fnH)   * (fT)   * (1-fz) + \
+                  self.ionbal[inH+1, iT+1, iz+1] * (fnH)   * (fT)   * (fz)
+
+        return 10**ionfrac

diff -r fa5e0bb5d12f314e71237b5a42cfef177a151a71 -r b127a65cad34538709061407e8b0e4ee6db3e417 yt/frontends/sph/owls_ion_tables.py
--- a/yt/frontends/sph/owls_ion_tables.py
+++ /dev/null
@@ -1,210 +0,0 @@
-""" 
-OWLS ion tables
-
-A module to handle the HM01 UV background spectra and ionization data from the
-OWLS photoionization equilibrium lookup tables. 
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import sys
-import h5py
-import numpy as np
-
-
-
-
-def h5rd( fname, path, dtype=None ):
-    """ Read Data. Return a dataset located at <path> in file <fname> as
-    a numpy array. 
-    e.g. rd( fname, '/PartType0/Coordinates' ). """
-
-    data = None
-    with h5py.File( fname, 'r' ) as h5f:
-        ds = h5f[path]
-        if dtype == None:
-            dtype = ds.dtype
-        data = np.zeros( ds.shape, dtype=dtype )
-        data = ds.value
-    return data
-
-
-
-class IonTableSpectrum:
-
-    """ A class to handle the HM01 spectra in the OWLS ionization tables. """
-
-    def __init__(self, ion_file):
-
-        where = '/header/spectrum/gammahi'
-        self.GH1 = h5rd( ion_file, where ) # GH1[1/s]
-
-        where = '/header/spectrum/logenergy_ryd'
-        self.logryd = h5rd( ion_file, where ) # E[ryd]  
-
-        where = '/header/spectrum/logflux'
-        self.logflux = h5rd( ion_file, where ) # J[ergs/s/Hz/Sr/cm^2] 
-
-        where = '/header/spectrum/redshift'
-        self.z = h5rd( ion_file, where ) # z
-
-
-
-    def return_table_GH1_at_z(self,z):
-
-        # find redshift indices
-        #-----------------------------------------------------------------
-        i_zlo = np.argmin( np.abs( self.z - z ) )
-        if self.z[i_zlo] < z:
-            i_zhi = i_zlo + 1
-        else:
-            i_zhi = i_zlo
-            i_zlo = i_zlo - 1
-    
-        z_frac = (z - self.z[i_zlo]) / (self.z[i_zhi] - self.z[i_zlo])
-   
-        # find GH1 from table
-        #-----------------------------------------------------------------
-        logGH1_all = np.log10( self.GH1 )
-        dlog_GH1 = logGH1_all[i_zhi] - logGH1_all[i_zlo]
-
-        logGH1_table = logGH1_all[i_zlo] + z_frac * dlog_GH1
-        GH1_table = 10.0**logGH1_table
-
-        return GH1_table
-    
-
-
-
-class IonTableOWLS:
-
-    """ A class to handle OWLS ionization tables. """
-
-    DELTA_nH = 0.25
-    DELTA_T = 0.1
-    
-    def __init__(self, ion_file):
-
-        self.ion_file = ion_file
-
-        # ionbal is indexed like [nH, T, z]
-        # nH and T are log quantities
-        #---------------------------------------------------------------
-        self.nH = h5rd( ion_file, '/logd' )         # log nH [cm^-3]
-        self.T = h5rd( ion_file, '/logt' )          # log T [K]
-        self.z = h5rd( ion_file, '/redshift' )      # z
-
-        # read the ionization fractions
-        # linear values stored in file so take log here
-        # ionbal is the ionization balance (i.e. fraction) 
-        #---------------------------------------------------------------
-        self.ionbal = h5rd( ion_file, '/ionbal' ).astype(np.float64)    
-        self.ionbal_orig = self.ionbal.copy()
-
-        ipositive = np.where( self.ionbal > 0.0 )
-        izero = np.where( self.ionbal <= 0.0 )
-        self.ionbal[izero] = self.ionbal[ipositive].min()
-
-        self.ionbal = np.log10( self.ionbal )
-
-
-        # load in background spectrum
-        #---------------------------------------------------------------
-        self.spectrum = IonTableSpectrum( ion_file ) 
-
-        # calculate the spacing along each dimension
-        #---------------------------------------------------------------
-        self.dnH = self.nH[1:] - self.nH[0:-1]
-        self.dT = self.T[1:] - self.T[0:-1]
-        self.dz = self.z[1:] - self.z[0:-1]
-
-        self.order_str = '[log nH, log T, z]'
-
-
-            
-        
-                                                
-    # sets iz and fz
-    #-----------------------------------------------------
-    def set_iz( self, z ):
-
-        if z <= self.z[0]:
-            self.iz = 0
-            self.fz = 0.0
-        elif z >= self.z[-1]:
-            self.iz = len(self.z) - 2
-            self.fz = 1.0
-        else:
-            for iz in range( len(self.z)-1 ):
-                if z < self.z[iz+1]:
-                    self.iz = iz
-                    self.fz = ( z - self.z[iz] ) / self.dz[iz]
-                    break
-
-        
-
-    # interpolate the table at a fixed redshift for the input
-    # values of nH and T ( input should be log ).  A simple    
-    # tri-linear interpolation is used.  
-    #-----------------------------------------------------
-    def interp( self, nH, T ):
-
-        nH = np.array( nH )
-        T  = np.array( T )
-
-        if nH.size != T.size:
-            raise ValueError(' owls_ion_tables: array size mismatch !!! ')
-        
-        # field discovery will have nH.size == 1 and T.size == 1
-        # in that case we simply return 1.0
-
-        if nH.size == 1 and T.size == 1:
-            ionfrac = 1.0
-            return ionfrac
-
-
-        # find inH and fnH
-        #-----------------------------------------------------
-        x_nH = ( nH - self.nH[0] ) / self.DELTA_nH
-        x_nH_clip = np.clip( x_nH, 0.0, self.nH.size-1.001 )
-        fnH,inH = np.modf( x_nH_clip )
-        inH = inH.astype( np.int32 )
-
-
-        # find iT and fT
-        #-----------------------------------------------------
-        x_T = ( T - self.T[0] ) / self.DELTA_T
-        x_T_clip = np.clip( x_T, 0.0, self.T.size-1.001 )
-        fT,iT = np.modf( x_T_clip )
-        iT = iT.astype( np.int32 )
-        
-
-        # short names for previously calculated iz and fz
-        #-----------------------------------------------------
-        iz = self.iz
-        fz = self.fz
-
-                   
-        # calculate interpolated value
-        # use tri-linear interpolation on the log values
-        #-----------------------------------------------------
-
-        ionfrac = self.ionbal[inH,   iT,   iz  ] * (1-fnH) * (1-fT) * (1-fz) + \
-                  self.ionbal[inH+1, iT,   iz  ] * (fnH)   * (1-fT) * (1-fz) + \
-                  self.ionbal[inH,   iT+1, iz  ] * (1-fnH) * (fT)   * (1-fz) + \
-                  self.ionbal[inH,   iT,   iz+1] * (1-fnH) * (1-fT) * (fz)   + \
-                  self.ionbal[inH+1, iT,   iz+1] * (fnH)   * (1-fT) * (fz)   + \
-                  self.ionbal[inH,   iT+1, iz+1] * (1-fnH) * (fT)   * (fz)   + \
-                  self.ionbal[inH+1, iT+1, iz]   * (fnH)   * (fT)   * (1-fz) + \
-                  self.ionbal[inH+1, iT+1, iz+1] * (fnH)   * (fT)   * (fz)
-
-        return 10**ionfrac


https://bitbucket.org/yt_analysis/yt/commits/68f22b5997c3/
Changeset:   68f22b5997c3
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:07:41+00:00
Summary:     Moving EAGLE frontend to its own directory.
Affected #:  6 files

diff -r b127a65cad34538709061407e8b0e4ee6db3e417 -r 68f22b5997c336021d8210afded89a0079f30e64 yt/frontends/sph/eagle/api.py
--- /dev/null
+++ b/yt/frontends/sph/eagle/api.py
@@ -0,0 +1,19 @@
+"""
+API for EAGLE frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    EagleDataset, \
+    EagleNetworkDataset

diff -r b127a65cad34538709061407e8b0e4ee6db3e417 -r 68f22b5997c336021d8210afded89a0079f30e64 yt/frontends/sph/eagle/data_structures.py
--- /dev/null
+++ b/yt/frontends/sph/eagle/data_structures.py
@@ -0,0 +1,98 @@
+"""
+Data structures for EAGLE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import types
+
+from yt.frontends.sph.gadget.data_structures import \
+    GadgetHDF5Dataset
+from yt.frontends.sph.owls.fields import \
+    OWLSFieldInfo
+import yt.units
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+class EagleDataset(GadgetHDF5Dataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = OWLSFieldInfo
+    _time_readin_ = 'Time'
+
+    def _parse_parameter_file(self):
+
+        # read values from header
+        hvals = self._get_hvals()
+        self.parameters = hvals
+
+        # set features common to OWLS and Eagle
+        self._set_owls_eagle()
+
+        # Set time from analytic solution for flat LCDM universe
+        a = hvals['ExpansionFactor']
+        H0 = hvals['H(z)'] / hvals['E(z)']
+        a_eq = ( self.omega_matter / self.omega_lambda )**(1./3)
+        t1 = 2.0 / ( 3.0 * np.sqrt( self.omega_lambda ) )
+        t2 = (a/a_eq)**(3./2)
+        t3 = np.sqrt( 1.0 + (a/a_eq)**3 )
+        t = t1 * np.log( t2 + t3 ) / H0
+        self.current_time = t * yt.units.s
+
+    def _set_code_unit_attributes(self):
+        self._set_owls_eagle_units()
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Config', 'Constants', 'HashTable', 'Header', 
+                       'Parameters', 'RuntimePars', 'Units']
+        veto_groups = ['SUBFIND',
+                       'PartType0/ChemistryAbundances', 
+                       'PartType0/ChemicalAbundances']
+        valid = True
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
+            fileh.close()
+        except:
+            valid = False
+            pass
+        return valid
+
+class EagleNetworkDataset(EagleDataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = EagleNetworkFieldInfo
+    _time_readin = 'Time'
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys() and \
+               "SUBFIND" not in fileh["/"].keys() and \
+               ("ChemistryAbundances" in fileh["PartType0"].keys()
+                or "ChemicalAbundances" in fileh["PartType0"].keys()):
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r b127a65cad34538709061407e8b0e4ee6db3e417 -r 68f22b5997c336021d8210afded89a0079f30e64 yt/frontends/sph/eagle/definitions.py
--- /dev/null
+++ b/yt/frontends/sph/eagle/definitions.py
@@ -0,0 +1,79 @@
+
+gadget_ptypes = ("Gas", "Halo", "Disk", "Bulge", "Stars", "Bndry")
+ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
+                 "PartType4", "PartType5")
+
+gadget_header_specs = dict(
+    default      = (('Npart', 6, 'i'),
+                    ('Massarr', 6, 'd'),
+                    ('Time', 1, 'd'),
+                    ('Redshift', 1, 'd'),
+                    ('FlagSfr', 1, 'i'),
+                    ('FlagFeedback', 1, 'i'),
+                    ('Nall', 6, 'i'),
+                    ('FlagCooling', 1, 'i'),
+                    ('NumFiles', 1, 'i'),
+                    ('BoxSize', 1, 'd'),
+                    ('Omega0', 1, 'd'),
+                    ('OmegaLambda', 1, 'd'),
+                    ('HubbleParam', 1, 'd'),
+                    ('FlagAge', 1, 'i'),
+                    ('FlagMEtals', 1, 'i'),
+                    ('NallHW', 6, 'i'),
+                    ('unused', 16, 'i')),
+    pad32       = (('empty',  32, 'c'),),
+    pad64       = (('empty',  64, 'c'),),
+    pad128      = (('empty', 128, 'c'),),
+    pad256      = (('empty', 256, 'c'),),
+)
+
+gadget_ptype_specs = dict(
+    default = ( "Gas",
+                "Halo",
+                "Disk",
+                "Bulge",
+                "Stars",
+                "Bndry" )
+)
+
+gadget_field_specs = dict(
+    default = ( "Coordinates",
+                "Velocities",
+                "ParticleIDs",
+                "Mass",
+                ("InternalEnergy", "Gas"),
+                ("Density", "Gas"),
+                ("SmoothingLength", "Gas"),
+    ),
+    agora_unlv = ( "Coordinates",
+                   "Velocities",
+                   "ParticleIDs",
+                   "Mass",
+                   ("InternalEnergy", "Gas"),
+                   ("Density", "Gas"),
+                   ("Electron_Number_Density", "Gas"),
+                   ("HI_NumberDensity", "Gas"),
+                   ("SmoothingLength", "Gas"),
+    )
+)
+
+
+eaglenetwork_ions = \
+    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
+     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
+     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
+     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r b127a65cad34538709061407e8b0e4ee6db3e417 -r 68f22b5997c336021d8210afded89a0079f30e64 yt/frontends/sph/eagle/fields.py
--- /dev/null
+++ b/yt/frontends/sph/eagle/fields.py
@@ -0,0 +1,73 @@
+"""
+EAGLE fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.sph.owls.fields import \
+    OWLSFieldInfo
+from yt.units.yt_array import YTQuantity
+from yt.utilities.periodic_table import periodic_table
+
+from .definitions import \
+    eaglenetwork_ion_lookup
+
+class EagleNetworkFieldInfo(OWLSFieldInfo):
+
+    _ions = \
+        ('H1', 'H2', 'He1', 'He2','He3', 'C1',\
+         'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'N1', 'N2', \
+         'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+         'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'Ne1', 'Ne2',\
+         'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+         'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+         'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+         'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+         'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+         'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+         'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+         'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+         'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+         'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+         'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+         'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+    def __init__(self, *args, **kwargs):
+        
+        super(EagleNetworkFieldInfo,self).__init__( *args, **kwargs )
+        
+    def _create_ion_density_func( self, ftype, ion ):
+        """ returns a function that calculates the ion density of a particle. 
+        """ 
+
+        def _ion_density(field, data):
+
+            # Lookup the index of the ion 
+            index = eaglenetwork_ion_lookup[ion] 
+
+            # Ion to hydrogen number density ratio
+            ion_chem = data[ftype, "Chemistry_%03i"%index]
+
+            # Mass of a single ion
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+            else:
+                symbol = ion[0:1].capitalize()
+            m_ion = YTQuantity(periodic_table.elements_by_symbol[symbol].weight, 'amu')
+
+            # hydrogen number density 
+            n_H = data["PartType0", "H_number_density"] 
+
+            return m_ion*ion_chem*n_H 
+        
+        return _ion_density

diff -r b127a65cad34538709061407e8b0e4ee6db3e417 -r 68f22b5997c336021d8210afded89a0079f30e64 yt/frontends/sph/eagle/io.py
--- /dev/null
+++ b/yt/frontends/sph/eagle/io.py
@@ -0,0 +1,21 @@
+"""
+EAGLE data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.sph.owls.io import \
+    IOHandlerOWLS
+
+class IOHandlerEagleNetwork(IOHandlerOWLS):
+    _dataset_type = "eagle_network"


https://bitbucket.org/yt_analysis/yt/commits/13b59b4eae6b/
Changeset:   13b59b4eae6b
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:20:57+00:00
Summary:     Moving HTTPStream frontend to its own directory.
Affected #:  4 files

diff -r 68f22b5997c336021d8210afded89a0079f30e64 -r 13b59b4eae6bf5448c7141bb795f3756d0627035 yt/frontends/sph/http_stream/api.py
--- /dev/null
+++ b/yt/frontends/sph/http_stream/api.py
@@ -0,0 +1,18 @@
+"""
+API for HTTPStream frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    HTTPStreamDataset

diff -r 68f22b5997c336021d8210afded89a0079f30e64 -r 13b59b4eae6bf5448c7141bb795f3756d0627035 yt/frontends/sph/http_stream/data_structures.py
--- /dev/null
+++ b/yt/frontends/sph/http_stream/data_structures.py
@@ -0,0 +1,112 @@
+"""
+Data structures for HTTPStream frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+import time
+import types
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+
+try:
+    import requests
+    import json
+except ImportError:
+    requests = None
+
+class HTTPParticleFile(ParticleFile):
+    pass
+
+class HTTPStreamDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = HTTPParticleFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _particle_velocity_name = "Velocities"
+    filename_template = ""
+    
+    def __init__(self, base_url,
+                 dataset_type = "http_particle_stream",
+                 n_ref = 64, over_refine_factor=1):
+        if requests is None:
+            raise RuntimeError
+        self.base_url = base_url
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(HTTPStreamDataset, self).__init__("", dataset_type)
+
+    def __repr__(self):
+        return self.base_url
+
+    def _parse_parameter_file(self):
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+
+        # Here's where we're going to grab the JSON index file
+        hreq = requests.get(self.base_url + "/yt_index.json")
+        if hreq.status_code != 200:
+            raise RuntimeError
+        header = json.loads(hreq.content)
+        header['particle_count'] = dict((int(k), header['particle_count'][k])
+            for k in header['particle_count'])
+        self.parameters = header
+
+        # Now we get what we need
+        self.domain_left_edge = np.array(header['domain_left_edge'], "float64")
+        self.domain_right_edge = np.array(header['domain_right_edge'], "float64")
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.periodicity = (True, True, True)
+
+        self.current_time = header['current_time']
+        self.unique_identifier = header.get("unique_identifier", time.time())
+        self.cosmological_simulation = int(header['cosmological_simulation'])
+        for attr in ('current_redshift', 'omega_lambda', 'omega_matter',
+                     'hubble_constant'):
+            setattr(self, attr, float(header[attr]))
+
+        self.file_count = header['num_files']
+
+    def _set_units(self):
+        length_unit = float(self.parameters['units']['length'])
+        time_unit = float(self.parameters['units']['time'])
+        mass_unit = float(self.parameters['units']['mass'])
+        density_unit = mass_unit / length_unit ** 3
+        velocity_unit = length_unit / time_unit
+        self._unit_base = {}
+        self._unit_base['cm'] = 1.0/length_unit
+        self._unit_base['s'] = 1.0/time_unit
+        super(HTTPStreamDataset, self)._set_units()
+        self.conversion_factors["velocity"] = velocity_unit
+        self.conversion_factors["mass"] = mass_unit
+        self.conversion_factors["density"] = density_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        if not args[0].startswith("http://"):
+            return False
+        hreq = requests.get(args[0] + "/yt_index.json")
+        if hreq.status_code == 200:
+            return True
+        return False

diff -r 68f22b5997c336021d8210afded89a0079f30e64 -r 13b59b4eae6bf5448c7141bb795f3756d0627035 yt/frontends/sph/http_stream/io.py
--- /dev/null
+++ b/yt/frontends/sph/http_stream/io.py
@@ -0,0 +1,116 @@
+"""
+HTTPStream data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+     compute_morton
+
+try:
+    import requests
+except ImportError:
+    requests = None
+
+class IOHandlerHTTPStream(BaseIOHandler):
+    _dataset_type = "http_particle_stream"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    def __init__(self, ds):
+        if requests is None:
+            raise RuntimeError
+        self._url = ds.base_url
+        # This should eventually manage the IO and cache it
+        self.total_bytes = 0
+        super(IOHandlerHTTPStream, self).__init__(ds)
+
+    def _open_stream(self, data_file, field):
+        # This does not actually stream yet!
+        ftype, fname = field
+        s = "%s/%s/%s/%s" % (self._url,
+            data_file.file_id, ftype, fname)
+        mylog.info("Loading URL %s", s)
+        resp = requests.get(s)
+        if resp.status_code != 200:
+            raise RuntimeError
+        self.total_bytes += len(resp.content)
+        return resp.content
+
+    def _identify_fields(self, data_file):
+        f = []
+        for ftype, fname in self.ds.parameters["field_list"]:
+            f.append((str(ftype), str(fname)))
+        return f, {}
+
+    def _read_particle_coords(self, chunks, ptf):
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            for ptype in ptf:
+                s = self._open_stream(data_file, (ptype, "Coordinates"))
+                c = np.frombuffer(s, dtype="float64")
+                c.shape = (c.shape[0]/3.0, 3)
+                yield ptype, (c[:,0], c[:,1], c[:,2])
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            for ptype, field_list in sorted(ptf.items()):
+                s = self._open_stream(data_file, (ptype, "Coordinates"))
+                c = np.frombuffer(s, dtype="float64")
+                c.shape = (c.shape[0]/3.0, 3)
+                mask = selector.select_points(
+                            c[:,0], c[:,1], c[:,2], 0.0)
+                del c
+                if mask is None: continue
+                for field in field_list:
+                    s = self._open_stream(data_file, (ptype, field))
+                    c = np.frombuffer(s, dtype="float64")
+                    if field in self._vector_fields:
+                        c.shape = (c.shape[0]/3.0, 3)
+                    data = c[mask, ...]
+                    yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        header = self.ds.parameters
+        ptypes = header["particle_count"][data_file.file_id].keys()
+        pcount = sum(header["particle_count"][data_file.file_id].values())
+        morton = np.empty(pcount, dtype='uint64')
+        ind = 0
+        for ptype in ptypes:
+            s = self._open_stream(data_file, (ptype, "Coordinates"))
+            c = np.frombuffer(s, dtype="float64")
+            c.shape = (c.shape[0]/3.0, 3)
+            regions.add_data_file(c, data_file.file_id,
+                                  data_file.ds.filter_bbox)
+            morton[ind:ind+c.shape[0]] = compute_morton(
+                c[:,0], c[:,1], c[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge,
+                data_file.ds.filter_bbox)
+            ind += c.shape[0]
+        return morton
+
+    def _count_particles(self, data_file):
+        return self.ds.parameters["particle_count"][data_file.file_id]


https://bitbucket.org/yt_analysis/yt/commits/8f80d9fec926/
Changeset:   8f80d9fec926
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:29:07+00:00
Summary:     Reverting Gadget frontend to using SPHFieldInfo instead of a gadget-specific one.
Affected #:  2 files

diff -r 13b59b4eae6bf5448c7141bb795f3756d0627035 -r 8f80d9fec9269357c2cf4490bd624c0a47d636a6 yt/frontends/sph/gadget/data_structures.py
--- a/yt/frontends/sph/gadget/data_structures.py
+++ b/yt/frontends/sph/gadget/data_structures.py
@@ -24,6 +24,8 @@
     ParticleFile
 from yt.frontends.sph.data_structures import \
     ParticleDataset
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.utilities.cosmology import \
@@ -33,8 +35,6 @@
 from yt.utilities.fortran_utils import read_record
 from yt.utilities.logger import ytLogger as mylog
 
-from .fields import \
-    GadgetFieldInfo
 from .definitions import \
     gadget_header_specs, \
     gadget_field_specs, \
@@ -63,7 +63,7 @@
 class GadgetDataset(ParticleDataset):
     _index_class = ParticleIndex
     _file_class = GadgetBinaryFile
-    _field_info_class = GadgetFieldInfo
+    _field_info_class = SPHFieldInfo
     _particle_mass_name = "Mass"
     _particle_coordinates_name = "Coordinates"
     _particle_velocity_name = "Velocities"
@@ -242,7 +242,7 @@
 
 class GadgetHDF5Dataset(GadgetDataset):
     _file_class = ParticleFile
-    _field_info_class = GadgetFieldInfo
+    _field_info_class = SPHFieldInfo
     _particle_mass_name = "Masses"
     _suffix = ".hdf5"
 

diff -r 13b59b4eae6bf5448c7141bb795f3756d0627035 -r 8f80d9fec9269357c2cf4490bd624c0a47d636a6 yt/frontends/sph/gadget/fields.py
--- a/yt/frontends/sph/gadget/fields.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""
-Gadget fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import os
-import numpy as np
-
-from yt.fields.field_info_container import \
-    FieldInfoContainer
-
-from yt.fields.species_fields import \
-    setup_species_fields
-
-class GadgetFieldInfo(FieldInfoContainer):
-    known_other_fields = ()
-
-    known_particle_fields = (
-        ("Mass", ("code_mass", ["particle_mass"], None)),
-        ("Masses", ("code_mass", ["particle_mass"], None)),
-        ("Coordinates", ("code_length", ["particle_position"], None)),
-        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
-        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
-        ("ParticleIDs", ("", ["particle_index"], None)),
-        ("InternalEnergy", ("", ["thermal_energy"], None)),
-        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
-        ("Density", ("code_mass / code_length**3", ["density"], None)),
-        ("MaximumTemperature", ("K", [], None)),
-        ("Temperature", ("K", ["temperature"], None)),
-        ("Epsilon", ("code_length", [], None)),
-        ("Metals", ("code_metallicity", ["metallicity"], None)),
-        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
-        ("Phi", ("code_length", [], None)),
-        ("FormationTime", ("code_time", ["creation_time"], None)),
-        # These are metallicity fields that get discovered for FIRE simulations
-        ("Metallicity_00", ("", ["metallicity"], None)),
-        ("Metallicity_01", ("", ["He_fraction"], None)),
-        ("Metallicity_02", ("", ["C_fraction"], None)),
-        ("Metallicity_03", ("", ["N_fraction"], None)),
-        ("Metallicity_04", ("", ["O_fraction"], None)),
-        ("Metallicity_05", ("", ["Ne_fraction"], None)),
-        ("Metallicity_06", ("", ["Mg_fraction"], None)),
-        ("Metallicity_07", ("", ["Si_fraction"], None)),
-        ("Metallicity_08", ("", ["S_fraction"], None)),
-        ("Metallicity_09", ("", ["Ca_fraction"], None)),
-        ("Metallicity_10", ("", ["Fe_fraction"], None)),
-    )
-
-    def __init__(self, *args, **kwargs):
-        super(SPHFieldInfo, self).__init__(*args, **kwargs)
-        # Special case for FIRE
-        if ("PartType0", "Metallicity_00") in self.field_list:
-            self.species_names += ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
-                "Ca", "Fe"]
-
-    def setup_particle_fields(self, ptype, *args, **kwargs):
-        super(SPHFieldInfo, self).setup_particle_fields(ptype, *args, **kwargs)
-        setup_species_fields(self, ptype)


https://bitbucket.org/yt_analysis/yt/commits/181aa9eca51c/
Changeset:   181aa9eca51c
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:50:20+00:00
Summary:     Cleaning up definitions files.
Affected #:  4 files

diff -r 8f80d9fec9269357c2cf4490bd624c0a47d636a6 -r 181aa9eca51cdaebbb5f5f1669df6b86bdb1ddac yt/frontends/sph/definitions.py
--- a/yt/frontends/sph/definitions.py
+++ /dev/null
@@ -1,79 +0,0 @@
-
-gadget_ptypes = ("Gas", "Halo", "Disk", "Bulge", "Stars", "Bndry")
-ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
-                 "PartType4", "PartType5")
-
-gadget_header_specs = dict(
-    default      = (('Npart', 6, 'i'),
-                    ('Massarr', 6, 'd'),
-                    ('Time', 1, 'd'),
-                    ('Redshift', 1, 'd'),
-                    ('FlagSfr', 1, 'i'),
-                    ('FlagFeedback', 1, 'i'),
-                    ('Nall', 6, 'i'),
-                    ('FlagCooling', 1, 'i'),
-                    ('NumFiles', 1, 'i'),
-                    ('BoxSize', 1, 'd'),
-                    ('Omega0', 1, 'd'),
-                    ('OmegaLambda', 1, 'd'),
-                    ('HubbleParam', 1, 'd'),
-                    ('FlagAge', 1, 'i'),
-                    ('FlagMEtals', 1, 'i'),
-                    ('NallHW', 6, 'i'),
-                    ('unused', 16, 'i')),
-    pad32       = (('empty',  32, 'c'),),
-    pad64       = (('empty',  64, 'c'),),
-    pad128      = (('empty', 128, 'c'),),
-    pad256      = (('empty', 256, 'c'),),
-)
-
-gadget_ptype_specs = dict(
-    default = ( "Gas",
-                "Halo",
-                "Disk",
-                "Bulge",
-                "Stars",
-                "Bndry" )
-)
-
-gadget_field_specs = dict(
-    default = ( "Coordinates",
-                "Velocities",
-                "ParticleIDs",
-                "Mass",
-                ("InternalEnergy", "Gas"),
-                ("Density", "Gas"),
-                ("SmoothingLength", "Gas"),
-    ),
-    agora_unlv = ( "Coordinates",
-                   "Velocities",
-                   "ParticleIDs",
-                   "Mass",
-                   ("InternalEnergy", "Gas"),
-                   ("Density", "Gas"),
-                   ("Electron_Number_Density", "Gas"),
-                   ("HI_NumberDensity", "Gas"),
-                   ("SmoothingLength", "Gas"),
-    )
-)
-
-
-eaglenetwork_ions = \
-    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
-     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
-     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
-     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
-     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
-     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
-     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
-     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
-     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
-     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
-     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
-     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
-     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
-     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
-     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
-     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
-
-eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r 8f80d9fec9269357c2cf4490bd624c0a47d636a6 -r 181aa9eca51cdaebbb5f5f1669df6b86bdb1ddac yt/frontends/sph/eagle/definitions.py
--- a/yt/frontends/sph/eagle/definitions.py
+++ b/yt/frontends/sph/eagle/definitions.py
@@ -1,62 +1,18 @@
+"""
+EAGLE definitions
 
-gadget_ptypes = ("Gas", "Halo", "Disk", "Bulge", "Stars", "Bndry")
-ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
-                 "PartType4", "PartType5")
 
-gadget_header_specs = dict(
-    default      = (('Npart', 6, 'i'),
-                    ('Massarr', 6, 'd'),
-                    ('Time', 1, 'd'),
-                    ('Redshift', 1, 'd'),
-                    ('FlagSfr', 1, 'i'),
-                    ('FlagFeedback', 1, 'i'),
-                    ('Nall', 6, 'i'),
-                    ('FlagCooling', 1, 'i'),
-                    ('NumFiles', 1, 'i'),
-                    ('BoxSize', 1, 'd'),
-                    ('Omega0', 1, 'd'),
-                    ('OmegaLambda', 1, 'd'),
-                    ('HubbleParam', 1, 'd'),
-                    ('FlagAge', 1, 'i'),
-                    ('FlagMEtals', 1, 'i'),
-                    ('NallHW', 6, 'i'),
-                    ('unused', 16, 'i')),
-    pad32       = (('empty',  32, 'c'),),
-    pad64       = (('empty',  64, 'c'),),
-    pad128      = (('empty', 128, 'c'),),
-    pad256      = (('empty', 256, 'c'),),
-)
 
-gadget_ptype_specs = dict(
-    default = ( "Gas",
-                "Halo",
-                "Disk",
-                "Bulge",
-                "Stars",
-                "Bndry" )
-)
 
-gadget_field_specs = dict(
-    default = ( "Coordinates",
-                "Velocities",
-                "ParticleIDs",
-                "Mass",
-                ("InternalEnergy", "Gas"),
-                ("Density", "Gas"),
-                ("SmoothingLength", "Gas"),
-    ),
-    agora_unlv = ( "Coordinates",
-                   "Velocities",
-                   "ParticleIDs",
-                   "Mass",
-                   ("InternalEnergy", "Gas"),
-                   ("Density", "Gas"),
-                   ("Electron_Number_Density", "Gas"),
-                   ("HI_NumberDensity", "Gas"),
-                   ("SmoothingLength", "Gas"),
-    )
-)
+"""
 
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
 
 eaglenetwork_ions = \
     ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\

diff -r 8f80d9fec9269357c2cf4490bd624c0a47d636a6 -r 181aa9eca51cdaebbb5f5f1669df6b86bdb1ddac yt/frontends/sph/gadget/definitions.py
--- a/yt/frontends/sph/gadget/definitions.py
+++ b/yt/frontends/sph/gadget/definitions.py
@@ -1,7 +1,18 @@
+"""
+Gadget definitions
 
-gadget_ptypes = ("Gas", "Halo", "Disk", "Bulge", "Stars", "Bndry")
-ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
-                 "PartType4", "PartType5")
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
 
 gadget_header_specs = dict(
     default      = (('Npart', 6, 'i'),
@@ -56,24 +67,3 @@
                    ("SmoothingLength", "Gas"),
     )
 )
-
-
-eaglenetwork_ions = \
-    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
-     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
-     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
-     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
-     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
-     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
-     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
-     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
-     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
-     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
-     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
-     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
-     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
-     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
-     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
-     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
-
-eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r 8f80d9fec9269357c2cf4490bd624c0a47d636a6 -r 181aa9eca51cdaebbb5f5f1669df6b86bdb1ddac yt/frontends/sph/owls/definitions.py
--- /dev/null
+++ b/yt/frontends/sph/owls/definitions.py
@@ -0,0 +1,18 @@
+"""
+OWLS definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
+                 "PartType4", "PartType5")


https://bitbucket.org/yt_analysis/yt/commits/1907615dc51b/
Changeset:   1907615dc51b
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:50:46+00:00
Summary:     Fixing import.
Affected #:  1 file

diff -r 181aa9eca51cdaebbb5f5f1669df6b86bdb1ddac -r 1907615dc51b6c26a721c9a302bdab039eacd0b8 yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -37,7 +37,7 @@
 from yt.data_objects.static_output import \
     Dataset, \
     ParticleFile
-from yt.frontends.sph.data_structures import \
+from yt.frontends.sph.gadget.data_structures import \
     _fix_unit_ordering
 import yt.utilities.fortran_utils as fpu
 from yt.units.yt_array import \


https://bitbucket.org/yt_analysis/yt/commits/20fdd1f5ae95/
Changeset:   20fdd1f5ae95
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:51:15+00:00
Summary:     Fixing a definition import.
Affected #:  1 file

diff -r 1907615dc51b6c26a721c9a302bdab039eacd0b8 -r 20fdd1f5ae9553512a4e52794d86eec2a4e09a9b yt/frontends/sph/owls/io.py
--- a/yt/frontends/sph/owls/io.py
+++ b/yt/frontends/sph/owls/io.py
@@ -17,12 +17,13 @@
 import h5py
 import numpy as np
 
-from yt.frontends.sph.gradget.definitions import \
-    ghdf5_ptypes
 from yt.utilities.io_handler import \
     BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
 
-from yt.utilities.lib.geometry_utils import compute_morton
+.definitions import \
+    ghdf5_ptypes
 
 CHUNKSIZE = 10000000
 


https://bitbucket.org/yt_analysis/yt/commits/6c9fa8796bfb/
Changeset:   6c9fa8796bfb
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:52:19+00:00
Summary:     Moving and removing general sph frontend things as they were moved to subdirectories.
Affected #:  4 files

diff -r 20fdd1f5ae9553512a4e52794d86eec2a4e09a9b -r 6c9fa8796bfb150f1c3013bbcdb41e7f56294ef2 yt/frontends/sph/api.py
--- a/yt/frontends/sph/api.py
+++ b/yt/frontends/sph/api.py
@@ -1,5 +1,5 @@
 """
-API for yt.frontends.sph
+API for SPH frontends
 
 
 
@@ -14,20 +14,18 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from .data_structures import \
-      OWLSDataset, \
-      GadgetDataset, \
-      GadgetHDF5Dataset, \
-      TipsyDataset,\
-      EagleNetworkDataset, \
-      EagleDataset
+from .eagle.api import \
+    EagleDataset, \
+    EagleNetworkDataset
 
-from .io import \
-      IOHandlerOWLS, \
-      IOHandlerGadgetBinary,\
-      IOHandlerEagleNetwork
+from .gadget.api import \
+    GadgetDataset
 
-from .fields import \
-      SPHFieldInfo, \
-      TipsyFieldInfo,\
-      EagleNetworkFieldInfo
+from .http_stream.api import \
+    HTTPStreamDataset
+    
+from .owls.api import \
+    OWLSDataset
+
+from .tipsy.api import \
+    TipsyDataset

diff -r 20fdd1f5ae9553512a4e52794d86eec2a4e09a9b -r 6c9fa8796bfb150f1c3013bbcdb41e7f56294ef2 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -1,5 +1,5 @@
 """
-Data structures for a generic SPH/Gadget frontend.
+Data structures for SPH frontends.
 
 
 
@@ -14,762 +14,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
-import numpy as np
-import stat
-import weakref
-import struct
-import glob
-import time
-import os
-import types
-
-import yt.units
-from yt.utilities.fortran_utils import read_record
-from yt.utilities.logger import ytLogger as mylog
-from yt.geometry.particle_geometry_handler import \
-    ParticleIndex
 from yt.data_objects.static_output import \
-    Dataset, ParticleFile
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
-from yt.utilities.physical_constants import \
-    G, \
-    cm_per_kpc, \
-    mass_sun_cgs
-from yt.utilities.cosmology import Cosmology
-from .fields import \
-    SPHFieldInfo, OWLSFieldInfo, TipsyFieldInfo, EagleNetworkFieldInfo
-from .definitions import \
-    gadget_header_specs, \
-    gadget_field_specs, \
-    gadget_ptype_specs
-from .io import \
-    IOHandlerTipsyBinary
-
-try:
-    import requests
-    import json
-except ImportError:
-    requests = None
-
-def _fix_unit_ordering(unit):
-    if isinstance(unit[0], types.StringTypes):
-        unit = unit[1], unit[0]
-    return unit
-
-class GadgetBinaryFile(ParticleFile):
-    def __init__(self, ds, io, filename, file_id):
-        with open(filename, "rb") as f:
-            self.header = read_record(f, ds._header_spec)
-            self._position_offset = f.tell()
-            f.seek(0, os.SEEK_END)
-            self._file_size = f.tell()
-
-        super(GadgetBinaryFile, self).__init__(ds, io, filename, file_id)
-
-    def _calculate_offsets(self, field_list):
-        self.field_offsets = self.io._calculate_field_offsets(
-            field_list, self.total_particles,
-            self._position_offset, self._file_size)
-
+    Dataset
 
 class ParticleDataset(Dataset):
     _unit_base = None
     over_refine_factor = 1
     filter_bbox = False
-
-
-class GadgetDataset(ParticleDataset):
-    _index_class = ParticleIndex
-    _file_class = GadgetBinaryFile
-    _field_info_class = SPHFieldInfo
-    _particle_mass_name = "Mass"
-    _particle_coordinates_name = "Coordinates"
-    _particle_velocity_name = "Velocities"
-    _suffix = ""
-
-    def __init__(self, filename, dataset_type="gadget_binary",
-                 additional_fields=(),
-                 unit_base=None, n_ref=64,
-                 over_refine_factor=1,
-                 bounding_box = None,
-                 header_spec = "default",
-                 field_spec = "default",
-                 ptype_spec = "default"):
-        if self._instantiated: return
-        self._header_spec = self._setup_binary_spec(
-            header_spec, gadget_header_specs)
-        self._field_spec = self._setup_binary_spec(
-            field_spec, gadget_field_specs)
-        self._ptype_spec = self._setup_binary_spec(
-            ptype_spec, gadget_ptype_specs)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        self.storage_filename = None
-        if unit_base is not None and "UnitLength_in_cm" in unit_base:
-            # We assume this is comoving, because in the absence of comoving
-            # integration the redshift will be zero.
-            unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
-        self._unit_base = unit_base
-        if bounding_box is not None:
-            bbox = np.array(bounding_box, dtype="float64")
-            if bbox.shape == (2, 3):
-                bbox = bbox.transpose()
-            self.domain_left_edge = bbox[:,0]
-            self.domain_right_edge = bbox[:,1]
-        else:
-            self.domain_left_edge = self.domain_right_edge = None
-        super(GadgetDataset, self).__init__(filename, dataset_type)
-
-    def _setup_binary_spec(self, spec, spec_dict):
-        if isinstance(spec, types.StringTypes):
-            _hs = ()
-            for hs in spec.split("+"):
-                _hs += spec_dict[hs]
-            spec = _hs
-        return spec
-
-    def __repr__(self):
-        return os.path.basename(self.parameter_filename).split(".")[0]
-
-    def _get_hvals(self):
-        # The entries in this header are capitalized and named to match Table 4
-        # in the GADGET-2 user guide.
-
-        f = open(self.parameter_filename)
-        hvals = read_record(f, self._header_spec)
-        for i in hvals:
-            if len(hvals[i]) == 1:
-                hvals[i] = hvals[i][0]
-        return hvals
-
-    def _parse_parameter_file(self):
-
-        hvals = self._get_hvals()
-
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.parameters["HydroMethod"] = "sph"
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-        # Set standard values
-
-        # We may have an overridden bounding box.
-        if self.domain_left_edge is None:
-            self.domain_left_edge = np.zeros(3, "float64")
-            self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.periodicity = (True, True, True)
-
-        self.cosmological_simulation = 1
-
-        self.current_redshift = hvals["Redshift"]
-        self.omega_lambda = hvals["OmegaLambda"]
-        self.omega_matter = hvals["Omega0"]
-        self.hubble_constant = hvals["HubbleParam"]
-        # According to the Gadget manual, OmegaLambda will be zero for
-        # non-cosmological datasets.  However, it may be the case that
-        # individuals are running cosmological simulations *without* Lambda, in
-        # which case we may be doing something incorrect here.
-        # It may be possible to deduce whether ComovingIntegration is on
-        # somehow, but opinions on this vary.
-        if self.omega_lambda == 0.0:
-            mylog.info("Omega Lambda is 0.0, so we are turning off Cosmology.")
-            self.hubble_constant = 1.0  # So that scaling comes out correct
-            self.cosmological_simulation = 0
-            self.current_redshift = 0.0
-            # This may not be correct.
-            self.current_time = hvals["Time"] * sec_conversion["Gyr"]
-        else:
-            # Now we calculate our time based on the cosmology, because in
-            # ComovingIntegration hvals["Time"] will in fact be the expansion
-            # factor, not the actual integration time, so we re-calculate
-            # global time from our Cosmology.
-            cosmo = Cosmology(self.hubble_constant,
-                              self.omega_matter, self.omega_lambda)
-            self.current_time = cosmo.hubble_time(self.current_redshift)
-            mylog.info("Calculating time from %0.3e to be %0.3e seconds",
-                       hvals["Time"], self.current_time)
-        self.parameters = hvals
-
-        prefix = self.parameter_filename.split(".", 1)[0]
-
-        if hvals["NumFiles"] > 1:
-            self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
-        else:
-            self.filename_template = self.parameter_filename
-
-        self.file_count = hvals["NumFiles"]
-
-    def _set_code_unit_attributes(self):
-        # If no units passed in by user, set a sane default (Gadget-2 users guide).
-        if self._unit_base is None:
-            if self.cosmological_simulation == 1:
-                mylog.info("Assuming length units are in kpc/h (comoving)")
-                self._unit_base = dict(length = (1.0, "kpccm/h"))
-            else:
-                mylog.info("Assuming length units are in kpc (physical)")
-                self._unit_base = dict(length = (1.0, "kpc"))
-                
-        # If units passed in by user, decide what to do about
-        # co-moving and factors of h
-        unit_base = self._unit_base or {}
-        if "length" in unit_base:
-            length_unit = unit_base["length"]
-        elif "UnitLength_in_cm" in unit_base:
-            if self.cosmological_simulation == 0:
-                length_unit = (unit_base["UnitLength_in_cm"], "cm")
-            else:
-                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
-        else:
-            raise RuntimeError
-        length_unit = _fix_unit_ordering(length_unit)
-        self.length_unit = self.quan(length_unit[0], length_unit[1])
-
-        unit_base = self._unit_base or {}
-        if "velocity" in unit_base:
-            velocity_unit = unit_base["velocity"]
-        elif "UnitVelocity_in_cm_per_s" in unit_base:
-            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
-        else:
-            velocity_unit = (1e5, "cm/s")
-        velocity_unit = _fix_unit_ordering(velocity_unit)
-        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
-
-        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
-        # Default to 1e10 Msun/h if mass is not specified.
-        if "mass" in unit_base:
-            mass_unit = unit_base["mass"]
-        elif "UnitMass_in_g" in unit_base:
-            if self.cosmological_simulation == 0:
-                mass_unit = (unit_base["UnitMass_in_g"], "g")
-            else:
-                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
-        else:
-            # Sane default
-            mass_unit = (1.0, "1e10*Msun/h")
-        mass_unit = _fix_unit_ordering(mass_unit)
-        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
-        self.time_unit = self.length_unit / self.velocity_unit
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        # We do not allow load() of these files.
-        return False
-
-
-class GadgetHDF5Dataset(GadgetDataset):
-    _file_class = ParticleFile
-    _field_info_class = SPHFieldInfo
-    _particle_mass_name = "Masses"
-    _suffix = ".hdf5"
-
-    def __init__(self, filename, dataset_type="gadget_hdf5", 
-                 unit_base = None, n_ref=64,
-                 over_refine_factor=1,
-                 bounding_box = None):
-        self.storage_filename = None
-        filename = os.path.abspath(filename)
-        super(GadgetHDF5Dataset, self).__init__(
-            filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
-            over_refine_factor=over_refine_factor,
-            bounding_box = bounding_box)
-
-    def _get_hvals(self):
-        handle = h5py.File(self.parameter_filename, mode="r")
-        hvals = {}
-        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
-        # Compat reasons.
-        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
-        hvals["Massarr"] = hvals["MassTable"]
-        handle.close()
-        return hvals
-
-    def _get_uvals(self):
-        handle = h5py.File(self.parameter_filename, mode="r")
-        uvals = {}
-        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
-        handle.close()
-        return uvals
-
-
-
-    def _set_owls_eagle(self):
-
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.parameters["HydroMethod"] = "sph"
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-
-        self._unit_base = self._get_uvals()
-        self._unit_base['cmcm'] = 1.0 / self._unit_base["UnitLength_in_cm"]
-
-        self.current_redshift = self.parameters["Redshift"]
-        self.omega_lambda = self.parameters["OmegaLambda"]
-        self.omega_matter = self.parameters["Omega0"]
-        self.hubble_constant = self.parameters["HubbleParam"]
-
-        if self.domain_left_edge is None:
-            self.domain_left_edge = np.zeros(3, "float64")
-            self.domain_right_edge = np.ones(3, "float64") * self.parameters["BoxSize"]
-
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-
-        self.cosmological_simulation = 1
-        self.periodicity = (True, True, True)
-
-        prefix = os.path.abspath(self.parameter_filename.split(".", 1)[0])
-        suffix = self.parameter_filename.rsplit(".", 1)[-1]
-        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
-        self.file_count = self.parameters["NumFilesPerSnapshot"]
-
-
-    def _set_owls_eagle_units(self):
-
-        # note the contents of the HDF5 Units group are in _unit_base 
-        # note the velocity stored on disk is sqrt(a) dx/dt 
-        self.length_unit = self.quan( self._unit_base["UnitLength_in_cm"], 'cmcm/h' )
-        self.mass_unit = self.quan( self._unit_base["UnitMass_in_g"], 'g/h' )
-        self.velocity_unit = self.quan( self._unit_base["UnitVelocity_in_cm_per_s"], 'cm/s' )
-        self.time_unit = self.quan( self._unit_base["UnitTime_in_s"], 's/h' )
-
-        
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        try:
-            fileh = h5py.File(args[0], mode='r')
-            if "Constants" not in fileh["/"].keys() and \
-               "Header" in fileh["/"].keys():
-                fileh.close()
-                return True
-            fileh.close()
-        except:
-            pass
-        return False
-
-class OWLSDataset(GadgetHDF5Dataset):
-    _particle_mass_name = "Mass"
-    _field_info_class = OWLSFieldInfo
-    _time_readin = "Time_GYR"
-
-
-    def _parse_parameter_file(self):
-
-        # read values from header
-        hvals = self._get_hvals()
-        self.parameters = hvals
-
-        # set features common to OWLS and Eagle
-        self._set_owls_eagle()
-
-        # Set time from value in header
-        self.current_time = hvals[self._time_readin] * \
-                            sec_conversion["Gyr"] * yt.units.s
-
-
-    def _set_code_unit_attributes(self):
-        self._set_owls_eagle_units()
-
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        need_groups = ['Constants', 'Header', 'Parameters', 'Units']
-        veto_groups = ['SUBFIND', 'FOF',
-                       'PartType0/ChemistryAbundances', 
-                       'PartType0/ChemicalAbundances',
-                       'RuntimePars', 'HashTable']
-        valid = True
-        try:
-            fileh = h5py.File(args[0], mode='r')
-            for ng in need_groups:
-                if ng not in fileh["/"]:
-                    valid = False
-            for vg in veto_groups:
-                if vg in fileh["/"]:
-                    valid = False                    
-            fileh.close()
-        except:
-            valid = False
-            pass
-        return valid
-
-
-class EagleDataset(GadgetHDF5Dataset):
-    _particle_mass_name = "Mass"
-    _field_info_class = OWLSFieldInfo
-    _time_readin_ = 'Time'
-
-    def _parse_parameter_file(self):
-
-        # read values from header
-        hvals = self._get_hvals()
-        self.parameters = hvals
-
-        # set features common to OWLS and Eagle
-        self._set_owls_eagle()
-
-        # Set time from analytic solution for flat LCDM universe
-        a = hvals['ExpansionFactor']
-        H0 = hvals['H(z)'] / hvals['E(z)']
-        a_eq = ( self.omega_matter / self.omega_lambda )**(1./3)
-        t1 = 2.0 / ( 3.0 * np.sqrt( self.omega_lambda ) )
-        t2 = (a/a_eq)**(3./2)
-        t3 = np.sqrt( 1.0 + (a/a_eq)**3 )
-        t = t1 * np.log( t2 + t3 ) / H0
-        self.current_time = t * yt.units.s
-
-
-    def _set_code_unit_attributes(self):
-        self._set_owls_eagle_units()
-
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        need_groups = ['Config', 'Constants', 'HashTable', 'Header', 
-                       'Parameters', 'RuntimePars', 'Units']
-        veto_groups = ['SUBFIND',
-                       'PartType0/ChemistryAbundances', 
-                       'PartType0/ChemicalAbundances']
-        valid = True
-        try:
-            fileh = h5py.File(args[0], mode='r')
-            for ng in need_groups:
-                if ng not in fileh["/"]:
-                    valid = False
-            for vg in veto_groups:
-                if vg in fileh["/"]:
-                    valid = False                    
-            fileh.close()
-        except:
-            valid = False
-            pass
-        return valid
-
-
-class EagleNetworkDataset(EagleDataset):
-    _particle_mass_name = "Mass"
-    _field_info_class = EagleNetworkFieldInfo
-    _time_readin = 'Time'
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        try:
-            fileh = h5py.File(args[0], mode='r')
-            if "Constants" in fileh["/"].keys() and \
-               "Header" in fileh["/"].keys() and \
-               "SUBFIND" not in fileh["/"].keys() and \
-               ("ChemistryAbundances" in fileh["PartType0"].keys()
-                or "ChemicalAbundances" in fileh["PartType0"].keys()):
-                fileh.close()
-                return True
-            fileh.close()
-        except:
-            pass
-        return False
-
-class TipsyFile(ParticleFile):
-
-    def _calculate_offsets(self, field_list):
-        self.field_offsets = self.io._calculate_particle_offsets(self)
-
-    def __init__(self, ds, io, filename, file_id):
-        # To go above 1 domain, we need to include an indexing step in the
-        # IOHandler, rather than simply reading from a single file.
-        assert file_id == 0
-        super(TipsyFile, self).__init__(ds, io, filename, file_id)
-        io._create_dtypes(self)
-        io._update_domain(self)#Check automatically what the domain size is
-
-
-class TipsyDataset(ParticleDataset):
-    _index_class = ParticleIndex
-    _file_class = TipsyFile
-    _field_info_class = TipsyFieldInfo
-    _particle_mass_name = "Mass"
-    _particle_coordinates_name = "Coordinates"
-    _header_spec = (('time',    'd'),
-                    ('nbodies', 'i'),
-                    ('ndim',    'i'),
-                    ('nsph',    'i'),
-                    ('ndark',   'i'),
-                    ('nstar',   'i'),
-                    ('dummy',   'i'))
-
-    def __init__(self, filename, dataset_type="tipsy",
-                 field_dtypes=None,
-                 unit_base=None,
-                 parameter_file=None,
-                 cosmology_parameters=None,
-                 n_ref=64, over_refine_factor=1):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        if field_dtypes is None:
-            field_dtypes = {}
-        success, self.endian = self._validate_header(filename)
-        if not success:
-            print "SOMETHING HAS GONE WRONG.  NBODIES != SUM PARTICLES."
-            print "%s != (%s == %s + %s + %s)" % (
-                self.parameters['nbodies'],
-                tot,
-                self.parameters['nsph'],
-                self.parameters['ndark'],
-                self.parameters['nstar'])
-            print "Often this can be fixed by changing the 'endian' parameter."
-            print "This defaults to '>' but may in fact be '<'."
-            raise RuntimeError
-        self.storage_filename = None
-
-        # My understanding is that dtypes are set on a field by field basis,
-        # not on a (particle type, field) basis
-        self._field_dtypes = field_dtypes
-
-        self._unit_base = unit_base or {}
-        self._cosmology_parameters = cosmology_parameters
-        if parameter_file is not None:
-            parameter_file = os.path.abspath(parameter_file)
-        self._param_file = parameter_file
-        filename = os.path.abspath(filename)
-        super(TipsyDataset, self).__init__(filename, dataset_type)
-
-    def __repr__(self):
-        return os.path.basename(self.parameter_filename)
-
-    def _parse_parameter_file(self):
-
-        # Parsing the header of the tipsy file, from this we obtain
-        # the snapshot time and particle counts.
-
-        f = open(self.parameter_filename, "rb")
-        hh = self.endian + "".join(["%s" % (b) for a, b in self._header_spec])
-        hvals = dict([(a, c) for (a, b), c in zip(self._header_spec,
-                     struct.unpack(hh, f.read(struct.calcsize(hh))))])
-        self.parameters.update(hvals)
-        self._header_offset = f.tell()
-
-        # These are always true, for now.
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.parameters["HydroMethod"] = "sph"
-
-
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-
-        # Read in parameter file, if available.
-        if self._param_file is None:
-            pfn = glob.glob(os.path.join(self.directory, "*.param"))
-            assert len(pfn) < 2, \
-                "More than one param file is in the data directory"
-            if pfn == []:
-                pfn = None
-            else:
-                pfn = pfn[0]
-        else:
-            pfn = self._param_file
-
-        if pfn is not None:
-            for line in (l.strip() for l in open(pfn)):
-                # skip comment lines and blank lines
-                l = line.strip()
-                if l.startswith('#') or l == '':
-                    continue
-                # parse parameters according to tipsy parameter type
-                param, val = (i.strip() for i in line.split('=', 1))
-                val = val.split('#')[0]
-                if param.startswith('n') or param.startswith('i'):
-                    val = long(val)
-                elif param.startswith('d'):
-                    val = float(val)
-                elif param.startswith('b'):
-                    val = bool(float(val))
-                self.parameters[param] = val
-
-        self.current_time = hvals["time"]
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        periodic = self.parameters.get('bPeriodic', True)
-        period = self.parameters.get('dPeriod', None)
-        comoving = self.parameters.get('bComove', False)
-        self.periodicity = (periodic, periodic, periodic)
-        if comoving and period is None:
-            period = 1.0
-        if periodic and period is not None:
-            # If we are periodic, that sets our domain width to either 1 or dPeriod.
-            self.domain_left_edge = np.zeros(3, "float64") - 0.5*period
-            self.domain_right_edge = np.zeros(3, "float64") + 0.5*period
-        else:
-            self.domain_left_edge = None
-            self.domain_right_edge = None
-        if comoving:
-            cosm = self._cosmology_parameters or {}
-            self.scale_factor = hvals["time"]#In comoving simulations, time stores the scale factor a
-            self.cosmological_simulation = 1
-            dcosm = dict(current_redshift=(1.0/self.scale_factor)-1.0,
-                         omega_lambda=self.parameters.get('dLambda', cosm.get('omega_lambda',0.0)),
-                         omega_matter=self.parameters.get('dOmega0', cosm.get('omega_matter',0.0)),
-                         hubble_constant=self.parameters.get('dHubble0', cosm.get('hubble_constant',1.0)))
-            for param in dcosm.keys():
-                pval = dcosm[param]
-                setattr(self, param, pval)
-        else:
-            self.cosmological_simulation = 0.0
-            kpc_unit = self.parameters.get('dKpcUnit', 1.0)
-            self._unit_base['cm'] = 1.0 / (kpc_unit * cm_per_kpc)
-
-        self.filename_template = self.parameter_filename
-        self.file_count = 1
-
-        f.close()
-
-    def _set_derived_attrs(self):
-        if self.domain_left_edge is None or self.domain_right_edge is None:
-            self.domain_left_edge = np.nan
-            self.domain_right_edge = np.nan
-            self.index
-        super(TipsyDataset, self)._set_derived_attrs()
-
-    def _set_code_unit_attributes(self):
-        if self.cosmological_simulation:
-            mu = self.parameters.get('dMsolUnit', 1.)
-            lu = self.parameters.get('dKpcUnit', 1000.)
-            # In cosmological runs, lengths are stored as length*scale_factor
-            self.length_unit = self.quan(lu, 'kpc')*self.scale_factor
-            self.mass_unit = self.quan(mu, 'Msun')
-            density_unit = self.mass_unit/ (self.length_unit/self.scale_factor)**3
-            # Gasoline's hubble constant, dHubble0, is stored units of proper code time.
-            self.hubble_constant *= np.sqrt(G.in_units('kpc**3*Msun**-1*s**-2')*density_unit).value/(3.2407793e-18)  
-            cosmo = Cosmology(self.hubble_constant,
-                              self.omega_matter, self.omega_lambda)
-            self.current_time = cosmo.hubble_time(self.current_redshift)
-        else:
-            mu = self.parameters.get('dMsolUnit', 1.0)
-            self.mass_unit = self.quan(mu, 'Msun')
-            lu = self.parameters.get('dKpcUnit', 1.0)
-            self.length_unit = self.quan(lu, 'kpc')
-            density_unit = self.mass_unit / self.length_unit**3
-        self.time_unit = 1.0 / np.sqrt(G * density_unit)
-
-    @staticmethod
-    def _validate_header(filename):
-        '''
-        This method automatically detects whether the tipsy file is big/little endian
-        and is not corrupt/invalid.  It returns a tuple of (Valid, endianswap) where
-        Valid is a boolean that is true if the file is a tipsy file, and endianswap is 
-        the endianness character '>' or '<'.
-        '''
-        try:
-            f = open(filename,'rb')
-        except:
-            return False, 1
-        try:
-            f.seek(0, os.SEEK_END)
-            fs = f.tell()
-            f.seek(0, os.SEEK_SET)
-            #Read in the header
-            t, n, ndim, ng, nd, ns = struct.unpack("<diiiii", f.read(28))
-        except IOError:
-            return False, 1
-        endianswap = "<"
-        #Check Endianness
-        if (ndim < 1 or ndim > 3):
-            endianswap = ">"
-            f.seek(0)
-            t, n, ndim, ng, nd, ns = struct.unpack(">diiiii", f.read(28))
-        # File is borked if this is true.  The header is 28 bytes, and may
-        # Be followed by a 4 byte pad.  Next comes gas particles, which use
-        # 48 bytes, followed by 36 bytes per dark matter particle, and 44 bytes
-        # per star particle.  If positions are stored as doubles, each of these
-        # sizes is increased by 12 bytes.
-        if (fs != 28+48*ng+36*nd+44*ns and fs != 28+60*ng+48*nd+56*ns and
-                fs != 32+48*ng+36*nd+44*ns and fs != 32+60*ng+48*nd+56*ns):
-            f.close()
-            return False, 0
-        f.close()
-        return True, endianswap
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        return TipsyDataset._validate_header(args[0])[0]
-
-class HTTPParticleFile(ParticleFile):
-    pass
-
-class HTTPStreamDataset(ParticleDataset):
-    _index_class = ParticleIndex
-    _file_class = HTTPParticleFile
-    _field_info_class = SPHFieldInfo
-    _particle_mass_name = "Mass"
-    _particle_coordinates_name = "Coordinates"
-    _particle_velocity_name = "Velocities"
-    filename_template = ""
-    
-    def __init__(self, base_url,
-                 dataset_type = "http_particle_stream",
-                 n_ref = 64, over_refine_factor=1):
-        if requests is None:
-            raise RuntimeError
-        self.base_url = base_url
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HTTPStreamDataset, self).__init__("", dataset_type)
-
-    def __repr__(self):
-        return self.base_url
-
-    def _parse_parameter_file(self):
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.parameters["HydroMethod"] = "sph"
-
-        # Here's where we're going to grab the JSON index file
-        hreq = requests.get(self.base_url + "/yt_index.json")
-        if hreq.status_code != 200:
-            raise RuntimeError
-        header = json.loads(hreq.content)
-        header['particle_count'] = dict((int(k), header['particle_count'][k])
-            for k in header['particle_count'])
-        self.parameters = header
-
-        # Now we get what we need
-        self.domain_left_edge = np.array(header['domain_left_edge'], "float64")
-        self.domain_right_edge = np.array(header['domain_right_edge'], "float64")
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.periodicity = (True, True, True)
-
-        self.current_time = header['current_time']
-        self.unique_identifier = header.get("unique_identifier", time.time())
-        self.cosmological_simulation = int(header['cosmological_simulation'])
-        for attr in ('current_redshift', 'omega_lambda', 'omega_matter',
-                     'hubble_constant'):
-            setattr(self, attr, float(header[attr]))
-
-        self.file_count = header['num_files']
-
-    def _set_units(self):
-        length_unit = float(self.parameters['units']['length'])
-        time_unit = float(self.parameters['units']['time'])
-        mass_unit = float(self.parameters['units']['mass'])
-        density_unit = mass_unit / length_unit ** 3
-        velocity_unit = length_unit / time_unit
-        self._unit_base = {}
-        self._unit_base['cm'] = 1.0/length_unit
-        self._unit_base['s'] = 1.0/time_unit
-        super(HTTPStreamDataset, self)._set_units()
-        self.conversion_factors["velocity"] = velocity_unit
-        self.conversion_factors["mass"] = mass_unit
-        self.conversion_factors["density"] = density_unit
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        if not args[0].startswith("http://"):
-            return False
-        hreq = requests.get(args[0] + "/yt_index.json")
-        if hreq.status_code == 200:
-            return True
-        return False

diff -r 20fdd1f5ae9553512a4e52794d86eec2a4e09a9b -r 6c9fa8796bfb150f1c3013bbcdb41e7f56294ef2 yt/frontends/sph/fields.py
--- a/yt/frontends/sph/fields.py
+++ b/yt/frontends/sph/fields.py
@@ -1,5 +1,5 @@
 """
-OWLS-specific fields
+SPH fields
 
 
 
@@ -14,40 +14,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import os
-import numpy as np
-import owls_ion_tables as oit
-
-from yt.funcs import *
-
 from yt.fields.field_info_container import \
     FieldInfoContainer
-
-from .definitions import \
-    gadget_ptypes, \
-    ghdf5_ptypes,\
-    eaglenetwork_ion_lookup
-
-from yt.units.yt_array import YTQuantity
-from yt.config import ytcfg
-from yt.utilities.physical_constants import mh
-from yt.utilities.periodic_table import periodic_table
 from yt.fields.species_fields import \
-    add_species_field_by_fraction, \
-    add_species_field_by_density, \
     setup_species_fields
 
-from yt.fields.particle_fields import \
-    add_volume_weighted_smoothed_field
-
-
-# Here are helper functions for things like vector fields and so on.
-
-def _get_conv(cf):
-    def _convert(data):
-        return data.convert(cf)
-    return _convert
-
 class SPHFieldInfo(FieldInfoContainer):
     known_other_fields = ()
 
@@ -92,379 +63,3 @@
     def setup_particle_fields(self, ptype, *args, **kwargs):
         super(SPHFieldInfo, self).setup_particle_fields(ptype, *args, **kwargs)
         setup_species_fields(self, ptype)
-
-class TipsyFieldInfo(SPHFieldInfo):
-    aux_particle_fields = {
-        'uDotFB':("uDotFB", ("code_mass * code_velocity**2", ["uDotFB"], None)),
-        'uDotAV':("uDotAV", ("code_mass * code_velocity**2", ["uDotAV"], None)),
-        'uDotPdV':("uDotPdV", ("code_mass * code_velocity**2", ["uDotPdV"], None)),
-        'uDotHydro':("uDotHydro", ("code_mass * code_velocity**2", ["uDotHydro"], None)),
-        'uDotDiff':("uDotDiff", ("code_mass * code_velocity**2", ["uDotDiff"], None)),
-        'uDot':("uDot", ("code_mass * code_velocity**2", ["uDot"], None)),
-        'coolontime':("coolontime", ("code_time", ["coolontime"], None)),
-        'timeform':("timeform", ("code_time", ["timeform"], None)),
-        'massform':("massform", ("code_mass", ["massform"], None)),
-        'HI':("HI", ("dimensionless", ["HI"], None)),
-        'HII':("HII", ("dimensionless", ["HII"], None)),
-        'HeI':("HeI", ("dimensionless", ["HeI"], None)),
-        'HeII':("HeII", ("dimensionless", ["HeII"], None)),
-        'OxMassFrac':("OxMassFrac", ("dimensionless", ["OxMassFrac"], None)),
-        'FeMassFrac':("FeMassFrac", ("dimensionless", ["FeMassFrac"], None)),
-        'c':("c", ("code_velocity", ["c"], None)),
-        'acc':("acc", ("code_velocity / code_time", ["acc"], None)),
-        'accg':("accg", ("code_velocity / code_time", ["accg"], None))}
-    
-    def __init__(self, ds, field_list, slice_info = None):
-        for field in field_list:
-            if field[1] in self.aux_particle_fields.keys() and \
-                self.aux_particle_fields[field[1]] not in self.known_particle_fields:
-                self.known_particle_fields += (self.aux_particle_fields[field[1]],)
-        super(TipsyFieldInfo,self).__init__(ds, field_list, slice_info)
-
-
-        
-
-class OWLSFieldInfo(SPHFieldInfo):
-
-    _ions = ("c1", "c2", "c3", "c4", "c5", "c6",
-             "fe2", "fe17", "h1", "he1", "he2", "mg1", "mg2", "n2", 
-             "n3", "n4", "n5", "n6", "n7", "ne8", "ne9", "ne10", "o1", 
-             "o6", "o7", "o8", "si2", "si3", "si4", "si13")
-
-    _elements = ("H", "He", "C", "N", "O", "Ne", "Mg", "Si", "Fe")
-
-    _num_neighbors = 48
-
-    _add_elements = ("PartType0", "PartType4")
-
-    _add_ions = ("PartType0")
-
-
-    def __init__(self, *args, **kwargs):
-        
-        new_particle_fields = (
-            ("Hydrogen", ("", ["H_fraction"], None)),
-            ("Helium", ("", ["He_fraction"], None)),
-            ("Carbon", ("", ["C_fraction"], None)),
-            ("Nitrogen", ("", ["N_fraction"], None)),
-            ("Oxygen", ("", ["O_fraction"], None)),
-            ("Neon", ("", ["Ne_fraction"], None)),
-            ("Magnesium", ("", ["Mg_fraction"], None)),
-            ("Silicon", ("", ["Si_fraction"], None)),
-            ("Iron", ("", ["Fe_fraction"], None))
-            )
-
-        self.known_particle_fields += new_particle_fields
-        
-        super(OWLSFieldInfo,self).__init__( *args, **kwargs )
-
-
-
-    def setup_particle_fields(self, ptype):
-        """ additional particle fields derived from those in snapshot.
-        we also need to add the smoothed fields here b/c setup_fluid_fields
-        is called before setup_particle_fields. """ 
-
-        smoothed_suffixes = ("_number_density", "_density", "_mass")
-
-
-
-        # we add particle element fields for stars and gas
-        #-----------------------------------------------------
-        if ptype in self._add_elements:
-
-
-            # this adds the particle element fields
-            # X_density, X_mass, and X_number_density
-            # where X is an item of self._elements.
-            # X_fraction are defined in snapshot
-            #-----------------------------------------------
-            for s in self._elements:
-                add_species_field_by_fraction(self, ptype, s,
-                                              particle_type=True)
-
-        # this needs to be called after the call to 
-        # add_species_field_by_fraction for some reason ...
-        # not sure why yet. 
-        #-------------------------------------------------------
-        if ptype == 'PartType0':
-            ftype='gas'
-        elif ptype == 'PartType1':
-            ftype='dm'
-        elif ptype == 'PartType2':
-            ftype='PartType2'
-        elif ptype == 'PartType3':
-            ftype='PartType3'
-        elif ptype == 'PartType4':
-            ftype='star'
-        elif ptype == 'PartType5':
-            ftype='BH'
-        elif ptype == 'all':
-            ftype='all'
-        
-        super(OWLSFieldInfo,self).setup_particle_fields(
-            ptype, num_neighbors=self._num_neighbors, ftype=ftype)
-
-
-        # and now we add the smoothed versions for PartType0
-        #-----------------------------------------------------
-        if ptype == 'PartType0':
-
-            loaded = []
-            for s in self._elements:
-                for sfx in smoothed_suffixes:
-                    fname = s + sfx
-                    fn = add_volume_weighted_smoothed_field( 
-                        ptype, "particle_position", "particle_mass",
-                        "smoothing_length", "density", fname, self,
-                        self._num_neighbors)
-                    loaded += fn
-
-                    self.alias(("gas", fname), fn[0])
-
-            self._show_field_errors += loaded
-            self.find_dependencies(loaded)
-
-
-            # we only add ion fields for gas.  this takes some 
-            # time as the ion abundances have to be interpolated
-            # from cloudy tables (optically thin)
-            #-----------------------------------------------------
-    
-
-            # this defines the ion density on particles
-            # X_density for all items in self._ions
-            #-----------------------------------------------
-            self.setup_gas_ion_density_particle_fields( ptype )
-
-            # this adds the rest of the ion particle fields
-            # X_fraction, X_mass, X_number_density
-            #-----------------------------------------------
-            for ion in self._ions:
-
-                # construct yt name for ion
-                #---------------------------------------------------
-                if ion[0:2].isalpha():
-                    symbol = ion[0:2].capitalize()
-                    roman = int(ion[2:])
-                else:
-                    symbol = ion[0:1].capitalize()
-                    roman = int(ion[1:])
-
-                pstr = "_p" + str(roman-1)
-                yt_ion = symbol + pstr
-
-                # add particle field
-                #---------------------------------------------------
-                add_species_field_by_density(self, ptype, yt_ion,
-                                             particle_type=True)
-
-
-            # add smoothed ion fields
-            #-----------------------------------------------
-            for ion in self._ions:
-
-                # construct yt name for ion
-                #---------------------------------------------------
-                if ion[0:2].isalpha():
-                    symbol = ion[0:2].capitalize()
-                    roman = int(ion[2:])
-                else:
-                    symbol = ion[0:1].capitalize()
-                    roman = int(ion[1:])
-
-                pstr = "_p" + str(roman-1)
-                yt_ion = symbol + pstr
-
-                loaded = []
-                for sfx in smoothed_suffixes:
-                    fname = yt_ion + sfx
-                    fn = add_volume_weighted_smoothed_field( 
-                        ptype, "particle_position", "particle_mass",
-                        "smoothing_length", "density", fname, self,
-                        self._num_neighbors)
-                    loaded += fn
-
-                    self.alias(("gas", fname), fn[0])
-
-                self._show_field_errors += loaded
-                self.find_dependencies(loaded)
-
-
-
-    def setup_gas_ion_density_particle_fields( self, ptype ):
-        """ Sets up particle fields for gas ion densities. """ 
-
-        # loop over all ions and make fields
-        #----------------------------------------------
-        for ion in self._ions:
-
-            # construct yt name for ion
-            #---------------------------------------------------
-            if ion[0:2].isalpha():
-                symbol = ion[0:2].capitalize()
-                roman = int(ion[2:])
-            else:
-                symbol = ion[0:1].capitalize()
-                roman = int(ion[1:])
-
-            pstr = "_p" + str(roman-1)
-            yt_ion = symbol + pstr
-            ftype = ptype
-
-            # add ion density field for particles
-            #---------------------------------------------------
-            fname = yt_ion + '_density'
-            dens_func = self._create_ion_density_func( ftype, ion )
-            self.add_field( (ftype, fname),
-                            function = dens_func, 
-                            units="g/cm**3",
-                            particle_type=True )            
-            self._show_field_errors.append( (ftype,fname) )
-
-
-
-        
-    def _create_ion_density_func( self, ftype, ion ):
-        """ returns a function that calculates the ion density of a particle. 
-        """ 
-
-        def _ion_density(field, data):
-
-            # get element symbol from ion string. ion string will 
-            # be a member of the tuple _ions (i.e. si13)
-            #--------------------------------------------------------
-            if ion[0:2].isalpha():
-                symbol = ion[0:2].capitalize()
-            else:
-                symbol = ion[0:1].capitalize()
-
-            # mass fraction for the element
-            #--------------------------------------------------------
-            m_frac = data[ftype, symbol+"_fraction"]
-
-            # get nH and T for lookup
-            #--------------------------------------------------------
-            log_nH = np.log10( data["PartType0", "H_number_density"] )
-            log_T = np.log10( data["PartType0", "Temperature"] )
-
-            # get name of owls_ion_file for given ion
-            #--------------------------------------------------------
-            owls_ion_path = self._get_owls_ion_data_dir()
-            fname = os.path.join( owls_ion_path, ion+".hdf5" )
-
-            # create ionization table for this redshift
-            #--------------------------------------------------------
-            itab = oit.IonTableOWLS( fname )
-            itab.set_iz( data.ds.current_redshift )
-
-            # find ion balance using log nH and log T
-            #--------------------------------------------------------
-            i_frac = itab.interp( log_nH, log_T )
-            return data[ftype,"Density"] * m_frac * i_frac 
-        
-        return _ion_density
-
-
-
-
-
-    # this function sets up the X_mass, X_density, X_fraction, and
-    # X_number_density fields where X is the name of an OWLS element.
-    #-------------------------------------------------------------
-    def setup_fluid_fields(self):
-
-        return
-
-
-
-    # this function returns the owls_ion_data directory. if it doesn't
-    # exist it will download the data from http://yt-project.org/data
-    #-------------------------------------------------------------
-    def _get_owls_ion_data_dir(self):
-
-        txt = "Attempting to download ~ 30 Mb of owls ion data from %s to %s."
-        data_file = "owls_ion_data.tar.gz"
-        data_url = "http://yt-project.org/data"
-
-        # get test_data_dir from yt config (ytcgf)
-        #----------------------------------------------
-        tdir = ytcfg.get("yt","test_data_dir")
-
-        # set download destination to tdir or ./ if tdir isnt defined
-        #----------------------------------------------
-        if tdir == "/does/not/exist":
-            data_dir = "./"
-        else:
-            data_dir = tdir            
-
-
-        # check for owls_ion_data directory in data_dir
-        # if not there download the tarball and untar it
-        #----------------------------------------------
-        owls_ion_path = os.path.join( data_dir, "owls_ion_data" )
-
-        if not os.path.exists(owls_ion_path):
-            mylog.info(txt % (data_url, data_dir))                    
-            fname = data_dir + "/" + data_file
-            fn = download_file(os.path.join(data_url, data_file), fname)
-
-            cmnd = "cd " + data_dir + "; " + "tar xf " + data_file
-            os.system(cmnd)
-
-
-        if not os.path.exists(owls_ion_path):
-            raise RuntimeError, "Failed to download owls ion data."
-
-        return owls_ion_path
-
-
-class EagleNetworkFieldInfo(OWLSFieldInfo):
-
-    _ions = \
-        ('H1', 'H2', 'He1', 'He2','He3', 'C1',\
-         'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'N1', 'N2', \
-         'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
-         'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'Ne1', 'Ne2',\
-         'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
-         'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
-         'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
-         'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
-         'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
-         'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
-         'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
-         'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
-         'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
-         'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
-         'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
-         'Fe24', 'Fe25', 'Fe25', 'Fe27',)
-
-    def __init__(self, *args, **kwargs):
-        
-        super(EagleNetworkFieldInfo,self).__init__( *args, **kwargs )
-        
-    def _create_ion_density_func( self, ftype, ion ):
-        """ returns a function that calculates the ion density of a particle. 
-        """ 
-
-        def _ion_density(field, data):
-
-            # Lookup the index of the ion 
-            index = eaglenetwork_ion_lookup[ion] 
-
-            # Ion to hydrogen number density ratio
-            ion_chem = data[ftype, "Chemistry_%03i"%index]
-
-            # Mass of a single ion
-            if ion[0:2].isalpha():
-                symbol = ion[0:2].capitalize()
-            else:
-                symbol = ion[0:1].capitalize()
-            m_ion = YTQuantity(periodic_table.elements_by_symbol[symbol].weight, 'amu')
-
-            # hydrogen number density 
-            n_H = data["PartType0", "H_number_density"] 
-
-            return m_ion*ion_chem*n_H 
-        
-        return _ion_density

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/fd90835256b8/
Changeset:   fd90835256b8
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:52:57+00:00
Summary:     Removing empty file.
Affected #:  1 file



https://bitbucket.org/yt_analysis/yt/commits/7fbbf06dad8b/
Changeset:   7fbbf06dad8b
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:57:06+00:00
Summary:     Moving tests.
Affected #:  7 files

diff -r fd90835256b86ff68fd52f2913db01cc0c0b2c56 -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 yt/frontends/sph/owls/tests/test_owls.py
--- /dev/null
+++ b/yt/frontends/sph/owls/tests/test_owls.py
@@ -0,0 +1,60 @@
+"""
+Tipsy tests using the OWLS HDF5-Gadget dataset
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    requires_ds, \
+    small_patch_amr, \
+    big_patch_amr, \
+    data_dir_load, \
+    PixelizedProjectionValuesTest, \
+    FieldValuesTest, \
+    create_obj
+from yt.frontends.sph.api import OWLSDataset
+
+_fields = (("deposit", "all_density"), ("deposit", "all_count"),
+           ("deposit", "PartType0_density"),
+           ("deposit", "PartType4_density"))
+
+os33 = "snapshot_033/snap_033.0.hdf5"
+ at requires_ds(os33)
+def test_snapshot_033():
+    ds = data_dir_load(os33)
+    yield assert_equal, str(ds), "snap_033"
+    dso = [ None, ("sphere", ("c", (0.1, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["particle_position"].shape[0], 2*(128*128*128)
+    yield assert_equal, dd["particle_position"].shape[1], 3
+    tot = sum(dd[ptype,"particle_position"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, (2*128*128*128)
+    for dobj_name in dso:
+        for field in _fields:
+            for axis in [0, 1, 2]:
+                for weight_field in [None, "density"]:
+                    yield PixelizedProjectionValuesTest(
+                        os33, axis, field, weight_field,
+                        dobj_name)
+            yield FieldValuesTest(os33, field, dobj_name)
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+
+
+ at requires_file(os33)
+def test_OWLSDataset():
+    assert isinstance(data_dir_load(os33), OWLSDataset)

diff -r fd90835256b86ff68fd52f2913db01cc0c0b2c56 -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 yt/frontends/sph/tests/test_owls.py
--- a/yt/frontends/sph/tests/test_owls.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""
-Tipsy tests using the OWLS HDF5-Gadget dataset
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.testing import *
-from yt.utilities.answer_testing.framework import \
-    requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
-    data_dir_load, \
-    PixelizedProjectionValuesTest, \
-    FieldValuesTest, \
-    create_obj
-from yt.frontends.sph.api import OWLSDataset
-
-_fields = (("deposit", "all_density"), ("deposit", "all_count"),
-           ("deposit", "PartType0_density"),
-           ("deposit", "PartType4_density"))
-
-os33 = "snapshot_033/snap_033.0.hdf5"
- at requires_ds(os33)
-def test_snapshot_033():
-    ds = data_dir_load(os33)
-    yield assert_equal, str(ds), "snap_033"
-    dso = [ None, ("sphere", ("c", (0.1, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["particle_position"].shape[0], 2*(128*128*128)
-    yield assert_equal, dd["particle_position"].shape[1], 3
-    tot = sum(dd[ptype,"particle_position"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, (2*128*128*128)
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        os33, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(os33, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
-
-
- at requires_file(os33)
-def test_OWLSDataset():
-    assert isinstance(data_dir_load(os33), OWLSDataset)

diff -r fd90835256b86ff68fd52f2913db01cc0c0b2c56 -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 yt/frontends/sph/tests/test_tipsy.py
--- a/yt/frontends/sph/tests/test_tipsy.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""
-Tipsy tests using the AGORA dataset
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.testing import *
-from yt.utilities.answer_testing.framework import \
-    requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
-    data_dir_load, \
-    PixelizedProjectionValuesTest, \
-    FieldValuesTest, \
-    create_obj
-from yt.frontends.sph.api import TipsyDataset
-
-_fields = (("deposit", "all_density"),
-           ("deposit", "all_count"),
-           ("deposit", "DarkMatter_density"),
-)
-
-pkdgrav = "halo1e11_run1.00400/halo1e11_run1.00400"
- at requires_ds(pkdgrav, file_check = True)
-def test_pkdgrav():
-    cosmology_parameters = dict(current_redshift = 0.0,
-                                omega_lambda = 0.728,
-                                omega_matter = 0.272,
-                                hubble_constant = 0.702)
-    kwargs = dict(field_dtypes = {"Coordinates": "d"},
-                  cosmology_parameters = cosmology_parameters,
-                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
-                  n_ref = 64)
-    ds = data_dir_load(pkdgrav, TipsyDataset, (), kwargs)
-    yield assert_equal, str(ds), "halo1e11_run1.00400"
-    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["Coordinates"].shape, (26847360, 3)
-    tot = sum(dd[ptype,"Coordinates"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, 26847360
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        ds, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(ds, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
-
-gasoline = "agora_1e11.00400/agora_1e11.00400"
- at requires_ds(gasoline, file_check = True)
-def test_gasoline():
-    cosmology_parameters = dict(current_redshift = 0.0,
-                                omega_lambda = 0.728,
-                                omega_matter = 0.272,
-                                hubble_constant = 0.702)
-    kwargs = dict(cosmology_parameters = cosmology_parameters,
-                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
-                  n_ref = 64)
-    ds = data_dir_load(gasoline, TipsyDataset, (), kwargs)
-    yield assert_equal, str(ds), "agora_1e11.00400"
-    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["Coordinates"].shape, (10550576, 3)
-    tot = sum(dd[ptype,"Coordinates"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, 10550576
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        ds, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(ds, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
-
-
- at requires_file(pkdgrav)
-def test_TipsyDataset():
-    assert isinstance(data_dir_load(pkdgrav), TipsyDataset)

diff -r fd90835256b86ff68fd52f2913db01cc0c0b2c56 -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 yt/frontends/sph/tipsy/tests/test_tipsy.py
--- /dev/null
+++ b/yt/frontends/sph/tipsy/tests/test_tipsy.py
@@ -0,0 +1,99 @@
+"""
+Tipsy tests using the AGORA dataset
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    requires_ds, \
+    small_patch_amr, \
+    big_patch_amr, \
+    data_dir_load, \
+    PixelizedProjectionValuesTest, \
+    FieldValuesTest, \
+    create_obj
+from yt.frontends.sph.api import TipsyDataset
+
+_fields = (("deposit", "all_density"),
+           ("deposit", "all_count"),
+           ("deposit", "DarkMatter_density"),
+)
+
+pkdgrav = "halo1e11_run1.00400/halo1e11_run1.00400"
+ at requires_ds(pkdgrav, file_check = True)
+def test_pkdgrav():
+    cosmology_parameters = dict(current_redshift = 0.0,
+                                omega_lambda = 0.728,
+                                omega_matter = 0.272,
+                                hubble_constant = 0.702)
+    kwargs = dict(field_dtypes = {"Coordinates": "d"},
+                  cosmology_parameters = cosmology_parameters,
+                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                  n_ref = 64)
+    ds = data_dir_load(pkdgrav, TipsyDataset, (), kwargs)
+    yield assert_equal, str(ds), "halo1e11_run1.00400"
+    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["Coordinates"].shape, (26847360, 3)
+    tot = sum(dd[ptype,"Coordinates"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, 26847360
+    for dobj_name in dso:
+        for field in _fields:
+            for axis in [0, 1, 2]:
+                for weight_field in [None, "density"]:
+                    yield PixelizedProjectionValuesTest(
+                        ds, axis, field, weight_field,
+                        dobj_name)
+            yield FieldValuesTest(ds, field, dobj_name)
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+
+gasoline = "agora_1e11.00400/agora_1e11.00400"
+ at requires_ds(gasoline, file_check = True)
+def test_gasoline():
+    cosmology_parameters = dict(current_redshift = 0.0,
+                                omega_lambda = 0.728,
+                                omega_matter = 0.272,
+                                hubble_constant = 0.702)
+    kwargs = dict(cosmology_parameters = cosmology_parameters,
+                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                  n_ref = 64)
+    ds = data_dir_load(gasoline, TipsyDataset, (), kwargs)
+    yield assert_equal, str(ds), "agora_1e11.00400"
+    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["Coordinates"].shape, (10550576, 3)
+    tot = sum(dd[ptype,"Coordinates"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, 10550576
+    for dobj_name in dso:
+        for field in _fields:
+            for axis in [0, 1, 2]:
+                for weight_field in [None, "density"]:
+                    yield PixelizedProjectionValuesTest(
+                        ds, axis, field, weight_field,
+                        dobj_name)
+            yield FieldValuesTest(ds, field, dobj_name)
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+
+
+ at requires_file(pkdgrav)
+def test_TipsyDataset():
+    assert isinstance(data_dir_load(pkdgrav), TipsyDataset)


https://bitbucket.org/yt_analysis/yt/commits/74d79f9fd236/
Changeset:   74d79f9fd236
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 15:58:55+00:00
Summary:     Merging.
Affected #:  8 files

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd doc/source/reference/faq/index.rst
--- a/doc/source/reference/faq/index.rst
+++ b/doc/source/reference/faq/index.rst
@@ -214,26 +214,37 @@
 
 The plugin file is a means of modifying the available fields, quantities, data
 objects and so on without modifying the source code of yt.  The plugin file
-will be executed if it is detected, and it must be:
+will be executed if it is detected.  It must be located in a ``.yt`` folder
+in your home directory and be named ``my_plugins.py``:
 
 .. code-block:: bash
 
    $HOME/.yt/my_plugins.py
 
-The code in this file can thus add fields, add derived quantities, add
+The code in this file can add fields, define functions, define
 datatypes, and on and on.  It is executed at the bottom of ``yt.mods``, and so
-it is provided with the entire namespace available in the module ``yt.mods`` --
-which is the primary entry point to yt, and which contains most of the
-functionality of yt.  For example, if I created a plugin file containing:
+it is provided with the entire namespace available in the module ``yt.mods``.
+For example, if I created a plugin file containing:
 
 .. code-block:: python
 
    def _myfunc(field, data):
        return np.random.random(data["density"].shape)
-   add_field("SomeQuantity", function=_myfunc)
+   add_field("some_quantity", function=_myfunc, units='')
 
-then all of my data objects would have access to the field "SomeQuantity"
-despite its lack of use.
+then all of my data objects would have access to the field "some_quantity".
+Note that the units must be specified as a string, see
+:ref:`data_selection_and_fields` for more details on units and derived fields.
+
+.. note::
+
+   Since the ``my_plugins.py`` is parsed inside of ``yt.mods``, you must import
+   yt using ``yt.mods`` to use the plugins file.  If you import using
+   ``import yt``, the plugins file will not be parsed.  You can tell that your
+   plugins file is being parsed by watching for a logging message when you
+   import yt.  Note that both the ``yt load`` and ``iyt`` command line entry
+   points invoke ``from yt.mods import *``, so the ``my_plugins.py`` file
+   will be parsed if you enter yt that way.
 
 You can also define other convenience functions in your plugin file.  For
 instance, you could define some variables or functions, and even import common

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/fields/api.py
--- a/yt/fields/api.py
+++ b/yt/fields/api.py
@@ -26,6 +26,11 @@
 from . import particle_fields
 #from . import species_fields
 from . import vector_operations
+from . import local_fields
+from . import my_plugin_fields
+
+from .local_fields import add_field, derived_field
+
 
 from .derived_field import \
     DerivedField, \
@@ -38,6 +43,3 @@
     FieldDetector
 from .field_info_container import \
     FieldInfoContainer
-
-from . import local_fields
-from .local_fields import add_field, derived_field

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/fields/my_plugin_fields.py
--- /dev/null
+++ b/yt/fields/my_plugin_fields.py
@@ -0,0 +1,31 @@
+"""
+This is a container for storing fields defined in the my_plugins.py file.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from .field_plugin_registry import \
+    register_field_plugin
+
+from .field_info_container import \
+    FieldInfoContainer
+
+# Empty FieldInfoContainer
+my_plugins_fields = FieldInfoContainer(None, [], None)
+
+ at register_field_plugin
+def setup_my_plugins_fields(registry, ftype="gas", slice_info=None):
+    # fields end up inside this container when added via add_field in
+    # my_plugins.py. See yt.funcs.enable_plugins to see how this is set up.
+    registry.update(my_plugins_fields)

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -14,7 +14,6 @@
 #-----------------------------------------------------------------------------
 
 import time, types, signal, inspect, traceback, sys, pdb, os, re
-import time, types, signal, inspect, traceback, sys, pdb, os, re
 import contextlib
 import warnings, struct, subprocess
 import numpy as np
@@ -748,9 +747,10 @@
             SyntaxWarning, stacklevel=2)
         return cls(*args, **kwargs)
     return _func
-    
+
 def enable_plugins():
     import yt
+    from yt.fields.my_plugin_fields import my_plugins_fields
     from yt.config import ytcfg
     my_plugin_name = ytcfg.get("yt","pluginfilename")
     # We assume that it is with respect to the $HOME/.yt directory
@@ -760,7 +760,9 @@
         _fn = os.path.expanduser("~/.yt/%s" % my_plugin_name)
     if os.path.isfile(_fn):
         mylog.info("Loading plugins from %s", _fn)
-        execfile(_fn, yt.__dict__)
+        execdict = yt.__dict__.copy()
+        execdict['add_field'] = my_plugins_fields.add_field
+        execfile(_fn, execdict)
 
 def fix_unitary(u):
     if u == '1':

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -217,7 +217,8 @@
           np.ndarray[np.int64_t, ndim=1] ys,
           np.ndarray[np.float64_t, ndim=2] colors,
           int points_per_color=1,
-          int thick=1):
+          int thick=1,
+	  int flip=0):
 
     cdef int nx = image.shape[0]
     cdef int ny = image.shape[1]
@@ -256,18 +257,23 @@
             if x0 >= thick and x0 < nx-thick and y0 >= thick and y0 < ny-thick:
                 for xi in range(x0-thick/2, x0+(1+thick)/2):
                     for yi in range(y0-thick/2, y0+(1+thick)/2):
+                        if flip: 
+                            yi0 = ny - yi
+                        else:
+                            yi0 = yi
+
                         if has_alpha:
-                            image[xi, yi, 3] = outa = alpha[3] + image[xi, yi, 3]*(1-alpha[3])
+                            image[xi, yi0, 3] = outa = alpha[3] + image[xi, yi0, 3]*(1-alpha[3])
                             if outa != 0.0:
                                 outa = 1.0/outa
                             for i in range(3):
-                                image[xi, yi, i] = \
-                                        ((1.-alpha[3])*image[xi, yi, i]*image[xi, yi, 3]
+                                image[xi, yi0, i] = \
+                                        ((1.-alpha[3])*image[xi, yi0, i]*image[xi, yi0, 3]
                                          + alpha[3]*alpha[i])*outa
                         else:
                             for i in range(3):
-                                image[xi, yi, i] = \
-                                        (1.-alpha[i])*image[xi,yi,i] + alpha[i]
+                                image[xi, yi0, i] = \
+                                        (1.-alpha[i])*image[xi,yi0,i] + alpha[i]
 
             if (x0 == x1 and y0 == y1):
                 break

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -889,13 +889,13 @@
                 if self._field_transform[f] == linear_transform:
                     self.plots[f].cax.minorticks_on()
                 else:
-                    vmin = self.plots[f].cb.norm.vmin
-                    vmax = self.plots[f].cb.norm.vmax
+                    vmin = np.float64( self.plots[f].cb.norm.vmin )
+                    vmax = np.float64( self.plots[f].cb.norm.vmax )
                     if self._field_transform[f] == log_transform:
                         mticks = self.plots[f].image.norm( get_log_minorticks(vmin, vmax) )
                     else: # symlog_transform
                         flinthresh = 10**np.floor( np.log10( self.plots[f].cb.norm.linthresh ) )
-                        mticks = self.plots[f].image.norm( get_symlog_minorticks(flinthresh, vmin.d, vmax.d) )
+                        mticks = self.plots[f].image.norm( get_symlog_minorticks(flinthresh, vmin, vmax) )
                     self.plots[f].cax.yaxis.set_ticks(mticks, minor=True)
             else:
                 self.plots[f].cax.minorticks_off()

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -864,9 +864,9 @@
                 if self._field_transform[f] == linear_transform:
                     self.plots[f].cax.minorticks_on()
                 else:
-                    vmin = self.plots[f].cb.norm.vmin
-                    vmax = self.plots[f].cb.norm.vmax
-                    mticks = self.plots[f].image.norm( get_log_minorticks(vmin.d, vmax.d) )
+                    vmin = np.float64( self.plots[f].cb.norm.vmin )
+                    vmax = np.float64( self.plots[f].cb.norm.vmax )
+                    mticks = self.plots[f].image.norm( get_log_minorticks(vmin, vmax) )
                     self.plots[f].cax.yaxis.set_ticks(mticks, minor=True)
             else:
                 self.plots[f].cax.minorticks_off()

diff -r 7fbbf06dad8bd50e337b3a764843d6827bb41c05 -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -316,8 +316,11 @@
         nim = im.rescale(inline=False)
         enhance_rgba(nim)
         nim.add_background_color('black', inline=True)
-       
-        lines(nim, px, py, colors, 24)
+
+        # we flipped it in snapshot to get the orientation correct, so
+        # flip the lines
+        lines(nim, px, py, colors, 24, flip=1)
+
         return nim
 
     def draw_coordinate_vectors(self, im, length=0.05, thickness=1):
@@ -370,11 +373,13 @@
                   np.array([0.0, 1.0, 0.0, alpha]),
                   np.array([0.0, 0.0, 1.0, alpha])]
 
+        # we flipped it in snapshot to get the orientation correct, so
+        # flip the lines
         for vec, color in zip(coord_vectors, colors):
             dx = int(np.dot(vec, self.orienter.unit_vectors[0]))
             dy = int(np.dot(vec, self.orienter.unit_vectors[1]))
             lines(im, np.array([px0, px0+dx]), np.array([py0, py0+dy]),
-                  np.array([color, color]), 1, thickness)
+                  np.array([color, color]), 1, thickness, flip=1)
 
     def draw_line(self, im, x0, x1, color=None):
         r"""Draws a line on an existing volume rendering.
@@ -415,7 +420,10 @@
         py1 = int(self.resolution[0]*(dx1/self.width[0]))
         px0 = int(self.resolution[1]*(dy0/self.width[1]))
         px1 = int(self.resolution[1]*(dy1/self.width[1]))
-        lines(im, np.array([px0,px1]), np.array([py0,py1]), color=np.array([color,color]))
+
+        # we flipped it in snapshot to get the orientation correct, so
+        # flip the lines
+        lines(im, np.array([px0,px1]), np.array([py0,py1]), color=np.array([color,color]),flip=1)
 
     def draw_domain(self,im,alpha=0.3):
         r"""Draws domain edges on an existing volume rendering.
@@ -497,7 +505,9 @@
 
         px, py, dz = self.project_to_plane(vertices, res=im.shape[:2])
        
-        lines(im, px, py, color.reshape(1,4), 24)
+        # we flipped it in snapshot to get the orientation correct, so
+        # flip the lines
+        lines(im, px, py, color.reshape(1,4), 24, flip=1)
 
     def look_at(self, new_center, north_vector = None):
         r"""Change the view direction based on a new focal point.
@@ -748,7 +758,7 @@
                                         image, sampler),
                            info=self.get_information())
 
-        # flip it up/down to handle how the png orientation is donetest.png
+        # flip it up/down to handle how the png orientation is done
         image = image[:,::-1,:]
         self.save_image(image, fn=fn, clip_ratio=clip_ratio, 
                        transparent=transparent)


https://bitbucket.org/yt_analysis/yt/commits/7da392db9455/
Changeset:   7da392db9455
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 19:06:44+00:00
Summary:     Merging.
Affected #:  16 files

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -39,3 +39,5 @@
         for frontend in _frontends:
             _mod = "yt.frontends.%s.api" % frontend
             setattr(self, frontend, importlib.import_module(_mod))
+        setattr(self, 'api', importlib.import_module('yt.frontends.api'))
+        setattr(self, '__name__', 'yt.frontends.api')

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/art/api.py
--- a/yt/frontends/art/api.py
+++ b/yt/frontends/art/api.py
@@ -24,3 +24,5 @@
 
 from .io import \
       IOHandlerART
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/artio/api.py
--- a/yt/frontends/artio/api.py
+++ b/yt/frontends/artio/api.py
@@ -22,3 +22,5 @@
 
 from .io import \
     IOHandlerARTIO
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/athena/api.py
--- a/yt/frontends/athena/api.py
+++ b/yt/frontends/athena/api.py
@@ -22,3 +22,5 @@
 
 from .io import \
       IOHandlerAthena
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/boxlib/api.py
--- a/yt/frontends/boxlib/api.py
+++ b/yt/frontends/boxlib/api.py
@@ -29,3 +29,5 @@
 
 from .io import \
       IOHandlerBoxlib
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/chombo/api.py
--- a/yt/frontends/chombo/api.py
+++ b/yt/frontends/chombo/api.py
@@ -35,3 +35,5 @@
 from .io import \
     IOHandlerChomboHDF5,\
     IOHandlerPlutoHDF5
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/enzo/api.py
--- a/yt/frontends/enzo/api.py
+++ b/yt/frontends/enzo/api.py
@@ -35,3 +35,5 @@
       IOHandlerInMemory, \
       IOHandlerPacked2D, \
       IOHandlerPacked1D
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/fits/api.py
--- a/yt/frontends/fits/api.py
+++ b/yt/frontends/fits/api.py
@@ -22,4 +22,6 @@
       IOHandlerFITS
 
 from .misc import \
-      setup_counts_fields
\ No newline at end of file
+      setup_counts_fields
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/flash/api.py
--- a/yt/frontends/flash/api.py
+++ b/yt/frontends/flash/api.py
@@ -23,3 +23,5 @@
 
 from .io import \
       IOHandlerFLASH
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/moab/api.py
--- a/yt/frontends/moab/api.py
+++ b/yt/frontends/moab/api.py
@@ -25,3 +25,5 @@
 
 from .io import \
       IOHandlerMoabH5MHex8
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/ramses/api.py
--- a/yt/frontends/ramses/api.py
+++ b/yt/frontends/ramses/api.py
@@ -24,3 +24,5 @@
 
 from .definitions import \
       field_aliases
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/sph/gadget/api.py
--- a/yt/frontends/sph/gadget/api.py
+++ b/yt/frontends/sph/gadget/api.py
@@ -17,6 +17,3 @@
 from .data_structures import \
       GadgetDataset, \
       GadgetHDF5Dataset
-
-from .io import \
-      IOHandlerGadgetBinary

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/sph/owls/api.py
--- a/yt/frontends/sph/owls/api.py
+++ b/yt/frontends/sph/owls/api.py
@@ -16,3 +16,5 @@
 
 from .data_structures import \
     OWLSDataset
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/sph/tipsy/api.py
--- a/yt/frontends/sph/tipsy/api.py
+++ b/yt/frontends/sph/tipsy/api.py
@@ -16,3 +16,5 @@
 
 from .data_structures import \
      TipsyDataset
+
+from . import tests

diff -r 74d79f9fd236da9d4ecd6ab09aa0da904ed873dd -r 7da392db9455f00a08bd794360ef80f6074b933e yt/frontends/stream/api.py
--- a/yt/frontends/stream/api.py
+++ b/yt/frontends/stream/api.py
@@ -31,3 +31,5 @@
 
 from .io import \
       IOHandlerStream
+
+from . import tests


https://bitbucket.org/yt_analysis/yt/commits/f2b89637ff33/
Changeset:   f2b89637ff33
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 20:37:51+00:00
Summary:     Moving sph frontends into the frontends directory.
Affected #:  63 files

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/eagle/api.py
--- /dev/null
+++ b/yt/frontends/eagle/api.py
@@ -0,0 +1,19 @@
+"""
+API for EAGLE frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    EagleDataset, \
+    EagleNetworkDataset

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/eagle/data_structures.py
--- /dev/null
+++ b/yt/frontends/eagle/data_structures.py
@@ -0,0 +1,98 @@
+"""
+Data structures for EAGLE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import types
+
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+from yt.frontends.owls.fields import \
+    OWLSFieldInfo
+import yt.units
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+class EagleDataset(GadgetHDF5Dataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = OWLSFieldInfo
+    _time_readin_ = 'Time'
+
+    def _parse_parameter_file(self):
+
+        # read values from header
+        hvals = self._get_hvals()
+        self.parameters = hvals
+
+        # set features common to OWLS and Eagle
+        self._set_owls_eagle()
+
+        # Set time from analytic solution for flat LCDM universe
+        a = hvals['ExpansionFactor']
+        H0 = hvals['H(z)'] / hvals['E(z)']
+        a_eq = ( self.omega_matter / self.omega_lambda )**(1./3)
+        t1 = 2.0 / ( 3.0 * np.sqrt( self.omega_lambda ) )
+        t2 = (a/a_eq)**(3./2)
+        t3 = np.sqrt( 1.0 + (a/a_eq)**3 )
+        t = t1 * np.log( t2 + t3 ) / H0
+        self.current_time = t * yt.units.s
+
+    def _set_code_unit_attributes(self):
+        self._set_owls_eagle_units()
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Config', 'Constants', 'HashTable', 'Header', 
+                       'Parameters', 'RuntimePars', 'Units']
+        veto_groups = ['SUBFIND',
+                       'PartType0/ChemistryAbundances', 
+                       'PartType0/ChemicalAbundances']
+        valid = True
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
+            fileh.close()
+        except:
+            valid = False
+            pass
+        return valid
+
+class EagleNetworkDataset(EagleDataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = EagleNetworkFieldInfo
+    _time_readin = 'Time'
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys() and \
+               "SUBFIND" not in fileh["/"].keys() and \
+               ("ChemistryAbundances" in fileh["PartType0"].keys()
+                or "ChemicalAbundances" in fileh["PartType0"].keys()):
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/eagle/definitions.py
--- /dev/null
+++ b/yt/frontends/eagle/definitions.py
@@ -0,0 +1,35 @@
+"""
+EAGLE definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+eaglenetwork_ions = \
+    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
+     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
+     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
+     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/eagle/fields.py
--- /dev/null
+++ b/yt/frontends/eagle/fields.py
@@ -0,0 +1,73 @@
+"""
+EAGLE fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.owls.fields import \
+    OWLSFieldInfo
+from yt.units.yt_array import YTQuantity
+from yt.utilities.periodic_table import periodic_table
+
+from .definitions import \
+    eaglenetwork_ion_lookup
+
+class EagleNetworkFieldInfo(OWLSFieldInfo):
+
+    _ions = \
+        ('H1', 'H2', 'He1', 'He2','He3', 'C1',\
+         'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'N1', 'N2', \
+         'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+         'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'Ne1', 'Ne2',\
+         'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+         'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+         'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+         'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+         'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+         'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+         'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+         'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+         'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+         'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+         'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+         'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+    def __init__(self, *args, **kwargs):
+        
+        super(EagleNetworkFieldInfo,self).__init__( *args, **kwargs )
+        
+    def _create_ion_density_func( self, ftype, ion ):
+        """ returns a function that calculates the ion density of a particle. 
+        """ 
+
+        def _ion_density(field, data):
+
+            # Lookup the index of the ion 
+            index = eaglenetwork_ion_lookup[ion] 
+
+            # Ion to hydrogen number density ratio
+            ion_chem = data[ftype, "Chemistry_%03i"%index]
+
+            # Mass of a single ion
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+            else:
+                symbol = ion[0:1].capitalize()
+            m_ion = YTQuantity(periodic_table.elements_by_symbol[symbol].weight, 'amu')
+
+            # hydrogen number density 
+            n_H = data["PartType0", "H_number_density"] 
+
+            return m_ion*ion_chem*n_H 
+        
+        return _ion_density

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/eagle/io.py
--- /dev/null
+++ b/yt/frontends/eagle/io.py
@@ -0,0 +1,21 @@
+"""
+EAGLE data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.owls.io import \
+    IOHandlerOWLS
+
+class IOHandlerEagleNetwork(IOHandlerOWLS):
+    _dataset_type = "eagle_network"

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/gadget/api.py
--- /dev/null
+++ b/yt/frontends/gadget/api.py
@@ -0,0 +1,19 @@
+"""
+API for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+      GadgetDataset, \
+      GadgetHDF5Dataset

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/gadget/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget/data_structures.py
@@ -0,0 +1,330 @@
+"""
+Data structures for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import os
+import types
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.cosmology import \
+    Cosmology
+from yt.utilities.definitions import \
+    sec_conversion
+from yt.utilities.fortran_utils import read_record
+from yt.utilities.logger import ytLogger as mylog
+
+from .definitions import \
+    gadget_header_specs, \
+    gadget_field_specs, \
+    gadget_ptype_specs
+
+def _fix_unit_ordering(unit):
+    if isinstance(unit[0], types.StringTypes):
+        unit = unit[1], unit[0]
+    return unit
+
+class GadgetBinaryFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with open(filename, "rb") as f:
+            self.header = read_record(f, ds._header_spec)
+            self._position_offset = f.tell()
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+        super(GadgetBinaryFile, self).__init__(ds, io, filename, file_id)
+
+    def _calculate_offsets(self, field_list):
+        self.field_offsets = self.io._calculate_field_offsets(
+            field_list, self.total_particles,
+            self._position_offset, self._file_size)
+
+class GadgetDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = GadgetBinaryFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _particle_velocity_name = "Velocities"
+    _suffix = ""
+
+    def __init__(self, filename, dataset_type="gadget_binary",
+                 additional_fields=(),
+                 unit_base=None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None,
+                 header_spec = "default",
+                 field_spec = "default",
+                 ptype_spec = "default"):
+        if self._instantiated: return
+        self._header_spec = self._setup_binary_spec(
+            header_spec, gadget_header_specs)
+        self._field_spec = self._setup_binary_spec(
+            field_spec, gadget_field_specs)
+        self._ptype_spec = self._setup_binary_spec(
+            ptype_spec, gadget_ptype_specs)
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        self.storage_filename = None
+        if unit_base is not None and "UnitLength_in_cm" in unit_base:
+            # We assume this is comoving, because in the absence of comoving
+            # integration the redshift will be zero.
+            unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+        self._unit_base = unit_base
+        if bounding_box is not None:
+            bbox = np.array(bounding_box, dtype="float64")
+            if bbox.shape == (2, 3):
+                bbox = bbox.transpose()
+            self.domain_left_edge = bbox[:,0]
+            self.domain_right_edge = bbox[:,1]
+        else:
+            self.domain_left_edge = self.domain_right_edge = None
+        super(GadgetDataset, self).__init__(filename, dataset_type)
+
+    def _setup_binary_spec(self, spec, spec_dict):
+        if isinstance(spec, types.StringTypes):
+            _hs = ()
+            for hs in spec.split("+"):
+                _hs += spec_dict[hs]
+            spec = _hs
+        return spec
+
+    def __repr__(self):
+        return os.path.basename(self.parameter_filename).split(".")[0]
+
+    def _get_hvals(self):
+        # The entries in this header are capitalized and named to match Table 4
+        # in the GADGET-2 user guide.
+
+        f = open(self.parameter_filename)
+        hvals = read_record(f, self._header_spec)
+        for i in hvals:
+            if len(hvals[i]) == 1:
+                hvals[i] = hvals[i][0]
+        return hvals
+
+    def _parse_parameter_file(self):
+
+        hvals = self._get_hvals()
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        # Set standard values
+
+        # We may have an overridden bounding box.
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.periodicity = (True, True, True)
+
+        self.cosmological_simulation = 1
+
+        self.current_redshift = hvals["Redshift"]
+        self.omega_lambda = hvals["OmegaLambda"]
+        self.omega_matter = hvals["Omega0"]
+        self.hubble_constant = hvals["HubbleParam"]
+        # According to the Gadget manual, OmegaLambda will be zero for
+        # non-cosmological datasets.  However, it may be the case that
+        # individuals are running cosmological simulations *without* Lambda, in
+        # which case we may be doing something incorrect here.
+        # It may be possible to deduce whether ComovingIntegration is on
+        # somehow, but opinions on this vary.
+        if self.omega_lambda == 0.0:
+            mylog.info("Omega Lambda is 0.0, so we are turning off Cosmology.")
+            self.hubble_constant = 1.0  # So that scaling comes out correct
+            self.cosmological_simulation = 0
+            self.current_redshift = 0.0
+            # This may not be correct.
+            self.current_time = hvals["Time"] * sec_conversion["Gyr"]
+        else:
+            # Now we calculate our time based on the cosmology, because in
+            # ComovingIntegration hvals["Time"] will in fact be the expansion
+            # factor, not the actual integration time, so we re-calculate
+            # global time from our Cosmology.
+            cosmo = Cosmology(self.hubble_constant,
+                              self.omega_matter, self.omega_lambda)
+            self.current_time = cosmo.hubble_time(self.current_redshift)
+            mylog.info("Calculating time from %0.3e to be %0.3e seconds",
+                       hvals["Time"], self.current_time)
+        self.parameters = hvals
+
+        prefix = self.parameter_filename.split(".", 1)[0]
+
+        if hvals["NumFiles"] > 1:
+            self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        else:
+            self.filename_template = self.parameter_filename
+
+        self.file_count = hvals["NumFiles"]
+
+    def _set_code_unit_attributes(self):
+        # If no units passed in by user, set a sane default (Gadget-2 users guide).
+        if self._unit_base is None:
+            if self.cosmological_simulation == 1:
+                mylog.info("Assuming length units are in kpc/h (comoving)")
+                self._unit_base = dict(length = (1.0, "kpccm/h"))
+            else:
+                mylog.info("Assuming length units are in kpc (physical)")
+                self._unit_base = dict(length = (1.0, "kpc"))
+                
+        # If units passed in by user, decide what to do about
+        # co-moving and factors of h
+        unit_base = self._unit_base or {}
+        if "length" in unit_base:
+            length_unit = unit_base["length"]
+        elif "UnitLength_in_cm" in unit_base:
+            if self.cosmological_simulation == 0:
+                length_unit = (unit_base["UnitLength_in_cm"], "cm")
+            else:
+                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+        else:
+            raise RuntimeError
+        length_unit = _fix_unit_ordering(length_unit)
+        self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+        unit_base = self._unit_base or {}
+        if "velocity" in unit_base:
+            velocity_unit = unit_base["velocity"]
+        elif "UnitVelocity_in_cm_per_s" in unit_base:
+            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+        else:
+            velocity_unit = (1e5, "cm/s")
+        velocity_unit = _fix_unit_ordering(velocity_unit)
+        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+        # Default to 1e10 Msun/h if mass is not specified.
+        if "mass" in unit_base:
+            mass_unit = unit_base["mass"]
+        elif "UnitMass_in_g" in unit_base:
+            if self.cosmological_simulation == 0:
+                mass_unit = (unit_base["UnitMass_in_g"], "g")
+            else:
+                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+        else:
+            # Sane default
+            mass_unit = (1.0, "1e10*Msun/h")
+        mass_unit = _fix_unit_ordering(mass_unit)
+        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+        self.time_unit = self.length_unit / self.velocity_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        # We do not allow load() of these files.
+        return False
+
+
+class GadgetHDF5Dataset(GadgetDataset):
+    _file_class = ParticleFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Masses"
+    _suffix = ".hdf5"
+
+    def __init__(self, filename, dataset_type="gadget_hdf5", 
+                 unit_base = None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None):
+        self.storage_filename = None
+        filename = os.path.abspath(filename)
+        super(GadgetHDF5Dataset, self).__init__(
+            filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
+            over_refine_factor=over_refine_factor,
+            bounding_box = bounding_box)
+
+    def _get_hvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        hvals = {}
+        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+        # Compat reasons.
+        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
+        hvals["Massarr"] = hvals["MassTable"]
+        handle.close()
+        return hvals
+
+    def _get_uvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        uvals = {}
+        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
+        handle.close()
+        return uvals
+
+
+
+    def _set_owls_eagle(self):
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        self._unit_base = self._get_uvals()
+        self._unit_base['cmcm'] = 1.0 / self._unit_base["UnitLength_in_cm"]
+
+        self.current_redshift = self.parameters["Redshift"]
+        self.omega_lambda = self.parameters["OmegaLambda"]
+        self.omega_matter = self.parameters["Omega0"]
+        self.hubble_constant = self.parameters["HubbleParam"]
+
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * self.parameters["BoxSize"]
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+
+        self.cosmological_simulation = 1
+        self.periodicity = (True, True, True)
+
+        prefix = os.path.abspath(self.parameter_filename.split(".", 1)[0])
+        suffix = self.parameter_filename.rsplit(".", 1)[-1]
+        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+        self.file_count = self.parameters["NumFilesPerSnapshot"]
+
+    def _set_owls_eagle_units(self):
+
+        # note the contents of the HDF5 Units group are in _unit_base 
+        # note the velocity stored on disk is sqrt(a) dx/dt 
+        self.length_unit = self.quan(self._unit_base["UnitLength_in_cm"], 'cmcm/h')
+        self.mass_unit = self.quan(self._unit_base["UnitMass_in_g"], 'g/h')
+        self.velocity_unit = self.quan(self._unit_base["UnitVelocity_in_cm_per_s"], 'cm/s')
+        self.time_unit = self.quan(self._unit_base["UnitTime_in_s"], 's/h')
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" not in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys():
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/gadget/definitions.py
--- /dev/null
+++ b/yt/frontends/gadget/definitions.py
@@ -0,0 +1,69 @@
+"""
+Gadget definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+gadget_header_specs = dict(
+    default      = (('Npart', 6, 'i'),
+                    ('Massarr', 6, 'd'),
+                    ('Time', 1, 'd'),
+                    ('Redshift', 1, 'd'),
+                    ('FlagSfr', 1, 'i'),
+                    ('FlagFeedback', 1, 'i'),
+                    ('Nall', 6, 'i'),
+                    ('FlagCooling', 1, 'i'),
+                    ('NumFiles', 1, 'i'),
+                    ('BoxSize', 1, 'd'),
+                    ('Omega0', 1, 'd'),
+                    ('OmegaLambda', 1, 'd'),
+                    ('HubbleParam', 1, 'd'),
+                    ('FlagAge', 1, 'i'),
+                    ('FlagMEtals', 1, 'i'),
+                    ('NallHW', 6, 'i'),
+                    ('unused', 16, 'i')),
+    pad32       = (('empty',  32, 'c'),),
+    pad64       = (('empty',  64, 'c'),),
+    pad128      = (('empty', 128, 'c'),),
+    pad256      = (('empty', 256, 'c'),),
+)
+
+gadget_ptype_specs = dict(
+    default = ( "Gas",
+                "Halo",
+                "Disk",
+                "Bulge",
+                "Stars",
+                "Bndry" )
+)
+
+gadget_field_specs = dict(
+    default = ( "Coordinates",
+                "Velocities",
+                "ParticleIDs",
+                "Mass",
+                ("InternalEnergy", "Gas"),
+                ("Density", "Gas"),
+                ("SmoothingLength", "Gas"),
+    ),
+    agora_unlv = ( "Coordinates",
+                   "Velocities",
+                   "ParticleIDs",
+                   "Mass",
+                   ("InternalEnergy", "Gas"),
+                   ("Density", "Gas"),
+                   ("Electron_Number_Density", "Gas"),
+                   ("HI_NumberDensity", "Gas"),
+                   ("SmoothingLength", "Gas"),
+    )
+)

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/gadget/io.py
--- /dev/null
+++ b/yt/frontends/gadget/io.py
@@ -0,0 +1,203 @@
+"""
+Gadget data-file handling functions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.geometry.oct_container import \
+    _ORDER_MAX
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+class IOHandlerGadgetBinary(BaseIOHandler):
+    _dataset_type = "gadget_binary"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    # Particle types (Table 3 in GADGET-2 user guide)
+    #
+    # Blocks in the file:
+    #   HEAD
+    #   POS
+    #   VEL
+    #   ID
+    #   MASS    (variable mass only)
+    #   U       (gas only)
+    #   RHO     (gas only)
+    #   HSML    (gas only)
+    #   POT     (only if enabled in makefile)
+    #   ACCE    (only if enabled in makefile)
+    #   ENDT    (only if enabled in makefile)
+    #   TSTP    (only if enabled in makefile)
+
+    _var_mass = None
+
+    def __init__(self, ds, *args, **kwargs):
+        self._fields = ds._field_spec
+        self._ptypes = ds._ptype_spec
+        super(IOHandlerGadgetBinary, self).__init__(ds, *args, **kwargs)
+
+    @property
+    def var_mass(self):
+        if self._var_mass is None:
+            vm = []
+            for i, v in enumerate(self.ds["Massarr"]):
+                if v == 0:
+                    vm.append(self._ptypes[i])
+            self._var_mass = tuple(vm)
+        return self._var_mass
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype in ptf:
+                # This is where we could implement sub-chunking
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                yield ptype, (pos[:,0], pos[:,1], pos[:,2])
+            f.close()
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype, field_list in sorted(ptf.items()):
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                mask = selector.select_points(
+                    pos[:,0], pos[:,1], pos[:,2], 0.0)
+                del pos
+                if mask is None: continue
+                for field in field_list:
+                    if field == "Mass" and ptype not in self.var_mass:
+                        data = np.empty(mask.sum(), dtype="float64")
+                        m = self.ds.parameters["Massarr"][
+                            self._ptypes.index(ptype)]
+                        data[:] = m
+                        yield (ptype, field), data
+                        continue
+                    f.seek(poff[ptype, field], os.SEEK_SET)
+                    data = self._read_field_from_file(f, tp[ptype], field)
+                    data = data[mask,...]
+                    yield (ptype, field), data
+            f.close()
+
+    def _read_field_from_file(self, f, count, name):
+        if count == 0: return
+        if name == "ParticleIDs":
+            dt = "uint32"
+        else:
+            dt = "float32"
+        if name in self._vector_fields:
+            count *= 3
+        arr = np.fromfile(f, dtype=dt, count = count)
+        if name in self._vector_fields:
+            arr = arr.reshape((count/3, 3), order="C")
+        return arr.astype("float64")
+
+    def _initialize_index(self, data_file, regions):
+        count = sum(data_file.total_particles.values())
+        DLE = data_file.ds.domain_left_edge
+        DRE = data_file.ds.domain_right_edge
+        dx = (DRE - DLE) / 2**_ORDER_MAX
+        with open(data_file.filename, "rb") as f:
+            # We add on an additionally 4 for the first record.
+            f.seek(data_file._position_offset + 4)
+            # The first total_particles * 3 values are positions
+            pp = np.fromfile(f, dtype = 'float32', count = count*3)
+            pp.shape = (count, 3)
+        regions.add_data_file(pp, data_file.file_id, data_file.ds.filter_bbox)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE,
+                                data_file.ds.filter_bbox)
+        return morton
+
+    def _count_particles(self, data_file):
+        npart = dict((self._ptypes[i], v)
+            for i, v in enumerate(data_file.header["Npart"]))
+        return npart
+
+    # header is 256, but we have 4 at beginning and end for ints
+    _field_size = 4
+    def _calculate_field_offsets(self, field_list, pcount,
+                                 offset, file_size = None):
+        # field_list is (ftype, fname) but the blocks are ordered
+        # (fname, ftype) in the file.
+        pos = offset
+        fs = self._field_size
+        offsets = {}
+        for field in self._fields:
+            if not isinstance(field, types.StringTypes):
+                field = field[0]
+            if not any( (ptype, field) in field_list
+                        for ptype in self._ptypes):
+                continue
+            pos += 4
+            any_ptypes = False
+            for ptype in self._ptypes:
+                if field == "Mass" and ptype not in self.var_mass:
+                    continue
+                if (ptype, field) not in field_list:
+                    continue
+                offsets[(ptype, field)] = pos
+                any_ptypes = True
+                if field in self._vector_fields:
+                    pos += 3 * pcount[ptype] * fs
+                else:
+                    pos += pcount[ptype] * fs
+            pos += 4
+            if not any_ptypes: pos -= 8
+        if file_size is not None:
+            if file_size != pos:
+                mylog.warning("Your Gadget-2 file may have extra " +
+                              "columns or different precision!" +
+                              " (%s file vs %s computed)",
+                              file_size, pos)
+        return offsets
+
+    def _identify_fields(self, domain):
+        # We can just look at the particle counts.
+        field_list = []
+        tp = domain.total_particles
+        for i, ptype in enumerate(self._ptypes):
+            count = tp[ptype]
+            if count == 0: continue
+            m = domain.header["Massarr"][i]
+            for field in self._fields:
+                if isinstance(field, types.TupleType):
+                    field, req = field
+                    if req is ZeroMass:
+                        if m > 0.0 : continue
+                    elif req != ptype:
+                        continue
+                field_list.append((ptype, field))
+        return field_list, {}

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/http_stream/api.py
--- /dev/null
+++ b/yt/frontends/http_stream/api.py
@@ -0,0 +1,18 @@
+"""
+API for HTTPStream frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    HTTPStreamDataset

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/http_stream/data_structures.py
--- /dev/null
+++ b/yt/frontends/http_stream/data_structures.py
@@ -0,0 +1,112 @@
+"""
+Data structures for HTTPStream frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+import time
+import types
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+
+try:
+    import requests
+    import json
+except ImportError:
+    requests = None
+
+class HTTPParticleFile(ParticleFile):
+    pass
+
+class HTTPStreamDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = HTTPParticleFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _particle_velocity_name = "Velocities"
+    filename_template = ""
+    
+    def __init__(self, base_url,
+                 dataset_type = "http_particle_stream",
+                 n_ref = 64, over_refine_factor=1):
+        if requests is None:
+            raise RuntimeError
+        self.base_url = base_url
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(HTTPStreamDataset, self).__init__("", dataset_type)
+
+    def __repr__(self):
+        return self.base_url
+
+    def _parse_parameter_file(self):
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+
+        # Here's where we're going to grab the JSON index file
+        hreq = requests.get(self.base_url + "/yt_index.json")
+        if hreq.status_code != 200:
+            raise RuntimeError
+        header = json.loads(hreq.content)
+        header['particle_count'] = dict((int(k), header['particle_count'][k])
+            for k in header['particle_count'])
+        self.parameters = header
+
+        # Now we get what we need
+        self.domain_left_edge = np.array(header['domain_left_edge'], "float64")
+        self.domain_right_edge = np.array(header['domain_right_edge'], "float64")
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.periodicity = (True, True, True)
+
+        self.current_time = header['current_time']
+        self.unique_identifier = header.get("unique_identifier", time.time())
+        self.cosmological_simulation = int(header['cosmological_simulation'])
+        for attr in ('current_redshift', 'omega_lambda', 'omega_matter',
+                     'hubble_constant'):
+            setattr(self, attr, float(header[attr]))
+
+        self.file_count = header['num_files']
+
+    def _set_units(self):
+        length_unit = float(self.parameters['units']['length'])
+        time_unit = float(self.parameters['units']['time'])
+        mass_unit = float(self.parameters['units']['mass'])
+        density_unit = mass_unit / length_unit ** 3
+        velocity_unit = length_unit / time_unit
+        self._unit_base = {}
+        self._unit_base['cm'] = 1.0/length_unit
+        self._unit_base['s'] = 1.0/time_unit
+        super(HTTPStreamDataset, self)._set_units()
+        self.conversion_factors["velocity"] = velocity_unit
+        self.conversion_factors["mass"] = mass_unit
+        self.conversion_factors["density"] = density_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        if not args[0].startswith("http://"):
+            return False
+        hreq = requests.get(args[0] + "/yt_index.json")
+        if hreq.status_code == 200:
+            return True
+        return False

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/http_stream/io.py
--- /dev/null
+++ b/yt/frontends/http_stream/io.py
@@ -0,0 +1,116 @@
+"""
+HTTPStream data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+     compute_morton
+
+try:
+    import requests
+except ImportError:
+    requests = None
+
+class IOHandlerHTTPStream(BaseIOHandler):
+    _dataset_type = "http_particle_stream"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    def __init__(self, ds):
+        if requests is None:
+            raise RuntimeError
+        self._url = ds.base_url
+        # This should eventually manage the IO and cache it
+        self.total_bytes = 0
+        super(IOHandlerHTTPStream, self).__init__(ds)
+
+    def _open_stream(self, data_file, field):
+        # This does not actually stream yet!
+        ftype, fname = field
+        s = "%s/%s/%s/%s" % (self._url,
+            data_file.file_id, ftype, fname)
+        mylog.info("Loading URL %s", s)
+        resp = requests.get(s)
+        if resp.status_code != 200:
+            raise RuntimeError
+        self.total_bytes += len(resp.content)
+        return resp.content
+
+    def _identify_fields(self, data_file):
+        f = []
+        for ftype, fname in self.ds.parameters["field_list"]:
+            f.append((str(ftype), str(fname)))
+        return f, {}
+
+    def _read_particle_coords(self, chunks, ptf):
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            for ptype in ptf:
+                s = self._open_stream(data_file, (ptype, "Coordinates"))
+                c = np.frombuffer(s, dtype="float64")
+                c.shape = (c.shape[0]/3.0, 3)
+                yield ptype, (c[:,0], c[:,1], c[:,2])
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            for ptype, field_list in sorted(ptf.items()):
+                s = self._open_stream(data_file, (ptype, "Coordinates"))
+                c = np.frombuffer(s, dtype="float64")
+                c.shape = (c.shape[0]/3.0, 3)
+                mask = selector.select_points(
+                            c[:,0], c[:,1], c[:,2], 0.0)
+                del c
+                if mask is None: continue
+                for field in field_list:
+                    s = self._open_stream(data_file, (ptype, field))
+                    c = np.frombuffer(s, dtype="float64")
+                    if field in self._vector_fields:
+                        c.shape = (c.shape[0]/3.0, 3)
+                    data = c[mask, ...]
+                    yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        header = self.ds.parameters
+        ptypes = header["particle_count"][data_file.file_id].keys()
+        pcount = sum(header["particle_count"][data_file.file_id].values())
+        morton = np.empty(pcount, dtype='uint64')
+        ind = 0
+        for ptype in ptypes:
+            s = self._open_stream(data_file, (ptype, "Coordinates"))
+            c = np.frombuffer(s, dtype="float64")
+            c.shape = (c.shape[0]/3.0, 3)
+            regions.add_data_file(c, data_file.file_id,
+                                  data_file.ds.filter_bbox)
+            morton[ind:ind+c.shape[0]] = compute_morton(
+                c[:,0], c[:,1], c[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge,
+                data_file.ds.filter_bbox)
+            ind += c.shape[0]
+        return morton
+
+    def _count_particles(self, data_file):
+        return self.ds.parameters["particle_count"][data_file.file_id]

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/owls/api.py
--- /dev/null
+++ b/yt/frontends/owls/api.py
@@ -0,0 +1,20 @@
+"""
+API for OWLS frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    OWLSDataset
+
+from . import tests

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/owls/data_structures.py
--- /dev/null
+++ b/yt/frontends/owls/data_structures.py
@@ -0,0 +1,73 @@
+"""
+Data structures for OWLS frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import types
+
+import yt.units
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+from yt.utilities.definitions import \
+    sec_conversion
+
+from .fields import \
+    OWLSFieldInfo
+
+class OWLSDataset(GadgetHDF5Dataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = OWLSFieldInfo
+    _time_readin = "Time_GYR"
+
+
+    def _parse_parameter_file(self):
+
+        # read values from header
+        hvals = self._get_hvals()
+        self.parameters = hvals
+
+        # set features common to OWLS and Eagle
+        self._set_owls_eagle()
+
+        # Set time from value in header
+        self.current_time = hvals[self._time_readin] * \
+                            sec_conversion["Gyr"] * yt.units.s
+
+
+    def _set_code_unit_attributes(self):
+        self._set_owls_eagle_units()
+
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Constants', 'Header', 'Parameters', 'Units']
+        veto_groups = ['SUBFIND', 'FOF',
+                       'PartType0/ChemistryAbundances', 
+                       'PartType0/ChemicalAbundances',
+                       'RuntimePars', 'HashTable']
+        valid = True
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
+            fileh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/owls/definitions.py
--- /dev/null
+++ b/yt/frontends/owls/definitions.py
@@ -0,0 +1,18 @@
+"""
+OWLS definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+ghdf5_ptypes  = ("PartType0", "PartType1", "PartType2", "PartType3",
+                 "PartType4", "PartType5")

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/owls/fields.py
--- /dev/null
+++ b/yt/frontends/owls/fields.py
@@ -0,0 +1,323 @@
+"""
+OWLS fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os
+import numpy as np
+
+from yt.config import ytcfg
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
+from yt.fields.species_fields import \
+    add_species_field_by_fraction, \
+    add_species_field_by_density
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+
+import owls_ion_tables as oit
+
+class OWLSFieldInfo(SPHFieldInfo):
+
+    _ions = ("c1", "c2", "c3", "c4", "c5", "c6",
+             "fe2", "fe17", "h1", "he1", "he2", "mg1", "mg2", "n2", 
+             "n3", "n4", "n5", "n6", "n7", "ne8", "ne9", "ne10", "o1", 
+             "o6", "o7", "o8", "si2", "si3", "si4", "si13")
+
+    _elements = ("H", "He", "C", "N", "O", "Ne", "Mg", "Si", "Fe")
+
+    _num_neighbors = 48
+
+    _add_elements = ("PartType0", "PartType4")
+
+    _add_ions = ("PartType0")
+
+
+    def __init__(self, *args, **kwargs):
+        
+        new_particle_fields = (
+            ("Hydrogen", ("", ["H_fraction"], None)),
+            ("Helium", ("", ["He_fraction"], None)),
+            ("Carbon", ("", ["C_fraction"], None)),
+            ("Nitrogen", ("", ["N_fraction"], None)),
+            ("Oxygen", ("", ["O_fraction"], None)),
+            ("Neon", ("", ["Ne_fraction"], None)),
+            ("Magnesium", ("", ["Mg_fraction"], None)),
+            ("Silicon", ("", ["Si_fraction"], None)),
+            ("Iron", ("", ["Fe_fraction"], None))
+            )
+
+        self.known_particle_fields += new_particle_fields
+        
+        super(OWLSFieldInfo,self).__init__( *args, **kwargs )
+
+
+
+    def setup_particle_fields(self, ptype):
+        """ additional particle fields derived from those in snapshot.
+        we also need to add the smoothed fields here b/c setup_fluid_fields
+        is called before setup_particle_fields. """ 
+
+        smoothed_suffixes = ("_number_density", "_density", "_mass")
+
+
+
+        # we add particle element fields for stars and gas
+        #-----------------------------------------------------
+        if ptype in self._add_elements:
+
+
+            # this adds the particle element fields
+            # X_density, X_mass, and X_number_density
+            # where X is an item of self._elements.
+            # X_fraction are defined in snapshot
+            #-----------------------------------------------
+            for s in self._elements:
+                add_species_field_by_fraction(self, ptype, s,
+                                              particle_type=True)
+
+        # this needs to be called after the call to 
+        # add_species_field_by_fraction for some reason ...
+        # not sure why yet. 
+        #-------------------------------------------------------
+        if ptype == 'PartType0':
+            ftype='gas'
+        elif ptype == 'PartType1':
+            ftype='dm'
+        elif ptype == 'PartType2':
+            ftype='PartType2'
+        elif ptype == 'PartType3':
+            ftype='PartType3'
+        elif ptype == 'PartType4':
+            ftype='star'
+        elif ptype == 'PartType5':
+            ftype='BH'
+        elif ptype == 'all':
+            ftype='all'
+        
+        super(OWLSFieldInfo,self).setup_particle_fields(
+            ptype, num_neighbors=self._num_neighbors, ftype=ftype)
+
+
+        # and now we add the smoothed versions for PartType0
+        #-----------------------------------------------------
+        if ptype == 'PartType0':
+
+            loaded = []
+            for s in self._elements:
+                for sfx in smoothed_suffixes:
+                    fname = s + sfx
+                    fn = add_volume_weighted_smoothed_field( 
+                        ptype, "particle_position", "particle_mass",
+                        "smoothing_length", "density", fname, self,
+                        self._num_neighbors)
+                    loaded += fn
+
+                    self.alias(("gas", fname), fn[0])
+
+            self._show_field_errors += loaded
+            self.find_dependencies(loaded)
+
+
+            # we only add ion fields for gas.  this takes some 
+            # time as the ion abundances have to be interpolated
+            # from cloudy tables (optically thin)
+            #-----------------------------------------------------
+    
+
+            # this defines the ion density on particles
+            # X_density for all items in self._ions
+            #-----------------------------------------------
+            self.setup_gas_ion_density_particle_fields( ptype )
+
+            # this adds the rest of the ion particle fields
+            # X_fraction, X_mass, X_number_density
+            #-----------------------------------------------
+            for ion in self._ions:
+
+                # construct yt name for ion
+                #---------------------------------------------------
+                if ion[0:2].isalpha():
+                    symbol = ion[0:2].capitalize()
+                    roman = int(ion[2:])
+                else:
+                    symbol = ion[0:1].capitalize()
+                    roman = int(ion[1:])
+
+                pstr = "_p" + str(roman-1)
+                yt_ion = symbol + pstr
+
+                # add particle field
+                #---------------------------------------------------
+                add_species_field_by_density(self, ptype, yt_ion,
+                                             particle_type=True)
+
+
+            # add smoothed ion fields
+            #-----------------------------------------------
+            for ion in self._ions:
+
+                # construct yt name for ion
+                #---------------------------------------------------
+                if ion[0:2].isalpha():
+                    symbol = ion[0:2].capitalize()
+                    roman = int(ion[2:])
+                else:
+                    symbol = ion[0:1].capitalize()
+                    roman = int(ion[1:])
+
+                pstr = "_p" + str(roman-1)
+                yt_ion = symbol + pstr
+
+                loaded = []
+                for sfx in smoothed_suffixes:
+                    fname = yt_ion + sfx
+                    fn = add_volume_weighted_smoothed_field( 
+                        ptype, "particle_position", "particle_mass",
+                        "smoothing_length", "density", fname, self,
+                        self._num_neighbors)
+                    loaded += fn
+
+                    self.alias(("gas", fname), fn[0])
+
+                self._show_field_errors += loaded
+                self.find_dependencies(loaded)
+
+
+
+    def setup_gas_ion_density_particle_fields( self, ptype ):
+        """ Sets up particle fields for gas ion densities. """ 
+
+        # loop over all ions and make fields
+        #----------------------------------------------
+        for ion in self._ions:
+
+            # construct yt name for ion
+            #---------------------------------------------------
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+                roman = int(ion[2:])
+            else:
+                symbol = ion[0:1].capitalize()
+                roman = int(ion[1:])
+
+            pstr = "_p" + str(roman-1)
+            yt_ion = symbol + pstr
+            ftype = ptype
+
+            # add ion density field for particles
+            #---------------------------------------------------
+            fname = yt_ion + '_density'
+            dens_func = self._create_ion_density_func( ftype, ion )
+            self.add_field( (ftype, fname),
+                            function = dens_func, 
+                            units="g/cm**3",
+                            particle_type=True )            
+            self._show_field_errors.append( (ftype,fname) )
+
+
+
+        
+    def _create_ion_density_func( self, ftype, ion ):
+        """ returns a function that calculates the ion density of a particle. 
+        """ 
+
+        def _ion_density(field, data):
+
+            # get element symbol from ion string. ion string will 
+            # be a member of the tuple _ions (i.e. si13)
+            #--------------------------------------------------------
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+            else:
+                symbol = ion[0:1].capitalize()
+
+            # mass fraction for the element
+            #--------------------------------------------------------
+            m_frac = data[ftype, symbol+"_fraction"]
+
+            # get nH and T for lookup
+            #--------------------------------------------------------
+            log_nH = np.log10( data["PartType0", "H_number_density"] )
+            log_T = np.log10( data["PartType0", "Temperature"] )
+
+            # get name of owls_ion_file for given ion
+            #--------------------------------------------------------
+            owls_ion_path = self._get_owls_ion_data_dir()
+            fname = os.path.join( owls_ion_path, ion+".hdf5" )
+
+            # create ionization table for this redshift
+            #--------------------------------------------------------
+            itab = oit.IonTableOWLS( fname )
+            itab.set_iz( data.ds.current_redshift )
+
+            # find ion balance using log nH and log T
+            #--------------------------------------------------------
+            i_frac = itab.interp( log_nH, log_T )
+            return data[ftype,"Density"] * m_frac * i_frac 
+        
+        return _ion_density
+
+
+
+
+
+    # this function sets up the X_mass, X_density, X_fraction, and
+    # X_number_density fields where X is the name of an OWLS element.
+    #-------------------------------------------------------------
+    def setup_fluid_fields(self):
+
+        return
+
+
+
+    # this function returns the owls_ion_data directory. if it doesn't
+    # exist it will download the data from http://yt-project.org/data
+    #-------------------------------------------------------------
+    def _get_owls_ion_data_dir(self):
+
+        txt = "Attempting to download ~ 30 Mb of owls ion data from %s to %s."
+        data_file = "owls_ion_data.tar.gz"
+        data_url = "http://yt-project.org/data"
+
+        # get test_data_dir from yt config (ytcgf)
+        #----------------------------------------------
+        tdir = ytcfg.get("yt","test_data_dir")
+
+        # set download destination to tdir or ./ if tdir isnt defined
+        #----------------------------------------------
+        if tdir == "/does/not/exist":
+            data_dir = "./"
+        else:
+            data_dir = tdir            
+
+
+        # check for owls_ion_data directory in data_dir
+        # if not there download the tarball and untar it
+        #----------------------------------------------
+        owls_ion_path = os.path.join( data_dir, "owls_ion_data" )
+
+        if not os.path.exists(owls_ion_path):
+            mylog.info(txt % (data_url, data_dir))                    
+            fname = data_dir + "/" + data_file
+            fn = download_file(os.path.join(data_url, data_file), fname)
+
+            cmnd = "cd " + data_dir + "; " + "tar xf " + data_file
+            os.system(cmnd)
+
+
+        if not os.path.exists(owls_ion_path):
+            raise RuntimeError, "Failed to download owls ion data."
+
+        return owls_ion_path

diff -r 7da392db9455f00a08bd794360ef80f6074b933e -r f2b89637ff33eed724862683975e598b38abbca5 yt/frontends/owls/io.py
--- /dev/null
+++ b/yt/frontends/owls/io.py
@@ -0,0 +1,203 @@
+"""
+OWLS data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+.definitions import \
+    ghdf5_ptypes
+
+CHUNKSIZE = 10000000
+
+def _get_h5_handle(fn):
+    try:
+        f = h5py.File(fn, "r")
+    except IOError as e:
+        print "ERROR OPENING %s" % (fn)
+        if os.path.exists(fn):
+            print "FILENAME EXISTS"
+        else:
+            print "FILENAME DOES NOT EXIST"
+        raise
+    return f
+
+class IOHandlerOWLS(BaseIOHandler):
+    _dataset_type = "OWLS"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+    _known_ptypes = ghdf5_ptypes
+    _var_mass = None
+    _element_names = ('Hydrogen', 'Helium', 'Carbon', 'Nitrogen', 'Oxygen',
+                       'Neon', 'Magnesium', 'Silicon', 'Iron' )
+
+
+    @property
+    def var_mass(self):
+        if self._var_mass is None:
+            vm = []
+            for i, v in enumerate(self.ds["Massarr"]):
+                if v == 0:
+                    vm.append(self._known_ptypes[i])
+            self._var_mass = tuple(vm)
+        return self._var_mass
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            f = _get_h5_handle(data_file.filename)
+            # This double-reads
+            for ptype, field_list in sorted(ptf.items()):
+                if data_file.total_particles[ptype] == 0:
+                    continue
+                x = f["/%s/Coordinates" % ptype][:,0].astype("float64")
+                y = f["/%s/Coordinates" % ptype][:,1].astype("float64")
+                z = f["/%s/Coordinates" % ptype][:,2].astype("float64")
+                yield ptype, (x, y, z)
+            f.close()
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            f = _get_h5_handle(data_file.filename)
+            for ptype, field_list in sorted(ptf.items()):
+                if data_file.total_particles[ptype] == 0:
+                    continue
+                g = f["/%s" % ptype]
+                coords = g["Coordinates"][:].astype("float64")
+                mask = selector.select_points(
+                            coords[:,0], coords[:,1], coords[:,2], 0.0)
+                del coords
+                if mask is None: continue
+                for field in field_list:
+
+                    if field in ("Mass", "Masses") and \
+                        ptype not in self.var_mass:
+                        data = np.empty(mask.sum(), dtype="float64")
+                        ind = self._known_ptypes.index(ptype)
+                        data[:] = self.ds["Massarr"][ind]
+
+                    elif field in self._element_names:
+                        rfield = 'ElementAbundance/' + field
+                        data = g[rfield][:][mask,...]
+                    elif field.startswith("Metallicity_"):
+                        col = int(field.rsplit("_", 1)[-1])
+                        data = g["Metallicity"][:,col][mask]
+                    elif field.startswith("Chemistry_"):
+                        col = int(field.rsplit("_", 1)[-1])
+                        data = g["ChemistryAbundances"][:,col][mask]
+                    else:
+                        data = g[field][:][mask,...]
+
+                    yield (ptype, field), data
+            f.close()
+
+    def _initialize_index(self, data_file, regions):
+        f = _get_h5_handle(data_file.filename)
+        pcount = f["/Header"].attrs["NumPart_ThisFile"][:].sum()
+        morton = np.empty(pcount, dtype='uint64')
+        ind = 0
+        for key in f.keys():
+            if not key.startswith("PartType"): continue
+            if "Coordinates" not in f[key]: continue
+            ds = f[key]["Coordinates"]
+            dt = ds.dtype.newbyteorder("N") # Native
+            pos = np.empty(ds.shape, dtype=dt)
+            pos[:] = ds
+            regions.add_data_file(pos, data_file.file_id,
+                                  data_file.ds.filter_bbox)
+            morton[ind:ind+pos.shape[0]] = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge,
+                data_file.ds.filter_bbox)
+            ind += pos.shape[0]
+        f.close()
+        return morton
+
+    def _count_particles(self, data_file):
+        f = _get_h5_handle(data_file.filename)
+        pcount = f["/Header"].attrs["NumPart_ThisFile"][:]
+        f.close()
+        npart = dict(("PartType%s" % (i), v) for i, v in enumerate(pcount))
+        return npart
+
+
+    def _identify_fields(self, data_file):
+        f = _get_h5_handle(data_file.filename)
+        fields = []
+        cname = self.ds._particle_coordinates_name  # Coordinates
+        mname = self.ds._particle_mass_name  # Mass
+
+        # loop over all keys in OWLS hdf5 file
+        #--------------------------------------------------
+        for key in f.keys():
+
+            # only want particle data
+            #--------------------------------------
+            if not key.startswith("PartType"): continue
+
+            # particle data group
+            #--------------------------------------
+            g = f[key]
+            if cname not in g: continue
+
+            # note str => not unicode!
+
+            #ptype = int(key[8:])
+            ptype = str(key)
+            if ptype not in self.var_mass:
+                fields.append((ptype, mname))
+
+            # loop over all keys in PartTypeX group
+            #----------------------------------------
+            for k in g.keys():
+
+                if k == 'ElementAbundance':
+                    gp = g[k]
+                    for j in gp.keys():
+                        kk = j
+                        fields.append((ptype, str(kk)))
+                elif k == 'Metallicity' and len(g[k].shape) > 1:
+                    # Vector of metallicity
+                    for i in range(g[k].shape[1]):
+                        fields.append((ptype, "Metallicity_%02i" % i))
+                elif k == "ChemistryAbundances" and len(g[k].shape)>1:
+                    for i in range(g[k].shape[1]):
+                        fields.append((ptype, "Chemistry_%03i" % i))
+                else:
+                    kk = k
+                    if not hasattr(g[kk], "shape"): continue
+                    fields.append((ptype, str(kk)))
+
+
+        f.close()
+        return fields, {}

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/241faffb6a9b/
Changeset:   241faffb6a9b
Branch:      yt
User:        brittonsmith
Date:        2014-10-22 20:40:29+00:00
Summary:     Adding sph frontends to api.
Affected #:  1 file

diff -r f2b89637ff33eed724862683975e598b38abbca5 -r 241faffb6a9baa80b8ec1800335a0461183a95d7 yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -21,17 +21,21 @@
     'athena',
     'boxlib',
     'chombo',
+    'eagle',
     'enzo',
     'fits',
     'flash',
+    'gadget',
     'gdf',
     'halo_catalogs',
+    'http_stream',
     'moab',
+    'owls',
     #'pluto',
     'ramses',
     'sdf',
-    'sph',
     'stream',
+    'tipsy',
 ]
 
 class _frontend_container:


https://bitbucket.org/yt_analysis/yt/commits/c440d1a51b62/
Changeset:   c440d1a51b62
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:00:09+00:00
Summary:     Fixing a number of imports.
Affected #:  5 files

diff -r 241faffb6a9baa80b8ec1800335a0461183a95d7 -r c440d1a51b62a09c31e3585d35ee759be4175af8 yt/__init__.py
--- a/yt/__init__.py
+++ b/yt/__init__.py
@@ -131,9 +131,9 @@
     hexahedral_connectivity
 
 # For backwards compatibility
-GadgetDataset = frontends.sph.GadgetDataset
+GadgetDataset = frontends.gadget.GadgetDataset
 GadgetStaticOutput = deprecated_class(GadgetDataset)
-TipsyDataset = frontends.sph.TipsyDataset
+TipsyDataset = frontends.tipsy.TipsyDataset
 TipsyStaticOutput = deprecated_class(TipsyDataset)
 
 # Now individual component imports from the visualization API

diff -r 241faffb6a9baa80b8ec1800335a0461183a95d7 -r c440d1a51b62a09c31e3585d35ee759be4175af8 yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -37,7 +37,7 @@
 from yt.data_objects.static_output import \
     Dataset, \
     ParticleFile
-from yt.frontends.sph.gadget.data_structures import \
+from yt.frontends.gadget.data_structures import \
     _fix_unit_ordering
 import yt.utilities.fortran_utils as fpu
 from yt.units.yt_array import \

diff -r 241faffb6a9baa80b8ec1800335a0461183a95d7 -r c440d1a51b62a09c31e3585d35ee759be4175af8 yt/frontends/owls/tests/test_owls.py
--- a/yt/frontends/owls/tests/test_owls.py
+++ b/yt/frontends/owls/tests/test_owls.py
@@ -23,7 +23,7 @@
     PixelizedProjectionValuesTest, \
     FieldValuesTest, \
     create_obj
-from yt.frontends.sph.api import OWLSDataset
+from yt.frontends.owls.api import OWLSDataset
 
 _fields = (("deposit", "all_density"), ("deposit", "all_count"),
            ("deposit", "PartType0_density"),

diff -r 241faffb6a9baa80b8ec1800335a0461183a95d7 -r c440d1a51b62a09c31e3585d35ee759be4175af8 yt/frontends/tipsy/api.py
--- a/yt/frontends/tipsy/api.py
+++ b/yt/frontends/tipsy/api.py
@@ -15,6 +15,9 @@
 #-----------------------------------------------------------------------------
 
 from .data_structures import \
-     TipsyDataset
+    TipsyDataset
 
+from .io import \
+    IOHandlerTipsyBinary
+     
 from . import tests

diff -r 241faffb6a9baa80b8ec1800335a0461183a95d7 -r c440d1a51b62a09c31e3585d35ee759be4175af8 yt/frontends/tipsy/io.py
--- a/yt/frontends/tipsy/io.py
+++ b/yt/frontends/tipsy/io.py
@@ -16,6 +16,7 @@
 
 import glob
 import numpy as np
+import os
 
 from yt.geometry.oct_container import \
     _ORDER_MAX
@@ -23,7 +24,9 @@
     BaseIOHandler
 from yt.utilities.lib.geometry_utils import \
     compute_morton
-
+from yt.utilities.logger import ytLogger as \
+    mylog
+    
 CHUNKSIZE = 10000000
 
 class IOHandlerTipsyBinary(BaseIOHandler):


https://bitbucket.org/yt_analysis/yt/commits/1fbb2d098ecb/
Changeset:   1fbb2d098ecb
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:12:03+00:00
Summary:     Fixing frontend imports.
Affected #:  5 files

diff -r c440d1a51b62a09c31e3585d35ee759be4175af8 -r 1fbb2d098ecb072ca63a64eec9a6288432660204 yt/frontends/eagle/api.py
--- a/yt/frontends/eagle/api.py
+++ b/yt/frontends/eagle/api.py
@@ -17,3 +17,9 @@
 from .data_structures import \
     EagleDataset, \
     EagleNetworkDataset
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+from .io import \
+    IOHandlerTipsyBinary

diff -r c440d1a51b62a09c31e3585d35ee759be4175af8 -r 1fbb2d098ecb072ca63a64eec9a6288432660204 yt/frontends/gadget/api.py
--- a/yt/frontends/gadget/api.py
+++ b/yt/frontends/gadget/api.py
@@ -15,5 +15,8 @@
 #-----------------------------------------------------------------------------
 
 from .data_structures import \
-      GadgetDataset, \
-      GadgetHDF5Dataset
+    GadgetDataset, \
+    GadgetHDF5Dataset
+
+from .io import \
+    IOHandlerGadgetBinary

diff -r c440d1a51b62a09c31e3585d35ee759be4175af8 -r 1fbb2d098ecb072ca63a64eec9a6288432660204 yt/frontends/http_stream/api.py
--- a/yt/frontends/http_stream/api.py
+++ b/yt/frontends/http_stream/api.py
@@ -16,3 +16,6 @@
 
 from .data_structures import \
     HTTPStreamDataset
+
+from .io import \
+    IOHandlerHTTPStream

diff -r c440d1a51b62a09c31e3585d35ee759be4175af8 -r 1fbb2d098ecb072ca63a64eec9a6288432660204 yt/frontends/owls/api.py
--- a/yt/frontends/owls/api.py
+++ b/yt/frontends/owls/api.py
@@ -17,4 +17,10 @@
 from .data_structures import \
     OWLSDataset
 
+from .fields import \
+    OWLSFieldInfo
+
+from .io import \
+    IOHandlerOWLS
+    
 from . import tests

diff -r c440d1a51b62a09c31e3585d35ee759be4175af8 -r 1fbb2d098ecb072ca63a64eec9a6288432660204 yt/frontends/tipsy/api.py
--- a/yt/frontends/tipsy/api.py
+++ b/yt/frontends/tipsy/api.py
@@ -17,6 +17,9 @@
 from .data_structures import \
     TipsyDataset
 
+from .fields import \
+    TipsyFieldInfo
+    
 from .io import \
     IOHandlerTipsyBinary
      


https://bitbucket.org/yt_analysis/yt/commits/cf0fe81c9055/
Changeset:   cf0fe81c9055
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:15:56+00:00
Summary:     Moving HaloCatalog frontend to main frontend directory.
Affected #:  10 files

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalog/__init__.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalog/api.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/api.py
@@ -0,0 +1,24 @@
+"""
+API for HaloCatalog frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     HaloCatalogDataset
+
+from .io import \
+     IOHandlerHaloCatalogHDF5
+
+from .fields import \
+     HaloCatalogFieldInfo

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalog/data_structures.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -0,0 +1,96 @@
+"""
+Data structures for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+    HaloCatalogFieldInfo
+
+from yt.utilities.cosmology import Cosmology
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity
+    
+class HaloCatalogHDF5File(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with h5py.File(filename, "r") as f:
+            self.header = dict((field, f.attrs[field]) \
+                               for field in f.attrs.keys())
+
+        super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
+    
+class HaloCatalogDataset(Dataset):
+    _index_class = ParticleIndex
+    _file_class = HaloCatalogHDF5File
+    _field_info_class = HaloCatalogFieldInfo
+    _suffix = ".h5"
+
+    def __init__(self, filename, dataset_type="halocatalog_hdf5",
+                 n_ref = 16, over_refine_factor = 1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(HaloCatalogDataset, self).__init__(filename, dataset_type)
+
+    def _parse_parameter_file(self):
+        with h5py.File(self.parameter_filename, "r") as f:
+            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
+        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+
+        for attr in ["cosmological_simulation", "current_time", "current_redshift",
+                     "hubble_constant", "omega_matter", "omega_lambda",
+                     "domain_left_edge", "domain_right_edge"]:
+            setattr(self, attr, hvals[attr])
+        self.periodicity = (True, True, True)
+        self.particle_types = ("halos")
+        self.particle_types_raw = ("halos")
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.parameters.update(hvals)
+
+    def _set_code_unit_attributes(self):
+        self.length_unit = self.quan(1.0, "cm")
+        self.mass_unit = self.quan(1.0, "g")
+        self.velocity_unit = self.quan(1.0, "cm / s")
+        self.time_unit = self.quan(1.0, "s")
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        if not args[0].endswith(".h5"): return False
+        with h5py.File(args[0], "r") as f:
+            if "data_type" in f.attrs and \
+              f.attrs["data_type"] == "halo_catalog":
+                return True
+        return False

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalog/fields.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/fields.py
@@ -0,0 +1,48 @@
+"""
+HaloCatalog-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+from yt.units.yt_array import \
+    YTArray
+
+from yt.utilities.physical_constants import \
+    mh, \
+    mass_sun_cgs
+
+m_units = "g"
+p_units = "cm"
+v_units = "cm / s"
+r_units = "cm"
+
+class HaloCatalogFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+    )
+
+    known_particle_fields = (
+        ("particle_identifier", ("", [], None)),
+        ("particle_position_x", (p_units, [], None)),
+        ("particle_position_y", (p_units, [], None)),
+        ("particle_position_z", (p_units, [], None)),
+        ("particle_velocity_x", (v_units, [], None)),
+        ("particle_velocity_y", (v_units, [], None)),
+        ("particle_velocity_z", (v_units, [], None)),
+        ("particle_mass", (m_units, [], "Virial Mass")),
+        ("virial_radius", (r_units, [], "Virial Radius")),
+)

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalog/io.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/io.py
@@ -0,0 +1,119 @@
+"""
+HaloCatalog data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+from yt.geometry.oct_container import _ORDER_MAX
+
+class IOHandlerHaloCatalogHDF5(BaseIOHandler):
+    _dataset_type = "halocatalog_hdf5"
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with h5py.File(data_file.filename, "r") as f:
+                x = f['particle_position_x'].value.astype("float64")
+                y = f['particle_position_y'].value.astype("float64")
+                z = f['particle_position_z'].value.astype("float64")
+                yield "halos", (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    x = f['particle_position_x'].value.astype("float64")
+                    y = f['particle_position_y'].value.astype("float64")
+                    z = f['particle_position_z'].value.astype("float64")
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        data = f[field][mask].astype("float64")
+                        yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        pcount = data_file.header["num_halos"]
+        morton = np.empty(pcount, dtype='uint64')
+        mylog.debug("Initializing index % 5i (% 7i particles)",
+                    data_file.file_id, pcount)
+        ind = 0
+        with h5py.File(data_file.filename, "r") as f:
+            if not f.keys(): return None
+            pos = np.empty((pcount, 3), dtype="float64")
+            pos = data_file.ds.arr(pos, "code_length")
+            dx = np.finfo(f['particle_position_x'].dtype).eps
+            dx = 2.0*self.ds.quan(dx, "code_length")
+            pos[:,0] = f["particle_position_x"].value
+            pos[:,1] = f["particle_position_y"].value
+            pos[:,2] = f["particle_position_z"].value
+            # These are 32 bit numbers, so we give a little lee-way.
+            # Otherwise, for big sets of particles, we often will bump into the
+            # domain edges.  This helps alleviate that.
+            np.clip(pos, self.ds.domain_left_edge + dx,
+                         self.ds.domain_right_edge - dx, pos)
+            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+                raise YTDomainOverflow(pos.min(axis=0),
+                                       pos.max(axis=0),
+                                       self.ds.domain_left_edge,
+                                       self.ds.domain_right_edge)
+            regions.add_data_file(pos, data_file.file_id)
+            morton[ind:ind+pos.shape[0]] = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge)
+        return morton
+
+    def _count_particles(self, data_file):
+        return {'halos': data_file.header['num_halos']}
+
+    def _identify_fields(self, data_file):
+        with h5py.File(data_file.filename, "r") as f:
+            fields = [("halos", field) for field in f]
+            units = dict([(("halos", field), 
+                           f[field].attrs["units"]) for field in f])
+        return fields, units

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalogs/halo_catalog/__init__.py
--- a/yt/frontends/halo_catalogs/halo_catalog/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalogs/halo_catalog/api.py
--- a/yt/frontends/halo_catalogs/halo_catalog/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for HaloCatalog frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     HaloCatalogDataset
-
-from .io import \
-     IOHandlerHaloCatalogHDF5
-
-from .fields import \
-     HaloCatalogFieldInfo

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalogs/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalogs/halo_catalog/data_structures.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""
-Data structures for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-import stat
-import weakref
-import struct
-import glob
-import time
-import os
-
-from .fields import \
-    HaloCatalogFieldInfo
-
-from yt.utilities.cosmology import Cosmology
-from yt.geometry.particle_geometry_handler import \
-    ParticleIndex
-from yt.data_objects.static_output import \
-    Dataset, \
-    ParticleFile
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
-    
-class HaloCatalogHDF5File(ParticleFile):
-    def __init__(self, ds, io, filename, file_id):
-        with h5py.File(filename, "r") as f:
-            self.header = dict((field, f.attrs[field]) \
-                               for field in f.attrs.keys())
-
-        super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
-    
-class HaloCatalogDataset(Dataset):
-    _index_class = ParticleIndex
-    _file_class = HaloCatalogHDF5File
-    _field_info_class = HaloCatalogFieldInfo
-    _suffix = ".h5"
-
-    def __init__(self, filename, dataset_type="halocatalog_hdf5",
-                 n_ref = 16, over_refine_factor = 1):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HaloCatalogDataset, self).__init__(filename, dataset_type)
-
-    def _parse_parameter_file(self):
-        with h5py.File(self.parameter_filename, "r") as f:
-            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
-        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
-        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
-
-        for attr in ["cosmological_simulation", "current_time", "current_redshift",
-                     "hubble_constant", "omega_matter", "omega_lambda",
-                     "domain_left_edge", "domain_right_edge"]:
-            setattr(self, attr, hvals[attr])
-        self.periodicity = (True, True, True)
-        self.particle_types = ("halos")
-        self.particle_types_raw = ("halos")
-
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.parameters.update(hvals)
-
-    def _set_code_unit_attributes(self):
-        self.length_unit = self.quan(1.0, "cm")
-        self.mass_unit = self.quan(1.0, "g")
-        self.velocity_unit = self.quan(1.0, "cm / s")
-        self.time_unit = self.quan(1.0, "s")
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        if not args[0].endswith(".h5"): return False
-        with h5py.File(args[0], "r") as f:
-            if "data_type" in f.attrs and \
-              f.attrs["data_type"] == "halo_catalog":
-                return True
-        return False

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalogs/halo_catalog/fields.py
--- a/yt/frontends/halo_catalogs/halo_catalog/fields.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-HaloCatalog-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-from yt.funcs import mylog
-from yt.fields.field_info_container import \
-    FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs
-
-m_units = "g"
-p_units = "cm"
-v_units = "cm / s"
-r_units = "cm"
-
-class HaloCatalogFieldInfo(FieldInfoContainer):
-    known_other_fields = (
-    )
-
-    known_particle_fields = (
-        ("particle_identifier", ("", [], None)),
-        ("particle_position_x", (p_units, [], None)),
-        ("particle_position_y", (p_units, [], None)),
-        ("particle_position_z", (p_units, [], None)),
-        ("particle_velocity_x", (v_units, [], None)),
-        ("particle_velocity_y", (v_units, [], None)),
-        ("particle_velocity_z", (v_units, [], None)),
-        ("particle_mass", (m_units, [], "Virial Mass")),
-        ("virial_radius", (r_units, [], "Virial Radius")),
-)

diff -r 1fbb2d098ecb072ca63a64eec9a6288432660204 -r cf0fe81c90554a4483415e5b9e36ed484f674b69 yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-HaloCatalog data-file handling function
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-
-from yt.utilities.exceptions import *
-from yt.funcs import mylog
-
-from yt.utilities.io_handler import \
-    BaseIOHandler
-
-from yt.utilities.lib.geometry_utils import compute_morton
-
-from yt.geometry.oct_container import _ORDER_MAX
-
-class IOHandlerHaloCatalogHDF5(BaseIOHandler):
-    _dataset_type = "halocatalog_hdf5"
-
-    def _read_fluid_selection(self, chunks, selector, fields, size):
-        raise NotImplementedError
-
-    def _read_particle_coords(self, chunks, ptf):
-        # This will read chunks and yield the results.
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with h5py.File(data_file.filename, "r") as f:
-                x = f['particle_position_x'].value.astype("float64")
-                y = f['particle_position_y'].value.astype("float64")
-                z = f['particle_position_z'].value.astype("float64")
-                yield "halos", (x, y, z)
-
-    def _read_particle_fields(self, chunks, ptf, selector):
-        # Now we have all the sizes, and we can allocate
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with h5py.File(data_file.filename, "r") as f:
-                for ptype, field_list in sorted(ptf.items()):
-                    x = f['particle_position_x'].value.astype("float64")
-                    y = f['particle_position_y'].value.astype("float64")
-                    z = f['particle_position_z'].value.astype("float64")
-                    mask = selector.select_points(x, y, z, 0.0)
-                    del x, y, z
-                    if mask is None: continue
-                    for field in field_list:
-                        data = f[field][mask].astype("float64")
-                        yield (ptype, field), data
-
-    def _initialize_index(self, data_file, regions):
-        pcount = data_file.header["num_halos"]
-        morton = np.empty(pcount, dtype='uint64')
-        mylog.debug("Initializing index % 5i (% 7i particles)",
-                    data_file.file_id, pcount)
-        ind = 0
-        with h5py.File(data_file.filename, "r") as f:
-            if not f.keys(): return None
-            pos = np.empty((pcount, 3), dtype="float64")
-            pos = data_file.ds.arr(pos, "code_length")
-            dx = np.finfo(f['particle_position_x'].dtype).eps
-            dx = 2.0*self.ds.quan(dx, "code_length")
-            pos[:,0] = f["particle_position_x"].value
-            pos[:,1] = f["particle_position_y"].value
-            pos[:,2] = f["particle_position_z"].value
-            # These are 32 bit numbers, so we give a little lee-way.
-            # Otherwise, for big sets of particles, we often will bump into the
-            # domain edges.  This helps alleviate that.
-            np.clip(pos, self.ds.domain_left_edge + dx,
-                         self.ds.domain_right_edge - dx, pos)
-            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
-               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
-                raise YTDomainOverflow(pos.min(axis=0),
-                                       pos.max(axis=0),
-                                       self.ds.domain_left_edge,
-                                       self.ds.domain_right_edge)
-            regions.add_data_file(pos, data_file.file_id)
-            morton[ind:ind+pos.shape[0]] = compute_morton(
-                pos[:,0], pos[:,1], pos[:,2],
-                data_file.ds.domain_left_edge,
-                data_file.ds.domain_right_edge)
-        return morton
-
-    def _count_particles(self, data_file):
-        return {'halos': data_file.header['num_halos']}
-
-    def _identify_fields(self, data_file):
-        with h5py.File(data_file.filename, "r") as f:
-            fields = [("halos", field) for field in f]
-            units = dict([(("halos", field), 
-                           f[field].attrs["units"]) for field in f])
-        return fields, units


https://bitbucket.org/yt_analysis/yt/commits/4221e22b2c23/
Changeset:   4221e22b2c23
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:16:53+00:00
Summary:     Moving OWLSSubfind frontend to main frontend directory.
Affected #:  14 files

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/halo_catalogs/owls_subfind/__init__.py
--- a/yt/frontends/halo_catalogs/owls_subfind/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/halo_catalogs/owls_subfind/api.py
--- a/yt/frontends/halo_catalogs/owls_subfind/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for OWLSSubfind frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     OWLSSubfindDataset
-
-from .io import \
-     IOHandlerOWLSSubfindHDF5
-
-from .fields import \
-     OWLSSubfindFieldInfo

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ /dev/null
@@ -1,222 +0,0 @@
-"""
-Data structures for OWLSSubfind frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from collections import defaultdict
-import h5py
-import numpy as np
-import stat
-import weakref
-import struct
-import glob
-import time
-import os
-
-from .fields import \
-    OWLSSubfindFieldInfo
-
-from yt.utilities.cosmology import Cosmology
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
-from yt.utilities.exceptions import \
-     YTException
-from yt.geometry.particle_geometry_handler import \
-    ParticleIndex
-from yt.data_objects.static_output import \
-    Dataset, \
-    ParticleFile
-from yt.frontends.gadget.data_structures import \
-    _fix_unit_ordering
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
-
-class OWLSSubfindParticleIndex(ParticleIndex):
-    def __init__(self, ds, dataset_type):
-        super(OWLSSubfindParticleIndex, self).__init__(ds, dataset_type)
-
-    def _calculate_particle_index_starts(self):
-        # Halo indices are not saved in the file, so we must count by hand.
-        # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
-        particle_count = defaultdict(int)
-        offset_count = 0
-        for data_file in self.data_files:
-            data_file.index_start = dict([(ptype, particle_count[ptype]) for
-                                           ptype in data_file.total_particles])
-            data_file.offset_start = offset_count
-            for ptype in data_file.total_particles:
-                particle_count[ptype] += data_file.total_particles[ptype]
-            offset_count += data_file.total_offset
-
-    def _calculate_file_offset_map(self):
-        # After the FOF  is performed, a load-balancing step redistributes halos 
-        # and then writes more fields.  Here, for each file, we create a list of 
-        # files which contain the rest of the redistributed particles.
-        ifof = np.array([data_file.total_particles["FOF"]
-                         for data_file in self.data_files])
-        isub = np.array([data_file.total_offset
-                         for data_file in self.data_files])
-        subend = isub.cumsum()
-        fofend = ifof.cumsum()
-        istart = np.digitize(fofend - ifof, subend - isub) - 1
-        iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
-        for i, data_file in enumerate(self.data_files):
-            data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
-
-    def _detect_output_fields(self):
-        # TODO: Add additional fields
-        dsl = []
-        units = {}
-        for dom in self.data_files:
-            fl, _units = self.io._identify_fields(dom)
-            units.update(_units)
-            dom._calculate_offsets(fl)
-            for f in fl:
-                if f not in dsl: dsl.append(f)
-        self.field_list = dsl
-        ds = self.dataset
-        ds.particle_types = tuple(set(pt for pt, ds in dsl))
-        # This is an attribute that means these particle types *actually*
-        # exist.  As in, they are real, in the dataset.
-        ds.field_units.update(units)
-        ds.particle_types_raw = ds.particle_types
-            
-    def _setup_geometry(self):
-        super(OWLSSubfindParticleIndex, self)._setup_geometry()
-        self._calculate_particle_index_starts()
-        self._calculate_file_offset_map()
-    
-class OWLSSubfindHDF5File(ParticleFile):
-    def __init__(self, ds, io, filename, file_id):
-        super(OWLSSubfindHDF5File, self).__init__(ds, io, filename, file_id)
-        with h5py.File(filename, "r") as f:
-            self.header = dict((field, f.attrs[field]) \
-                               for field in f.attrs.keys())
-    
-class OWLSSubfindDataset(Dataset):
-    _index_class = OWLSSubfindParticleIndex
-    _file_class = OWLSSubfindHDF5File
-    _field_info_class = OWLSSubfindFieldInfo
-    _suffix = ".hdf5"
-
-    def __init__(self, filename, dataset_type="subfind_hdf5",
-                 n_ref = 16, over_refine_factor = 1):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(OWLSSubfindDataset, self).__init__(filename, dataset_type)
-
-    def _parse_parameter_file(self):
-        handle = h5py.File(self.parameter_filename, mode="r")
-        hvals = {}
-        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
-        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
-        hvals["Massarr"] = hvals["MassTable"]
-
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-
-        # Set standard values
-        self.current_time = self.quan(hvals["Time_GYR"] * sec_conversion["Gyr"], "s")
-        self.domain_left_edge = np.zeros(3, "float64")
-        self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.cosmological_simulation = 1
-        self.periodicity = (True, True, True)
-        self.current_redshift = hvals["Redshift"]
-        self.omega_lambda = hvals["OmegaLambda"]
-        self.omega_matter = hvals["Omega0"]
-        self.hubble_constant = hvals["HubbleParam"]
-        self.parameters = hvals
-        prefix = os.path.abspath(
-            os.path.join(os.path.dirname(self.parameter_filename), 
-                         os.path.basename(self.parameter_filename).split(".", 1)[0]))
-        
-        suffix = self.parameter_filename.rsplit(".", 1)[-1]
-        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
-        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
-        if self.file_count == 0:
-            raise YTException(message="No data files found.", ds=self)
-        self.particle_types = ("FOF", "SUBFIND")
-        self.particle_types_raw = ("FOF", "SUBFIND")
-        
-        # To avoid having to open files twice
-        self._unit_base = {}
-        self._unit_base.update(
-            (str(k), v) for k, v in handle["/Units"].attrs.items())
-        handle.close()
-
-    def _set_code_unit_attributes(self):
-        # Set a sane default for cosmological simulations.
-        if self._unit_base is None and self.cosmological_simulation == 1:
-            mylog.info("Assuming length units are in Mpc/h (comoving)")
-            self._unit_base = dict(length = (1.0, "Mpccm/h"))
-        # The other same defaults we will use from the standard Gadget
-        # defaults.
-        unit_base = self._unit_base or {}
-        if "length" in unit_base:
-            length_unit = unit_base["length"]
-        elif "UnitLength_in_cm" in unit_base:
-            if self.cosmological_simulation == 0:
-                length_unit = (unit_base["UnitLength_in_cm"], "cm")
-            else:
-                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
-        else:
-            raise RuntimeError
-        length_unit = _fix_unit_ordering(length_unit)
-        self.length_unit = self.quan(length_unit[0], length_unit[1])
-
-        unit_base = self._unit_base or {}
-        if "velocity" in unit_base:
-            velocity_unit = unit_base["velocity"]
-        elif "UnitVelocity_in_cm_per_s" in unit_base:
-            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
-        else:
-            velocity_unit = (1e5, "cm/s")
-        velocity_unit = _fix_unit_ordering(velocity_unit)
-        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
-        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
-        # Default to 1e10 Msun/h if mass is not specified.
-        if "mass" in unit_base:
-            mass_unit = unit_base["mass"]
-        elif "UnitMass_in_g" in unit_base:
-            if self.cosmological_simulation == 0:
-                mass_unit = (unit_base["UnitMass_in_g"], "g")
-            else:
-                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
-        else:
-            # Sane default
-            mass_unit = (1.0, "1e10*Msun/h")
-        mass_unit = _fix_unit_ordering(mass_unit)
-        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
-        self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        need_groups = ['Constants', 'Header', 'Parameters', 'Units', 'FOF']
-        veto_groups = []
-        valid = True
-        try:
-            fh = h5py.File(args[0], mode='r')
-            valid = all(ng in fh["/"] for ng in need_groups) and \
-              not any(vg in fh["/"] for vg in veto_groups)
-            fh.close()
-        except:
-            valid = False
-            pass
-        return valid

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/halo_catalogs/owls_subfind/fields.py
--- a/yt/frontends/halo_catalogs/owls_subfind/fields.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
-OWLSSubfind-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.funcs import mylog
-from yt.fields.field_info_container import \
-    FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-m_units = "code_mass"
-mdot_units = "code_mass / code_time"
-p_units = "Mpccm/h"
-v_units = "1e5 * cmcm / s"
-
-class OWLSSubfindFieldInfo(FieldInfoContainer):
-    known_other_fields = (
-    )
-
-    known_particle_fields = (
-        ("CenterOfMass_0", (p_units, ["particle_position_x"], None)),
-        ("CenterOfMass_1", (p_units, ["particle_position_y"], None)),
-        ("CenterOfMass_2", (p_units, ["particle_position_z"], None)),
-        ("CenterOfMassVelocity_0", (v_units, ["particle_velocity_x"], None)),
-        ("CenterOfMassVelocity_1", (v_units, ["particle_velocity_y"], None)),
-        ("CenterOfMassVelocity_2", (v_units, ["particle_velocity_z"], None)),
-        ("Mass", (m_units, ["particle_mass"], None)),
-        ("Halo_M_Crit200", (m_units, ["Virial Mass"], None)),
-        ("Halo_M_Crit2500", (m_units, [], None)),
-        ("Halo_M_Crit500", (m_units, [], None)),
-        ("Halo_M_Mean200", (m_units, [], None)),
-        ("Halo_M_Mean2500", (m_units, [], None)),
-        ("Halo_M_Mean500", (m_units, [], None)),
-        ("Halo_M_TopHat200", (m_units, [], None)),
-        ("Halo_R_Crit200", (p_units, ["Virial Radius"], None)),
-        ("Halo_R_Crit2500", (p_units, [], None)),
-        ("Halo_R_Crit500", (p_units, [], None)),
-        ("Halo_R_Mean200", (p_units, [], None)),
-        ("Halo_R_Mean2500", (p_units, [], None)),
-        ("Halo_R_Mean500", (p_units, [], None)),
-        ("Halo_R_TopHat200", (p_units, [], None)),
-        ("BH_Mass", (m_units, [], None)),
-        ("Stars/Mass", (m_units, [], None)),
-        ("BH_Mdot", (mdot_units, [], None)),
-        ("StarFormationRate", (mdot_units, [], None)),
-)

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/halo_catalogs/owls_subfind/io.py
--- a/yt/frontends/halo_catalogs/owls_subfind/io.py
+++ /dev/null
@@ -1,217 +0,0 @@
-"""
-OWLSSubfind data-file handling function
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-
-from yt.utilities.exceptions import *
-from yt.funcs import mylog
-
-from yt.utilities.io_handler import \
-    BaseIOHandler
-
-from yt.utilities.lib.geometry_utils import compute_morton
-
-from yt.geometry.oct_container import _ORDER_MAX
-
-class IOHandlerOWLSSubfindHDF5(BaseIOHandler):
-    _dataset_type = "subfind_hdf5"
-
-    def __init__(self, ds):
-        super(IOHandlerOWLSSubfindHDF5, self).__init__(ds)
-        self.offset_fields = set([])
-
-    def _read_fluid_selection(self, chunks, selector, fields, size):
-        raise NotImplementedError
-
-    def _read_particle_coords(self, chunks, ptf):
-        # This will read chunks and yield the results.
-        chunks = list(chunks)
-        data_files = set([])
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            with h5py.File(data_file.filename, "r") as f:
-                for ptype, field_list in sorted(ptf.items()):
-                    pcount = data_file.total_particles[ptype]
-                    coords = f[ptype]["CenterOfMass"].value.astype("float64")
-                    coords = np.resize(coords, (pcount, 3))
-                    x = coords[:, 0]
-                    y = coords[:, 1]
-                    z = coords[:, 2]
-                    yield ptype, (x, y, z)
-
-    def _read_offset_particle_field(self, field, data_file, fh):
-        field_data = np.empty(data_file.total_particles["FOF"], dtype="float64")
-        fofindex = np.arange(data_file.total_particles["FOF"]) + data_file.index_start["FOF"]
-        for offset_file in data_file.offset_files:
-            if fh.filename == offset_file.filename:
-                ofh = fh
-            else:
-                ofh = h5py.File(offset_file.filename, "r")
-            subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
-            substart = max(fofindex[0] - subindex[0], 0)
-            subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
-            fofstart = substart + subindex[0] - fofindex[0]
-            fofend = subend + subindex[0] - fofindex[0]
-            field_data[fofstart:fofend + 1] = ofh["SUBFIND"][field][substart:subend + 1]
-        return field_data
-                    
-    def _read_particle_fields(self, chunks, ptf, selector):
-        # Now we have all the sizes, and we can allocate
-        chunks = list(chunks)
-        data_files = set([])
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            with h5py.File(data_file.filename, "r") as f:
-                for ptype, field_list in sorted(ptf.items()):
-                    pcount = data_file.total_particles[ptype]
-                    if pcount == 0: continue
-                    coords = f[ptype]["CenterOfMass"].value.astype("float64")
-                    coords = np.resize(coords, (pcount, 3))
-                    x = coords[:, 0]
-                    y = coords[:, 1]
-                    z = coords[:, 2]
-                    mask = selector.select_points(x, y, z, 0.0)
-                    del x, y, z
-                    if mask is None: continue
-                    for field in field_list:
-                        if field in self.offset_fields:
-                            field_data = \
-                              self._read_offset_particle_field(field, data_file, f)
-                        else:
-                            if field == "particle_identifier":
-                                field_data = \
-                                  np.arange(data_file.total_particles[ptype]) + \
-                                  data_file.index_start[ptype]
-                            elif field in f[ptype]:
-                                field_data = f[ptype][field].value.astype("float64")
-                            else:
-                                fname = field[:field.rfind("_")]
-                                field_data = f[ptype][fname].value.astype("float64")
-                                my_div = field_data.size / pcount
-                                if my_div > 1:
-                                    field_data = np.resize(field_data, (pcount, my_div))
-                                    findex = int(field[field.rfind("_") + 1:])
-                                    field_data = field_data[:, findex]
-                        data = field_data[mask]
-                        yield (ptype, field), data
-
-    def _initialize_index(self, data_file, regions):
-        pcount = sum(data_file.total_particles.values())
-        morton = np.empty(pcount, dtype='uint64')
-        if pcount == 0: return morton
-        mylog.debug("Initializing index % 5i (% 7i particles)",
-                    data_file.file_id, pcount)
-        ind = 0
-        with h5py.File(data_file.filename, "r") as f:
-            if not f.keys(): return None
-            dx = np.finfo(f["FOF"]['CenterOfMass'].dtype).eps
-            dx = 2.0*self.ds.quan(dx, "code_length")
-
-            for ptype in data_file.ds.particle_types_raw:
-                if data_file.total_particles[ptype] == 0: continue
-                pos = f[ptype]["CenterOfMass"].value.astype("float64")
-                pos = np.resize(pos, (data_file.total_particles[ptype], 3))
-                pos = data_file.ds.arr(pos, "code_length")
-                
-                # These are 32 bit numbers, so we give a little lee-way.
-                # Otherwise, for big sets of particles, we often will bump into the
-                # domain edges.  This helps alleviate that.
-                np.clip(pos, self.ds.domain_left_edge + dx,
-                             self.ds.domain_right_edge - dx, pos)
-                if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
-                   np.any(pos.max(axis=0) > self.ds.domain_right_edge):
-                    raise YTDomainOverflow(pos.min(axis=0),
-                                           pos.max(axis=0),
-                                           self.ds.domain_left_edge,
-                                           self.ds.domain_right_edge)
-                regions.add_data_file(pos, data_file.file_id)
-                morton[ind:ind+pos.shape[0]] = compute_morton(
-                    pos[:,0], pos[:,1], pos[:,2],
-                    data_file.ds.domain_left_edge,
-                    data_file.ds.domain_right_edge)
-                ind += pos.shape[0]
-        return morton
-
-    def _count_particles(self, data_file):
-        with h5py.File(data_file.filename, "r") as f:
-            pcount = {"FOF": f["FOF"].attrs["Number_of_groups"]}
-            if "SUBFIND" in f:
-                # We need this to figure out where the offset fields are stored.
-                data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
-                pcount["SUBFIND"] = f["FOF"].attrs["Number_of_subgroups"]
-            else:
-                data_file.total_offset = 0
-                pcount["SUBFIND"] = 0
-            return pcount
-
-    def _identify_fields(self, data_file):
-        fields = []
-        pcount = data_file.total_particles
-        if sum(pcount.values()) == 0: return fields, {}
-        with h5py.File(data_file.filename, "r") as f:
-            for ptype in self.ds.particle_types_raw:
-                if data_file.total_particles[ptype] == 0: continue
-                fields.append((ptype, "particle_identifier"))
-                my_fields, my_offset_fields = \
-                  subfind_field_list(f[ptype], ptype, data_file.total_particles)
-                fields.extend(my_fields)
-                self.offset_fields = self.offset_fields.union(set(my_offset_fields))
-        return fields, {}
-
-def subfind_field_list(fh, ptype, pcount):
-    fields = []
-    offset_fields = []
-    for field in fh.keys():
-        if "PartType" in field:
-            # These are halo member particles
-            continue
-        elif isinstance(fh[field], h5py.Group):
-            my_fields, my_offset_fields = \
-              subfind_field_list(fh[field], ptype, pcount)
-            fields.extend(my_fields)
-            my_offset_fields.extend(offset_fields)
-        else:
-            if not fh[field].size % pcount[ptype]:
-                my_div = fh[field].size / pcount[ptype]
-                fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
-                if my_div > 1:
-                    for i in range(my_div):
-                        fields.append((ptype, "%s_%d" % (fname, i)))
-                else:
-                    fields.append((ptype, fname))
-            elif ptype == "SUBFIND" and \
-              not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
-                # These are actually FOF fields, but they were written after 
-                # a load balancing step moved halos around and thus they do not
-                # correspond to the halos stored in the FOF group.
-                my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
-                fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
-                if my_div > 1:
-                    for i in range(my_div):
-                        fields.append(("FOF", "%s_%d" % (fname, i)))
-                else:
-                    fields.append(("FOF", fname))
-                offset_fields.append(fname)
-            else:
-                mylog.warn("Cannot add field (%s, %s) with size %d." % \
-                           (ptype, fh[field].name, fh[field].size))
-                continue
-    return fields, offset_fields

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
--- a/yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-OWLSSubfind frontend tests using owls_fof_halos datasets
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.testing import *
-from yt.utilities.answer_testing.framework import \
-    FieldValuesTest, \
-    requires_ds, \
-    data_dir_load
-
-_fields = ("particle_position_x", "particle_position_y",
-           "particle_position_z", "particle_mass")
-
-g8 = "owls_fof_halos/groups_008/group_008.0.hdf5"
- at requires_ds(g8)
-def test_fields_g8():
-    ds = data_dir_load(g8)
-    yield assert_equal, str(ds), "group_008.0.hdf5"
-    for field in _fields:
-        yield FieldValuesTest(g8, field)
-
-# a dataset with empty files
-g3 = "owls_fof_halos/groups_003/group_003.0.hdf5"
- at requires_ds(g3)
-def test_fields_g3():
-    ds = data_dir_load(g3)
-    yield assert_equal, str(ds), "group_003.0.hdf5"
-    for field in _fields:
-        yield FieldValuesTest(g3, field)

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/owls_subfind/__init__.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/owls_subfind/api.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/api.py
@@ -0,0 +1,24 @@
+"""
+API for OWLSSubfind frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     OWLSSubfindDataset
+
+from .io import \
+     IOHandlerOWLSSubfindHDF5
+
+from .fields import \
+     OWLSSubfindFieldInfo

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/owls_subfind/data_structures.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -0,0 +1,222 @@
+"""
+Data structures for OWLSSubfind frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import defaultdict
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+    OWLSSubfindFieldInfo
+
+from yt.utilities.cosmology import Cosmology
+from yt.utilities.definitions import \
+    mpc_conversion, sec_conversion
+from yt.utilities.exceptions import \
+     YTException
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+from yt.frontends.gadget.data_structures import \
+    _fix_unit_ordering
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity
+
+class OWLSSubfindParticleIndex(ParticleIndex):
+    def __init__(self, ds, dataset_type):
+        super(OWLSSubfindParticleIndex, self).__init__(ds, dataset_type)
+
+    def _calculate_particle_index_starts(self):
+        # Halo indices are not saved in the file, so we must count by hand.
+        # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
+        particle_count = defaultdict(int)
+        offset_count = 0
+        for data_file in self.data_files:
+            data_file.index_start = dict([(ptype, particle_count[ptype]) for
+                                           ptype in data_file.total_particles])
+            data_file.offset_start = offset_count
+            for ptype in data_file.total_particles:
+                particle_count[ptype] += data_file.total_particles[ptype]
+            offset_count += data_file.total_offset
+
+    def _calculate_file_offset_map(self):
+        # After the FOF  is performed, a load-balancing step redistributes halos 
+        # and then writes more fields.  Here, for each file, we create a list of 
+        # files which contain the rest of the redistributed particles.
+        ifof = np.array([data_file.total_particles["FOF"]
+                         for data_file in self.data_files])
+        isub = np.array([data_file.total_offset
+                         for data_file in self.data_files])
+        subend = isub.cumsum()
+        fofend = ifof.cumsum()
+        istart = np.digitize(fofend - ifof, subend - isub) - 1
+        iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
+        for i, data_file in enumerate(self.data_files):
+            data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
+
+    def _detect_output_fields(self):
+        # TODO: Add additional fields
+        dsl = []
+        units = {}
+        for dom in self.data_files:
+            fl, _units = self.io._identify_fields(dom)
+            units.update(_units)
+            dom._calculate_offsets(fl)
+            for f in fl:
+                if f not in dsl: dsl.append(f)
+        self.field_list = dsl
+        ds = self.dataset
+        ds.particle_types = tuple(set(pt for pt, ds in dsl))
+        # This is an attribute that means these particle types *actually*
+        # exist.  As in, they are real, in the dataset.
+        ds.field_units.update(units)
+        ds.particle_types_raw = ds.particle_types
+            
+    def _setup_geometry(self):
+        super(OWLSSubfindParticleIndex, self)._setup_geometry()
+        self._calculate_particle_index_starts()
+        self._calculate_file_offset_map()
+    
+class OWLSSubfindHDF5File(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        super(OWLSSubfindHDF5File, self).__init__(ds, io, filename, file_id)
+        with h5py.File(filename, "r") as f:
+            self.header = dict((field, f.attrs[field]) \
+                               for field in f.attrs.keys())
+    
+class OWLSSubfindDataset(Dataset):
+    _index_class = OWLSSubfindParticleIndex
+    _file_class = OWLSSubfindHDF5File
+    _field_info_class = OWLSSubfindFieldInfo
+    _suffix = ".hdf5"
+
+    def __init__(self, filename, dataset_type="subfind_hdf5",
+                 n_ref = 16, over_refine_factor = 1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(OWLSSubfindDataset, self).__init__(filename, dataset_type)
+
+    def _parse_parameter_file(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        hvals = {}
+        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
+        hvals["Massarr"] = hvals["MassTable"]
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        # Set standard values
+        self.current_time = self.quan(hvals["Time_GYR"] * sec_conversion["Gyr"], "s")
+        self.domain_left_edge = np.zeros(3, "float64")
+        self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.cosmological_simulation = 1
+        self.periodicity = (True, True, True)
+        self.current_redshift = hvals["Redshift"]
+        self.omega_lambda = hvals["OmegaLambda"]
+        self.omega_matter = hvals["Omega0"]
+        self.hubble_constant = hvals["HubbleParam"]
+        self.parameters = hvals
+        prefix = os.path.abspath(
+            os.path.join(os.path.dirname(self.parameter_filename), 
+                         os.path.basename(self.parameter_filename).split(".", 1)[0]))
+        
+        suffix = self.parameter_filename.rsplit(".", 1)[-1]
+        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+        if self.file_count == 0:
+            raise YTException(message="No data files found.", ds=self)
+        self.particle_types = ("FOF", "SUBFIND")
+        self.particle_types_raw = ("FOF", "SUBFIND")
+        
+        # To avoid having to open files twice
+        self._unit_base = {}
+        self._unit_base.update(
+            (str(k), v) for k, v in handle["/Units"].attrs.items())
+        handle.close()
+
+    def _set_code_unit_attributes(self):
+        # Set a sane default for cosmological simulations.
+        if self._unit_base is None and self.cosmological_simulation == 1:
+            mylog.info("Assuming length units are in Mpc/h (comoving)")
+            self._unit_base = dict(length = (1.0, "Mpccm/h"))
+        # The other same defaults we will use from the standard Gadget
+        # defaults.
+        unit_base = self._unit_base or {}
+        if "length" in unit_base:
+            length_unit = unit_base["length"]
+        elif "UnitLength_in_cm" in unit_base:
+            if self.cosmological_simulation == 0:
+                length_unit = (unit_base["UnitLength_in_cm"], "cm")
+            else:
+                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+        else:
+            raise RuntimeError
+        length_unit = _fix_unit_ordering(length_unit)
+        self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+        unit_base = self._unit_base or {}
+        if "velocity" in unit_base:
+            velocity_unit = unit_base["velocity"]
+        elif "UnitVelocity_in_cm_per_s" in unit_base:
+            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+        else:
+            velocity_unit = (1e5, "cm/s")
+        velocity_unit = _fix_unit_ordering(velocity_unit)
+        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+        # Default to 1e10 Msun/h if mass is not specified.
+        if "mass" in unit_base:
+            mass_unit = unit_base["mass"]
+        elif "UnitMass_in_g" in unit_base:
+            if self.cosmological_simulation == 0:
+                mass_unit = (unit_base["UnitMass_in_g"], "g")
+            else:
+                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+        else:
+            # Sane default
+            mass_unit = (1.0, "1e10*Msun/h")
+        mass_unit = _fix_unit_ordering(mass_unit)
+        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+        self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Constants', 'Header', 'Parameters', 'Units', 'FOF']
+        veto_groups = []
+        valid = True
+        try:
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            fh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/owls_subfind/fields.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/fields.py
@@ -0,0 +1,58 @@
+"""
+OWLSSubfind-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+from yt.units.yt_array import \
+    YTArray
+
+m_units = "code_mass"
+mdot_units = "code_mass / code_time"
+p_units = "Mpccm/h"
+v_units = "1e5 * cmcm / s"
+
+class OWLSSubfindFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+    )
+
+    known_particle_fields = (
+        ("CenterOfMass_0", (p_units, ["particle_position_x"], None)),
+        ("CenterOfMass_1", (p_units, ["particle_position_y"], None)),
+        ("CenterOfMass_2", (p_units, ["particle_position_z"], None)),
+        ("CenterOfMassVelocity_0", (v_units, ["particle_velocity_x"], None)),
+        ("CenterOfMassVelocity_1", (v_units, ["particle_velocity_y"], None)),
+        ("CenterOfMassVelocity_2", (v_units, ["particle_velocity_z"], None)),
+        ("Mass", (m_units, ["particle_mass"], None)),
+        ("Halo_M_Crit200", (m_units, ["Virial Mass"], None)),
+        ("Halo_M_Crit2500", (m_units, [], None)),
+        ("Halo_M_Crit500", (m_units, [], None)),
+        ("Halo_M_Mean200", (m_units, [], None)),
+        ("Halo_M_Mean2500", (m_units, [], None)),
+        ("Halo_M_Mean500", (m_units, [], None)),
+        ("Halo_M_TopHat200", (m_units, [], None)),
+        ("Halo_R_Crit200", (p_units, ["Virial Radius"], None)),
+        ("Halo_R_Crit2500", (p_units, [], None)),
+        ("Halo_R_Crit500", (p_units, [], None)),
+        ("Halo_R_Mean200", (p_units, [], None)),
+        ("Halo_R_Mean2500", (p_units, [], None)),
+        ("Halo_R_Mean500", (p_units, [], None)),
+        ("Halo_R_TopHat200", (p_units, [], None)),
+        ("BH_Mass", (m_units, [], None)),
+        ("Stars/Mass", (m_units, [], None)),
+        ("BH_Mdot", (mdot_units, [], None)),
+        ("StarFormationRate", (mdot_units, [], None)),
+)

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/owls_subfind/io.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/io.py
@@ -0,0 +1,217 @@
+"""
+OWLSSubfind data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+from yt.geometry.oct_container import _ORDER_MAX
+
+class IOHandlerOWLSSubfindHDF5(BaseIOHandler):
+    _dataset_type = "subfind_hdf5"
+
+    def __init__(self, ds):
+        super(IOHandlerOWLSSubfindHDF5, self).__init__(ds)
+        self.offset_fields = set([])
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    pcount = data_file.total_particles[ptype]
+                    coords = f[ptype]["CenterOfMass"].value.astype("float64")
+                    coords = np.resize(coords, (pcount, 3))
+                    x = coords[:, 0]
+                    y = coords[:, 1]
+                    z = coords[:, 2]
+                    yield ptype, (x, y, z)
+
+    def _read_offset_particle_field(self, field, data_file, fh):
+        field_data = np.empty(data_file.total_particles["FOF"], dtype="float64")
+        fofindex = np.arange(data_file.total_particles["FOF"]) + data_file.index_start["FOF"]
+        for offset_file in data_file.offset_files:
+            if fh.filename == offset_file.filename:
+                ofh = fh
+            else:
+                ofh = h5py.File(offset_file.filename, "r")
+            subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
+            substart = max(fofindex[0] - subindex[0], 0)
+            subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
+            fofstart = substart + subindex[0] - fofindex[0]
+            fofend = subend + subindex[0] - fofindex[0]
+            field_data[fofstart:fofend + 1] = ofh["SUBFIND"][field][substart:subend + 1]
+        return field_data
+                    
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    pcount = data_file.total_particles[ptype]
+                    if pcount == 0: continue
+                    coords = f[ptype]["CenterOfMass"].value.astype("float64")
+                    coords = np.resize(coords, (pcount, 3))
+                    x = coords[:, 0]
+                    y = coords[:, 1]
+                    z = coords[:, 2]
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        if field in self.offset_fields:
+                            field_data = \
+                              self._read_offset_particle_field(field, data_file, f)
+                        else:
+                            if field == "particle_identifier":
+                                field_data = \
+                                  np.arange(data_file.total_particles[ptype]) + \
+                                  data_file.index_start[ptype]
+                            elif field in f[ptype]:
+                                field_data = f[ptype][field].value.astype("float64")
+                            else:
+                                fname = field[:field.rfind("_")]
+                                field_data = f[ptype][fname].value.astype("float64")
+                                my_div = field_data.size / pcount
+                                if my_div > 1:
+                                    field_data = np.resize(field_data, (pcount, my_div))
+                                    findex = int(field[field.rfind("_") + 1:])
+                                    field_data = field_data[:, findex]
+                        data = field_data[mask]
+                        yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        pcount = sum(data_file.total_particles.values())
+        morton = np.empty(pcount, dtype='uint64')
+        if pcount == 0: return morton
+        mylog.debug("Initializing index % 5i (% 7i particles)",
+                    data_file.file_id, pcount)
+        ind = 0
+        with h5py.File(data_file.filename, "r") as f:
+            if not f.keys(): return None
+            dx = np.finfo(f["FOF"]['CenterOfMass'].dtype).eps
+            dx = 2.0*self.ds.quan(dx, "code_length")
+
+            for ptype in data_file.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                pos = f[ptype]["CenterOfMass"].value.astype("float64")
+                pos = np.resize(pos, (data_file.total_particles[ptype], 3))
+                pos = data_file.ds.arr(pos, "code_length")
+                
+                # These are 32 bit numbers, so we give a little lee-way.
+                # Otherwise, for big sets of particles, we often will bump into the
+                # domain edges.  This helps alleviate that.
+                np.clip(pos, self.ds.domain_left_edge + dx,
+                             self.ds.domain_right_edge - dx, pos)
+                if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+                   np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+                    raise YTDomainOverflow(pos.min(axis=0),
+                                           pos.max(axis=0),
+                                           self.ds.domain_left_edge,
+                                           self.ds.domain_right_edge)
+                regions.add_data_file(pos, data_file.file_id)
+                morton[ind:ind+pos.shape[0]] = compute_morton(
+                    pos[:,0], pos[:,1], pos[:,2],
+                    data_file.ds.domain_left_edge,
+                    data_file.ds.domain_right_edge)
+                ind += pos.shape[0]
+        return morton
+
+    def _count_particles(self, data_file):
+        with h5py.File(data_file.filename, "r") as f:
+            pcount = {"FOF": f["FOF"].attrs["Number_of_groups"]}
+            if "SUBFIND" in f:
+                # We need this to figure out where the offset fields are stored.
+                data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
+                pcount["SUBFIND"] = f["FOF"].attrs["Number_of_subgroups"]
+            else:
+                data_file.total_offset = 0
+                pcount["SUBFIND"] = 0
+            return pcount
+
+    def _identify_fields(self, data_file):
+        fields = []
+        pcount = data_file.total_particles
+        if sum(pcount.values()) == 0: return fields, {}
+        with h5py.File(data_file.filename, "r") as f:
+            for ptype in self.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                fields.append((ptype, "particle_identifier"))
+                my_fields, my_offset_fields = \
+                  subfind_field_list(f[ptype], ptype, data_file.total_particles)
+                fields.extend(my_fields)
+                self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+        return fields, {}
+
+def subfind_field_list(fh, ptype, pcount):
+    fields = []
+    offset_fields = []
+    for field in fh.keys():
+        if "PartType" in field:
+            # These are halo member particles
+            continue
+        elif isinstance(fh[field], h5py.Group):
+            my_fields, my_offset_fields = \
+              subfind_field_list(fh[field], ptype, pcount)
+            fields.extend(my_fields)
+            my_offset_fields.extend(offset_fields)
+        else:
+            if not fh[field].size % pcount[ptype]:
+                my_div = fh[field].size / pcount[ptype]
+                fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+                if my_div > 1:
+                    for i in range(my_div):
+                        fields.append((ptype, "%s_%d" % (fname, i)))
+                else:
+                    fields.append((ptype, fname))
+            elif ptype == "SUBFIND" and \
+              not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
+                # These are actually FOF fields, but they were written after 
+                # a load balancing step moved halos around and thus they do not
+                # correspond to the halos stored in the FOF group.
+                my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
+                fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+                if my_div > 1:
+                    for i in range(my_div):
+                        fields.append(("FOF", "%s_%d" % (fname, i)))
+                else:
+                    fields.append(("FOF", fname))
+                offset_fields.append(fname)
+            else:
+                mylog.warn("Cannot add field (%s, %s) with size %d." % \
+                           (ptype, fh[field].name, fh[field].size))
+                continue
+    return fields, offset_fields

diff -r cf0fe81c90554a4483415e5b9e36ed484f674b69 -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf yt/frontends/owls_subfind/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/tests/test_outputs.py
@@ -0,0 +1,40 @@
+"""
+OWLSSubfind frontend tests using owls_fof_halos datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    data_dir_load
+
+_fields = ("particle_position_x", "particle_position_y",
+           "particle_position_z", "particle_mass")
+
+g8 = "owls_fof_halos/groups_008/group_008.0.hdf5"
+ at requires_ds(g8)
+def test_fields_g8():
+    ds = data_dir_load(g8)
+    yield assert_equal, str(ds), "group_008.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g8, field)
+
+# a dataset with empty files
+g3 = "owls_fof_halos/groups_003/group_003.0.hdf5"
+ at requires_ds(g3)
+def test_fields_g3():
+    ds = data_dir_load(g3)
+    yield assert_equal, str(ds), "group_003.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g3, field)


https://bitbucket.org/yt_analysis/yt/commits/17d76c2972fb/
Changeset:   17d76c2972fb
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:18:53+00:00
Summary:     Moving rockstar frontend to main frontend directory.
Affected #:  12 files

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/halo_catalogs/rockstar/__init__.py
--- a/yt/frontends/halo_catalogs/rockstar/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for Rockstar frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/halo_catalogs/rockstar/api.py
--- a/yt/frontends/halo_catalogs/rockstar/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for Rockstar frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     RockstarDataset
-
-from .io import \
-     IOHandlerRockstarBinary
-
-from .fields import \
-     RockstarFieldInfo

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/halo_catalogs/rockstar/data_structures.py
--- a/yt/frontends/halo_catalogs/rockstar/data_structures.py
+++ /dev/null
@@ -1,110 +0,0 @@
-"""
-Data structures for Rockstar frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-import stat
-import weakref
-import struct
-import glob
-import time
-import os
-
-from .fields import \
-    RockstarFieldInfo
-
-from yt.utilities.cosmology import Cosmology
-from yt.geometry.particle_geometry_handler import \
-    ParticleIndex
-from yt.data_objects.static_output import \
-    Dataset, \
-    ParticleFile
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
-
-from .definitions import \
-    header_dt
-
-class RockstarBinaryFile(ParticleFile):
-    def __init__(self, ds, io, filename, file_id):
-        with open(filename, "rb") as f:
-            self.header = fpu.read_cattrs(f, header_dt, "=")
-            self._position_offset = f.tell()
-            f.seek(0, os.SEEK_END)
-            self._file_size = f.tell()
-
-        super(RockstarBinaryFile, self).__init__(ds, io, filename, file_id)
-
-class RockstarDataset(Dataset):
-    _index_class = ParticleIndex
-    _file_class = RockstarBinaryFile
-    _field_info_class = RockstarFieldInfo
-    _suffix = ".bin"
-
-    def __init__(self, filename, dataset_type="rockstar_binary",
-                 n_ref = 16, over_refine_factor = 1):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(RockstarDataset, self).__init__(filename, dataset_type)
-
-    def _parse_parameter_file(self):
-        with open(self.parameter_filename, "rb") as f:
-            hvals = fpu.read_cattrs(f, header_dt)
-            hvals.pop("unused")
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
-        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
-        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
-        
-        # Now we can set up things we already know.
-        self.cosmological_simulation = 1
-        self.current_redshift = (1.0 / hvals['scale']) - 1.0
-        self.hubble_constant = hvals['h0']
-        self.omega_lambda = hvals['Ol']
-        self.omega_matter = hvals['Om']
-        cosmo = Cosmology(self.hubble_constant,
-                          self.omega_matter, self.omega_lambda)
-        self.current_time = cosmo.hubble_time(self.current_redshift).in_units("s")
-        self.periodicity = (True, True, True)
-        self.particle_types = ("halos")
-        self.particle_types_raw = ("halos")
-
-        self.domain_left_edge = np.array([0.0,0.0,0.0])
-        self.domain_right_edge = np.array([hvals['box_size']] * 3)
-
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.parameters.update(hvals)
-
-    def _set_code_unit_attributes(self):
-        z = self.current_redshift
-        self.length_unit = self.quan(1.0 / (1.0+z), "Mpc / h")
-        self.mass_unit = self.quan(1.0, "Msun / h")
-        self.velocity_unit = self.quan(1.0, "km / s")
-        self.time_unit = self.length_unit / self.velocity_unit
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        if not args[0].endswith(".bin"): return False
-        with open(args[0], "rb") as f:
-            header = fpu.read_cattrs(f, header_dt)
-            if header['magic'] == 18077126535843729616:
-                return True
-        return False

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/halo_catalogs/rockstar/definitions.py
--- a/yt/frontends/halo_catalogs/rockstar/definitions.py
+++ /dev/null
@@ -1,127 +0,0 @@
-"""
-Data structures for Rockstar
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-BINARY_HEADER_SIZE=256
-header_dt = (
-    ("magic", 1, "Q"),
-    ("snap", 1, "q"),
-    ("chunk", 1, "q"),
-    ("scale", 1, "f"),
-    ("Om", 1, "f"),
-    ("Ol", 1, "f"),
-    ("h0", 1, "f"),
-    ("bounds", 6, "f"),
-    ("num_halos", 1, "q"),
-    ("num_particles", 1, "q"),
-    ("box_size", 1, "f"),
-    ("particle_mass", 1, "f"),
-    ("particle_type", 1, "q"),
-    ("format_revision", 1, "i"),
-    ("version", 12, "c"),
-    ("unused", BINARY_HEADER_SIZE - 4*12 - 4 - 8*6 - 12, "c")
-)
-
-# Note the final field here, which is a field for min/max format revision in
-# which the field appears.
-
-KNOWN_REVISIONS=[0, 1]
-
-halo_dt = [
-    ('particle_identifier', np.int64),
-    ('particle_position_x', np.float32),
-    ('particle_position_y', np.float32),
-    ('particle_position_z', np.float32),
-    ('particle_mposition_x', np.float32, (0, 0)),
-    ('particle_mposition_y', np.float32, (0, 0)),
-    ('particle_mposition_z', np.float32, (0, 0)),
-    ('particle_velocity_x', np.float32),
-    ('particle_velocity_y', np.float32),
-    ('particle_velocity_z', np.float32),
-    ('particle_corevel_x', np.float32, (1, 100)),
-    ('particle_corevel_y', np.float32, (1, 100)),
-    ('particle_corevel_z', np.float32, (1, 100)),
-    ('particle_bulkvel_x', np.float32),
-    ('particle_bulkvel_y', np.float32),
-    ('particle_bulkvel_z', np.float32),
-    ('particle_mass', np.float32),
-    ('virial_radius', np.float32),
-    ('child_r', np.float32),
-    ('vmax_r', np.float32),
-    ('mgrav', np.float32),
-    ('vmax', np.float32),
-    ('rvmax', np.float32),
-    ('rs', np.float32),
-    ('klypin_rs', np.float32),
-    ('vrms', np.float32),
-    ('Jx', np.float32),
-    ('Jy', np.float32),
-    ('Jz', np.float32),
-    ('energy', np.float32),
-    ('spin', np.float32),
-    ('alt_m1', np.float32),
-    ('alt_m2', np.float32),
-    ('alt_m3', np.float32),
-    ('alt_m4', np.float32),
-    ('Xoff', np.float32),
-    ('Voff', np.float32),
-    ('b_to_a', np.float32),
-    ('c_to_a', np.float32),
-    ('Ax', np.float32),
-    ('Ay', np.float32),
-    ('Az', np.float32),
-    ('b_to_a2', np.float32, (1, 100)),
-    ('c_to_a2', np.float32, (1, 100)),
-    ('A2x', np.float32, (1, 100)),
-    ('A2y', np.float32, (1, 100)),
-    ('A2z', np.float32, (1, 100)),
-    ('bullock_spin', np.float32),
-    ('kin_to_pot', np.float32),
-    ('m_pe_b', np.float32, (1, 100)),
-    ('m_pe_d', np.float32, (1, 100)),
-    ('num_p', np.int64),
-    ('num_child_particles', np.int64),
-    ('p_start', np.int64),
-    ('desc', np.int64),
-    ('flags', np.int64),
-    ('n_core', np.int64),
-    ('min_pos_err', np.float32),
-    ('min_vel_err', np.float32),
-    ('min_bulkvel_err', np.float32),
-]
-
-halo_dts = {}
-
-for rev in KNOWN_REVISIONS:
-    halo_dts[rev] = []
-    for item in halo_dt:
-        if len(item) == 2:
-            halo_dts[rev].append(item)
-        else:
-            mi, ma = item[2]
-            if (mi <= rev) and (rev <= ma):
-                halo_dts[rev].append(item[:2])
-    halo_dts[rev] = np.dtype(halo_dts[rev], align=True)
-
-particle_dt = np.dtype([
-    ('particle_identifier', np.int64),
-    ('particle_position_x', np.float32),
-    ('particle_position_y', np.float32),
-    ('particle_position_z', np.float32),
-    ('particle_velocity_x', np.float32),
-    ('particle_velocity_y', np.float32),
-    ('particle_velocity_z', np.float32),
-])

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/halo_catalogs/rockstar/fields.py
--- a/yt/frontends/halo_catalogs/rockstar/fields.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Rockstar-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-from yt.funcs import mylog
-from yt.fields.field_info_container import \
-    FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs
-
-m_units = "Msun / h"                # Msun / h
-p_units = "Mpccm / h"               # Mpc / h comoving
-v_units = "km / s"                  # km /s phys, peculiar
-r_units = "kpccm / h"               # kpc / h comoving
-
-class RockstarFieldInfo(FieldInfoContainer):
-    known_other_fields = (
-    )
-
-    known_particle_fields = (
-        ("particle_identifier", ("", [], None)),
-        ("particle_position_x", (p_units, [], None)),
-        ("particle_position_y", (p_units, [], None)),
-        ("particle_position_z", (p_units, [], None)),
-        ("particle_velocity_x", (v_units, [], None)),
-        ("particle_velocity_y", (v_units, [], None)),
-        ("particle_velocity_z", (v_units, [], None)),
-        ("particle_corevel_x", (v_units, [], None)),
-        ("particle_corevel_y", (v_units, [], None)),
-        ("particle_corevel_z", (v_units, [], None)),
-        ("particle_bulkvel_x", (v_units, [], None)),
-        ("particle_bulkvel_y", (v_units, [], None)),
-        ("particle_bulkvel_z", (v_units, [], None)),
-        ("particle_mass", (m_units, [], "Mass")),
-        ("virial_radius", (r_units, [], "Radius")),
-        ("child_r", (r_units, [], None)),
-        ("vmax_r", (v_units, [], None)),
-    # These fields I don't have good definitions for yet.
-    ('mgrav', ("", [], None)),
-    ('vmax', (v_units, [], "V_{max}")),
-    ('rvmax', (v_units, [], None)),
-    ('rs', (r_units, [], "R_s")),
-    ('klypin_rs', (r_units, [], "Klypin R_s")),
-    ('vrms', (v_units, [], "V_{rms}")),
-    ('Jx', ("", [], "J_x")),
-    ('Jy', ("", [], "J_y")),
-    ('Jz', ("", [], "J_z")),
-    ('energy', ("", [], None)),
-    ('spin', ("", [], "Spin Parameter")),
-    ('alt_m1', (m_units, [], None)),
-    ('alt_m2', (m_units, [], None)),
-    ('alt_m3', (m_units, [], None)),
-    ('alt_m4', (m_units, [], None)),
-    ('Xoff', ("", [], None)),
-    ('Voff', ("", [], None)),
-    ('b_to_a', ("", [], "Ellipsoidal b to a")),
-    ('c_to_a', ("", [], "Ellipsoidal c to a")),
-    ('Ax', ("", [], "A_x")),
-    ('Ay', ("", [], "A_y")),
-    ('Az', ("", [], "A_z")),
-    ('b_to_a2', ("", [], None)),
-    ('c_to_a2', ("", [], None)),
-    ('A2x', ("", [], "A2_x")),
-    ('A2y', ("", [], "A2_y")),
-    ('A2z', ("", [], "A2_z")),
-    ('bullock_spin', ("", [], "Bullock Spin Parameter")),
-    ('kin_to_pot', ("", [], "Kinetic to Potential")),
-    ('m_pe_b', ("", [], None)),
-    ('m_pe_d', ("", [], None)),
-    ('num_p', ("", [], "Number of Particles")),
-    ('num_child_particles', ("", [], "Number of Child Particles")),
-    ('p_start', ("", [], None)),
-    ('desc', ("", [], None)),
-    ('flags', ("", [], None)),
-    ('n_core', ("", [], None)),
-    ('min_pos_err', ("", [], None)),
-    ('min_vel_err', ("", [], None)),
-    ('min_bulkvel_err', ("", [], None)),
-)

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/halo_catalogs/rockstar/io.py
--- a/yt/frontends/halo_catalogs/rockstar/io.py
+++ /dev/null
@@ -1,131 +0,0 @@
-"""
-Rockstar data-file handling function
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-
-from yt.utilities.exceptions import *
-from yt.funcs import mylog
-
-from yt.utilities.io_handler import \
-    BaseIOHandler
-
-import yt.utilities.fortran_utils as fpu
-from .definitions import halo_dts
-from yt.utilities.lib.geometry_utils import compute_morton
-
-from yt.geometry.oct_container import _ORDER_MAX
-
-class IOHandlerRockstarBinary(BaseIOHandler):
-    _dataset_type = "rockstar_binary"
-
-    def __init__(self, *args, **kwargs):
-        super(IOHandlerRockstarBinary, self).__init__(*args, **kwargs)
-        self._halo_dt = halo_dts[self.ds.parameters['format_revision']]
-
-    def _read_fluid_selection(self, chunks, selector, fields, size):
-        raise NotImplementedError
-
-    def _read_particle_coords(self, chunks, ptf):
-        # This will read chunks and yield the results.
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with open(data_file.filename, "rb") as f:
-                f.seek(data_file._position_offset, os.SEEK_SET)
-                halos = np.fromfile(f, dtype=self._halo_dt, count = pcount)
-                x = halos['particle_position_x'].astype("float64")
-                y = halos['particle_position_y'].astype("float64")
-                z = halos['particle_position_z'].astype("float64")
-                yield "halos", (x, y, z)
-
-    def _read_particle_fields(self, chunks, ptf, selector):
-        # Now we have all the sizes, and we can allocate
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with open(data_file.filename, "rb") as f:
-                for ptype, field_list in sorted(ptf.items()):
-                    f.seek(data_file._position_offset, os.SEEK_SET)
-                    halos = np.fromfile(f, dtype=self._halo_dt, count = pcount)
-                    x = halos['particle_position_x'].astype("float64")
-                    y = halos['particle_position_y'].astype("float64")
-                    z = halos['particle_position_z'].astype("float64")
-                    mask = selector.select_points(x, y, z, 0.0)
-                    del x, y, z
-                    if mask is None: continue
-                    for field in field_list:
-                        data = halos[field][mask].astype("float64")
-                        yield (ptype, field), data
-
-    def _initialize_index(self, data_file, regions):
-        pcount = data_file.header["num_halos"]
-        morton = np.empty(pcount, dtype='uint64')
-        mylog.debug("Initializing index % 5i (% 7i particles)",
-                    data_file.file_id, pcount)
-        ind = 0
-        with open(data_file.filename, "rb") as f:
-            f.seek(data_file._position_offset, os.SEEK_SET)
-            halos = np.fromfile(f, dtype=self._halo_dt, count = pcount)
-            pos = np.empty((halos.size, 3), dtype="float64")
-            # These positions are in Mpc, *not* "code" units
-            pos = data_file.ds.arr(pos, "code_length")
-            dx = np.finfo(halos['particle_position_x'].dtype).eps
-            dx = 2.0*self.ds.quan(dx, "code_length")
-            pos[:,0] = halos["particle_position_x"]
-            pos[:,1] = halos["particle_position_y"]
-            pos[:,2] = halos["particle_position_z"]
-            # These are 32 bit numbers, so we give a little lee-way.
-            # Otherwise, for big sets of particles, we often will bump into the
-            # domain edges.  This helps alleviate that.
-            np.clip(pos, self.ds.domain_left_edge + dx,
-                         self.ds.domain_right_edge - dx, pos)
-            #del halos
-            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
-               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
-                raise YTDomainOverflow(pos.min(axis=0),
-                                       pos.max(axis=0),
-                                       self.ds.domain_left_edge,
-                                       self.ds.domain_right_edge)
-            regions.add_data_file(pos, data_file.file_id)
-            morton[ind:ind+pos.shape[0]] = compute_morton(
-                pos[:,0], pos[:,1], pos[:,2],
-                data_file.ds.domain_left_edge,
-                data_file.ds.domain_right_edge)
-        return morton
-
-    def _count_particles(self, data_file):
-        return {'halos': data_file.header['num_halos']}
-
-    def _identify_fields(self, data_file):
-        fields = [("halos", f) for f in self._halo_dt.fields if
-                  "padding" not in f]
-        return fields, {}

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/rockstar/__init__.py
--- /dev/null
+++ b/yt/frontends/rockstar/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for Rockstar frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/rockstar/api.py
--- /dev/null
+++ b/yt/frontends/rockstar/api.py
@@ -0,0 +1,24 @@
+"""
+API for Rockstar frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     RockstarDataset
+
+from .io import \
+     IOHandlerRockstarBinary
+
+from .fields import \
+     RockstarFieldInfo

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/rockstar/data_structures.py
--- /dev/null
+++ b/yt/frontends/rockstar/data_structures.py
@@ -0,0 +1,110 @@
+"""
+Data structures for Rockstar frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+    RockstarFieldInfo
+
+from yt.utilities.cosmology import Cosmology
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity
+
+from .definitions import \
+    header_dt
+
+class RockstarBinaryFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with open(filename, "rb") as f:
+            self.header = fpu.read_cattrs(f, header_dt, "=")
+            self._position_offset = f.tell()
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+        super(RockstarBinaryFile, self).__init__(ds, io, filename, file_id)
+
+class RockstarDataset(Dataset):
+    _index_class = ParticleIndex
+    _file_class = RockstarBinaryFile
+    _field_info_class = RockstarFieldInfo
+    _suffix = ".bin"
+
+    def __init__(self, filename, dataset_type="rockstar_binary",
+                 n_ref = 16, over_refine_factor = 1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(RockstarDataset, self).__init__(filename, dataset_type)
+
+    def _parse_parameter_file(self):
+        with open(self.parameter_filename, "rb") as f:
+            hvals = fpu.read_cattrs(f, header_dt)
+            hvals.pop("unused")
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
+        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+        
+        # Now we can set up things we already know.
+        self.cosmological_simulation = 1
+        self.current_redshift = (1.0 / hvals['scale']) - 1.0
+        self.hubble_constant = hvals['h0']
+        self.omega_lambda = hvals['Ol']
+        self.omega_matter = hvals['Om']
+        cosmo = Cosmology(self.hubble_constant,
+                          self.omega_matter, self.omega_lambda)
+        self.current_time = cosmo.hubble_time(self.current_redshift).in_units("s")
+        self.periodicity = (True, True, True)
+        self.particle_types = ("halos")
+        self.particle_types_raw = ("halos")
+
+        self.domain_left_edge = np.array([0.0,0.0,0.0])
+        self.domain_right_edge = np.array([hvals['box_size']] * 3)
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.parameters.update(hvals)
+
+    def _set_code_unit_attributes(self):
+        z = self.current_redshift
+        self.length_unit = self.quan(1.0 / (1.0+z), "Mpc / h")
+        self.mass_unit = self.quan(1.0, "Msun / h")
+        self.velocity_unit = self.quan(1.0, "km / s")
+        self.time_unit = self.length_unit / self.velocity_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        if not args[0].endswith(".bin"): return False
+        with open(args[0], "rb") as f:
+            header = fpu.read_cattrs(f, header_dt)
+            if header['magic'] == 18077126535843729616:
+                return True
+        return False

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/rockstar/definitions.py
--- /dev/null
+++ b/yt/frontends/rockstar/definitions.py
@@ -0,0 +1,127 @@
+"""
+Data structures for Rockstar
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+BINARY_HEADER_SIZE=256
+header_dt = (
+    ("magic", 1, "Q"),
+    ("snap", 1, "q"),
+    ("chunk", 1, "q"),
+    ("scale", 1, "f"),
+    ("Om", 1, "f"),
+    ("Ol", 1, "f"),
+    ("h0", 1, "f"),
+    ("bounds", 6, "f"),
+    ("num_halos", 1, "q"),
+    ("num_particles", 1, "q"),
+    ("box_size", 1, "f"),
+    ("particle_mass", 1, "f"),
+    ("particle_type", 1, "q"),
+    ("format_revision", 1, "i"),
+    ("version", 12, "c"),
+    ("unused", BINARY_HEADER_SIZE - 4*12 - 4 - 8*6 - 12, "c")
+)
+
+# Note the final field here, which is a field for min/max format revision in
+# which the field appears.
+
+KNOWN_REVISIONS=[0, 1]
+
+halo_dt = [
+    ('particle_identifier', np.int64),
+    ('particle_position_x', np.float32),
+    ('particle_position_y', np.float32),
+    ('particle_position_z', np.float32),
+    ('particle_mposition_x', np.float32, (0, 0)),
+    ('particle_mposition_y', np.float32, (0, 0)),
+    ('particle_mposition_z', np.float32, (0, 0)),
+    ('particle_velocity_x', np.float32),
+    ('particle_velocity_y', np.float32),
+    ('particle_velocity_z', np.float32),
+    ('particle_corevel_x', np.float32, (1, 100)),
+    ('particle_corevel_y', np.float32, (1, 100)),
+    ('particle_corevel_z', np.float32, (1, 100)),
+    ('particle_bulkvel_x', np.float32),
+    ('particle_bulkvel_y', np.float32),
+    ('particle_bulkvel_z', np.float32),
+    ('particle_mass', np.float32),
+    ('virial_radius', np.float32),
+    ('child_r', np.float32),
+    ('vmax_r', np.float32),
+    ('mgrav', np.float32),
+    ('vmax', np.float32),
+    ('rvmax', np.float32),
+    ('rs', np.float32),
+    ('klypin_rs', np.float32),
+    ('vrms', np.float32),
+    ('Jx', np.float32),
+    ('Jy', np.float32),
+    ('Jz', np.float32),
+    ('energy', np.float32),
+    ('spin', np.float32),
+    ('alt_m1', np.float32),
+    ('alt_m2', np.float32),
+    ('alt_m3', np.float32),
+    ('alt_m4', np.float32),
+    ('Xoff', np.float32),
+    ('Voff', np.float32),
+    ('b_to_a', np.float32),
+    ('c_to_a', np.float32),
+    ('Ax', np.float32),
+    ('Ay', np.float32),
+    ('Az', np.float32),
+    ('b_to_a2', np.float32, (1, 100)),
+    ('c_to_a2', np.float32, (1, 100)),
+    ('A2x', np.float32, (1, 100)),
+    ('A2y', np.float32, (1, 100)),
+    ('A2z', np.float32, (1, 100)),
+    ('bullock_spin', np.float32),
+    ('kin_to_pot', np.float32),
+    ('m_pe_b', np.float32, (1, 100)),
+    ('m_pe_d', np.float32, (1, 100)),
+    ('num_p', np.int64),
+    ('num_child_particles', np.int64),
+    ('p_start', np.int64),
+    ('desc', np.int64),
+    ('flags', np.int64),
+    ('n_core', np.int64),
+    ('min_pos_err', np.float32),
+    ('min_vel_err', np.float32),
+    ('min_bulkvel_err', np.float32),
+]
+
+halo_dts = {}
+
+for rev in KNOWN_REVISIONS:
+    halo_dts[rev] = []
+    for item in halo_dt:
+        if len(item) == 2:
+            halo_dts[rev].append(item)
+        else:
+            mi, ma = item[2]
+            if (mi <= rev) and (rev <= ma):
+                halo_dts[rev].append(item[:2])
+    halo_dts[rev] = np.dtype(halo_dts[rev], align=True)
+
+particle_dt = np.dtype([
+    ('particle_identifier', np.int64),
+    ('particle_position_x', np.float32),
+    ('particle_position_y', np.float32),
+    ('particle_position_z', np.float32),
+    ('particle_velocity_x', np.float32),
+    ('particle_velocity_y', np.float32),
+    ('particle_velocity_z', np.float32),
+])

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/rockstar/fields.py
--- /dev/null
+++ b/yt/frontends/rockstar/fields.py
@@ -0,0 +1,97 @@
+"""
+Rockstar-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+from yt.units.yt_array import \
+    YTArray
+
+from yt.utilities.physical_constants import \
+    mh, \
+    mass_sun_cgs
+
+m_units = "Msun / h"                # Msun / h
+p_units = "Mpccm / h"               # Mpc / h comoving
+v_units = "km / s"                  # km /s phys, peculiar
+r_units = "kpccm / h"               # kpc / h comoving
+
+class RockstarFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+    )
+
+    known_particle_fields = (
+        ("particle_identifier", ("", [], None)),
+        ("particle_position_x", (p_units, [], None)),
+        ("particle_position_y", (p_units, [], None)),
+        ("particle_position_z", (p_units, [], None)),
+        ("particle_velocity_x", (v_units, [], None)),
+        ("particle_velocity_y", (v_units, [], None)),
+        ("particle_velocity_z", (v_units, [], None)),
+        ("particle_corevel_x", (v_units, [], None)),
+        ("particle_corevel_y", (v_units, [], None)),
+        ("particle_corevel_z", (v_units, [], None)),
+        ("particle_bulkvel_x", (v_units, [], None)),
+        ("particle_bulkvel_y", (v_units, [], None)),
+        ("particle_bulkvel_z", (v_units, [], None)),
+        ("particle_mass", (m_units, [], "Mass")),
+        ("virial_radius", (r_units, [], "Radius")),
+        ("child_r", (r_units, [], None)),
+        ("vmax_r", (v_units, [], None)),
+    # These fields I don't have good definitions for yet.
+    ('mgrav', ("", [], None)),
+    ('vmax', (v_units, [], "V_{max}")),
+    ('rvmax', (v_units, [], None)),
+    ('rs', (r_units, [], "R_s")),
+    ('klypin_rs', (r_units, [], "Klypin R_s")),
+    ('vrms', (v_units, [], "V_{rms}")),
+    ('Jx', ("", [], "J_x")),
+    ('Jy', ("", [], "J_y")),
+    ('Jz', ("", [], "J_z")),
+    ('energy', ("", [], None)),
+    ('spin', ("", [], "Spin Parameter")),
+    ('alt_m1', (m_units, [], None)),
+    ('alt_m2', (m_units, [], None)),
+    ('alt_m3', (m_units, [], None)),
+    ('alt_m4', (m_units, [], None)),
+    ('Xoff', ("", [], None)),
+    ('Voff', ("", [], None)),
+    ('b_to_a', ("", [], "Ellipsoidal b to a")),
+    ('c_to_a', ("", [], "Ellipsoidal c to a")),
+    ('Ax', ("", [], "A_x")),
+    ('Ay', ("", [], "A_y")),
+    ('Az', ("", [], "A_z")),
+    ('b_to_a2', ("", [], None)),
+    ('c_to_a2', ("", [], None)),
+    ('A2x', ("", [], "A2_x")),
+    ('A2y', ("", [], "A2_y")),
+    ('A2z', ("", [], "A2_z")),
+    ('bullock_spin', ("", [], "Bullock Spin Parameter")),
+    ('kin_to_pot', ("", [], "Kinetic to Potential")),
+    ('m_pe_b', ("", [], None)),
+    ('m_pe_d', ("", [], None)),
+    ('num_p', ("", [], "Number of Particles")),
+    ('num_child_particles', ("", [], "Number of Child Particles")),
+    ('p_start', ("", [], None)),
+    ('desc', ("", [], None)),
+    ('flags', ("", [], None)),
+    ('n_core', ("", [], None)),
+    ('min_pos_err', ("", [], None)),
+    ('min_vel_err', ("", [], None)),
+    ('min_bulkvel_err', ("", [], None)),
+)

diff -r 4221e22b2c232b4ea56a45ea84bd0d3c64cbdedf -r 17d76c2972fb78113017c00c26277cc1b44cd217 yt/frontends/rockstar/io.py
--- /dev/null
+++ b/yt/frontends/rockstar/io.py
@@ -0,0 +1,131 @@
+"""
+Rockstar data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+import yt.utilities.fortran_utils as fpu
+from .definitions import halo_dts
+from yt.utilities.lib.geometry_utils import compute_morton
+
+from yt.geometry.oct_container import _ORDER_MAX
+
+class IOHandlerRockstarBinary(BaseIOHandler):
+    _dataset_type = "rockstar_binary"
+
+    def __init__(self, *args, **kwargs):
+        super(IOHandlerRockstarBinary, self).__init__(*args, **kwargs)
+        self._halo_dt = halo_dts[self.ds.parameters['format_revision']]
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with open(data_file.filename, "rb") as f:
+                f.seek(data_file._position_offset, os.SEEK_SET)
+                halos = np.fromfile(f, dtype=self._halo_dt, count = pcount)
+                x = halos['particle_position_x'].astype("float64")
+                y = halos['particle_position_y'].astype("float64")
+                z = halos['particle_position_z'].astype("float64")
+                yield "halos", (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with open(data_file.filename, "rb") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    f.seek(data_file._position_offset, os.SEEK_SET)
+                    halos = np.fromfile(f, dtype=self._halo_dt, count = pcount)
+                    x = halos['particle_position_x'].astype("float64")
+                    y = halos['particle_position_y'].astype("float64")
+                    z = halos['particle_position_z'].astype("float64")
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        data = halos[field][mask].astype("float64")
+                        yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        pcount = data_file.header["num_halos"]
+        morton = np.empty(pcount, dtype='uint64')
+        mylog.debug("Initializing index % 5i (% 7i particles)",
+                    data_file.file_id, pcount)
+        ind = 0
+        with open(data_file.filename, "rb") as f:
+            f.seek(data_file._position_offset, os.SEEK_SET)
+            halos = np.fromfile(f, dtype=self._halo_dt, count = pcount)
+            pos = np.empty((halos.size, 3), dtype="float64")
+            # These positions are in Mpc, *not* "code" units
+            pos = data_file.ds.arr(pos, "code_length")
+            dx = np.finfo(halos['particle_position_x'].dtype).eps
+            dx = 2.0*self.ds.quan(dx, "code_length")
+            pos[:,0] = halos["particle_position_x"]
+            pos[:,1] = halos["particle_position_y"]
+            pos[:,2] = halos["particle_position_z"]
+            # These are 32 bit numbers, so we give a little lee-way.
+            # Otherwise, for big sets of particles, we often will bump into the
+            # domain edges.  This helps alleviate that.
+            np.clip(pos, self.ds.domain_left_edge + dx,
+                         self.ds.domain_right_edge - dx, pos)
+            #del halos
+            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+                raise YTDomainOverflow(pos.min(axis=0),
+                                       pos.max(axis=0),
+                                       self.ds.domain_left_edge,
+                                       self.ds.domain_right_edge)
+            regions.add_data_file(pos, data_file.file_id)
+            morton[ind:ind+pos.shape[0]] = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge)
+        return morton
+
+    def _count_particles(self, data_file):
+        return {'halos': data_file.header['num_halos']}
+
+    def _identify_fields(self, data_file):
+        fields = [("halos", f) for f in self._halo_dt.fields if
+                  "padding" not in f]
+        return fields, {}


https://bitbucket.org/yt_analysis/yt/commits/57d37bfcc6cd/
Changeset:   57d37bfcc6cd
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:21:44+00:00
Summary:     Removing halo_catalogs directory.
Affected #:  4 files

diff -r 17d76c2972fb78113017c00c26277cc1b44cd217 -r 57d37bfcc6cd50d63c4d3ec6e34b15c807a51a59 yt/frontends/halo_catalogs/__init__.py
--- a/yt/frontends/halo_catalogs/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for halo catalog frontends.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 17d76c2972fb78113017c00c26277cc1b44cd217 -r 57d37bfcc6cd50d63c4d3ec6e34b15c807a51a59 yt/frontends/halo_catalogs/api.py
--- a/yt/frontends/halo_catalogs/api.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-API for yt.frontends.halo_catalogs
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .halo_catalog.api import \
-     HaloCatalogDataset, \
-     IOHandlerHaloCatalogHDF5, \
-     HaloCatalogFieldInfo
-
-from .rockstar.api import \
-      RockstarDataset, \
-      IOHandlerRockstarBinary, \
-      RockstarFieldInfo
-
-from .owls_subfind.api import \
-     OWLSSubfindDataset, \
-     IOHandlerOWLSSubfindHDF5, \
-     OWLSSubfindFieldInfo

diff -r 17d76c2972fb78113017c00c26277cc1b44cd217 -r 57d37bfcc6cd50d63c4d3ec6e34b15c807a51a59 yt/frontends/halo_catalogs/setup.py
--- a/yt/frontends/halo_catalogs/setup.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-from numpy.distutils.misc_util import Configuration
-
-
-def configuration(parent_package='', top_path=None):
-    config = Configuration('halo_catalogs', parent_package, top_path)
-    config.add_subpackage("halo_catalog")
-    config.add_subpackage("owls_subfind")
-    config.add_subpackage("rockstar")
-    config.make_config_py()
-    return config


https://bitbucket.org/yt_analysis/yt/commits/487d3cbe0aaa/
Changeset:   487d3cbe0aaa
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 00:59:10+00:00
Summary:     Cleaning up setup.py.
Affected #:  1 file

diff -r 57d37bfcc6cd50d63c4d3ec6e34b15c807a51a59 -r 487d3cbe0aaa7be098488a5fc8ae177642065723 yt/frontends/setup.py
--- a/yt/frontends/setup.py
+++ b/yt/frontends/setup.py
@@ -8,26 +8,40 @@
     config.make_config_py()  # installs __config__.py
     #config.make_svn_version_py()
     config.add_subpackage("art")
+    config.add_subpackage("artio")
     config.add_subpackage("athena")
     config.add_subpackage("boxlib")
     config.add_subpackage("chombo")
+    config.add_subpackage("eagle")
     config.add_subpackage("enzo")
     config.add_subpackage("fits")
     config.add_subpackage("flash")
+    config.add_subpackage("gadget")
     config.add_subpackage("gdf")
-    config.add_subpackage("halo_catalogs")
+    config.add_subpackage("halo_catalog")
+    config.add_subpackage("http_stream")
     config.add_subpackage("moab")
-    config.add_subpackage("moab/tests")
-    config.add_subpackage("artio")
-#    config.add_subpackage("artio2")
+    config.add_subpackage("owls")
+    config.add_subpackage("owls_subfind")
     config.add_subpackage("pluto")
     config.add_subpackage("ramses")
+    config.add_subpackage("rockstar")
     config.add_subpackage("sdf")
     config.add_subpackage("sph")
     config.add_subpackage("stream")
+    config.add_subpackage("tipsy")
+    config.add_subpackage("art/tests")
+    config.add_subpackage("artio/tests")
+    config.add_subpackage("athena/tests")
     config.add_subpackage("boxlib/tests")
+    config.add_subpackage("chombo/tests")
+    config.add_subpackage("enzo/tests")
+    config.add_subpackage("fits/tests")
     config.add_subpackage("flash/tests")
-    config.add_subpackage("enzo/tests")
+    config.add_subpackage("moab/tests")
+    config.add_subpackage("owls/tests")
+    config.add_subpackage("owls_subfind/tests")
+    config.add_subpackage("ramses/tests")
     config.add_subpackage("stream/tests")
-    config.add_subpackage("chombo/tests")
+    config.add_subpackage("tipsy/tests")
     return config


https://bitbucket.org/yt_analysis/yt/commits/794e3cef4b86/
Changeset:   794e3cef4b86
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 01:15:45+00:00
Summary:     Adding setup.py files.
Affected #:  8 files

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/eagle/setup.py
--- /dev/null
+++ b/yt/frontends/eagle/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('eagle', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/gadget/setup.py
--- /dev/null
+++ b/yt/frontends/gadget/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('gadget', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/halo_catalog/setup.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('halo_catalog', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/http_stream/setup.py
--- /dev/null
+++ b/yt/frontends/http_stream/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('http_stream', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/owls/setup.py
--- /dev/null
+++ b/yt/frontends/owls/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('owls', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/owls_subfind/setup.py
--- /dev/null
+++ b/yt/frontends/owls_subfind/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('owls_subfind', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/rockstar/setup.py
--- /dev/null
+++ b/yt/frontends/rockstar/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('rockstar', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 487d3cbe0aaa7be098488a5fc8ae177642065723 -r 794e3cef4b863c0fbead86223d7e548862ea135e yt/frontends/tipsy/setup.py
--- /dev/null
+++ b/yt/frontends/tipsy/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('tipsy', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config


https://bitbucket.org/yt_analysis/yt/commits/1ab59a5061c8/
Changeset:   1ab59a5061c8
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 01:17:47+00:00
Summary:     Adding tests import.
Affected #:  1 file

diff -r 794e3cef4b863c0fbead86223d7e548862ea135e -r 1ab59a5061c8d3e8fffb5f7212824e3ebe1af99d yt/frontends/owls_subfind/api.py
--- a/yt/frontends/owls_subfind/api.py
+++ b/yt/frontends/owls_subfind/api.py
@@ -22,3 +22,5 @@
 
 from .fields import \
      OWLSSubfindFieldInfo
+
+from . import tests


https://bitbucket.org/yt_analysis/yt/commits/2533e9747a1f/
Changeset:   2533e9747a1f
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 15:07:39+00:00
Summary:     Fixing more imports.
Affected #:  4 files

diff -r 1ab59a5061c8d3e8fffb5f7212824e3ebe1af99d -r 2533e9747a1f7c008125ce2a4e64e5dbbb7925ae yt/analysis_modules/halo_analysis/halo_finding_methods.py
--- a/yt/analysis_modules/halo_analysis/halo_finding_methods.py
+++ b/yt/analysis_modules/halo_analysis/halo_finding_methods.py
@@ -17,7 +17,7 @@
 
 from yt.analysis_modules.halo_finding.halo_objects import \
     FOFHaloFinder, HOPHaloFinder
-from yt.frontends.halo_catalogs.halo_catalog.data_structures import \
+from yt.frontends.halo_catalog.data_structures import \
     HaloCatalogDataset
 from yt.frontends.stream.data_structures import \
     load_particles
@@ -70,7 +70,7 @@
     Run the Rockstar halo finding method.
     """
 
-    from yt.frontends.halo_catalogs.rockstar.data_structures import \
+    from yt.frontends.rockstar.data_structures import \
      RockstarDataset
     from yt.analysis_modules.halo_finding.rockstar.api import \
      RockstarHaloFinder

diff -r 1ab59a5061c8d3e8fffb5f7212824e3ebe1af99d -r 2533e9747a1f7c008125ce2a4e64e5dbbb7925ae yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -27,7 +27,7 @@
     'flash',
     'gadget',
     'gdf',
-    'halo_catalogs',
+    'halo_catalog',
     'http_stream',
     'moab',
     'owls',

diff -r 1ab59a5061c8d3e8fffb5f7212824e3ebe1af99d -r 2533e9747a1f7c008125ce2a4e64e5dbbb7925ae yt/frontends/eagle/api.py
--- a/yt/frontends/eagle/api.py
+++ b/yt/frontends/eagle/api.py
@@ -22,4 +22,4 @@
     EagleNetworkFieldInfo
 
 from .io import \
-    IOHandlerTipsyBinary
+    IOHandlerEagleNetwork

diff -r 1ab59a5061c8d3e8fffb5f7212824e3ebe1af99d -r 2533e9747a1f7c008125ce2a4e64e5dbbb7925ae yt/frontends/owls/io.py
--- a/yt/frontends/owls/io.py
+++ b/yt/frontends/owls/io.py
@@ -22,7 +22,7 @@
 from yt.utilities.lib.geometry_utils import \
     compute_morton
 
-.definitions import \
+from .definitions import \
     ghdf5_ptypes
 
 CHUNKSIZE = 10000000


https://bitbucket.org/yt_analysis/yt/commits/d669c0633634/
Changeset:   d669c0633634
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 18:59:51+00:00
Summary:     Restoring gadget hdf5 io handler.
Affected #:  2 files

diff -r 2533e9747a1f7c008125ce2a4e64e5dbbb7925ae -r d669c06336343abad8158fa23510893f3aad68d8 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -239,7 +239,6 @@
         # We do not allow load() of these files.
         return False
 
-
 class GadgetHDF5Dataset(GadgetDataset):
     _file_class = ParticleFile
     _field_info_class = SPHFieldInfo

diff -r 2533e9747a1f7c008125ce2a4e64e5dbbb7925ae -r d669c06336343abad8158fa23510893f3aad68d8 yt/frontends/gadget/io.py
--- a/yt/frontends/gadget/io.py
+++ b/yt/frontends/gadget/io.py
@@ -24,6 +24,11 @@
 from yt.utilities.lib.geometry_utils import \
     compute_morton
 
+class IOHandlerGadgetHDF5(IOHandlerOWLS):
+    _dataset_type = "gadget_hdf5"
+
+ZeroMass = object()
+    
 class IOHandlerGadgetBinary(BaseIOHandler):
     _dataset_type = "gadget_binary"
     _vector_fields = ("Coordinates", "Velocity", "Velocities")


https://bitbucket.org/yt_analysis/yt/commits/e909d96fb88f/
Changeset:   e909d96fb88f
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 19:56:18+00:00
Summary:     Forgot to import the stupid thing.
Affected #:  1 file

diff -r d669c06336343abad8158fa23510893f3aad68d8 -r e909d96fb88f4c5e14f4193d08e7afc020430b6c yt/frontends/gadget/api.py
--- a/yt/frontends/gadget/api.py
+++ b/yt/frontends/gadget/api.py
@@ -19,4 +19,5 @@
     GadgetHDF5Dataset
 
 from .io import \
-    IOHandlerGadgetBinary
+    IOHandlerGadgetBinary, \
+    IOHandlerGadgetHDF5


https://bitbucket.org/yt_analysis/yt/commits/e3e987f781cc/
Changeset:   e3e987f781cc
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 20:32:48+00:00
Summary:     There's another one.
Affected #:  1 file

diff -r e909d96fb88f4c5e14f4193d08e7afc020430b6c -r e3e987f781cce48b46c3477323641eadb4a280aa yt/frontends/gadget/io.py
--- a/yt/frontends/gadget/io.py
+++ b/yt/frontends/gadget/io.py
@@ -17,6 +17,8 @@
 import h5py
 import numpy as np
 
+from yt.frontends.owls.io import \
+    IOHandlerOWLS
 from yt.geometry.oct_container import \
     _ORDER_MAX
 from yt.utilities.io_handler import \


https://bitbucket.org/yt_analysis/yt/commits/8ba544dc06bf/
Changeset:   8ba544dc06bf
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 23:35:55+00:00
Summary:     Fix another import.
Affected #:  1 file

diff -r e3e987f781cce48b46c3477323641eadb4a280aa -r 8ba544dc06bf2ebd5c1d31cbedf16960c200c08e yt/frontends/tipsy/tests/test_tipsy.py
--- a/yt/frontends/tipsy/tests/test_tipsy.py
+++ b/yt/frontends/tipsy/tests/test_tipsy.py
@@ -23,7 +23,7 @@
     PixelizedProjectionValuesTest, \
     FieldValuesTest, \
     create_obj
-from yt.frontends.sph.api import TipsyDataset
+from yt.frontends.tipsy.api import TipsyDataset
 
 _fields = (("deposit", "all_density"),
            ("deposit", "all_count"),


https://bitbucket.org/yt_analysis/yt/commits/7f47f6c6f96f/
Changeset:   7f47f6c6f96f
Branch:      yt
User:        brittonsmith
Date:        2014-10-23 23:37:32+00:00
Summary:     Changing names to match convention.
Affected #:  4 files

diff -r 8ba544dc06bf2ebd5c1d31cbedf16960c200c08e -r 7f47f6c6f96f4e4f82b1519d4ce788eab3f966f1 yt/frontends/owls/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/owls/tests/test_outputs.py
@@ -0,0 +1,60 @@
+"""
+Tipsy tests using the OWLS HDF5-Gadget dataset
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    requires_ds, \
+    small_patch_amr, \
+    big_patch_amr, \
+    data_dir_load, \
+    PixelizedProjectionValuesTest, \
+    FieldValuesTest, \
+    create_obj
+from yt.frontends.owls.api import OWLSDataset
+
+_fields = (("deposit", "all_density"), ("deposit", "all_count"),
+           ("deposit", "PartType0_density"),
+           ("deposit", "PartType4_density"))
+
+os33 = "snapshot_033/snap_033.0.hdf5"
+ at requires_ds(os33)
+def test_snapshot_033():
+    ds = data_dir_load(os33)
+    yield assert_equal, str(ds), "snap_033"
+    dso = [ None, ("sphere", ("c", (0.1, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["particle_position"].shape[0], 2*(128*128*128)
+    yield assert_equal, dd["particle_position"].shape[1], 3
+    tot = sum(dd[ptype,"particle_position"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, (2*128*128*128)
+    for dobj_name in dso:
+        for field in _fields:
+            for axis in [0, 1, 2]:
+                for weight_field in [None, "density"]:
+                    yield PixelizedProjectionValuesTest(
+                        os33, axis, field, weight_field,
+                        dobj_name)
+            yield FieldValuesTest(os33, field, dobj_name)
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+
+
+ at requires_file(os33)
+def test_OWLSDataset():
+    assert isinstance(data_dir_load(os33), OWLSDataset)

diff -r 8ba544dc06bf2ebd5c1d31cbedf16960c200c08e -r 7f47f6c6f96f4e4f82b1519d4ce788eab3f966f1 yt/frontends/owls/tests/test_owls.py
--- a/yt/frontends/owls/tests/test_owls.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""
-Tipsy tests using the OWLS HDF5-Gadget dataset
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.testing import *
-from yt.utilities.answer_testing.framework import \
-    requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
-    data_dir_load, \
-    PixelizedProjectionValuesTest, \
-    FieldValuesTest, \
-    create_obj
-from yt.frontends.owls.api import OWLSDataset
-
-_fields = (("deposit", "all_density"), ("deposit", "all_count"),
-           ("deposit", "PartType0_density"),
-           ("deposit", "PartType4_density"))
-
-os33 = "snapshot_033/snap_033.0.hdf5"
- at requires_ds(os33)
-def test_snapshot_033():
-    ds = data_dir_load(os33)
-    yield assert_equal, str(ds), "snap_033"
-    dso = [ None, ("sphere", ("c", (0.1, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["particle_position"].shape[0], 2*(128*128*128)
-    yield assert_equal, dd["particle_position"].shape[1], 3
-    tot = sum(dd[ptype,"particle_position"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, (2*128*128*128)
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        os33, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(os33, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
-
-
- at requires_file(os33)
-def test_OWLSDataset():
-    assert isinstance(data_dir_load(os33), OWLSDataset)

diff -r 8ba544dc06bf2ebd5c1d31cbedf16960c200c08e -r 7f47f6c6f96f4e4f82b1519d4ce788eab3f966f1 yt/frontends/tipsy/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/tipsy/tests/test_outputs.py
@@ -0,0 +1,99 @@
+"""
+Tipsy tests using the AGORA dataset
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    requires_ds, \
+    small_patch_amr, \
+    big_patch_amr, \
+    data_dir_load, \
+    PixelizedProjectionValuesTest, \
+    FieldValuesTest, \
+    create_obj
+from yt.frontends.tipsy.api import TipsyDataset
+
+_fields = (("deposit", "all_density"),
+           ("deposit", "all_count"),
+           ("deposit", "DarkMatter_density"),
+)
+
+pkdgrav = "halo1e11_run1.00400/halo1e11_run1.00400"
+ at requires_ds(pkdgrav, file_check = True)
+def test_pkdgrav():
+    cosmology_parameters = dict(current_redshift = 0.0,
+                                omega_lambda = 0.728,
+                                omega_matter = 0.272,
+                                hubble_constant = 0.702)
+    kwargs = dict(field_dtypes = {"Coordinates": "d"},
+                  cosmology_parameters = cosmology_parameters,
+                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                  n_ref = 64)
+    ds = data_dir_load(pkdgrav, TipsyDataset, (), kwargs)
+    yield assert_equal, str(ds), "halo1e11_run1.00400"
+    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["Coordinates"].shape, (26847360, 3)
+    tot = sum(dd[ptype,"Coordinates"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, 26847360
+    for dobj_name in dso:
+        for field in _fields:
+            for axis in [0, 1, 2]:
+                for weight_field in [None, "density"]:
+                    yield PixelizedProjectionValuesTest(
+                        ds, axis, field, weight_field,
+                        dobj_name)
+            yield FieldValuesTest(ds, field, dobj_name)
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+
+gasoline = "agora_1e11.00400/agora_1e11.00400"
+ at requires_ds(gasoline, file_check = True)
+def test_gasoline():
+    cosmology_parameters = dict(current_redshift = 0.0,
+                                omega_lambda = 0.728,
+                                omega_matter = 0.272,
+                                hubble_constant = 0.702)
+    kwargs = dict(cosmology_parameters = cosmology_parameters,
+                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                  n_ref = 64)
+    ds = data_dir_load(gasoline, TipsyDataset, (), kwargs)
+    yield assert_equal, str(ds), "agora_1e11.00400"
+    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["Coordinates"].shape, (10550576, 3)
+    tot = sum(dd[ptype,"Coordinates"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, 10550576
+    for dobj_name in dso:
+        for field in _fields:
+            for axis in [0, 1, 2]:
+                for weight_field in [None, "density"]:
+                    yield PixelizedProjectionValuesTest(
+                        ds, axis, field, weight_field,
+                        dobj_name)
+            yield FieldValuesTest(ds, field, dobj_name)
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+
+
+ at requires_file(pkdgrav)
+def test_TipsyDataset():
+    assert isinstance(data_dir_load(pkdgrav), TipsyDataset)

diff -r 8ba544dc06bf2ebd5c1d31cbedf16960c200c08e -r 7f47f6c6f96f4e4f82b1519d4ce788eab3f966f1 yt/frontends/tipsy/tests/test_tipsy.py
--- a/yt/frontends/tipsy/tests/test_tipsy.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""
-Tipsy tests using the AGORA dataset
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.testing import *
-from yt.utilities.answer_testing.framework import \
-    requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
-    data_dir_load, \
-    PixelizedProjectionValuesTest, \
-    FieldValuesTest, \
-    create_obj
-from yt.frontends.tipsy.api import TipsyDataset
-
-_fields = (("deposit", "all_density"),
-           ("deposit", "all_count"),
-           ("deposit", "DarkMatter_density"),
-)
-
-pkdgrav = "halo1e11_run1.00400/halo1e11_run1.00400"
- at requires_ds(pkdgrav, file_check = True)
-def test_pkdgrav():
-    cosmology_parameters = dict(current_redshift = 0.0,
-                                omega_lambda = 0.728,
-                                omega_matter = 0.272,
-                                hubble_constant = 0.702)
-    kwargs = dict(field_dtypes = {"Coordinates": "d"},
-                  cosmology_parameters = cosmology_parameters,
-                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
-                  n_ref = 64)
-    ds = data_dir_load(pkdgrav, TipsyDataset, (), kwargs)
-    yield assert_equal, str(ds), "halo1e11_run1.00400"
-    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["Coordinates"].shape, (26847360, 3)
-    tot = sum(dd[ptype,"Coordinates"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, 26847360
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        ds, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(ds, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
-
-gasoline = "agora_1e11.00400/agora_1e11.00400"
- at requires_ds(gasoline, file_check = True)
-def test_gasoline():
-    cosmology_parameters = dict(current_redshift = 0.0,
-                                omega_lambda = 0.728,
-                                omega_matter = 0.272,
-                                hubble_constant = 0.702)
-    kwargs = dict(cosmology_parameters = cosmology_parameters,
-                  unit_base = {'length': (1.0/60.0, "Mpccm/h")},
-                  n_ref = 64)
-    ds = data_dir_load(gasoline, TipsyDataset, (), kwargs)
-    yield assert_equal, str(ds), "agora_1e11.00400"
-    dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["Coordinates"].shape, (10550576, 3)
-    tot = sum(dd[ptype,"Coordinates"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, 10550576
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        ds, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(ds, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
-
-
- at requires_file(pkdgrav)
-def test_TipsyDataset():
-    assert isinstance(data_dir_load(pkdgrav), TipsyDataset)


https://bitbucket.org/yt_analysis/yt/commits/a4dd22f8b07e/
Changeset:   a4dd22f8b07e
Branch:      yt
User:        brittonsmith
Date:        2014-10-27 22:38:06+00:00
Summary:     Marking some tests as requiring big data.
Affected #:  2 files

diff -r 7f47f6c6f96f4e4f82b1519d4ce788eab3f966f1 -r a4dd22f8b07eb80bfe52816e400b42164f9d6c2a yt/frontends/owls/tests/test_outputs.py
--- a/yt/frontends/owls/tests/test_outputs.py
+++ b/yt/frontends/owls/tests/test_outputs.py
@@ -30,7 +30,7 @@
            ("deposit", "PartType4_density"))
 
 os33 = "snapshot_033/snap_033.0.hdf5"
- at requires_ds(os33)
+ at requires_ds(os33, big_data=True)
 def test_snapshot_033():
     ds = data_dir_load(os33)
     yield assert_equal, str(ds), "snap_033"

diff -r 7f47f6c6f96f4e4f82b1519d4ce788eab3f966f1 -r a4dd22f8b07eb80bfe52816e400b42164f9d6c2a yt/frontends/tipsy/tests/test_outputs.py
--- a/yt/frontends/tipsy/tests/test_outputs.py
+++ b/yt/frontends/tipsy/tests/test_outputs.py
@@ -31,7 +31,7 @@
 )
 
 pkdgrav = "halo1e11_run1.00400/halo1e11_run1.00400"
- at requires_ds(pkdgrav, file_check = True)
+ at requires_ds(pkdgrav, big_data = True, file_check = True)
 def test_pkdgrav():
     cosmology_parameters = dict(current_redshift = 0.0,
                                 omega_lambda = 0.728,
@@ -63,7 +63,7 @@
         yield assert_equal, s1, s2
 
 gasoline = "agora_1e11.00400/agora_1e11.00400"
- at requires_ds(gasoline, file_check = True)
+ at requires_ds(gasoline, big_data = True, file_check = True)
 def test_gasoline():
     cosmology_parameters = dict(current_redshift = 0.0,
                                 omega_lambda = 0.728,


https://bitbucket.org/yt_analysis/yt/commits/8551e1a70223/
Changeset:   8551e1a70223
Branch:      yt
User:        brittonsmith
Date:        2014-10-28 18:25:03+00:00
Summary:     Porting over some of John Zuhone's changes by hand.
Affected #:  3 files

diff -r a4dd22f8b07eb80bfe52816e400b42164f9d6c2a -r 8551e1a70223633c9f83bcade6f8be5b71e03da2 yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -52,10 +52,11 @@
     _suffix = ".h5"
 
     def __init__(self, filename, dataset_type="halocatalog_hdf5",
-                 n_ref = 16, over_refine_factor = 1):
+                 n_ref = 16, over_refine_factor = 1, units_override=None):
         self.n_ref = n_ref
         self.over_refine_factor = over_refine_factor
-        super(HaloCatalogDataset, self).__init__(filename, dataset_type)
+        super(HaloCatalogDataset, self).__init__(filename, dataset_type,
+                                                 units_override=units_override)
 
     def _parse_parameter_file(self):
         with h5py.File(self.parameter_filename, "r") as f:

diff -r a4dd22f8b07eb80bfe52816e400b42164f9d6c2a -r 8551e1a70223633c9f83bcade6f8be5b71e03da2 yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -113,10 +113,11 @@
     _suffix = ".hdf5"
 
     def __init__(self, filename, dataset_type="subfind_hdf5",
-                 n_ref = 16, over_refine_factor = 1):
+                 n_ref = 16, over_refine_factor = 1, units_override=None):
         self.n_ref = n_ref
         self.over_refine_factor = over_refine_factor
-        super(OWLSSubfindDataset, self).__init__(filename, dataset_type)
+        super(OWLSSubfindDataset, self).__init__(filename, dataset_type,
+                                                 units_override=units_override)
 
     def _parse_parameter_file(self):
         handle = h5py.File(self.parameter_filename, mode="r")

diff -r a4dd22f8b07eb80bfe52816e400b42164f9d6c2a -r 8551e1a70223633c9f83bcade6f8be5b71e03da2 yt/frontends/rockstar/data_structures.py
--- a/yt/frontends/rockstar/data_structures.py
+++ b/yt/frontends/rockstar/data_structures.py
@@ -56,10 +56,12 @@
     _suffix = ".bin"
 
     def __init__(self, filename, dataset_type="rockstar_binary",
-                 n_ref = 16, over_refine_factor = 1):
+                 n_ref = 16, over_refine_factor = 1,
+                 units_override=None):
         self.n_ref = n_ref
         self.over_refine_factor = over_refine_factor
-        super(RockstarDataset, self).__init__(filename, dataset_type)
+        super(RockstarDataset, self).__init__(filename, dataset_type,
+                                              units_override=units_override)
 
     def _parse_parameter_file(self):
         with open(self.parameter_filename, "rb") as f:


https://bitbucket.org/yt_analysis/yt/commits/458c362aa2bc/
Changeset:   458c362aa2bc
Branch:      yt
User:        brittonsmith
Date:        2014-10-28 18:31:10+00:00
Summary:     Adding some more of John Zuhone's changes by hand to ease merging.
Affected #:  2 files

diff -r 8551e1a70223633c9f83bcade6f8be5b71e03da2 -r 458c362aa2bca3ae1975a95f675a209c4899dd77 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -76,7 +76,8 @@
                  bounding_box = None,
                  header_spec = "default",
                  field_spec = "default",
-                 ptype_spec = "default"):
+                 ptype_spec = "default",
+                 units_override=None):
         if self._instantiated: return
         self._header_spec = self._setup_binary_spec(
             header_spec, gadget_header_specs)
@@ -100,6 +101,9 @@
             self.domain_right_edge = bbox[:,1]
         else:
             self.domain_left_edge = self.domain_right_edge = None
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetDataset. "+
+                               "Use unit_base instead.")
         super(GadgetDataset, self).__init__(filename, dataset_type)
 
     def _setup_binary_spec(self, spec, spec_dict):
@@ -248,9 +252,13 @@
     def __init__(self, filename, dataset_type="gadget_hdf5", 
                  unit_base = None, n_ref=64,
                  over_refine_factor=1,
-                 bounding_box = None):
+                 bounding_box = None,
+                 units_override=None):
         self.storage_filename = None
         filename = os.path.abspath(filename)
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetHDF5Dataset. "+
+                               "Use unit_base instead.")
         super(GadgetHDF5Dataset, self).__init__(
             filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
             over_refine_factor=over_refine_factor,

diff -r 8551e1a70223633c9f83bcade6f8be5b71e03da2 -r 458c362aa2bca3ae1975a95f675a209c4899dd77 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -66,7 +66,8 @@
                  unit_base=None,
                  parameter_file=None,
                  cosmology_parameters=None,
-                 n_ref=64, over_refine_factor=1):
+                 n_ref=64, over_refine_factor=1,
+                 units_override=None):
         self.n_ref = n_ref
         self.over_refine_factor = over_refine_factor
         if field_dtypes is None:
@@ -95,6 +96,9 @@
             parameter_file = os.path.abspath(parameter_file)
         self._param_file = parameter_file
         filename = os.path.abspath(filename)
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for TipsyDataset. "+
+                               "Use unit_base instead.")
         super(TipsyDataset, self).__init__(filename, dataset_type)
 
     def __repr__(self):


https://bitbucket.org/yt_analysis/yt/commits/4676d04cfba0/
Changeset:   4676d04cfba0
Branch:      yt
User:        brittonsmith
Date:        2014-10-28 18:41:53+00:00
Summary:     Mergins.
Affected #:  45 files

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f doc/source/analyzing/units/3)_Comoving_units_and_code_units.ipynb
--- a/doc/source/analyzing/units/3)_Comoving_units_and_code_units.ipynb
+++ b/doc/source/analyzing/units/3)_Comoving_units_and_code_units.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:8ba193cc3867e2185133bbf3952bd5834e6c63993208635c71cf55fa6f27b491"
+  "signature": "sha256:67eb4b2a3d1017bac09209ebc939e8c1fe154660fa15f76862019dfc8652ec32"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -305,9 +305,95 @@
      "language": "python",
      "metadata": {},
      "outputs": []
+    },
+    {
+     "cell_type": "heading",
+     "level": 3,
+     "metadata": {},
+     "source": [
+      "Overriding Code Unit Definitions"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "On occasion, you might have a dataset for a supported frontend that does not have the conversions to code units accessible (for example, Athena data) or you may want to change them outright. `yt` provides a mechanism so that one may provide their own code unit definitions to `load`, which override the default rules for a given frontend for defining code units. This is provided through the `units_override` dictionary. We'll use an example of an Athena dataset. First, a call to `load` without `units_override`:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ds1 = yt.load(\"MHDSloshing/virgo_low_res.0054.vtk\")\n",
+      "print ds1.length_unit\n",
+      "print ds1.mass_unit\n",
+      "print ds1.time_unit\n",
+      "sp1 = ds1.sphere(\"c\",(0.1,\"unitary\"))\n",
+      "print sp1[\"density\"]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "This is a galaxy cluster dataset, so it is not likely that the units of density are correct. We happen to know that the unit definitions are different, so we can override the units:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "units_override = {\"length_unit\":(1.0,\"Mpc\"),\n",
+      "                  \"time_unit\":(1.0,\"Myr\"),\n",
+      "                  \"mass_unit\":(1.0e14,\"Msun\")}"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "`units_override` can take the following keys:\n",
+      "\n",
+      "* `length_unit`\n",
+      "* `time_unit`\n",
+      "* `mass_unit`\n",
+      "* `magnetic_unit`\n",
+      "* `temperature_unit`\n",
+      "\n",
+      "and the associated values can be (value, unit) tuples, `YTQuantities`, or floats (in the latter case they are assumed to have the corresponding cgs unit). "
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ds2 = yt.load(\"MHDSloshing/virgo_low_res.0054.vtk\", units_override=units_override)\n",
+      "print ds2.length_unit\n",
+      "print ds2.mass_unit\n",
+      "print ds2.time_unit\n",
+      "sp2 = ds2.sphere(\"c\",(0.1,\"unitary\"))\n",
+      "print sp2[\"density\"]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "This option should be used very carefully, and *only* if you know that the dataset does not provide units or that the unit definitions generated are incorrect for some reason. "
+     ]
     }
    ],
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -113,29 +113,56 @@
 
 yt works in cgs ("Gaussian") units by default, but Athena data is not
 normally stored in these units. If you would like to convert data to
-cgs units, you may supply conversions for length, time, and mass to ``load``:
+cgs units, you may supply conversions for length, time, and mass to ``load`` using
+the ``units_override`` functionality:
 
 .. code-block:: python
 
    import yt
-   ds = yt.load("id0/cluster_merger.0250.vtk",
-                parameters={"length_unit":(1.0,"Mpc"),
-                            "time_unit"(1.0,"Myr"),
-                            "mass_unit":(1.0e14,"Msun")})
+
+   units_override = {"length_unit":(1.0,"Mpc"),
+                     "time_unit"(1.0,"Myr"),
+                     "mass_unit":(1.0e14,"Msun")}
+
+   ds = yt.load("id0/cluster_merger.0250.vtk", units_override=units_override)
 
 This means that the yt fields, e.g. ``("gas","density")``, ``("gas","x-velocity")``,
 ``("gas","magnetic_field_x")``, will be in cgs units, but the Athena fields, e.g.,
 ``("athena","density")``, ``("athena","velocity_x")``, ``("athena","cell_centered_B_x")``, will be
 in code units.
 
+Alternative values for the following simulation parameters may be specified using a ``parameters``
+dict, accepting the following keys:
+
+* ``Gamma``: ratio of specific heats, Type: Float
+* ``geometry``: Geometry type, currently accepts ``"cartesian"`` or ``"cylindrical"``
+* ``periodicity``: Is the domain periodic? Type: Tuple of boolean values corresponding to each dimension
+
+.. code-block:: python
+
+   import yt
+
+   parameters = {"gamma":4./3., "geometry":"cylindrical", "periodicity":(False,False,False)}
+
+   ds = yt.load("relativistic_jet_0000.vtk", parameters=parameters)
+
 .. rubric:: Caveats
 
 * yt primarily works with primitive variables. If the Athena
   dataset contains conservative variables, the yt primitive fields will be generated from the
   conserved variables on disk.
+* Special relativistic datasets may be loaded, but are not fully supported. In particular, the relationships between
+  quantities such as pressure and thermal energy will be incorrect, as it is currently assumed that their relationship
+  is that of an ideal a :math:`\gamma`-law equation of state.
 * Domains may be visualized assuming periodicity.
 * Particle list data is currently unsupported.
 
+.. note::
+
+   The old behavior of supplying unit conversions using a ``parameters``
+   dict supplied to ``load`` for Athena datasets is still supported, but is being deprecated in
+   favor of ``units_override``, which provides the same functionality.
+
 .. _loading-orion-data:
 
 BoxLib Data

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f doc/source/reference/configuration.rst
--- a/doc/source/reference/configuration.rst
+++ b/doc/source/reference/configuration.rst
@@ -40,14 +40,15 @@
 
 .. code-block:: python
 
-   from yt.config import ytcfg
-   ytcfg["yt", "loglevel"] = "1"
+   import yt
+   yt.funcs.mylog.setLevel(1)
 
-   from yt.mods import *
-   ds = load("my_data0001")
+   ds = yt.load("my_data0001")
    ds.print_stats()
 
-This has the same effect as setting ``loglevel = 1`` in the configuration file.
+This has the same effect as setting ``loglevel = 1`` in the configuration
+file. Note that a log level of 1 means that all log messages are printed to
+stdout.  To disable logging, set the log level to 50.
 
 Setting Configuration On the Command Line
 -----------------------------------------
@@ -95,3 +96,5 @@
   quiet.
 * ``stdoutStreamLogging`` (default: ``'False'``): If true, logging is directed
   to stdout rather than stderr
+* ``skip_dataset_cache`` (default: ``'False'``): If true, automatic caching of datasets
+  is turned off.
\ No newline at end of file

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -39,6 +39,7 @@
     storeparameterfiles = 'False',
     parameterfilestore = 'parameter_files.csv',
     maximumstoreddatasets = '500',
+    skip_dataset_cache = 'False',
     loadfieldplugins = 'True',
     pluginfilename = 'my_plugins.py',
     parallel_traceback = 'False',
@@ -97,6 +98,8 @@
 class YTConfigParser(ConfigParser.ConfigParser):
     def __setitem__(self, key, val):
         self.set(key[0], key[1], val)
+    def __getitem__(self, key):
+        self.get(key[0], key[1])
 
 if os.path.exists(os.path.expanduser("~/.yt/config")):
     ytcfg = YTConfigParser(ytcfg_defaults)

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -134,7 +134,9 @@
             return obj
         apath = os.path.abspath(filename)
         #if not os.path.exists(apath): raise IOError(filename)
-        if apath not in _cached_datasets:
+        if ytcfg.getboolean("yt","skip_dataset_cache"):
+            obj = object.__new__(cls)
+        elif apath not in _cached_datasets:
             obj = object.__new__(cls)
             if obj._skip_cache is False:
                 _cached_datasets[apath] = obj
@@ -142,7 +144,7 @@
             obj = _cached_datasets[apath]
         return obj
 
-    def __init__(self, filename, dataset_type=None, file_style=None):
+    def __init__(self, filename, dataset_type=None, file_style=None, units_override=None):
         """
         Base class for generating new output types.  Principally consists of
         a *filename* and a *dataset_type* which will be passed on to children.
@@ -157,6 +159,9 @@
         self.known_filters = self.known_filters or {}
         self.particle_unions = self.particle_unions or {}
         self.field_units = self.field_units or {}
+        if units_override is None:
+            units_override = {}
+        self.units_override = units_override
 
         # path stuff
         self.parameter_filename = str(filename)
@@ -667,6 +672,8 @@
 
     def set_code_units(self):
         self._set_code_unit_attributes()
+        # here we override units, if overrides have been provided.
+        self._override_code_units()
         self.unit_registry.modify("code_length", self.length_unit)
         self.unit_registry.modify("code_mass", self.mass_unit)
         self.unit_registry.modify("code_time", self.time_unit)
@@ -679,6 +686,24 @@
             self.unit_registry.add("unitary", float(DW.max() * DW.units.cgs_value),
                                    DW.units.dimensions)
 
+    def _override_code_units(self):
+        if len(self.units_override) == 0:
+            return
+        mylog.warning("Overriding code units. This is an experimental and potentially "+
+                      "dangerous option that may yield inconsistent results, and must be used "+
+                      "very carefully, and only if you know what you want from it.")
+        for unit, cgs in [("length", "cm"), ("time", "s"), ("mass", "g"),
+                          ("velocity","cm/s"), ("magnetic","gauss"), ("temperature","K")]:
+            val = self.units_override.get("%s_unit" % unit, None)
+            if val is not None:
+                if isinstance(val, YTQuantity):
+                    val = (val.v, str(val.units))
+                elif not isinstance(val, tuple):
+                    val = (val, cgs)
+                u = getattr(self, "%s_unit" % unit)
+                mylog.info("Overriding %s_unit: %g %s -> %g %s.", unit, u.v, u.units, val[0], val[1])
+                setattr(self, "%s_unit" % unit, self.quan(val[0], val[1]))
+
     _arr = None
     @property
     def arr(self):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/fields/magnetic_field.py
--- a/yt/fields/magnetic_field.py
+++ b/yt/fields/magnetic_field.py
@@ -55,7 +55,7 @@
     def _plasma_beta(field,data):
         """This assumes that your front end has provided Bx, By, Bz in
         units of Gauss. If you use MKS, make sure to write your own
-        PlasmaBeta field to deal with non-unitary \mu_0.
+        plasma_beta field to deal with non-unitary \mu_0.
         """
         return data[ftype,'pressure']/data[ftype,'magnetic_energy']
     registry.add_field((ftype, "plasma_beta"),
@@ -69,6 +69,10 @@
              units="erg / cm**3")
 
     def _magnetic_field_strength(field,data):
+        """This assumes that your front end has provided Bx, By, Bz in
+        units of Gauss. If you use MKS, make sure to write your own
+        PlasmaBeta field to deal with non-unitary \mu_0.
+        """
         return np.sqrt(8.*np.pi*data[ftype,"magnetic_energy"])
     registry.add_field((ftype,"magnetic_field_strength"),
                        function=_magnetic_field_strength,
@@ -110,3 +114,17 @@
              units="gauss",
              validators=[ValidateParameter("normal")])
 
+    def _alfven_speed(field,data):
+        """This assumes that your front end has provided Bx, By, Bz in
+        units of Gauss. If you use MKS, make sure to write your own
+        alfven_speed field to deal with non-unitary \mu_0.
+        """
+        return data[ftype,'magnetic_field_strength']/np.sqrt(4.*np.pi*data[ftype,'density'])
+    registry.add_field((ftype, "alfven_speed"), function=_alfven_speed,
+                       units="cm/s")
+
+    def _mach_alfven(field,data):
+        return data[ftype,'velocity_magnitude']/data[ftype,'alfven_speed']
+    registry.add_field((ftype, "mach_alfven"), function=_mach_alfven,
+                       units="dimensionless")
+

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/_skeleton/data_structures.py
--- a/yt/frontends/_skeleton/data_structures.py
+++ b/yt/frontends/_skeleton/data_structures.py
@@ -85,9 +85,12 @@
     _index_class = SkeletonHierarchy
     _field_info_class = SkeletonFieldInfo
     
-    def __init__(self, filename, dataset_type='skeleton'):
+    def __init__(self, filename, dataset_type='skeleton',
+                 storage_filename=None,
+                 units_override=None):
         self.fluid_types += ('skeleton',)
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
 
     def _set_code_unit_attributes(self):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -166,7 +166,8 @@
                  skip_particles=False, skip_stars=False,
                  limit_level=None, spread_age=True,
                  force_max_level=None, file_particle_header=None,
-                 file_particle_data=None, file_particle_stars=None):
+                 file_particle_data=None, file_particle_stars=None,
+                 units_override=None):
         self.fluid_types += ("art", )
         if fields is None:
             fields = fluid_fields
@@ -186,7 +187,8 @@
         self.spread_age = spread_age
         self.domain_left_edge = np.zeros(3, dtype='float')
         self.domain_right_edge = np.zeros(3, dtype='float')+1.0
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
 
     def _find_files(self, file_amr):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -16,7 +16,8 @@
 
 from yt.testing import \
     requires_file, \
-    assert_equal
+    assert_equal, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     big_patch_amr, \
@@ -48,3 +49,9 @@
 @requires_file(d9p)
 def test_ARTDataset():
     assert isinstance(data_dir_load(d9p), ARTDataset)
+
+ at requires_file(d9p)
+def test_units_override():
+    for test in units_override_check(d9p):
+        yield test
+

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/artio/data_structures.py
--- a/yt/frontends/artio/data_structures.py
+++ b/yt/frontends/artio/data_structures.py
@@ -314,7 +314,8 @@
     _field_info_class = ARTIOFieldInfo
 
     def __init__(self, filename, dataset_type='artio',
-                 storage_filename=None, max_range = 1024):
+                 storage_filename=None, max_range = 1024,
+                 units_override=None):
         if self._handle is not None:
             return
         self.max_range = max_range
@@ -324,7 +325,8 @@
         self._handle = artio_fileset(self._fileset_prefix)
         self.artio_parameters = self._handle.parameters
         # Here we want to initiate a traceback, if the reader is not built.
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
 
     def _set_code_unit_attributes(self):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/artio/tests/test_outputs.py
--- a/yt/frontends/artio/tests/test_outputs.py
+++ b/yt/frontends/artio/tests/test_outputs.py
@@ -50,3 +50,8 @@
 @requires_file(sizmbhloz)
 def test_ARTIODataset():
     assert isinstance(data_dir_load(sizmbhloz), ARTIODataset)
+
+ at requires_file(sizmbhloz)
+def test_units_override():
+    for test in units_override_check(sizmbhloz):
+        yield test

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -285,7 +285,8 @@
 
         # Need to reset the units in the dataset based on the correct
         # domain left/right/dimensions.
-        self.dataset._set_code_unit_attributes()
+        # DEV: Is this really necessary?
+        #self.dataset._set_code_unit_attributes()
 
         if self.dataset.dimensionality <= 2 :
             self.dataset.domain_dimensions[2] = np.int(1)
@@ -352,12 +353,24 @@
     _dataset_type = "athena"
 
     def __init__(self, filename, dataset_type='athena',
-                 storage_filename=None, parameters=None):
+                 storage_filename=None, parameters=None,
+                 units_override=None):
         self.fluid_types += ("athena",)
         if parameters is None:
             parameters = {}
         self.specified_parameters = parameters
-        Dataset.__init__(self, filename, dataset_type)
+        if units_override is None:
+            units_override = {}
+        # This is for backwards-compatibility
+        already_warned = False
+        for k,v in self.specified_parameters.items():
+            if k.endswith("_unit") and k not in units_override:
+                if not already_warned:
+                    mylog.warning("Supplying unit conversions from the parameters dict is deprecated, "+
+                                  "and will be removed in a future release. Use units_override instead.")
+                    already_warned = True
+                units_override[k] = self.specified_parameters.pop(k)
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
         self.filename = filename
         if storage_filename is None:
             storage_filename = '%s.yt' % filename.split('/')[-1]
@@ -372,23 +385,21 @@
         """
         Generates the conversion to various physical _units based on the parameter file
         """
+        if "length_unit" not in self.units_override:
+            self.no_cgs_equiv_length = True
         for unit, cgs in [("length", "cm"), ("time", "s"), ("mass", "g")]:
-            val = self.specified_parameters.get("%s_unit" % unit, None)
-            if val is None:
-                if unit == "length": self.no_cgs_equiv_length = True
-                mylog.warning("No %s conversion to cgs provided.  " +
-                              "Assuming 1.0 = 1.0 %s", unit, cgs)
-                val = 1.0
-            if not isinstance(val, tuple):
-                val = (val, cgs)
-            setattr(self, "%s_unit" % unit, self.quan(val[0], val[1]))
-        self.velocity_unit = self.length_unit/self.time_unit
-        self.magnetic_unit = np.sqrt(4*np.pi * self.mass_unit /
-                                  (self.time_unit**2 * self.length_unit))
-        self.magnetic_unit.convert_to_units("gauss")
+            # We set these to cgs for now, but they may be overridden later.
+            mylog.warning("Assuming 1.0 = 1.0 %s", cgs)
+            setattr(self, "%s_unit" % unit, self.quan(1.0, cgs))
 
     def set_code_units(self):
         super(AthenaDataset, self).set_code_units()
+        mag_unit = getattr(self, "magnetic_unit", None)
+        if mag_unit is None:
+            self.magnetic_unit = np.sqrt(4*np.pi * self.mass_unit /
+                                         (self.time_unit**2 * self.length_unit))
+        self.magnetic_unit.convert_to_units("gauss")
+
         self.unit_registry.modify("code_magnetic", self.magnetic_unit)
 
     def _parse_parameter_file(self):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -43,16 +43,16 @@
         test_blast.__name__ = test.description
         yield test
 
-parameters_stripping = {"time_unit":3.086e14,
-                        "length_unit":8.0236e22,
-                        "mass_unit":9.999e-30*8.0236e22**3}
+uo_stripping = {"time_unit":3.086e14,
+                "length_unit":8.0236e22,
+                "mass_unit":9.999e-30*8.0236e22**3}
 
 _fields_stripping = ("temperature", "density", "specific_scalar[0]")
 
 stripping = "RamPressureStripping/id0/rps.0062.vtk"
 @requires_ds(stripping, big_data=True)
 def test_stripping():
-    ds = data_dir_load(stripping, kwargs={"parameters":parameters_stripping})
+    ds = data_dir_load(stripping, kwargs={"units_override":uo_stripping})
     yield assert_equal, str(ds), "rps.0062"
     for test in small_patch_amr(stripping, _fields_stripping):
         test_stripping.__name__ = test.description

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -366,7 +366,8 @@
                  cparam_filename="inputs",
                  fparam_filename="probin",
                  dataset_type='boxlib_native',
-                 storage_filename=None):
+                 storage_filename=None,
+                 units_override=None):
         """
         The paramfile is usually called "inputs"
         and there may be a fortran inputs file usually called "probin"
@@ -380,7 +381,8 @@
         self.fparam_filename = self._localize_check(fparam_filename)
         self.storage_filename = storage_filename
 
-        Dataset.__init__(self, output_dir, dataset_type)
+        Dataset.__init__(self, output_dir, dataset_type,
+                         units_override=units_override)
 
         # These are still used in a few places.
         if "HydroMethod" not in self.parameters.keys():
@@ -721,10 +723,12 @@
                  cparam_filename="inputs",
                  fparam_filename="probin",
                  dataset_type='orion_native',
-                 storage_filename=None):
+                 storage_filename=None,
+                 units_override=None):
 
         BoxlibDataset.__init__(self, output_dir,
-                               cparam_filename, fparam_filename, dataset_type)
+                               cparam_filename, fparam_filename,
+                               dataset_type, units_override=units_override)
 
     @classmethod
     def _is_valid(cls, *args, **kwargs):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/boxlib/tests/test_orion.py
--- a/yt/frontends/boxlib/tests/test_orion.py
+++ b/yt/frontends/boxlib/tests/test_orion.py
@@ -47,3 +47,9 @@
 @requires_file(rt)
 def test_OrionDataset():
     assert isinstance(data_dir_load(rt), OrionDataset)
+
+ at requires_file(rt)
+def test_units_override():
+    for test in units_override_check(rt):
+        yield test
+

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -250,7 +250,8 @@
     _field_info_class = ChomboFieldInfo
 
     def __init__(self, filename, dataset_type='chombo_hdf5',
-                 storage_filename = None, ini_filename = None):
+                 storage_filename = None, ini_filename = None,
+                 units_override=None):
         self.fluid_types += ("chombo",)
         self._handle = HDF5FileHandler(filename)
         self.dataset_type = dataset_type
@@ -265,7 +266,8 @@
         self.geometry = "cartesian"
         self.ini_filename = ini_filename
         self.fullplotdir = os.path.abspath(filename)
-        Dataset.__init__(self,filename, self.dataset_type)
+        Dataset.__init__(self,filename, self.dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
         self.cosmological_simulation = False
 
@@ -454,10 +456,12 @@
     _field_info_class = PlutoFieldInfo
 
     def __init__(self, filename, dataset_type='pluto_chombo_native',
-                 storage_filename = None, ini_filename = None):
+                 storage_filename = None, ini_filename = None,
+                 units_override=None):
 
         ChomboDataset.__init__(self, filename, dataset_type, 
-                    storage_filename, ini_filename)
+                               storage_filename, ini_filename,
+                               units_override=units_override)
 
     def _parse_parameter_file(self):
         """
@@ -586,10 +590,12 @@
     _field_info_class = Orion2FieldInfo
 
     def __init__(self, filename, dataset_type='orion_chombo_native',
-                 storage_filename = None, ini_filename = None):
+                 storage_filename = None, ini_filename = None,
+                 units_override=None):
 
         ChomboDataset.__init__(self, filename, dataset_type,
-                    storage_filename, ini_filename)
+                               storage_filename, ini_filename,
+                               units_override=units_override)
 
     def _parse_parameter_file(self):
         """
@@ -672,10 +678,12 @@
     _field_info_class = ChomboPICFieldInfo3D
 
     def __init__(self, filename, dataset_type='chombo_hdf5',
-                 storage_filename=None, ini_filename=None):
+                 storage_filename=None, ini_filename=None,
+                 units_override=None):
 
         ChomboDataset.__init__(self, filename, dataset_type,
-                               storage_filename, ini_filename)
+                               storage_filename, ini_filename,
+                               units_override=units_override)
 
         if self.dimensionality == 1:
             self._field_info_class = ChomboPICFieldInfo1D

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/chombo/tests/test_outputs.py
--- a/yt/frontends/chombo/tests/test_outputs.py
+++ b/yt/frontends/chombo/tests/test_outputs.py
@@ -15,7 +15,8 @@
 
 from yt.testing import \
     requires_file, \
-    assert_equal
+    assert_equal, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
@@ -80,3 +81,18 @@
 @requires_file(kho)
 def test_PlutoDataset():
     assert isinstance(data_dir_load(kho), PlutoDataset)
+
+ at requires_file(zp)
+def test_units_override_zp():
+    for test in units_override_check(zp):
+        yield test
+
+ at requires_file(gc)
+def test_units_override_gc():
+    for test in units_override_check(gc):
+        yield test
+
+ at requires_file(kho)
+def test_units_override_kho():
+    for test in units_override_check(kho):
+        yield test
\ No newline at end of file

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -444,8 +444,7 @@
                 try:
                     gf = self.io._read_field_names(grid)
                 except self.io._read_exception:
-                    mylog.debug("Grid %s is a bit funky?", grid.id)
-                    continue
+                    raise IOError("Grid %s is a bit funky?", grid.id)
                 mylog.debug("Grid %s has: %s", grid.id, gf)
                 field_list = field_list.union(gf)
             if "AppendActiveParticleType" in self.dataset.parameters:
@@ -665,7 +664,8 @@
                  file_style = None,
                  parameter_override = None,
                  conversion_override = None,
-                 storage_filename = None):
+                 storage_filename = None,
+                 units_override=None):
         """
         This class is a stripped down class that simply reads and parses
         *filename* without looking at the index.  *dataset_type* gets passed
@@ -682,8 +682,8 @@
         if conversion_override is None: conversion_override = {}
         self._conversion_override = conversion_override
         self.storage_filename = storage_filename
-
-        Dataset.__init__(self, filename, dataset_type, file_style=file_style)
+        Dataset.__init__(self, filename, dataset_type, file_style=file_style,
+                         units_override=units_override)
 
     def _setup_1d(self):
         self._index_class = EnzoHierarchy1D
@@ -926,6 +926,8 @@
         magnetic_unit = np.float64(magnetic_unit.in_cgs())
         self.magnetic_unit = self.quan(magnetic_unit, "gauss")
 
+        self._override_code_units()
+
         self.unit_registry.modify("code_magnetic", self.magnetic_unit)
         self.unit_registry.modify("code_length", self.length_unit)
         self.unit_registry.modify("code_mass", self.mass_unit)

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -203,11 +203,13 @@
                 units="code_velocity**2")
             # Subtract off B-field energy
             def _sub_b(field, data):
-                return data[te_name] - 0.5*(
-                    data["x-velocity"]**2.0
-                    + data["y-velocity"]**2.0
-                    + data["z-velocity"]**2.0 ) \
-                    - data["MagneticEnergy"]/data["Density"]
+                ret = data[te_name] - 0.5*data["x-velocity"]**2.0
+                if data.ds.dimensionality > 1:
+                    ret -= 0.5*data["y-velocity"]**2.0
+                if data.ds.dimensionality > 2:
+                    ret -= 0.5*data["z-velocity"]**2.0
+                ret -= data["MagneticEnergy"]/data["Density"]
+                return ret
             self.add_field(
                 ("gas", "thermal_energy"),
                 function=_sub_b, units = "erg/g")
@@ -217,10 +219,12 @@
                 units = "code_velocity**2")
             self.alias(("gas", "total_energy"), ("enzo", te_name))
             def _tot_minus_kin(field, data):
-                return data[te_name] - 0.5*(
-                    data["x-velocity"]**2.0
-                    + data["y-velocity"]**2.0
-                    + data["z-velocity"]**2.0 )
+                ret = data[te_name] - 0.5*data["x-velocity"]**2.0
+                if data.ds.dimensionality > 1:
+                    ret -= 0.5*data["y-velocity"]**2.0
+                if data.ds.dimensionality > 2:
+                    ret -= 0.5*data["z-velocity"]**2.0
+                return ret
             self.add_field(
                 ("gas", "thermal_energy"),
                 function = _tot_minus_kin,

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -36,7 +36,10 @@
     def _read_field_names(self, grid):
         if grid.filename is None: return []
         f = h5py.File(grid.filename, "r")
-        group = f["/Grid%08i" % grid.id]
+        try:
+            group = f["/Grid%08i" % grid.id]
+        except KeyError:
+            group = f
         fields = []
         add_io = "io" in grid.ds.particle_types
         for name, v in group.iteritems():
@@ -366,6 +369,8 @@
                     #print "Opening (count) %s" % g.filename
                     f = h5py.File(g.filename, "r")
                 gds = f.get("/Grid%08i" % g.id)
+                if gds is None:
+                    gds = f
                 for field in fields:
                     ftype, fname = field
                     ds = np.atleast_3d(gds.get(fname).value.transpose())

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -91,6 +91,11 @@
     # Now we test our species fields
     yield check_color_conservation(ds)
 
+ at requires_file(enzotiny)
+def test_units_override():
+    for test in units_override_check(enzotiny):
+        yield test
+
 @requires_ds(ecp, big_data=True)
 def test_nuclei_density_fields():
     ds = data_dir_load(ecp)

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -313,17 +313,18 @@
     _handle = None
 
     def __init__(self, filename,
-                 dataset_type = 'fits',
-                 auxiliary_files = [],
-                 nprocs = None,
-                 storage_filename = None,
-                 nan_mask = None,
-                 spectral_factor = 1.0,
-                 z_axis_decomp = False,
-                 line_database = None,
-                 line_width = None,
-                 suppress_astropy_warnings = True,
-                 parameters = None):
+                 dataset_type='fits',
+                 auxiliary_files=[],
+                 nprocs=None,
+                 storage_filename=None,
+                 nan_mask=None,
+                 spectral_factor=1.0,
+                 z_axis_decomp=False,
+                 line_database=None,
+                 line_width=None,
+                 suppress_astropy_warnings=True,
+                 parameters=None,
+                 units_override=None):
 
         if parameters is None:
             parameters = {}
@@ -434,7 +435,7 @@
 
         self.refine_by = 2
 
-        Dataset.__init__(self, fn, dataset_type)
+        Dataset.__init__(self, fn, dataset_type, units_override=units_override)
         self.storage_filename = storage_filename
 
     def _set_code_unit_attributes(self):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/fits/tests/test_outputs.py
--- a/yt/frontends/fits/tests/test_outputs.py
+++ b/yt/frontends/fits/tests/test_outputs.py
@@ -41,3 +41,9 @@
     for test in small_patch_amr(vf, _fields_vels, input_center="c", input_weight="ones"):
         test_velocity_field.__name__ = test.description
         yield test
+
+ at requires_file(vf)
+def test_units_override():
+    for test in units_override_check(vf):
+        yield test
+

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -73,7 +73,7 @@
     def _detect_output_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [("flash", s) for s in self._handle["/unknown names"][:].flat]
-        if ("/particle names" in self._particle_handle) :
+        if ("/particle names" in self._particle_handle):
             self.field_list += [("io", "particle_" + s[0].strip()) for s
                                 in self._particle_handle["/particle names"][:]]
     
@@ -113,10 +113,10 @@
         except KeyError:
             self.grid_particle_count[:] = 0.0
         self._particle_indices = np.zeros(self.num_grids + 1, dtype='int64')
-        if self.num_grids > 1 :
+        if self.num_grids > 1:
             np.add.accumulate(self.grid_particle_count.squeeze(),
                               out=self._particle_indices[1:])
-        else :
+        else:
             self._particle_indices[1] = self.grid_particle_count.squeeze()
         # This will become redundant, as _prepare_grid will reset it to its
         # current value.  Note that FLASH uses 1-based indexing for refinement
@@ -191,29 +191,27 @@
     def __init__(self, filename, dataset_type='flash_hdf5',
                  storage_filename = None,
                  particle_filename = None, 
-                 conversion_override = None):
+                 units_override = None):
 
         self.fluid_types += ("flash",)
         if self._handle is not None: return
         self._handle = HDF5FileHandler(filename)
-        if conversion_override is None: conversion_override = {}
-        self._conversion_override = conversion_override
-        
+
         self.particle_filename = particle_filename
 
-        if self.particle_filename is None :
+        if self.particle_filename is None:
             self._particle_handle = self._handle
-        else :
+        else:
             try:
                 self._particle_handle = HDF5FileHandler(self.particle_filename)
-            except :
+            except:
                 raise IOError(self.particle_filename)
         # These should be explicitly obtained from the file, but for now that
         # will wait until a reorganization of the source tree and better
         # generalization.
         self.refine_by = 2
 
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
         self.storage_filename = storage_filename
 
         self.parameters["HydroMethod"] = 'flash' # always PPM DE
@@ -311,9 +309,9 @@
                     zipover = zip(self._handle[hn][:,'name'],self._handle[hn][:,'value'])
                 for varname, val in zipover:
                     vn = varname.strip()
-                    if hn.startswith("string") :
+                    if hn.startswith("string"):
                         pval = val.strip()
-                    else :
+                    else:
                         pval = val
                     if vn in self.parameters and self.parameters[vn] != pval:
                         mylog.info("{0} {1} overwrites a simulation "
@@ -327,7 +325,7 @@
             nzb = self.parameters["nzb"]
         except KeyError:
             nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
-                              for ax in 'xyz'] # FLASH2 only!
+                             for ax in 'xyz'] # FLASH2 only!
         
         # Determine dimensionality
         try:
@@ -343,18 +341,18 @@
 
         self.geometry = self.parameters["geometry"]
         # Determine base grid parameters
-        if 'lrefine_min' in self.parameters.keys() : # PARAMESH
+        if 'lrefine_min' in self.parameters.keys(): # PARAMESH
             nblockx = self.parameters["nblockx"]
             nblocky = self.parameters["nblocky"]
             nblockz = self.parameters["nblockz"]
-        else : # Uniform Grid
+        else: # Uniform Grid
             nblockx = self.parameters["iprocs"]
             nblocky = self.parameters["jprocs"]
             nblockz = self.parameters["kprocs"]
 
         # In case the user wasn't careful
-        if self.dimensionality <= 2 : nblockz = 1
-        if self.dimensionality == 1 : nblocky = 1
+        if self.dimensionality <= 2: nblockz = 1
+        if self.dimensionality == 1: nblocky = 1
 
         # Determine domain boundaries
         self.domain_left_edge = np.array(

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -47,3 +47,8 @@
 @requires_file(wt)
 def test_FLASHDataset():
     assert isinstance(data_dir_load(wt), FLASHDataset)
+
+ at requires_file(sloshing)
+def test_units_override():
+    for test in units_override_check(sloshing):
+        yield test

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -172,10 +172,11 @@
     _field_info_class = GDFFieldInfo
 
     def __init__(self, filename, dataset_type='grid_data_format',
-                 storage_filename=None, geometry=None):
+                 storage_filename=None, geometry=None,
+                 units_override=None):
         self.geometry = geometry
         self.fluid_types += ("gdf",)
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
         self.storage_filename = storage_filename
         self.filename = filename
 

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/moab/data_structures.py
--- a/yt/frontends/moab/data_structures.py
+++ b/yt/frontends/moab/data_structures.py
@@ -28,6 +28,7 @@
     io_registry
 from yt.utilities.definitions import \
     mpc_conversion, sec_conversion
+from yt.utilities.file_handler import HDF5FileHandler
 
 from .fields import MoabFieldInfo, PyneFieldInfo
 
@@ -69,12 +70,13 @@
     periodicity = (False, False, False)
 
     def __init__(self, filename, dataset_type='moab_hex8',
-                 storage_filename = None):
+                 storage_filename = None, units_override=None):
         self.fluid_types += ("moab",)
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
         self.filename = filename
-        self._handle = h5py.File(self.parameter_filename, "r")
+        self._handle = HDF5FileHandler(filename)
 
     def _set_code_unit_attributes(self):
         # Almost everything is regarded as dimensionless in MOAB, so these will
@@ -147,11 +149,12 @@
     periodicity = (False, False, False)
 
     def __init__(self, pyne_mesh, dataset_type='moab_hex8_pyne',
-                 storage_filename = None):
+                 storage_filename = None, units_override=None):
         self.fluid_types += ("pyne",)
         filename = "pyne_mesh_" + str(id(pyne_mesh))
         self.pyne_mesh = pyne_mesh
-        Dataset.__init__(self, str(filename), dataset_type)
+        Dataset.__init__(self, str(filename), dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
         self.filename = filename
 

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/moab/tests/test_c5.py
--- a/yt/frontends/moab/tests/test_c5.py
+++ b/yt/frontends/moab/tests/test_c5.py
@@ -60,3 +60,8 @@
 @requires_file(c5)
 def test_MoabHex8Dataset():
     assert isinstance(data_dir_load(c5), MoabHex8Dataset)
+
+ at requires_file(c5)
+def test_units_override():
+    for test in units_override_check(c5):
+        yield test

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -331,7 +331,6 @@
 class RAMSESIndex(OctreeIndex):
 
     def __init__(self, ds, dataset_type='ramses'):
-        self._ds = ds # TODO: Figure out the class composition better!
         self.fluid_field_list = ds._fields_in_file
         self.dataset_type = dataset_type
         self.dataset = weakref.proxy(ds)
@@ -370,12 +369,12 @@
         
 
         # TODO: copy/pasted from DomainFile; needs refactoring!
-        num = os.path.basename(self._ds.parameter_filename).split("."
+        num = os.path.basename(self.dataset.parameter_filename).split("."
                 )[0].split("_")[1]
         testdomain = 1 # Just pick the first domain file to read
         basename = "%s/%%s_%s.out%05i" % (
             os.path.abspath(
-              os.path.dirname(self._ds.parameter_filename)),
+              os.path.dirname(self.dataset.parameter_filename)),
             num, testdomain)
         hydro_fn = basename % "hydro"
         # Do we have a hydro file?
@@ -462,7 +461,8 @@
     gamma = 1.4 # This will get replaced on hydro_fn open
     
     def __init__(self, filename, dataset_type='ramses',
-                 fields = None, storage_filename = None):
+                 fields = None, storage_filename = None,
+                 units_override=None):
         # Here we want to initiate a traceback, if the reader is not built.
         if isinstance(fields, types.StringTypes):
             fields = field_aliases[fields]
@@ -472,7 +472,7 @@
         '''
         self.fluid_types += ("ramses",)
         self._fields_in_file = fields
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
         self.storage_filename = storage_filename
 
     def __repr__(self):
@@ -552,6 +552,7 @@
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
         self.max_level = rheader['levelmax'] - self.min_level
+        f.close()
 
     @classmethod
     def _is_valid(self, *args, **kwargs):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/ramses/tests/test_outputs.py
--- a/yt/frontends/ramses/tests/test_outputs.py
+++ b/yt/frontends/ramses/tests/test_outputs.py
@@ -49,3 +49,8 @@
 @requires_file(output_00080)
 def test_RAMSESDataset():
     assert isinstance(data_dir_load(output_00080), RAMSESDataset)
+
+ at requires_file(output_00080)
+def test_units_override():
+    for test in units_override_check(output_00080):
+        yield test

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/sdf/data_structures.py
--- a/yt/frontends/sdf/data_structures.py
+++ b/yt/frontends/sdf/data_structures.py
@@ -77,7 +77,8 @@
                  midx_filename = None,
                  midx_header = None,
                  midx_level = None,
-                 field_map = None):
+                 field_map = None,
+                 units_override=None):
         self.n_ref = n_ref
         self.over_refine_factor = over_refine_factor
         if bounding_box is not None:
@@ -102,7 +103,8 @@
         if filename.startswith("http"):
             prefix += 'http_'
         dataset_type = prefix + 'sdf_particles'
-        super(SDFDataset, self).__init__(filename, dataset_type)
+        super(SDFDataset, self).__init__(filename, dataset_type,
+                                         units_override=units_override)
 
     def _parse_parameter_file(self):
         if self.parameter_filename.startswith("http"):

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -292,8 +292,8 @@
     _field_info_class = StreamFieldInfo
     _dataset_type = 'stream'
 
-    def __init__(self, stream_handler, storage_filename = None,
-                 geometry = "cartesian"):
+    def __init__(self, stream_handler, storage_filename=None,
+                 geometry="cartesian"):
         #if parameter_override is None: parameter_override = {}
         #self._parameter_override = parameter_override
         #if conversion_override is None: conversion_override = {}

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -26,6 +26,7 @@
     assert_allclose, assert_raises
 from yt.units.yt_array import uconcatenate
 import yt.fields.api as field_api
+from yt.convenience import load
 
 def assert_rel_equal(a1, a2, decimals, err_msg='', verbose=True):
     # We have nan checks in here because occasionally we have fields that get
@@ -321,7 +322,30 @@
             return ftrue
         else:
             return ffalse
-                                        
+
+def units_override_check(fn):
+    ytcfg["yt","skip_dataset_cache"] = "True"
+    units_list = ["length","time","mass","velocity",
+                  "magnetic","temperature"]
+    ds1 = load(fn)
+    units_override = {}
+    attrs1 = []
+    attrs2 = []
+    for u in units_list:
+        unit_attr = getattr(ds1, "%s_unit" % u, None)
+        if unit_attr is not None:
+            attrs1.append(unit_attr)
+            units_override["%s_unit" % u] = (unit_attr.v, str(unit_attr.units))
+    del ds1
+    ds2 = load(fn, units_override=units_override)
+    ytcfg["yt","skip_dataset_cache"] = "False"
+    assert(len(ds2.units_override) > 0)
+    for u in units_list:
+        unit_attr = getattr(ds2, "%s_unit" % u, None)
+        if unit_attr is not None:
+            attrs2.append(unit_attr)
+    yield assert_equal, attrs1, attrs2
+
 # This is an export of the 40 grids in IsolatedGalaxy that are of level 4 or
 # lower.  It's just designed to give a sample AMR index to deal with.
 _amr_grid_index = [

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/units/unit_symbols.py
--- a/yt/units/unit_symbols.py
+++ b/yt/units/unit_symbols.py
@@ -85,7 +85,7 @@
 # Kelvin
 #
 
-degree_kelvin = Kelvin = quan(1.0, "K")
+degree_kelvin = Kelvin = K = quan(1.0, "K")
 
 #
 # Misc CGS

diff -r 458c362aa2bca3ae1975a95f675a209c4899dd77 -r 4676d04cfba066a8c7c608aa2fa49d3e8df2619f yt/utilities/tests/test_particle_generator.py
--- a/yt/utilities/tests/test_particle_generator.py
+++ b/yt/utilities/tests/test_particle_generator.py
@@ -5,7 +5,6 @@
 from yt.frontends.stream.api import load_uniform_grid, refine_amr
 import yt.utilities.initial_conditions as ic
 import yt.utilities.flagging_methods as fm
-from IPython import embed
 from yt.units.yt_array import uconcatenate
 
 def setup() :


https://bitbucket.org/yt_analysis/yt/commits/b4e9e508e031/
Changeset:   b4e9e508e031
Branch:      yt
User:        jzuhone
Date:        2014-11-06 21:40:37+00:00
Summary:     Merged in brittonsmith/yt (pull request #1281)

Moving SPH frontends into their own directories.
Affected #:  93 files

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/__init__.py
--- a/yt/__init__.py
+++ b/yt/__init__.py
@@ -136,9 +136,9 @@
     hexahedral_connectivity
 
 # For backwards compatibility
-GadgetDataset = frontends.sph.GadgetDataset
+GadgetDataset = frontends.gadget.GadgetDataset
 GadgetStaticOutput = deprecated_class(GadgetDataset)
-TipsyDataset = frontends.sph.TipsyDataset
+TipsyDataset = frontends.tipsy.TipsyDataset
 TipsyStaticOutput = deprecated_class(TipsyDataset)
 
 # Now individual component imports from the visualization API

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/analysis_modules/halo_analysis/halo_finding_methods.py
--- a/yt/analysis_modules/halo_analysis/halo_finding_methods.py
+++ b/yt/analysis_modules/halo_analysis/halo_finding_methods.py
@@ -17,7 +17,7 @@
 
 from yt.analysis_modules.halo_finding.halo_objects import \
     FOFHaloFinder, HOPHaloFinder
-from yt.frontends.halo_catalogs.halo_catalog.data_structures import \
+from yt.frontends.halo_catalog.data_structures import \
     HaloCatalogDataset
 from yt.frontends.stream.data_structures import \
     load_particles
@@ -70,7 +70,7 @@
     Run the Rockstar halo finding method.
     """
 
-    from yt.frontends.halo_catalogs.rockstar.data_structures import \
+    from yt.frontends.rockstar.data_structures import \
      RockstarDataset
     from yt.analysis_modules.halo_finding.rockstar.api import \
      RockstarHaloFinder

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -21,17 +21,21 @@
     'athena',
     'boxlib',
     'chombo',
+    'eagle',
     'enzo',
     'fits',
     'flash',
+    'gadget',
     'gdf',
-    'halo_catalogs',
+    'halo_catalog',
+    'http_stream',
     'moab',
+    'owls',
     #'pluto',
     'ramses',
     'sdf',
-    'sph',
     'stream',
+    'tipsy',
 ]
 
 class _frontend_container:

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/api.py
--- /dev/null
+++ b/yt/frontends/eagle/api.py
@@ -0,0 +1,25 @@
+"""
+API for EAGLE frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    EagleDataset, \
+    EagleNetworkDataset
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+from .io import \
+    IOHandlerEagleNetwork

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/data_structures.py
--- /dev/null
+++ b/yt/frontends/eagle/data_structures.py
@@ -0,0 +1,98 @@
+"""
+Data structures for EAGLE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import types
+
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+from yt.frontends.owls.fields import \
+    OWLSFieldInfo
+import yt.units
+
+from .fields import \
+    EagleNetworkFieldInfo
+
+class EagleDataset(GadgetHDF5Dataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = OWLSFieldInfo
+    _time_readin_ = 'Time'
+
+    def _parse_parameter_file(self):
+
+        # read values from header
+        hvals = self._get_hvals()
+        self.parameters = hvals
+
+        # set features common to OWLS and Eagle
+        self._set_owls_eagle()
+
+        # Set time from analytic solution for flat LCDM universe
+        a = hvals['ExpansionFactor']
+        H0 = hvals['H(z)'] / hvals['E(z)']
+        a_eq = ( self.omega_matter / self.omega_lambda )**(1./3)
+        t1 = 2.0 / ( 3.0 * np.sqrt( self.omega_lambda ) )
+        t2 = (a/a_eq)**(3./2)
+        t3 = np.sqrt( 1.0 + (a/a_eq)**3 )
+        t = t1 * np.log( t2 + t3 ) / H0
+        self.current_time = t * yt.units.s
+
+    def _set_code_unit_attributes(self):
+        self._set_owls_eagle_units()
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Config', 'Constants', 'HashTable', 'Header', 
+                       'Parameters', 'RuntimePars', 'Units']
+        veto_groups = ['SUBFIND',
+                       'PartType0/ChemistryAbundances', 
+                       'PartType0/ChemicalAbundances']
+        valid = True
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
+            fileh.close()
+        except:
+            valid = False
+            pass
+        return valid
+
+class EagleNetworkDataset(EagleDataset):
+    _particle_mass_name = "Mass"
+    _field_info_class = EagleNetworkFieldInfo
+    _time_readin = 'Time'
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys() and \
+               "SUBFIND" not in fileh["/"].keys() and \
+               ("ChemistryAbundances" in fileh["PartType0"].keys()
+                or "ChemicalAbundances" in fileh["PartType0"].keys()):
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/definitions.py
--- /dev/null
+++ b/yt/frontends/eagle/definitions.py
@@ -0,0 +1,35 @@
+"""
+EAGLE definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+eaglenetwork_ions = \
+    ('electron', 'H1', 'H2', 'H_m', 'He1', 'He2','He3', 'C1',\
+     'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C_m', 'N1', 'N2', \
+     'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+     'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'O_m', 'Ne1', 'Ne2',\
+     'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+     'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+     'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+     'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+     'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+     'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+     'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+     'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+     'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+     'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+     'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+     'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+eaglenetwork_ion_lookup = {ion:index for index, ion in enumerate(eaglenetwork_ions)}

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/fields.py
--- /dev/null
+++ b/yt/frontends/eagle/fields.py
@@ -0,0 +1,73 @@
+"""
+EAGLE fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.owls.fields import \
+    OWLSFieldInfo
+from yt.units.yt_array import YTQuantity
+from yt.utilities.periodic_table import periodic_table
+
+from .definitions import \
+    eaglenetwork_ion_lookup
+
+class EagleNetworkFieldInfo(OWLSFieldInfo):
+
+    _ions = \
+        ('H1', 'H2', 'He1', 'He2','He3', 'C1',\
+         'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'N1', 'N2', \
+         'N3', 'N4', 'N5', 'N6', 'N7', 'N8', 'O1', 'O2', 'O3', \
+         'O4', 'O5', 'O6', 'O7', 'O8', 'O9', 'Ne1', 'Ne2',\
+         'Ne3', 'Ne4', 'Ne5', 'Ne6', 'Ne7', 'Ne8', 'Ne9', 'Ne10',\
+         'Ne11', 'Mg1', 'Mg2', 'Mg3', 'Mg4', 'Mg5', 'Mg6', 'Mg7',\
+         'Mg8', 'Mg9', 'Mg10', 'Mg11', 'Mg12', 'Mg13', 'Si1', 'Si2',\
+         'Si3', 'Si4', 'Si5', 'Si6', 'Si7', 'Si8', 'Si9', 'Si10',\
+         'Si11', 'Si12', 'Si13', 'Si14', 'Si15', 'Si16', 'Si17',\
+         'Ca1', 'Ca2', 'Ca3', 'Ca4', 'Ca5', 'Ca6', 'Ca7', 'Ca8',\
+         'Ca9', 'Ca10', 'Ca11', 'Ca12', 'Ca13', 'Ca14', 'Ca15',\
+         'Ca16', 'Ca17', 'Ca18', 'Ca19', 'Ca20', 'Ca21', 'Fe1',\
+         'Fe2', 'Fe3', 'Fe4', 'Fe5', 'Fe6', 'Fe7', 'Fe8', 'Fe9',\
+         'Fe10', 'Fe11', 'Fe12', 'Fe13', 'Fe14', 'Fe15', 'Fe16',\
+         'Fe17', 'Fe18', 'Fe19', 'Fe20', 'Fe21', 'Fe22', 'Fe23',\
+         'Fe24', 'Fe25', 'Fe25', 'Fe27',)
+
+    def __init__(self, *args, **kwargs):
+        
+        super(EagleNetworkFieldInfo,self).__init__( *args, **kwargs )
+        
+    def _create_ion_density_func( self, ftype, ion ):
+        """ returns a function that calculates the ion density of a particle. 
+        """ 
+
+        def _ion_density(field, data):
+
+            # Lookup the index of the ion 
+            index = eaglenetwork_ion_lookup[ion] 
+
+            # Ion to hydrogen number density ratio
+            ion_chem = data[ftype, "Chemistry_%03i"%index]
+
+            # Mass of a single ion
+            if ion[0:2].isalpha():
+                symbol = ion[0:2].capitalize()
+            else:
+                symbol = ion[0:1].capitalize()
+            m_ion = YTQuantity(periodic_table.elements_by_symbol[symbol].weight, 'amu')
+
+            # hydrogen number density 
+            n_H = data["PartType0", "H_number_density"] 
+
+            return m_ion*ion_chem*n_H 
+        
+        return _ion_density

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/io.py
--- /dev/null
+++ b/yt/frontends/eagle/io.py
@@ -0,0 +1,21 @@
+"""
+EAGLE data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.frontends.owls.io import \
+    IOHandlerOWLS
+
+class IOHandlerEagleNetwork(IOHandlerOWLS):
+    _dataset_type = "eagle_network"

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/eagle/setup.py
--- /dev/null
+++ b/yt/frontends/eagle/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('eagle', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/api.py
--- /dev/null
+++ b/yt/frontends/gadget/api.py
@@ -0,0 +1,23 @@
+"""
+API for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    GadgetDataset, \
+    GadgetHDF5Dataset
+
+from .io import \
+    IOHandlerGadgetBinary, \
+    IOHandlerGadgetHDF5

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget/data_structures.py
@@ -0,0 +1,337 @@
+"""
+Data structures for Gadget frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import os
+import types
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.cosmology import \
+    Cosmology
+from yt.utilities.definitions import \
+    sec_conversion
+from yt.utilities.fortran_utils import read_record
+from yt.utilities.logger import ytLogger as mylog
+
+from .definitions import \
+    gadget_header_specs, \
+    gadget_field_specs, \
+    gadget_ptype_specs
+
+def _fix_unit_ordering(unit):
+    if isinstance(unit[0], types.StringTypes):
+        unit = unit[1], unit[0]
+    return unit
+
+class GadgetBinaryFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with open(filename, "rb") as f:
+            self.header = read_record(f, ds._header_spec)
+            self._position_offset = f.tell()
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+        super(GadgetBinaryFile, self).__init__(ds, io, filename, file_id)
+
+    def _calculate_offsets(self, field_list):
+        self.field_offsets = self.io._calculate_field_offsets(
+            field_list, self.total_particles,
+            self._position_offset, self._file_size)
+
+class GadgetDataset(ParticleDataset):
+    _index_class = ParticleIndex
+    _file_class = GadgetBinaryFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Mass"
+    _particle_coordinates_name = "Coordinates"
+    _particle_velocity_name = "Velocities"
+    _suffix = ""
+
+    def __init__(self, filename, dataset_type="gadget_binary",
+                 additional_fields=(),
+                 unit_base=None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None,
+                 header_spec = "default",
+                 field_spec = "default",
+                 ptype_spec = "default",
+                 units_override=None):
+        if self._instantiated: return
+        self._header_spec = self._setup_binary_spec(
+            header_spec, gadget_header_specs)
+        self._field_spec = self._setup_binary_spec(
+            field_spec, gadget_field_specs)
+        self._ptype_spec = self._setup_binary_spec(
+            ptype_spec, gadget_ptype_specs)
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        self.storage_filename = None
+        if unit_base is not None and "UnitLength_in_cm" in unit_base:
+            # We assume this is comoving, because in the absence of comoving
+            # integration the redshift will be zero.
+            unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+        self._unit_base = unit_base
+        if bounding_box is not None:
+            bbox = np.array(bounding_box, dtype="float64")
+            if bbox.shape == (2, 3):
+                bbox = bbox.transpose()
+            self.domain_left_edge = bbox[:,0]
+            self.domain_right_edge = bbox[:,1]
+        else:
+            self.domain_left_edge = self.domain_right_edge = None
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetDataset. "+
+                               "Use unit_base instead.")
+        super(GadgetDataset, self).__init__(filename, dataset_type)
+
+    def _setup_binary_spec(self, spec, spec_dict):
+        if isinstance(spec, types.StringTypes):
+            _hs = ()
+            for hs in spec.split("+"):
+                _hs += spec_dict[hs]
+            spec = _hs
+        return spec
+
+    def __repr__(self):
+        return os.path.basename(self.parameter_filename).split(".")[0]
+
+    def _get_hvals(self):
+        # The entries in this header are capitalized and named to match Table 4
+        # in the GADGET-2 user guide.
+
+        f = open(self.parameter_filename)
+        hvals = read_record(f, self._header_spec)
+        for i in hvals:
+            if len(hvals[i]) == 1:
+                hvals[i] = hvals[i][0]
+        return hvals
+
+    def _parse_parameter_file(self):
+
+        hvals = self._get_hvals()
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        # Set standard values
+
+        # We may have an overridden bounding box.
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.periodicity = (True, True, True)
+
+        self.cosmological_simulation = 1
+
+        self.current_redshift = hvals["Redshift"]
+        self.omega_lambda = hvals["OmegaLambda"]
+        self.omega_matter = hvals["Omega0"]
+        self.hubble_constant = hvals["HubbleParam"]
+        # According to the Gadget manual, OmegaLambda will be zero for
+        # non-cosmological datasets.  However, it may be the case that
+        # individuals are running cosmological simulations *without* Lambda, in
+        # which case we may be doing something incorrect here.
+        # It may be possible to deduce whether ComovingIntegration is on
+        # somehow, but opinions on this vary.
+        if self.omega_lambda == 0.0:
+            mylog.info("Omega Lambda is 0.0, so we are turning off Cosmology.")
+            self.hubble_constant = 1.0  # So that scaling comes out correct
+            self.cosmological_simulation = 0
+            self.current_redshift = 0.0
+            # This may not be correct.
+            self.current_time = hvals["Time"] * sec_conversion["Gyr"]
+        else:
+            # Now we calculate our time based on the cosmology, because in
+            # ComovingIntegration hvals["Time"] will in fact be the expansion
+            # factor, not the actual integration time, so we re-calculate
+            # global time from our Cosmology.
+            cosmo = Cosmology(self.hubble_constant,
+                              self.omega_matter, self.omega_lambda)
+            self.current_time = cosmo.hubble_time(self.current_redshift)
+            mylog.info("Calculating time from %0.3e to be %0.3e seconds",
+                       hvals["Time"], self.current_time)
+        self.parameters = hvals
+
+        prefix = self.parameter_filename.split(".", 1)[0]
+
+        if hvals["NumFiles"] > 1:
+            self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        else:
+            self.filename_template = self.parameter_filename
+
+        self.file_count = hvals["NumFiles"]
+
+    def _set_code_unit_attributes(self):
+        # If no units passed in by user, set a sane default (Gadget-2 users guide).
+        if self._unit_base is None:
+            if self.cosmological_simulation == 1:
+                mylog.info("Assuming length units are in kpc/h (comoving)")
+                self._unit_base = dict(length = (1.0, "kpccm/h"))
+            else:
+                mylog.info("Assuming length units are in kpc (physical)")
+                self._unit_base = dict(length = (1.0, "kpc"))
+                
+        # If units passed in by user, decide what to do about
+        # co-moving and factors of h
+        unit_base = self._unit_base or {}
+        if "length" in unit_base:
+            length_unit = unit_base["length"]
+        elif "UnitLength_in_cm" in unit_base:
+            if self.cosmological_simulation == 0:
+                length_unit = (unit_base["UnitLength_in_cm"], "cm")
+            else:
+                length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+        else:
+            raise RuntimeError
+        length_unit = _fix_unit_ordering(length_unit)
+        self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+        unit_base = self._unit_base or {}
+        if "velocity" in unit_base:
+            velocity_unit = unit_base["velocity"]
+        elif "UnitVelocity_in_cm_per_s" in unit_base:
+            velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+        else:
+            velocity_unit = (1e5, "cm/s")
+        velocity_unit = _fix_unit_ordering(velocity_unit)
+        self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+        # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+        # Default to 1e10 Msun/h if mass is not specified.
+        if "mass" in unit_base:
+            mass_unit = unit_base["mass"]
+        elif "UnitMass_in_g" in unit_base:
+            if self.cosmological_simulation == 0:
+                mass_unit = (unit_base["UnitMass_in_g"], "g")
+            else:
+                mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+        else:
+            # Sane default
+            mass_unit = (1.0, "1e10*Msun/h")
+        mass_unit = _fix_unit_ordering(mass_unit)
+        self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+        self.time_unit = self.length_unit / self.velocity_unit
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        # We do not allow load() of these files.
+        return False
+
+class GadgetHDF5Dataset(GadgetDataset):
+    _file_class = ParticleFile
+    _field_info_class = SPHFieldInfo
+    _particle_mass_name = "Masses"
+    _suffix = ".hdf5"
+
+    def __init__(self, filename, dataset_type="gadget_hdf5", 
+                 unit_base = None, n_ref=64,
+                 over_refine_factor=1,
+                 bounding_box = None,
+                 units_override=None):
+        self.storage_filename = None
+        filename = os.path.abspath(filename)
+        if units_override is not None:
+            raise RuntimeError("units_override is not supported for GadgetHDF5Dataset. "+
+                               "Use unit_base instead.")
+        super(GadgetHDF5Dataset, self).__init__(
+            filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
+            over_refine_factor=over_refine_factor,
+            bounding_box = bounding_box)
+
+    def _get_hvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        hvals = {}
+        hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+        # Compat reasons.
+        hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
+        hvals["Massarr"] = hvals["MassTable"]
+        handle.close()
+        return hvals
+
+    def _get_uvals(self):
+        handle = h5py.File(self.parameter_filename, mode="r")
+        uvals = {}
+        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
+        handle.close()
+        return uvals
+
+
+
+    def _set_owls_eagle(self):
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        self._unit_base = self._get_uvals()
+        self._unit_base['cmcm'] = 1.0 / self._unit_base["UnitLength_in_cm"]
+
+        self.current_redshift = self.parameters["Redshift"]
+        self.omega_lambda = self.parameters["OmegaLambda"]
+        self.omega_matter = self.parameters["Omega0"]
+        self.hubble_constant = self.parameters["HubbleParam"]
+
+        if self.domain_left_edge is None:
+            self.domain_left_edge = np.zeros(3, "float64")
+            self.domain_right_edge = np.ones(3, "float64") * self.parameters["BoxSize"]
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+
+        self.cosmological_simulation = 1
+        self.periodicity = (True, True, True)
+
+        prefix = os.path.abspath(self.parameter_filename.split(".", 1)[0])
+        suffix = self.parameter_filename.rsplit(".", 1)[-1]
+        self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+        self.file_count = self.parameters["NumFilesPerSnapshot"]
+
+    def _set_owls_eagle_units(self):
+
+        # note the contents of the HDF5 Units group are in _unit_base 
+        # note the velocity stored on disk is sqrt(a) dx/dt 
+        self.length_unit = self.quan(self._unit_base["UnitLength_in_cm"], 'cmcm/h')
+        self.mass_unit = self.quan(self._unit_base["UnitMass_in_g"], 'g/h')
+        self.velocity_unit = self.quan(self._unit_base["UnitVelocity_in_cm_per_s"], 'cm/s')
+        self.time_unit = self.quan(self._unit_base["UnitTime_in_s"], 's/h')
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = h5py.File(args[0], mode='r')
+            if "Constants" not in fileh["/"].keys() and \
+               "Header" in fileh["/"].keys():
+                fileh.close()
+                return True
+            fileh.close()
+        except:
+            pass
+        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/definitions.py
--- /dev/null
+++ b/yt/frontends/gadget/definitions.py
@@ -0,0 +1,69 @@
+"""
+Gadget definitions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+gadget_header_specs = dict(
+    default      = (('Npart', 6, 'i'),
+                    ('Massarr', 6, 'd'),
+                    ('Time', 1, 'd'),
+                    ('Redshift', 1, 'd'),
+                    ('FlagSfr', 1, 'i'),
+                    ('FlagFeedback', 1, 'i'),
+                    ('Nall', 6, 'i'),
+                    ('FlagCooling', 1, 'i'),
+                    ('NumFiles', 1, 'i'),
+                    ('BoxSize', 1, 'd'),
+                    ('Omega0', 1, 'd'),
+                    ('OmegaLambda', 1, 'd'),
+                    ('HubbleParam', 1, 'd'),
+                    ('FlagAge', 1, 'i'),
+                    ('FlagMEtals', 1, 'i'),
+                    ('NallHW', 6, 'i'),
+                    ('unused', 16, 'i')),
+    pad32       = (('empty',  32, 'c'),),
+    pad64       = (('empty',  64, 'c'),),
+    pad128      = (('empty', 128, 'c'),),
+    pad256      = (('empty', 256, 'c'),),
+)
+
+gadget_ptype_specs = dict(
+    default = ( "Gas",
+                "Halo",
+                "Disk",
+                "Bulge",
+                "Stars",
+                "Bndry" )
+)
+
+gadget_field_specs = dict(
+    default = ( "Coordinates",
+                "Velocities",
+                "ParticleIDs",
+                "Mass",
+                ("InternalEnergy", "Gas"),
+                ("Density", "Gas"),
+                ("SmoothingLength", "Gas"),
+    ),
+    agora_unlv = ( "Coordinates",
+                   "Velocities",
+                   "ParticleIDs",
+                   "Mass",
+                   ("InternalEnergy", "Gas"),
+                   ("Density", "Gas"),
+                   ("Electron_Number_Density", "Gas"),
+                   ("HI_NumberDensity", "Gas"),
+                   ("SmoothingLength", "Gas"),
+    )
+)

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/io.py
--- /dev/null
+++ b/yt/frontends/gadget/io.py
@@ -0,0 +1,210 @@
+"""
+Gadget data-file handling functions
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.frontends.owls.io import \
+    IOHandlerOWLS
+from yt.geometry.oct_container import \
+    _ORDER_MAX
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+class IOHandlerGadgetHDF5(IOHandlerOWLS):
+    _dataset_type = "gadget_hdf5"
+
+ZeroMass = object()
+    
+class IOHandlerGadgetBinary(BaseIOHandler):
+    _dataset_type = "gadget_binary"
+    _vector_fields = ("Coordinates", "Velocity", "Velocities")
+
+    # Particle types (Table 3 in GADGET-2 user guide)
+    #
+    # Blocks in the file:
+    #   HEAD
+    #   POS
+    #   VEL
+    #   ID
+    #   MASS    (variable mass only)
+    #   U       (gas only)
+    #   RHO     (gas only)
+    #   HSML    (gas only)
+    #   POT     (only if enabled in makefile)
+    #   ACCE    (only if enabled in makefile)
+    #   ENDT    (only if enabled in makefile)
+    #   TSTP    (only if enabled in makefile)
+
+    _var_mass = None
+
+    def __init__(self, ds, *args, **kwargs):
+        self._fields = ds._field_spec
+        self._ptypes = ds._ptype_spec
+        super(IOHandlerGadgetBinary, self).__init__(ds, *args, **kwargs)
+
+    @property
+    def var_mass(self):
+        if self._var_mass is None:
+            vm = []
+            for i, v in enumerate(self.ds["Massarr"]):
+                if v == 0:
+                    vm.append(self._ptypes[i])
+            self._var_mass = tuple(vm)
+        return self._var_mass
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype in ptf:
+                # This is where we could implement sub-chunking
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                yield ptype, (pos[:,0], pos[:,1], pos[:,2])
+            f.close()
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            poff = data_file.field_offsets
+            tp = data_file.total_particles
+            f = open(data_file.filename, "rb")
+            for ptype, field_list in sorted(ptf.items()):
+                f.seek(poff[ptype, "Coordinates"], os.SEEK_SET)
+                pos = self._read_field_from_file(f,
+                            tp[ptype], "Coordinates")
+                mask = selector.select_points(
+                    pos[:,0], pos[:,1], pos[:,2], 0.0)
+                del pos
+                if mask is None: continue
+                for field in field_list:
+                    if field == "Mass" and ptype not in self.var_mass:
+                        data = np.empty(mask.sum(), dtype="float64")
+                        m = self.ds.parameters["Massarr"][
+                            self._ptypes.index(ptype)]
+                        data[:] = m
+                        yield (ptype, field), data
+                        continue
+                    f.seek(poff[ptype, field], os.SEEK_SET)
+                    data = self._read_field_from_file(f, tp[ptype], field)
+                    data = data[mask,...]
+                    yield (ptype, field), data
+            f.close()
+
+    def _read_field_from_file(self, f, count, name):
+        if count == 0: return
+        if name == "ParticleIDs":
+            dt = "uint32"
+        else:
+            dt = "float32"
+        if name in self._vector_fields:
+            count *= 3
+        arr = np.fromfile(f, dtype=dt, count = count)
+        if name in self._vector_fields:
+            arr = arr.reshape((count/3, 3), order="C")
+        return arr.astype("float64")
+
+    def _initialize_index(self, data_file, regions):
+        count = sum(data_file.total_particles.values())
+        DLE = data_file.ds.domain_left_edge
+        DRE = data_file.ds.domain_right_edge
+        dx = (DRE - DLE) / 2**_ORDER_MAX
+        with open(data_file.filename, "rb") as f:
+            # We add on an additionally 4 for the first record.
+            f.seek(data_file._position_offset + 4)
+            # The first total_particles * 3 values are positions
+            pp = np.fromfile(f, dtype = 'float32', count = count*3)
+            pp.shape = (count, 3)
+        regions.add_data_file(pp, data_file.file_id, data_file.ds.filter_bbox)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE,
+                                data_file.ds.filter_bbox)
+        return morton
+
+    def _count_particles(self, data_file):
+        npart = dict((self._ptypes[i], v)
+            for i, v in enumerate(data_file.header["Npart"]))
+        return npart
+
+    # header is 256, but we have 4 at beginning and end for ints
+    _field_size = 4
+    def _calculate_field_offsets(self, field_list, pcount,
+                                 offset, file_size = None):
+        # field_list is (ftype, fname) but the blocks are ordered
+        # (fname, ftype) in the file.
+        pos = offset
+        fs = self._field_size
+        offsets = {}
+        for field in self._fields:
+            if not isinstance(field, types.StringTypes):
+                field = field[0]
+            if not any( (ptype, field) in field_list
+                        for ptype in self._ptypes):
+                continue
+            pos += 4
+            any_ptypes = False
+            for ptype in self._ptypes:
+                if field == "Mass" and ptype not in self.var_mass:
+                    continue
+                if (ptype, field) not in field_list:
+                    continue
+                offsets[(ptype, field)] = pos
+                any_ptypes = True
+                if field in self._vector_fields:
+                    pos += 3 * pcount[ptype] * fs
+                else:
+                    pos += pcount[ptype] * fs
+            pos += 4
+            if not any_ptypes: pos -= 8
+        if file_size is not None:
+            if file_size != pos:
+                mylog.warning("Your Gadget-2 file may have extra " +
+                              "columns or different precision!" +
+                              " (%s file vs %s computed)",
+                              file_size, pos)
+        return offsets
+
+    def _identify_fields(self, domain):
+        # We can just look at the particle counts.
+        field_list = []
+        tp = domain.total_particles
+        for i, ptype in enumerate(self._ptypes):
+            count = tp[ptype]
+            if count == 0: continue
+            m = domain.header["Massarr"][i]
+            for field in self._fields:
+                if isinstance(field, types.TupleType):
+                    field, req = field
+                    if req is ZeroMass:
+                        if m > 0.0 : continue
+                    elif req != ptype:
+                        continue
+                field_list.append((ptype, field))
+        return field_list, {}

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/gadget/setup.py
--- /dev/null
+++ b/yt/frontends/gadget/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('gadget', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/__init__.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/api.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/api.py
@@ -0,0 +1,24 @@
+"""
+API for HaloCatalog frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+     HaloCatalogDataset
+
+from .io import \
+     IOHandlerHaloCatalogHDF5
+
+from .fields import \
+     HaloCatalogFieldInfo

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/data_structures.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -0,0 +1,97 @@
+"""
+Data structures for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+    HaloCatalogFieldInfo
+
+from yt.utilities.cosmology import Cosmology
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity
+    
+class HaloCatalogHDF5File(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        with h5py.File(filename, "r") as f:
+            self.header = dict((field, f.attrs[field]) \
+                               for field in f.attrs.keys())
+
+        super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
+    
+class HaloCatalogDataset(Dataset):
+    _index_class = ParticleIndex
+    _file_class = HaloCatalogHDF5File
+    _field_info_class = HaloCatalogFieldInfo
+    _suffix = ".h5"
+
+    def __init__(self, filename, dataset_type="halocatalog_hdf5",
+                 n_ref = 16, over_refine_factor = 1, units_override=None):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(HaloCatalogDataset, self).__init__(filename, dataset_type,
+                                                 units_override=units_override)
+
+    def _parse_parameter_file(self):
+        with h5py.File(self.parameter_filename, "r") as f:
+            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
+        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
+        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+
+        for attr in ["cosmological_simulation", "current_time", "current_redshift",
+                     "hubble_constant", "omega_matter", "omega_lambda",
+                     "domain_left_edge", "domain_right_edge"]:
+            setattr(self, attr, hvals[attr])
+        self.periodicity = (True, True, True)
+        self.particle_types = ("halos")
+        self.particle_types_raw = ("halos")
+
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(3, "int32") * nz
+        self.parameters.update(hvals)
+
+    def _set_code_unit_attributes(self):
+        self.length_unit = self.quan(1.0, "cm")
+        self.mass_unit = self.quan(1.0, "g")
+        self.velocity_unit = self.quan(1.0, "cm / s")
+        self.time_unit = self.quan(1.0, "s")
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        if not args[0].endswith(".h5"): return False
+        with h5py.File(args[0], "r") as f:
+            if "data_type" in f.attrs and \
+              f.attrs["data_type"] == "halo_catalog":
+                return True
+        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/fields.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/fields.py
@@ -0,0 +1,48 @@
+"""
+HaloCatalog-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+from yt.units.yt_array import \
+    YTArray
+
+from yt.utilities.physical_constants import \
+    mh, \
+    mass_sun_cgs
+
+m_units = "g"
+p_units = "cm"
+v_units = "cm / s"
+r_units = "cm"
+
+class HaloCatalogFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+    )
+
+    known_particle_fields = (
+        ("particle_identifier", ("", [], None)),
+        ("particle_position_x", (p_units, [], None)),
+        ("particle_position_y", (p_units, [], None)),
+        ("particle_position_z", (p_units, [], None)),
+        ("particle_velocity_x", (v_units, [], None)),
+        ("particle_velocity_y", (v_units, [], None)),
+        ("particle_velocity_z", (v_units, [], None)),
+        ("particle_mass", (m_units, [], "Virial Mass")),
+        ("virial_radius", (r_units, [], "Virial Radius")),
+)

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/io.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/io.py
@@ -0,0 +1,119 @@
+"""
+HaloCatalog data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+    BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+from yt.geometry.oct_container import _ORDER_MAX
+
+class IOHandlerHaloCatalogHDF5(BaseIOHandler):
+    _dataset_type = "halocatalog_hdf5"
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This will read chunks and yield the results.
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with h5py.File(data_file.filename, "r") as f:
+                x = f['particle_position_x'].value.astype("float64")
+                y = f['particle_position_y'].value.astype("float64")
+                z = f['particle_position_z'].value.astype("float64")
+                yield "halos", (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # Now we have all the sizes, and we can allocate
+        chunks = list(chunks)
+        data_files = set([])
+        # Only support halo reading for now.
+        assert(len(ptf) == 1)
+        assert(ptf.keys()[0] == "halos")
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in sorted(data_files):
+            pcount = data_file.header['num_halos']
+            with h5py.File(data_file.filename, "r") as f:
+                for ptype, field_list in sorted(ptf.items()):
+                    x = f['particle_position_x'].value.astype("float64")
+                    y = f['particle_position_y'].value.astype("float64")
+                    z = f['particle_position_z'].value.astype("float64")
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        data = f[field][mask].astype("float64")
+                        yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        pcount = data_file.header["num_halos"]
+        morton = np.empty(pcount, dtype='uint64')
+        mylog.debug("Initializing index % 5i (% 7i particles)",
+                    data_file.file_id, pcount)
+        ind = 0
+        with h5py.File(data_file.filename, "r") as f:
+            if not f.keys(): return None
+            pos = np.empty((pcount, 3), dtype="float64")
+            pos = data_file.ds.arr(pos, "code_length")
+            dx = np.finfo(f['particle_position_x'].dtype).eps
+            dx = 2.0*self.ds.quan(dx, "code_length")
+            pos[:,0] = f["particle_position_x"].value
+            pos[:,1] = f["particle_position_y"].value
+            pos[:,2] = f["particle_position_z"].value
+            # These are 32 bit numbers, so we give a little lee-way.
+            # Otherwise, for big sets of particles, we often will bump into the
+            # domain edges.  This helps alleviate that.
+            np.clip(pos, self.ds.domain_left_edge + dx,
+                         self.ds.domain_right_edge - dx, pos)
+            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+                raise YTDomainOverflow(pos.min(axis=0),
+                                       pos.max(axis=0),
+                                       self.ds.domain_left_edge,
+                                       self.ds.domain_right_edge)
+            regions.add_data_file(pos, data_file.file_id)
+            morton[ind:ind+pos.shape[0]] = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.ds.domain_left_edge,
+                data_file.ds.domain_right_edge)
+        return morton
+
+    def _count_particles(self, data_file):
+        return {'halos': data_file.header['num_halos']}
+
+    def _identify_fields(self, data_file):
+        with h5py.File(data_file.filename, "r") as f:
+            fields = [("halos", field) for field in f]
+            units = dict([(("halos", field), 
+                           f[field].attrs["units"]) for field in f])
+        return fields, units

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalog/setup.py
--- /dev/null
+++ b/yt/frontends/halo_catalog/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('halo_catalog', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    #config.make_svn_version_py()
+    return config

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/__init__.py
--- a/yt/frontends/halo_catalogs/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for halo catalog frontends.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/api.py
--- a/yt/frontends/halo_catalogs/api.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-API for yt.frontends.halo_catalogs
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .halo_catalog.api import \
-     HaloCatalogDataset, \
-     IOHandlerHaloCatalogHDF5, \
-     HaloCatalogFieldInfo
-
-from .rockstar.api import \
-      RockstarDataset, \
-      IOHandlerRockstarBinary, \
-      RockstarFieldInfo
-
-from .owls_subfind.api import \
-     OWLSSubfindDataset, \
-     IOHandlerOWLSSubfindHDF5, \
-     OWLSSubfindFieldInfo

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/__init__.py
--- a/yt/frontends/halo_catalogs/halo_catalog/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/api.py
--- a/yt/frontends/halo_catalogs/halo_catalog/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for HaloCatalog frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     HaloCatalogDataset
-
-from .io import \
-     IOHandlerHaloCatalogHDF5
-
-from .fields import \
-     HaloCatalogFieldInfo

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalogs/halo_catalog/data_structures.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Data structures for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-import stat
-import weakref
-import struct
-import glob
-import time
-import os
-
-from .fields import \
-    HaloCatalogFieldInfo
-
-from yt.utilities.cosmology import Cosmology
-from yt.geometry.particle_geometry_handler import \
-    ParticleIndex
-from yt.data_objects.static_output import \
-    Dataset, \
-    ParticleFile
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
-    
-class HaloCatalogHDF5File(ParticleFile):
-    def __init__(self, ds, io, filename, file_id):
-        with h5py.File(filename, "r") as f:
-            self.header = dict((field, f.attrs[field]) \
-                               for field in f.attrs.keys())
-
-        super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
-    
-class HaloCatalogDataset(Dataset):
-    _index_class = ParticleIndex
-    _file_class = HaloCatalogHDF5File
-    _field_info_class = HaloCatalogFieldInfo
-    _suffix = ".h5"
-
-    def __init__(self, filename, dataset_type="halocatalog_hdf5",
-                 n_ref = 16, over_refine_factor = 1, units_override=None):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HaloCatalogDataset, self).__init__(filename, dataset_type,
-                                                 units_override=units_override)
-
-    def _parse_parameter_file(self):
-        with h5py.File(self.parameter_filename, "r") as f:
-            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
-        self.dimensionality = 3
-        self.refine_by = 2
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
-        prefix = ".".join(self.parameter_filename.rsplit(".", 2)[:-2])
-        self.filename_template = "%s.%%(num)s%s" % (prefix, self._suffix)
-        self.file_count = len(glob.glob(prefix + "*" + self._suffix))
-
-        for attr in ["cosmological_simulation", "current_time", "current_redshift",
-                     "hubble_constant", "omega_matter", "omega_lambda",
-                     "domain_left_edge", "domain_right_edge"]:
-            setattr(self, attr, hvals[attr])
-        self.periodicity = (True, True, True)
-        self.particle_types = ("halos")
-        self.particle_types_raw = ("halos")
-
-        nz = 1 << self.over_refine_factor
-        self.domain_dimensions = np.ones(3, "int32") * nz
-        self.parameters.update(hvals)
-
-    def _set_code_unit_attributes(self):
-        self.length_unit = self.quan(1.0, "cm")
-        self.mass_unit = self.quan(1.0, "g")
-        self.velocity_unit = self.quan(1.0, "cm / s")
-        self.time_unit = self.quan(1.0, "s")
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        if not args[0].endswith(".h5"): return False
-        with h5py.File(args[0], "r") as f:
-            if "data_type" in f.attrs and \
-              f.attrs["data_type"] == "halo_catalog":
-                return True
-        return False

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/fields.py
--- a/yt/frontends/halo_catalogs/halo_catalog/fields.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-HaloCatalog-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-from yt.funcs import mylog
-from yt.fields.field_info_container import \
-    FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs
-
-m_units = "g"
-p_units = "cm"
-v_units = "cm / s"
-r_units = "cm"
-
-class HaloCatalogFieldInfo(FieldInfoContainer):
-    known_other_fields = (
-    )
-
-    known_particle_fields = (
-        ("particle_identifier", ("", [], None)),
-        ("particle_position_x", (p_units, [], None)),
-        ("particle_position_y", (p_units, [], None)),
-        ("particle_position_z", (p_units, [], None)),
-        ("particle_velocity_x", (v_units, [], None)),
-        ("particle_velocity_y", (v_units, [], None)),
-        ("particle_velocity_z", (v_units, [], None)),
-        ("particle_mass", (m_units, [], "Virial Mass")),
-        ("virial_radius", (r_units, [], "Virial Radius")),
-)

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/halo_catalog/io.py
--- a/yt/frontends/halo_catalogs/halo_catalog/io.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-HaloCatalog data-file handling function
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import h5py
-import numpy as np
-
-from yt.utilities.exceptions import *
-from yt.funcs import mylog
-
-from yt.utilities.io_handler import \
-    BaseIOHandler
-
-from yt.utilities.lib.geometry_utils import compute_morton
-
-from yt.geometry.oct_container import _ORDER_MAX
-
-class IOHandlerHaloCatalogHDF5(BaseIOHandler):
-    _dataset_type = "halocatalog_hdf5"
-
-    def _read_fluid_selection(self, chunks, selector, fields, size):
-        raise NotImplementedError
-
-    def _read_particle_coords(self, chunks, ptf):
-        # This will read chunks and yield the results.
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with h5py.File(data_file.filename, "r") as f:
-                x = f['particle_position_x'].value.astype("float64")
-                y = f['particle_position_y'].value.astype("float64")
-                z = f['particle_position_z'].value.astype("float64")
-                yield "halos", (x, y, z)
-
-    def _read_particle_fields(self, chunks, ptf, selector):
-        # Now we have all the sizes, and we can allocate
-        chunks = list(chunks)
-        data_files = set([])
-        # Only support halo reading for now.
-        assert(len(ptf) == 1)
-        assert(ptf.keys()[0] == "halos")
-        for chunk in chunks:
-            for obj in chunk.objs:
-                data_files.update(obj.data_files)
-        for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
-            with h5py.File(data_file.filename, "r") as f:
-                for ptype, field_list in sorted(ptf.items()):
-                    x = f['particle_position_x'].value.astype("float64")
-                    y = f['particle_position_y'].value.astype("float64")
-                    z = f['particle_position_z'].value.astype("float64")
-                    mask = selector.select_points(x, y, z, 0.0)
-                    del x, y, z
-                    if mask is None: continue
-                    for field in field_list:
-                        data = f[field][mask].astype("float64")
-                        yield (ptype, field), data
-
-    def _initialize_index(self, data_file, regions):
-        pcount = data_file.header["num_halos"]
-        morton = np.empty(pcount, dtype='uint64')
-        mylog.debug("Initializing index % 5i (% 7i particles)",
-                    data_file.file_id, pcount)
-        ind = 0
-        with h5py.File(data_file.filename, "r") as f:
-            if not f.keys(): return None
-            pos = np.empty((pcount, 3), dtype="float64")
-            pos = data_file.ds.arr(pos, "code_length")
-            dx = np.finfo(f['particle_position_x'].dtype).eps
-            dx = 2.0*self.ds.quan(dx, "code_length")
-            pos[:,0] = f["particle_position_x"].value
-            pos[:,1] = f["particle_position_y"].value
-            pos[:,2] = f["particle_position_z"].value
-            # These are 32 bit numbers, so we give a little lee-way.
-            # Otherwise, for big sets of particles, we often will bump into the
-            # domain edges.  This helps alleviate that.
-            np.clip(pos, self.ds.domain_left_edge + dx,
-                         self.ds.domain_right_edge - dx, pos)
-            if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
-               np.any(pos.max(axis=0) > self.ds.domain_right_edge):
-                raise YTDomainOverflow(pos.min(axis=0),
-                                       pos.max(axis=0),
-                                       self.ds.domain_left_edge,
-                                       self.ds.domain_right_edge)
-            regions.add_data_file(pos, data_file.file_id)
-            morton[ind:ind+pos.shape[0]] = compute_morton(
-                pos[:,0], pos[:,1], pos[:,2],
-                data_file.ds.domain_left_edge,
-                data_file.ds.domain_right_edge)
-        return morton
-
-    def _count_particles(self, data_file):
-        return {'halos': data_file.header['num_halos']}
-
-    def _identify_fields(self, data_file):
-        with h5py.File(data_file.filename, "r") as f:
-            fields = [("halos", field) for field in f]
-            units = dict([(("halos", field), 
-                           f[field].attrs["units"]) for field in f])
-        return fields, units

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/owls_subfind/__init__.py
--- a/yt/frontends/halo_catalogs/owls_subfind/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-API for HaloCatalog frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 4c67e2a36f2c6f008a8360b7f7a3a4dd1dc04e28 -r b4e9e508e03145fd0788ff6514c1ab9c6f029fae yt/frontends/halo_catalogs/owls_subfind/api.py
--- a/yt/frontends/halo_catalogs/owls_subfind/api.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for OWLSSubfind frontend
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-     OWLSSubfindDataset
-
-from .io import \
-     IOHandlerOWLSSubfindHDF5
-
-from .fields import \
-     OWLSSubfindFieldInfo

This diff is so big that we needed to truncate the remainder.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list