[yt-svn] commit/yt: 29 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu May 19 10:43:51 PDT 2016


29 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/ef3ad4d78214/
Changeset:   ef3ad4d78214
Branch:      yt
User:        brittonsmith
Date:        2016-04-11 16:17:52+00:00
Summary:     Initial import.
Affected #:  3 files

diff -r 248ccc5ef8b46d83682344d249c8de1c964759ab -r ef3ad4d78214ad3b5479bff80928e2f88ebcc788 yt_gizmo/__init__.py
--- /dev/null
+++ b/yt_gizmo/__init__.py
@@ -0,0 +1,26 @@
+"""
+API for gadget_fof_plus frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import \
+    mylog
+
+mylog.warn("GizmoDataset dataset overrides GadgetHDF5Dataset.")
+
+from .data_structures import \
+    GizmoDataset
+
+from .fields import \
+    GizmoFieldInfo

diff -r 248ccc5ef8b46d83682344d249c8de1c964759ab -r ef3ad4d78214ad3b5479bff80928e2f88ebcc788 yt_gizmo/data_structures.py
--- /dev/null
+++ b/yt_gizmo/data_structures.py
@@ -0,0 +1,29 @@
+"""
+Data structures for gizmo frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+
+from .fields import \
+    GizmoFieldInfo
+
+class GizmoDataset(GadgetHDF5Dataset):
+    _file_class = ParticleFile
+    _field_info_class = GizmoFieldInfo
+    _particle_mass_name = "Masses"
+    _suffix = ".hdf5"

diff -r 248ccc5ef8b46d83682344d249c8de1c964759ab -r ef3ad4d78214ad3b5479bff80928e2f88ebcc788 yt_gizmo/fields.py
--- /dev/null
+++ b/yt_gizmo/fields.py
@@ -0,0 +1,28 @@
+"""
+Gizmo-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.species_fields import \
+    add_species_field_by_fraction
+from yt.frontends.gadget.fields import \
+    GadgetFieldInfo
+
+class GizmoFieldInfo(GadgetFieldInfo):
+    def setup_gas_particle_fields(self, ptype):
+        super(GizmoFieldInfo, self).setup_gas_particle_fields(ptype)
+        self.alias((ptype, "temperature"), (ptype, "Temperature"))
+
+        self.alias((ptype, "H_fraction"), (ptype, "NeutralHydrogenAbundance"))
+        add_species_field_by_fraction(self, ptype, "H", particle_type=True)


https://bitbucket.org/yt_analysis/yt/commits/e911b118b8af/
Changeset:   e911b118b8af
Branch:      yt
User:        brittonsmith
Date:        2016-04-11 16:35:26+00:00
Summary:     Adding HI and HII density fields.
Affected #:  1 file

diff -r ef3ad4d78214ad3b5479bff80928e2f88ebcc788 -r e911b118b8af5ed38636b703b27717473cb7452f yt_gizmo/fields.py
--- a/yt_gizmo/fields.py
+++ b/yt_gizmo/fields.py
@@ -15,7 +15,7 @@
 #-----------------------------------------------------------------------------
 
 from yt.fields.species_fields import \
-    add_species_field_by_fraction
+    add_species_field_by_density
 from yt.frontends.gadget.fields import \
     GadgetFieldInfo
 
@@ -24,5 +24,28 @@
         super(GizmoFieldInfo, self).setup_gas_particle_fields(ptype)
         self.alias((ptype, "temperature"), (ptype, "Temperature"))
 
-        self.alias((ptype, "H_fraction"), (ptype, "NeutralHydrogenAbundance"))
-        add_species_field_by_fraction(self, ptype, "H", particle_type=True)
+        def _h_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_fraction")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              data[(ptype, "NeutralHydrogenAbundance")]
+
+        self.add_field(
+            (ptype, "H_density"),
+            function=_h_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H", particle_type=True)
+
+        def _h_p1_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_fraction")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
+
+        self.add_field(
+            (ptype, "H_p1_density"),
+            function=_h_p1_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)


https://bitbucket.org/yt_analysis/yt/commits/5c3acafec7cb/
Changeset:   5c3acafec7cb
Branch:      yt
User:        brittonsmith
Date:        2016-04-12 14:41:51+00:00
Summary:     Renaming as FIRE frontend.
Affected #:  6 files

diff -r e911b118b8af5ed38636b703b27717473cb7452f -r 5c3acafec7cb7062fa7052182c4341bc6418852e yt_fire/__init__.py
--- /dev/null
+++ b/yt_fire/__init__.py
@@ -0,0 +1,24 @@
+"""
+API for FIRE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import \
+    mylog
+
+from .data_structures import \
+    FIREDataset
+
+from .fields import \
+    FIREFieldInfo

diff -r e911b118b8af5ed38636b703b27717473cb7452f -r 5c3acafec7cb7062fa7052182c4341bc6418852e yt_fire/data_structures.py
--- /dev/null
+++ b/yt_fire/data_structures.py
@@ -0,0 +1,29 @@
+"""
+Data structures for FIRE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.data_objects.static_output import \
+    ParticleFile
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+
+from .fields import \
+    FIREFieldInfo
+
+class FIREDataset(GadgetHDF5Dataset):
+    _file_class = ParticleFile
+    _field_info_class = FIREFieldInfo
+    _particle_mass_name = "Masses"
+    _suffix = ".hdf5"

diff -r e911b118b8af5ed38636b703b27717473cb7452f -r 5c3acafec7cb7062fa7052182c4341bc6418852e yt_fire/fields.py
--- /dev/null
+++ b/yt_fire/fields.py
@@ -0,0 +1,51 @@
+"""
+FIRE-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.species_fields import \
+    add_species_field_by_density
+from yt.frontends.gadget.fields import \
+    GadgetFieldInfo
+
+class FIREFieldInfo(GadgetFieldInfo):
+    def setup_gas_particle_fields(self, ptype):
+        super(FIREFieldInfo, self).setup_gas_particle_fields(ptype)
+        self.alias((ptype, "temperature"), (ptype, "Temperature"))
+
+        def _h_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_fraction")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              data[(ptype, "NeutralHydrogenAbundance")]
+
+        self.add_field(
+            (ptype, "H_density"),
+            function=_h_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H", particle_type=True)
+
+        def _h_p1_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_fraction")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
+
+        self.add_field(
+            (ptype, "H_p1_density"),
+            function=_h_p1_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)

diff -r e911b118b8af5ed38636b703b27717473cb7452f -r 5c3acafec7cb7062fa7052182c4341bc6418852e yt_gizmo/__init__.py
--- a/yt_gizmo/__init__.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
-API for gadget_fof_plus frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.funcs import \
-    mylog
-
-mylog.warn("GizmoDataset dataset overrides GadgetHDF5Dataset.")
-
-from .data_structures import \
-    GizmoDataset
-
-from .fields import \
-    GizmoFieldInfo

diff -r e911b118b8af5ed38636b703b27717473cb7452f -r 5c3acafec7cb7062fa7052182c4341bc6418852e yt_gizmo/data_structures.py
--- a/yt_gizmo/data_structures.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-Data structures for gizmo frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.data_objects.static_output import \
-    ParticleFile
-from yt.frontends.gadget.data_structures import \
-    GadgetHDF5Dataset
-
-from .fields import \
-    GizmoFieldInfo
-
-class GizmoDataset(GadgetHDF5Dataset):
-    _file_class = ParticleFile
-    _field_info_class = GizmoFieldInfo
-    _particle_mass_name = "Masses"
-    _suffix = ".hdf5"

diff -r e911b118b8af5ed38636b703b27717473cb7452f -r 5c3acafec7cb7062fa7052182c4341bc6418852e yt_gizmo/fields.py
--- a/yt_gizmo/fields.py
+++ /dev/null
@@ -1,51 +0,0 @@
-"""
-Gizmo-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.fields.species_fields import \
-    add_species_field_by_density
-from yt.frontends.gadget.fields import \
-    GadgetFieldInfo
-
-class GizmoFieldInfo(GadgetFieldInfo):
-    def setup_gas_particle_fields(self, ptype):
-        super(GizmoFieldInfo, self).setup_gas_particle_fields(ptype)
-        self.alias((ptype, "temperature"), (ptype, "Temperature"))
-
-        def _h_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_fraction")] - \
-              data[(ptype, "metallicity")]
-            return x_H * data[(ptype, "density")] * \
-              data[(ptype, "NeutralHydrogenAbundance")]
-
-        self.add_field(
-            (ptype, "H_density"),
-            function=_h_density,
-            particle_type=True,
-            units=self.ds.unit_system["density"])
-        add_species_field_by_density(self, ptype, "H", particle_type=True)
-
-        def _h_p1_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_fraction")] - \
-              data[(ptype, "metallicity")]
-            return x_H * data[(ptype, "density")] * \
-              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
-
-        self.add_field(
-            (ptype, "H_p1_density"),
-            function=_h_p1_density,
-            particle_type=True,
-            units=self.ds.unit_system["density"])
-        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)


https://bitbucket.org/yt_analysis/yt/commits/f5341734cf6a/
Changeset:   f5341734cf6a
Branch:      yt
User:        brittonsmith
Date:        2016-04-12 15:03:53+00:00
Summary:     Adding p0 aliaes.
Affected #:  1 file

diff -r 5c3acafec7cb7062fa7052182c4341bc6418852e -r f5341734cf6abda3ccad9fab032c74329f8e0f3c yt_fire/fields.py
--- a/yt_fire/fields.py
+++ b/yt_fire/fields.py
@@ -36,6 +36,8 @@
             particle_type=True,
             units=self.ds.unit_system["density"])
         add_species_field_by_density(self, ptype, "H", particle_type=True)
+        for suffix in ["density", "fraction", "mass", "number_density"]:
+            self.alias((ptype, "H_%s" % suffix), (ptype, "H_p0_%s" % suffix))
 
         def _h_p1_density(field, data):
             x_H = 1.0 - data[(ptype, "He_fraction")] - \


https://bitbucket.org/yt_analysis/yt/commits/8ecf0cf5aa90/
Changeset:   8ecf0cf5aa90
Branch:      yt
User:        brittonsmith
Date:        2016-04-12 15:23:08+00:00
Summary:     Adding is_valid for FIRE.
Affected #:  1 file

diff -r f5341734cf6abda3ccad9fab032c74329f8e0f3c -r 8ecf0cf5aa90e80660ad319a264943b60a45ffb9 yt_fire/data_structures.py
--- a/yt_fire/data_structures.py
+++ b/yt_fire/data_structures.py
@@ -14,6 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+from yt.utilities.on_demand_imports import _h5py as h5py
+
 from yt.data_objects.static_output import \
     ParticleFile
 from yt.frontends.gadget.data_structures import \
@@ -27,3 +29,21 @@
     _field_info_class = FIREFieldInfo
     _particle_mass_name = "Masses"
     _suffix = ".hdf5"
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Header']
+        veto_groups = ['FOF', 'Group', 'Subhalo']
+        valid = True
+        try:
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            dmetal = "/PartType0/Metallicity"
+            if dmetal not in fh or fh[dmetal].shape[1] not in (11, 17):
+                valid = False
+            fh.close()
+        except:
+            valid = False
+            pass
+        return valid


https://bitbucket.org/yt_analysis/yt/commits/8cb09ffccb65/
Changeset:   8cb09ffccb65
Branch:      yt
User:        brittonsmith
Date:        2016-04-12 16:17:59+00:00
Summary:     Removing some stuff we didn't need.
Affected #:  1 file

diff -r 8ecf0cf5aa90e80660ad319a264943b60a45ffb9 -r 8cb09ffccb65afe79d78e045c8ff224bac1ec87f yt_fire/data_structures.py
--- a/yt_fire/data_structures.py
+++ b/yt_fire/data_structures.py
@@ -16,8 +16,6 @@
 
 from yt.utilities.on_demand_imports import _h5py as h5py
 
-from yt.data_objects.static_output import \
-    ParticleFile
 from yt.frontends.gadget.data_structures import \
     GadgetHDF5Dataset
 
@@ -25,10 +23,7 @@
     FIREFieldInfo
 
 class FIREDataset(GadgetHDF5Dataset):
-    _file_class = ParticleFile
     _field_info_class = FIREFieldInfo
-    _particle_mass_name = "Masses"
-    _suffix = ".hdf5"
 
     @classmethod
     def _is_valid(self, *args, **kwargs):


https://bitbucket.org/yt_analysis/yt/commits/654472b8f929/
Changeset:   654472b8f929
Branch:      yt
User:        brittonsmith
Date:        2016-04-12 16:50:18+00:00
Summary:     Adding nuclei fraction and density fields for metals.
Affected #:  1 file

diff -r 8cb09ffccb65afe79d78e045c8ff224bac1ec87f -r 654472b8f929fd4616108badf40faed2a37701df yt_fire/fields.py
--- a/yt_fire/fields.py
+++ b/yt_fire/fields.py
@@ -18,14 +18,53 @@
     add_species_field_by_density
 from yt.frontends.gadget.fields import \
     GadgetFieldInfo
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
 
 class FIREFieldInfo(GadgetFieldInfo):
+    known_particle_fields = (
+        ("Mass", ("code_mass", ["particle_mass"], None)),
+        ("Masses", ("code_mass", ["particle_mass"], None)),
+        ("Coordinates", ("code_length", ["particle_position"], None)),
+        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
+        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
+        ("ParticleIDs", ("", ["particle_index"], None)),
+        ("InternalEnergy", ("code_velocity ** 2", ["thermal_energy"], None)),
+        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
+        ("Density", ("code_mass / code_length**3", ["density"], None)),
+        ("MaximumTemperature", ("K", [], None)),
+        ("Temperature", ("K", ["temperature"], None)),
+        ("Epsilon", ("code_length", [], None)),
+        ("Metals", ("code_metallicity", ["metallicity"], None)),
+        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
+        ("Phi", ("code_length", [], None)),
+        ("StarFormationRate", ("Msun / yr", [], None)),
+        ("FormationTime", ("code_time", ["creation_time"], None)),
+        ("Metallicity_00", ("", ["metallicity"], None)),
+        ("Metallicity_01", ("", ["He_nuclei_fraction"], None)),
+        ("Metallicity_02", ("", ["C_nuclei_fraction"], None)),
+        ("Metallicity_03", ("", ["N_nuclei_fraction"], None)),
+        ("Metallicity_04", ("", ["O_nuclei_fraction"], None)),
+        ("Metallicity_05", ("", ["Ne_nuclei_fraction"], None)),
+        ("Metallicity_06", ("", ["Mg_nuclei_fraction"], None)),
+        ("Metallicity_07", ("", ["Si_nuclei_fraction"], None)),
+        ("Metallicity_08", ("", ["S_nuclei_fraction"], None)),
+        ("Metallicity_09", ("", ["Ca_nuclei_fraction"], None)),
+        ("Metallicity_10", ("", ["Fe_nuclei_fraction"], None)),
+    )
+
+    def __init__(self, *args, **kwargs):
+        super(SPHFieldInfo, self).__init__(*args, **kwargs)
+        if ("PartType0", "Metallicity_00") in self.field_list:
+            self.nuclei_names = ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
+                                 "Ca", "Fe"]
+
     def setup_gas_particle_fields(self, ptype):
         super(FIREFieldInfo, self).setup_gas_particle_fields(ptype)
         self.alias((ptype, "temperature"), (ptype, "Temperature"))
 
         def _h_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_fraction")] - \
+            x_H = 1.0 - data[(ptype, "He_nuclei_fraction")] - \
               data[(ptype, "metallicity")]
             return x_H * data[(ptype, "density")] * \
               data[(ptype, "NeutralHydrogenAbundance")]
@@ -40,7 +79,7 @@
             self.alias((ptype, "H_%s" % suffix), (ptype, "H_p0_%s" % suffix))
 
         def _h_p1_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_fraction")] - \
+            x_H = 1.0 - data[(ptype, "He_nuclei_fraction")] - \
               data[(ptype, "metallicity")]
             return x_H * data[(ptype, "density")] * \
               (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
@@ -51,3 +90,14 @@
             particle_type=True,
             units=self.ds.unit_system["density"])
         add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
+
+        for species in self.nuclei_names:
+            def _nuclei_density_field(field, data):
+                return data[ptype, "density"] * \
+                  data[ptype, "%s_nuclei_fraction" % species]
+
+            self.add_field(
+                (ptype, "%s_nuclei_density" % species),
+                function=_h_p1_density,
+                particle_type=True,
+                units=self.ds.unit_system["density"])


https://bitbucket.org/yt_analysis/yt/commits/60b4ae456bd2/
Changeset:   60b4ae456bd2
Branch:      yt
User:        brittonsmith
Date:        2016-04-12 19:18:50+00:00
Summary:     Fixing nuclei mass density fields and changing nuclei fractions to metallicities.
Affected #:  1 file

diff -r 654472b8f929fd4616108badf40faed2a37701df -r 60b4ae456bd2d4fb081ba025c87c394fb8b5e02d yt_fire/fields.py
--- a/yt_fire/fields.py
+++ b/yt_fire/fields.py
@@ -41,16 +41,16 @@
         ("StarFormationRate", ("Msun / yr", [], None)),
         ("FormationTime", ("code_time", ["creation_time"], None)),
         ("Metallicity_00", ("", ["metallicity"], None)),
-        ("Metallicity_01", ("", ["He_nuclei_fraction"], None)),
-        ("Metallicity_02", ("", ["C_nuclei_fraction"], None)),
-        ("Metallicity_03", ("", ["N_nuclei_fraction"], None)),
-        ("Metallicity_04", ("", ["O_nuclei_fraction"], None)),
-        ("Metallicity_05", ("", ["Ne_nuclei_fraction"], None)),
-        ("Metallicity_06", ("", ["Mg_nuclei_fraction"], None)),
-        ("Metallicity_07", ("", ["Si_nuclei_fraction"], None)),
-        ("Metallicity_08", ("", ["S_nuclei_fraction"], None)),
-        ("Metallicity_09", ("", ["Ca_nuclei_fraction"], None)),
-        ("Metallicity_10", ("", ["Fe_nuclei_fraction"], None)),
+        ("Metallicity_01", ("", ["He_metallicity"], None)),
+        ("Metallicity_02", ("", ["C_metallicity"], None)),
+        ("Metallicity_03", ("", ["N_metallicity"], None)),
+        ("Metallicity_04", ("", ["O_metallicity"], None)),
+        ("Metallicity_05", ("", ["Ne_metallicity"], None)),
+        ("Metallicity_06", ("", ["Mg_metallicity"], None)),
+        ("Metallicity_07", ("", ["Si_metallicity"], None)),
+        ("Metallicity_08", ("", ["S_metallicity"], None)),
+        ("Metallicity_09", ("", ["Ca_metallicity"], None)),
+        ("Metallicity_10", ("", ["Fe_metallicity"], None)),
     )
 
     def __init__(self, *args, **kwargs):
@@ -64,7 +64,7 @@
         self.alias((ptype, "temperature"), (ptype, "Temperature"))
 
         def _h_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_nuclei_fraction")] - \
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
               data[(ptype, "metallicity")]
             return x_H * data[(ptype, "density")] * \
               data[(ptype, "NeutralHydrogenAbundance")]
@@ -79,7 +79,7 @@
             self.alias((ptype, "H_%s" % suffix), (ptype, "H_p0_%s" % suffix))
 
         def _h_p1_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_nuclei_fraction")] - \
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
               data[(ptype, "metallicity")]
             return x_H * data[(ptype, "density")] * \
               (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
@@ -91,13 +91,14 @@
             units=self.ds.unit_system["density"])
         add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
 
+        def _nuclei_mass_density_field(field, data):
+            species = field.name[1][:field.name[1].find("_")]
+            return data[ptype, "density"] * \
+              data[ptype, "%s_metallicity" % species]
+
         for species in self.nuclei_names:
-            def _nuclei_density_field(field, data):
-                return data[ptype, "density"] * \
-                  data[ptype, "%s_nuclei_fraction" % species]
-
             self.add_field(
-                (ptype, "%s_nuclei_density" % species),
-                function=_h_p1_density,
+                (ptype, "%s_nuclei_mass_density" % species),
+                function=_nuclei_mass_density_field,
                 particle_type=True,
                 units=self.ds.unit_system["density"])


https://bitbucket.org/yt_analysis/yt/commits/5ac8198b96b5/
Changeset:   5ac8198b96b5
Branch:      yt
User:        brittonsmith
Date:        2016-04-13 10:59:26+00:00
Summary:     Fix alias call.
Affected #:  1 file

diff -r 60b4ae456bd2d4fb081ba025c87c394fb8b5e02d -r 5ac8198b96b5ff1372198687f850b305a951e5f4 yt_fire/fields.py
--- a/yt_fire/fields.py
+++ b/yt_fire/fields.py
@@ -76,7 +76,7 @@
             units=self.ds.unit_system["density"])
         add_species_field_by_density(self, ptype, "H", particle_type=True)
         for suffix in ["density", "fraction", "mass", "number_density"]:
-            self.alias((ptype, "H_%s" % suffix), (ptype, "H_p0_%s" % suffix))
+            self.alias((ptype, "H_p0_%s" % suffix), (ptype, "H_%s" % suffix))
 
         def _h_p1_density(field, data):
             x_H = 1.0 - data[(ptype, "He_metallicity")] - \


https://bitbucket.org/yt_analysis/yt/commits/aecdcc23193d/
Changeset:   aecdcc23193d
Branch:      yt
User:        brittonsmith
Date:        2016-04-21 10:39:52+00:00
Summary:     Moving files into correct place.
Affected #:  7 files

diff -r 5ac8198b96b5ff1372198687f850b305a951e5f4 -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 yt/frontends/fire/api.py
--- /dev/null
+++ b/yt/frontends/fire/api.py
@@ -0,0 +1,21 @@
+"""
+API for FIRE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    FIREDataset
+
+from .fields import \
+    FIREFieldInfo

diff -r 5ac8198b96b5ff1372198687f850b305a951e5f4 -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 yt/frontends/fire/data_structures.py
--- /dev/null
+++ b/yt/frontends/fire/data_structures.py
@@ -0,0 +1,44 @@
+"""
+Data structures for FIRE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.utilities.on_demand_imports import _h5py as h5py
+
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+
+from .fields import \
+    FIREFieldInfo
+
+class FIREDataset(GadgetHDF5Dataset):
+    _field_info_class = FIREFieldInfo
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Header']
+        veto_groups = ['FOF', 'Group', 'Subhalo']
+        valid = True
+        try:
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            dmetal = "/PartType0/Metallicity"
+            if dmetal not in fh or fh[dmetal].shape[1] not in (11, 17):
+                valid = False
+            fh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r 5ac8198b96b5ff1372198687f850b305a951e5f4 -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 yt/frontends/fire/fields.py
--- /dev/null
+++ b/yt/frontends/fire/fields.py
@@ -0,0 +1,104 @@
+"""
+FIRE-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, Britton Smith.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.species_fields import \
+    add_species_field_by_density
+from yt.frontends.gadget.fields import \
+    GadgetFieldInfo
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+
+class FIREFieldInfo(GadgetFieldInfo):
+    known_particle_fields = (
+        ("Mass", ("code_mass", ["particle_mass"], None)),
+        ("Masses", ("code_mass", ["particle_mass"], None)),
+        ("Coordinates", ("code_length", ["particle_position"], None)),
+        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
+        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
+        ("ParticleIDs", ("", ["particle_index"], None)),
+        ("InternalEnergy", ("code_velocity ** 2", ["thermal_energy"], None)),
+        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
+        ("Density", ("code_mass / code_length**3", ["density"], None)),
+        ("MaximumTemperature", ("K", [], None)),
+        ("Temperature", ("K", ["temperature"], None)),
+        ("Epsilon", ("code_length", [], None)),
+        ("Metals", ("code_metallicity", ["metallicity"], None)),
+        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
+        ("Phi", ("code_length", [], None)),
+        ("StarFormationRate", ("Msun / yr", [], None)),
+        ("FormationTime", ("code_time", ["creation_time"], None)),
+        ("Metallicity_00", ("", ["metallicity"], None)),
+        ("Metallicity_01", ("", ["He_metallicity"], None)),
+        ("Metallicity_02", ("", ["C_metallicity"], None)),
+        ("Metallicity_03", ("", ["N_metallicity"], None)),
+        ("Metallicity_04", ("", ["O_metallicity"], None)),
+        ("Metallicity_05", ("", ["Ne_metallicity"], None)),
+        ("Metallicity_06", ("", ["Mg_metallicity"], None)),
+        ("Metallicity_07", ("", ["Si_metallicity"], None)),
+        ("Metallicity_08", ("", ["S_metallicity"], None)),
+        ("Metallicity_09", ("", ["Ca_metallicity"], None)),
+        ("Metallicity_10", ("", ["Fe_metallicity"], None)),
+    )
+
+    def __init__(self, *args, **kwargs):
+        super(SPHFieldInfo, self).__init__(*args, **kwargs)
+        if ("PartType0", "Metallicity_00") in self.field_list:
+            self.nuclei_names = ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
+                                 "Ca", "Fe"]
+
+    def setup_gas_particle_fields(self, ptype):
+        super(FIREFieldInfo, self).setup_gas_particle_fields(ptype)
+        self.alias((ptype, "temperature"), (ptype, "Temperature"))
+
+        def _h_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              data[(ptype, "NeutralHydrogenAbundance")]
+
+        self.add_field(
+            (ptype, "H_density"),
+            function=_h_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H", particle_type=True)
+        for suffix in ["density", "fraction", "mass", "number_density"]:
+            self.alias((ptype, "H_p0_%s" % suffix), (ptype, "H_%s" % suffix))
+
+        def _h_p1_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
+
+        self.add_field(
+            (ptype, "H_p1_density"),
+            function=_h_p1_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
+
+        def _nuclei_mass_density_field(field, data):
+            species = field.name[1][:field.name[1].find("_")]
+            return data[ptype, "density"] * \
+              data[ptype, "%s_metallicity" % species]
+
+        for species in self.nuclei_names:
+            self.add_field(
+                (ptype, "%s_nuclei_mass_density" % species),
+                function=_nuclei_mass_density_field,
+                particle_type=True,
+                units=self.ds.unit_system["density"])

diff -r 5ac8198b96b5ff1372198687f850b305a951e5f4 -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 yt_fire/__init__.py
--- a/yt_fire/__init__.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-API for FIRE frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.funcs import \
-    mylog
-
-from .data_structures import \
-    FIREDataset
-
-from .fields import \
-    FIREFieldInfo

diff -r 5ac8198b96b5ff1372198687f850b305a951e5f4 -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 yt_fire/data_structures.py
--- a/yt_fire/data_structures.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-Data structures for FIRE frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.utilities.on_demand_imports import _h5py as h5py
-
-from yt.frontends.gadget.data_structures import \
-    GadgetHDF5Dataset
-
-from .fields import \
-    FIREFieldInfo
-
-class FIREDataset(GadgetHDF5Dataset):
-    _field_info_class = FIREFieldInfo
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        need_groups = ['Header']
-        veto_groups = ['FOF', 'Group', 'Subhalo']
-        valid = True
-        try:
-            fh = h5py.File(args[0], mode='r')
-            valid = all(ng in fh["/"] for ng in need_groups) and \
-              not any(vg in fh["/"] for vg in veto_groups)
-            dmetal = "/PartType0/Metallicity"
-            if dmetal not in fh or fh[dmetal].shape[1] not in (11, 17):
-                valid = False
-            fh.close()
-        except:
-            valid = False
-            pass
-        return valid

diff -r 5ac8198b96b5ff1372198687f850b305a951e5f4 -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 yt_fire/fields.py
--- a/yt_fire/fields.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-FIRE-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.fields.species_fields import \
-    add_species_field_by_density
-from yt.frontends.gadget.fields import \
-    GadgetFieldInfo
-from yt.frontends.sph.fields import \
-    SPHFieldInfo
-
-class FIREFieldInfo(GadgetFieldInfo):
-    known_particle_fields = (
-        ("Mass", ("code_mass", ["particle_mass"], None)),
-        ("Masses", ("code_mass", ["particle_mass"], None)),
-        ("Coordinates", ("code_length", ["particle_position"], None)),
-        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
-        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
-        ("ParticleIDs", ("", ["particle_index"], None)),
-        ("InternalEnergy", ("code_velocity ** 2", ["thermal_energy"], None)),
-        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
-        ("Density", ("code_mass / code_length**3", ["density"], None)),
-        ("MaximumTemperature", ("K", [], None)),
-        ("Temperature", ("K", ["temperature"], None)),
-        ("Epsilon", ("code_length", [], None)),
-        ("Metals", ("code_metallicity", ["metallicity"], None)),
-        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
-        ("Phi", ("code_length", [], None)),
-        ("StarFormationRate", ("Msun / yr", [], None)),
-        ("FormationTime", ("code_time", ["creation_time"], None)),
-        ("Metallicity_00", ("", ["metallicity"], None)),
-        ("Metallicity_01", ("", ["He_metallicity"], None)),
-        ("Metallicity_02", ("", ["C_metallicity"], None)),
-        ("Metallicity_03", ("", ["N_metallicity"], None)),
-        ("Metallicity_04", ("", ["O_metallicity"], None)),
-        ("Metallicity_05", ("", ["Ne_metallicity"], None)),
-        ("Metallicity_06", ("", ["Mg_metallicity"], None)),
-        ("Metallicity_07", ("", ["Si_metallicity"], None)),
-        ("Metallicity_08", ("", ["S_metallicity"], None)),
-        ("Metallicity_09", ("", ["Ca_metallicity"], None)),
-        ("Metallicity_10", ("", ["Fe_metallicity"], None)),
-    )
-
-    def __init__(self, *args, **kwargs):
-        super(SPHFieldInfo, self).__init__(*args, **kwargs)
-        if ("PartType0", "Metallicity_00") in self.field_list:
-            self.nuclei_names = ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
-                                 "Ca", "Fe"]
-
-    def setup_gas_particle_fields(self, ptype):
-        super(FIREFieldInfo, self).setup_gas_particle_fields(ptype)
-        self.alias((ptype, "temperature"), (ptype, "Temperature"))
-
-        def _h_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
-              data[(ptype, "metallicity")]
-            return x_H * data[(ptype, "density")] * \
-              data[(ptype, "NeutralHydrogenAbundance")]
-
-        self.add_field(
-            (ptype, "H_density"),
-            function=_h_density,
-            particle_type=True,
-            units=self.ds.unit_system["density"])
-        add_species_field_by_density(self, ptype, "H", particle_type=True)
-        for suffix in ["density", "fraction", "mass", "number_density"]:
-            self.alias((ptype, "H_p0_%s" % suffix), (ptype, "H_%s" % suffix))
-
-        def _h_p1_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
-              data[(ptype, "metallicity")]
-            return x_H * data[(ptype, "density")] * \
-              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
-
-        self.add_field(
-            (ptype, "H_p1_density"),
-            function=_h_p1_density,
-            particle_type=True,
-            units=self.ds.unit_system["density"])
-        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
-
-        def _nuclei_mass_density_field(field, data):
-            species = field.name[1][:field.name[1].find("_")]
-            return data[ptype, "density"] * \
-              data[ptype, "%s_metallicity" % species]
-
-        for species in self.nuclei_names:
-            self.add_field(
-                (ptype, "%s_nuclei_mass_density" % species),
-                function=_nuclei_mass_density_field,
-                particle_type=True,
-                units=self.ds.unit_system["density"])


https://bitbucket.org/yt_analysis/yt/commits/b0016b5ad2c0/
Changeset:   b0016b5ad2c0
Branch:      yt
User:        brittonsmith
Date:        2016-04-21 10:44:23+00:00
Summary:     Adding import to frontends api.
Affected #:  1 file

diff -r aecdcc23193d7d8966b9cb61b3afcf84c8c97887 -r b0016b5ad2c0815c68f5fdccf9d775d647bb41ed yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -25,6 +25,7 @@
     'eagle',
     'enzo',
     'exodus_ii',
+    'fire',
     'fits',
     'flash',
     'gadget',


https://bitbucket.org/yt_analysis/yt/commits/6b45307ae021/
Changeset:   6b45307ae021
Branch:      yt
User:        brittonsmith
Date:        2016-04-21 10:44:34+00:00
Summary:     Updating headers.
Affected #:  2 files

diff -r b0016b5ad2c0815c68f5fdccf9d775d647bb41ed -r 6b45307ae021f9bc7c202e6b16849b1232dfcc47 yt/frontends/fire/data_structures.py
--- a/yt/frontends/fire/data_structures.py
+++ b/yt/frontends/fire/data_structures.py
@@ -7,7 +7,7 @@
 """
 
 #-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
+# Copyright (c) 2016, yt Development Team
 #
 # Distributed under the terms of the Modified BSD License.
 #

diff -r b0016b5ad2c0815c68f5fdccf9d775d647bb41ed -r 6b45307ae021f9bc7c202e6b16849b1232dfcc47 yt/frontends/fire/fields.py
--- a/yt/frontends/fire/fields.py
+++ b/yt/frontends/fire/fields.py
@@ -7,7 +7,7 @@
 """
 
 #-----------------------------------------------------------------------------
-# Copyright (c) 2016, Britton Smith.
+# Copyright (c) 2016, yt Development Team
 #
 # Distributed under the terms of the Modified BSD License.
 #


https://bitbucket.org/yt_analysis/yt/commits/cdbafa074d6f/
Changeset:   cdbafa074d6f
Branch:      yt
User:        brittonsmith
Date:        2016-04-21 10:49:06+00:00
Summary:     Removing old FIRE fields.
Affected #:  1 file

diff -r 6b45307ae021f9bc7c202e6b16849b1232dfcc47 -r cdbafa074d6f3c1e10ca64bd3c037b9ca4f04135 yt/frontends/sph/fields.py
--- a/yt/frontends/sph/fields.py
+++ b/yt/frontends/sph/fields.py
@@ -41,27 +41,8 @@
         ("Phi", ("code_length", [], None)),
         ("StarFormationRate", ("Msun / yr", [], None)),
         ("FormationTime", ("code_time", ["creation_time"], None)),
-        # These are metallicity fields that get discovered for FIRE simulations
-        ("Metallicity_00", ("", ["metallicity"], None)),
-        ("Metallicity_01", ("", ["He_fraction"], None)),
-        ("Metallicity_02", ("", ["C_fraction"], None)),
-        ("Metallicity_03", ("", ["N_fraction"], None)),
-        ("Metallicity_04", ("", ["O_fraction"], None)),
-        ("Metallicity_05", ("", ["Ne_fraction"], None)),
-        ("Metallicity_06", ("", ["Mg_fraction"], None)),
-        ("Metallicity_07", ("", ["Si_fraction"], None)),
-        ("Metallicity_08", ("", ["S_fraction"], None)),
-        ("Metallicity_09", ("", ["Ca_fraction"], None)),
-        ("Metallicity_10", ("", ["Fe_fraction"], None)),
     )
 
-    def __init__(self, *args, **kwargs):
-        super(SPHFieldInfo, self).__init__(*args, **kwargs)
-        # Special case for FIRE
-        if ("PartType0", "Metallicity_00") in self.field_list:
-            self.species_names += ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
-                "Ca", "Fe"]
-
     def setup_particle_fields(self, ptype, *args, **kwargs):
         super(SPHFieldInfo, self).setup_particle_fields(ptype, *args, **kwargs)
         setup_species_fields(self, ptype)


https://bitbucket.org/yt_analysis/yt/commits/a0fcda2c243f/
Changeset:   a0fcda2c243f
Branch:      yt
User:        chummels
Date:        2016-05-11 17:10:04+00:00
Summary:     Adding docs for loading Gizmo and FIRE data
Affected #:  1 file

diff -r cdbafa074d6f3c1e10ca64bd3c037b9ca4f04135 -r a0fcda2c243f357d141296212368b6dff114a116 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1285,6 +1285,31 @@
 ``bbox``
        The bounding box for the particle positions.
 
+.. _loading-gizmo-data:
+
+Gizmo Data
+----------
+
+Gizmo datasets, including FIRE outputs, can be loaded into yt in the usual 
+manner.  Like other SPH data formats, yt loads Gizmo data as particle fields 
+and then uses smoothing kernels to deposit those fields to an underlying 
+grid structure as spatial fields as described in :ref:`loading-gadget-data`.  
+To load Gizmo datasets using the standard HDF5 output format::
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("snapshot_600.hdf5")
+
+Because the Gizmo output format is similar to the Gadget format, yt
+may load Gizmo datasets as Gadget depending on the circumstances, but this
+should not pose a problem in most situations.  FIRE outputs will be loaded 
+accordingly due to the number of metallicity fields found (11 or 17).  
+
+For Gizmo outputs written as raw binary outputs, you may have to specify
+a bounding box, field specification, and units as are done for standard 
+Gadget outputs.  See :ref:`loading-gadget-data` for more information.
+
 .. _loading-pyne-data:
 
 Halo Catalog Data


https://bitbucket.org/yt_analysis/yt/commits/681b490ea35c/
Changeset:   681b490ea35c
Branch:      yt
User:        chummels
Date:        2016-05-11 17:13:34+00:00
Summary:     Adding FIRE and Gizmo to supported codes.
Affected #:  1 file

diff -r a0fcda2c243f357d141296212368b6dff114a116 -r 681b490ea35c278a18f945d27e91160a14fae655 doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -28,14 +28,18 @@
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Enzo                  |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
-| FLASH                 |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
+| FIRE                  |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | FITS                  |     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
-+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+| FLASH                 |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gadget                |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gasoline              |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+| Gizmo                 |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Grid Data Format (GDF)|     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Maestro               |   Y [#f1]_ |     N     |      Y     |   Y   |    Y     |    Y     |     N      | Partial  |


https://bitbucket.org/yt_analysis/yt/commits/63ec9ddb4b76/
Changeset:   63ec9ddb4b76
Branch:      yt
User:        chummels
Date:        2016-05-11 19:00:29+00:00
Summary:     Adding tests for FIRE frontend.
Affected #:  2 files

diff -r 681b490ea35c278a18f945d27e91160a14fae655 -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 yt/frontends/fire/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/fire/tests/test_outputs.py
@@ -0,0 +1,48 @@
+"""
+Gadget frontend tests
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import OrderedDict
+
+from yt.utilities.answer_testing.framework import \
+    data_dir_load, \
+    requires_ds, \
+    sph_answer
+from yt.frontends.fire.api import FIREDataset
+
+FIRE_m12i = 'FIRE_M12i_ref11/snapshot_600.hdf5'
+
+# This maps from field names to weight field names to use for projections
+fields = OrderedDict(
+    [
+        (("gas", "density"), None),
+        (("gas", "temperature"), None),
+        (("gas", "temperature"), ('gas', 'density')),
+        (('gas', 'velocity_magnitude'), None),
+        (("deposit", "all_density"), None),
+        (("deposit", "all_count"), None),
+        (("deposit", "all_cic"), None),
+        (("deposit", "PartType0_density"), None),
+    ]
+)
+
+ at requires_ds(FIRE_m12i)
+def test_FireDataset():
+    ds = data_dir_load(FIRE_m12i)
+    assert isinstance(ds, FIREDataset)
+    for test in sph_answer(ds, 'FIRE_m12i', 4786950, fields):
+        test_FireDataset.__name__ = test.description
+        yield test
+    assert False


https://bitbucket.org/yt_analysis/yt/commits/3ce6fdcf725a/
Changeset:   3ce6fdcf725a
Branch:      yt
User:        chummels
Date:        2016-05-11 19:04:08+00:00
Summary:     Moving FIRE frontend to Gizmo frontend
Affected #:  12 files

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/fire/api.py
--- a/yt/frontends/fire/api.py
+++ /dev/null
@@ -1,21 +0,0 @@
-"""
-API for FIRE frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, yt Development Team
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .data_structures import \
-    FIREDataset
-
-from .fields import \
-    FIREFieldInfo

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/fire/data_structures.py
--- a/yt/frontends/fire/data_structures.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-Data structures for FIRE frontend.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, yt Development Team
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.utilities.on_demand_imports import _h5py as h5py
-
-from yt.frontends.gadget.data_structures import \
-    GadgetHDF5Dataset
-
-from .fields import \
-    FIREFieldInfo
-
-class FIREDataset(GadgetHDF5Dataset):
-    _field_info_class = FIREFieldInfo
-
-    @classmethod
-    def _is_valid(self, *args, **kwargs):
-        need_groups = ['Header']
-        veto_groups = ['FOF', 'Group', 'Subhalo']
-        valid = True
-        try:
-            fh = h5py.File(args[0], mode='r')
-            valid = all(ng in fh["/"] for ng in need_groups) and \
-              not any(vg in fh["/"] for vg in veto_groups)
-            dmetal = "/PartType0/Metallicity"
-            if dmetal not in fh or fh[dmetal].shape[1] not in (11, 17):
-                valid = False
-            fh.close()
-        except:
-            valid = False
-            pass
-        return valid

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/fire/fields.py
--- a/yt/frontends/fire/fields.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-FIRE-specific fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2016, yt Development Team
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from yt.fields.species_fields import \
-    add_species_field_by_density
-from yt.frontends.gadget.fields import \
-    GadgetFieldInfo
-from yt.frontends.sph.fields import \
-    SPHFieldInfo
-
-class FIREFieldInfo(GadgetFieldInfo):
-    known_particle_fields = (
-        ("Mass", ("code_mass", ["particle_mass"], None)),
-        ("Masses", ("code_mass", ["particle_mass"], None)),
-        ("Coordinates", ("code_length", ["particle_position"], None)),
-        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
-        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
-        ("ParticleIDs", ("", ["particle_index"], None)),
-        ("InternalEnergy", ("code_velocity ** 2", ["thermal_energy"], None)),
-        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
-        ("Density", ("code_mass / code_length**3", ["density"], None)),
-        ("MaximumTemperature", ("K", [], None)),
-        ("Temperature", ("K", ["temperature"], None)),
-        ("Epsilon", ("code_length", [], None)),
-        ("Metals", ("code_metallicity", ["metallicity"], None)),
-        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
-        ("Phi", ("code_length", [], None)),
-        ("StarFormationRate", ("Msun / yr", [], None)),
-        ("FormationTime", ("code_time", ["creation_time"], None)),
-        ("Metallicity_00", ("", ["metallicity"], None)),
-        ("Metallicity_01", ("", ["He_metallicity"], None)),
-        ("Metallicity_02", ("", ["C_metallicity"], None)),
-        ("Metallicity_03", ("", ["N_metallicity"], None)),
-        ("Metallicity_04", ("", ["O_metallicity"], None)),
-        ("Metallicity_05", ("", ["Ne_metallicity"], None)),
-        ("Metallicity_06", ("", ["Mg_metallicity"], None)),
-        ("Metallicity_07", ("", ["Si_metallicity"], None)),
-        ("Metallicity_08", ("", ["S_metallicity"], None)),
-        ("Metallicity_09", ("", ["Ca_metallicity"], None)),
-        ("Metallicity_10", ("", ["Fe_metallicity"], None)),
-    )
-
-    def __init__(self, *args, **kwargs):
-        super(SPHFieldInfo, self).__init__(*args, **kwargs)
-        if ("PartType0", "Metallicity_00") in self.field_list:
-            self.nuclei_names = ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
-                                 "Ca", "Fe"]
-
-    def setup_gas_particle_fields(self, ptype):
-        super(FIREFieldInfo, self).setup_gas_particle_fields(ptype)
-        self.alias((ptype, "temperature"), (ptype, "Temperature"))
-
-        def _h_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
-              data[(ptype, "metallicity")]
-            return x_H * data[(ptype, "density")] * \
-              data[(ptype, "NeutralHydrogenAbundance")]
-
-        self.add_field(
-            (ptype, "H_density"),
-            function=_h_density,
-            particle_type=True,
-            units=self.ds.unit_system["density"])
-        add_species_field_by_density(self, ptype, "H", particle_type=True)
-        for suffix in ["density", "fraction", "mass", "number_density"]:
-            self.alias((ptype, "H_p0_%s" % suffix), (ptype, "H_%s" % suffix))
-
-        def _h_p1_density(field, data):
-            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
-              data[(ptype, "metallicity")]
-            return x_H * data[(ptype, "density")] * \
-              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
-
-        self.add_field(
-            (ptype, "H_p1_density"),
-            function=_h_p1_density,
-            particle_type=True,
-            units=self.ds.unit_system["density"])
-        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
-
-        def _nuclei_mass_density_field(field, data):
-            species = field.name[1][:field.name[1].find("_")]
-            return data[ptype, "density"] * \
-              data[ptype, "%s_metallicity" % species]
-
-        for species in self.nuclei_names:
-            self.add_field(
-                (ptype, "%s_nuclei_mass_density" % species),
-                function=_nuclei_mass_density_field,
-                particle_type=True,
-                units=self.ds.unit_system["density"])

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/fire/tests/test_outputs.py
--- a/yt/frontends/fire/tests/test_outputs.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-Gadget frontend tests
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2015, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from collections import OrderedDict
-
-from yt.utilities.answer_testing.framework import \
-    data_dir_load, \
-    requires_ds, \
-    sph_answer
-from yt.frontends.fire.api import FIREDataset
-
-FIRE_m12i = 'FIRE_M12i_ref11/snapshot_600.hdf5'
-
-# This maps from field names to weight field names to use for projections
-fields = OrderedDict(
-    [
-        (("gas", "density"), None),
-        (("gas", "temperature"), None),
-        (("gas", "temperature"), ('gas', 'density')),
-        (('gas', 'velocity_magnitude'), None),
-        (("deposit", "all_density"), None),
-        (("deposit", "all_count"), None),
-        (("deposit", "all_cic"), None),
-        (("deposit", "PartType0_density"), None),
-    ]
-)
-
- at requires_ds(FIRE_m12i)
-def test_FireDataset():
-    ds = data_dir_load(FIRE_m12i)
-    assert isinstance(ds, FIREDataset)
-    for test in sph_answer(ds, 'FIRE_m12i', 4786950, fields):
-        test_FireDataset.__name__ = test.description
-        yield test
-    assert False

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/gizmo/api.py
--- /dev/null
+++ b/yt/frontends/gizmo/api.py
@@ -0,0 +1,21 @@
+"""
+API for FIRE frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    GizmoDataset
+
+from .fields import \
+    GizmoFieldInfo

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/gizmo/data_structures.py
--- /dev/null
+++ b/yt/frontends/gizmo/data_structures.py
@@ -0,0 +1,44 @@
+"""
+Data structures for Gizmo frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.utilities.on_demand_imports import _h5py as h5py
+
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+
+from .fields import \
+    GizmoFieldInfo
+
+class GizmoDataset(GadgetHDF5Dataset):
+    _field_info_class = GizmoFieldInfo
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Header']
+        veto_groups = ['FOF', 'Group', 'Subhalo']
+        valid = True
+        try:
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            dmetal = "/PartType0/Metallicity"
+            if dmetal not in fh or fh[dmetal].shape[1] not in (11, 17):
+                valid = False
+            fh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/gizmo/fields.py
--- /dev/null
+++ b/yt/frontends/gizmo/fields.py
@@ -0,0 +1,104 @@
+"""
+Gizmo-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.species_fields import \
+    add_species_field_by_density
+from yt.frontends.gadget.fields import \
+    GadgetFieldInfo
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+
+class GizmoFieldInfo(GadgetFieldInfo):
+    known_particle_fields = (
+        ("Mass", ("code_mass", ["particle_mass"], None)),
+        ("Masses", ("code_mass", ["particle_mass"], None)),
+        ("Coordinates", ("code_length", ["particle_position"], None)),
+        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
+        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
+        ("ParticleIDs", ("", ["particle_index"], None)),
+        ("InternalEnergy", ("code_velocity ** 2", ["thermal_energy"], None)),
+        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
+        ("Density", ("code_mass / code_length**3", ["density"], None)),
+        ("MaximumTemperature", ("K", [], None)),
+        ("Temperature", ("K", ["temperature"], None)),
+        ("Epsilon", ("code_length", [], None)),
+        ("Metals", ("code_metallicity", ["metallicity"], None)),
+        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
+        ("Phi", ("code_length", [], None)),
+        ("StarFormationRate", ("Msun / yr", [], None)),
+        ("FormationTime", ("code_time", ["creation_time"], None)),
+        ("Metallicity_00", ("", ["metallicity"], None)),
+        ("Metallicity_01", ("", ["He_metallicity"], None)),
+        ("Metallicity_02", ("", ["C_metallicity"], None)),
+        ("Metallicity_03", ("", ["N_metallicity"], None)),
+        ("Metallicity_04", ("", ["O_metallicity"], None)),
+        ("Metallicity_05", ("", ["Ne_metallicity"], None)),
+        ("Metallicity_06", ("", ["Mg_metallicity"], None)),
+        ("Metallicity_07", ("", ["Si_metallicity"], None)),
+        ("Metallicity_08", ("", ["S_metallicity"], None)),
+        ("Metallicity_09", ("", ["Ca_metallicity"], None)),
+        ("Metallicity_10", ("", ["Fe_metallicity"], None)),
+    )
+
+    def __init__(self, *args, **kwargs):
+        super(SPHFieldInfo, self).__init__(*args, **kwargs)
+        if ("PartType0", "Metallicity_00") in self.field_list:
+            self.nuclei_names = ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
+                                 "Ca", "Fe"]
+
+    def setup_gas_particle_fields(self, ptype):
+        super(GizmoFieldInfo, self).setup_gas_particle_fields(ptype)
+        self.alias((ptype, "temperature"), (ptype, "Temperature"))
+
+        def _h_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              data[(ptype, "NeutralHydrogenAbundance")]
+
+        self.add_field(
+            (ptype, "H_density"),
+            function=_h_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H", particle_type=True)
+        for suffix in ["density", "fraction", "mass", "number_density"]:
+            self.alias((ptype, "H_p0_%s" % suffix), (ptype, "H_%s" % suffix))
+
+        def _h_p1_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
+
+        self.add_field(
+            (ptype, "H_p1_density"),
+            function=_h_p1_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
+
+        def _nuclei_mass_density_field(field, data):
+            species = field.name[1][:field.name[1].find("_")]
+            return data[ptype, "density"] * \
+              data[ptype, "%s_metallicity" % species]
+
+        for species in self.nuclei_names:
+            self.add_field(
+                (ptype, "%s_nuclei_mass_density" % species),
+                function=_nuclei_mass_density_field,
+                particle_type=True,
+                units=self.ds.unit_system["density"])

diff -r 63ec9ddb4b7610cdb0b9245dea59aa255dd19b96 -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a yt/frontends/gizmo/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -0,0 +1,48 @@
+"""
+Gizmo frontend tests
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import OrderedDict
+
+from yt.utilities.answer_testing.framework import \
+    data_dir_load, \
+    requires_ds, \
+    sph_answer
+from yt.frontends.gizmo.api import GizmoDataset
+
+FIRE_m12i = 'FIRE_M12i_ref11/snapshot_600.hdf5'
+
+# This maps from field names to weight field names to use for projections
+fields = OrderedDict(
+    [
+        (("gas", "density"), None),
+        (("gas", "temperature"), None),
+        (("gas", "temperature"), ('gas', 'density')),
+        (('gas', 'velocity_magnitude'), None),
+        (("deposit", "all_density"), None),
+        (("deposit", "all_count"), None),
+        (("deposit", "all_cic"), None),
+        (("deposit", "PartType0_density"), None),
+    ]
+)
+
+ at requires_ds(FIRE_m12i)
+def test_GizmoDataset():
+    ds = data_dir_load(FIRE_m12i)
+    assert isinstance(ds, FIREDataset)
+    for test in sph_answer(ds, 'FIRE_m12i', 4786950, fields):
+        test_GizmoDataset.__name__ = test.description
+        yield test
+    assert False


https://bitbucket.org/yt_analysis/yt/commits/e7f00268f51b/
Changeset:   e7f00268f51b
Branch:      yt
User:        chummels
Date:        2016-05-11 19:07:33+00:00
Summary:     Updating docs removing fire entry.
Affected #:  1 file

diff -r 3ce6fdcf725aab87fd7004359cbc69e97cc8902a -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -28,8 +28,6 @@
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Enzo                  |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
-| FIRE                  |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
-+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | FITS                  |     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 ++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | FLASH                 |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |


https://bitbucket.org/yt_analysis/yt/commits/a33c31184bc1/
Changeset:   a33c31184bc1
Branch:      yt
User:        chummels
Date:        2016-05-11 19:08:21+00:00
Summary:     Merging with tip
Affected #:  73 files

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -994,6 +994,17 @@
         cd ..
     fi
 
+    ( ${DEST_DIR}/bin/python -c "import _ssl" 2>&1 ) 1>> ${LOG_FILE}
+    RESULT=$?
+    if  [ $RESULT -ne 0 ]
+    then
+        echo "Unable to import the python SSL bindings."
+        echo "This means that OpenSSL is not installed or your system's OpenSSL"
+        echo "installation is out of date."
+        echo "Please install OpenSSL or set INST_CONDA=1"
+        do_exit
+    fi
+
     if [ $INST_PY3 -eq 1 ]
     then
         if [ ! -e $PYTHON3/done ]

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/analyzing/analysis_modules/photon_simulator.rst
--- a/doc/source/analyzing/analysis_modules/photon_simulator.rst
+++ b/doc/source/analyzing/analysis_modules/photon_simulator.rst
@@ -478,7 +478,8 @@
 
    import yt
    import numpy as np
-   from yt.utilities.physical_constants import cm_per_kpc, K_per_keV, mp
+   from yt.utilities.physical_ratios import cm_per_kpc, K_per_keV
+   from yt.units import mp
    from yt.utilities.cosmology import Cosmology
    from yt.analysis_modules.photon_simulator.api import *
    import aplpy

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/analyzing/analysis_modules/xray_emission_fields.rst
--- a/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
+++ b/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
@@ -32,7 +32,7 @@
   from yt.analysis_modules.spectral_integrator.api import \
        add_xray_emissivity_field
 
-  xray_fields = add_xray_emissivity_field(0.5, 7.0)
+  xray_fields = add_xray_emissivity_field(ds, 0.5, 7.0)
 
 Additional keyword arguments are:
 
@@ -49,7 +49,8 @@
 
  * **constant_metallicity** (*float*): If specified, assume a constant
    metallicity for the emission from metals.  The *with_metals* keyword
-   must be set to False to use this.  Default: None.
+   must be set to False to use this. It should be given in unit of solar metallicity.
+   Default: None.
 
 The resulting fields can be used like all normal fields. The function will return the names of
 the created fields in a Python list.
@@ -60,7 +61,7 @@
   from yt.analysis_modules.spectral_integrator.api import \
        add_xray_emissivity_field
 
-  xray_fields = add_xray_emissivity_field(0.5, 7.0, filename="apec_emissivity.h5")
+  xray_fields = add_xray_emissivity_field(ds, 0.5, 7.0, filename="apec_emissivity.h5")
 
   ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
   plot = yt.SlicePlot(ds, 'x', 'xray_luminosity_0.5_7.0_keV')

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/analyzing/units/1)_Symbolic_Units.ipynb
--- a/doc/source/analyzing/units/1)_Symbolic_Units.ipynb
+++ b/doc/source/analyzing/units/1)_Symbolic_Units.ipynb
@@ -155,7 +155,7 @@
    "outputs": [],
    "source": [
     "from yt.units.yt_array import YTQuantity\n",
-    "from yt.utilities.physical_constants import kboltz\n",
+    "from yt.units import kboltz\n",
     "from numpy.random import random\n",
     "import numpy as np\n",
     "\n",
@@ -446,7 +446,7 @@
    },
    "outputs": [],
    "source": [
-    "from yt.utilities.physical_constants import G, kboltz\n",
+    "from yt.units import G, kboltz\n",
     "\n",
     "print (\"Newton's constant: \", G)\n",
     "print (\"Newton's constant in MKS: \", G.in_mks(), \"\\n\")\n",

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/analyzing/units/2)_Fields_and_unit_conversion.ipynb
--- a/doc/source/analyzing/units/2)_Fields_and_unit_conversion.ipynb
+++ b/doc/source/analyzing/units/2)_Fields_and_unit_conversion.ipynb
@@ -467,7 +467,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "from yt.utilities.physical_constants import kboltz\n",
+    "from yt.units import kboltz\n",
     "kb = kboltz.to_astropy()"
    ]
   },

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/analyzing/units/6)_Unit_Equivalencies.ipynb
--- a/doc/source/analyzing/units/6)_Unit_Equivalencies.ipynb
+++ b/doc/source/analyzing/units/6)_Unit_Equivalencies.ipynb
@@ -41,7 +41,7 @@
     "print (dd[\"temperature\"].to_equivalent(\"eV\", \"thermal\"))\n",
     "\n",
     "# Rest energy of the proton\n",
-    "from yt.utilities.physical_constants import mp\n",
+    "from yt.units import mp\n",
     "E_p = mp.to_equivalent(\"GeV\", \"mass_energy\")\n",
     "print (E_p)"
    ]
@@ -61,7 +61,7 @@
    },
    "outputs": [],
    "source": [
-    "from yt.utilities.physical_constants import clight\n",
+    "from yt.units import clight\n",
     "v = 0.1*clight\n",
     "g = v.to_equivalent(\"dimensionless\", \"lorentz\")\n",
     "print (g)\n",
@@ -166,7 +166,7 @@
    },
    "outputs": [],
    "source": [
-    "from yt.utilities.physical_constants import qp # the elementary charge in esu\n",
+    "from yt.units import qp # the elementary charge in esu\n",
     "qp_SI = qp.to_equivalent(\"C\",\"SI\") # convert to Coulombs\n",
     "print (qp)\n",
     "print (qp_SI)"

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/analyzing/units/7)_Unit_Systems.ipynb
--- a/doc/source/analyzing/units/7)_Unit_Systems.ipynb
+++ b/doc/source/analyzing/units/7)_Unit_Systems.ipynb
@@ -324,7 +324,7 @@
    },
    "outputs": [],
    "source": [
-    "from yt.utilities.physical_constants import G\n",
+    "from yt.units import G\n",
     "print (G.in_base(\"mks\"))"
    ]
   },

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/cookbook/tests/test_cookbook.py
--- a/doc/source/cookbook/tests/test_cookbook.py
+++ b/doc/source/cookbook/tests/test_cookbook.py
@@ -15,6 +15,26 @@
 import subprocess
 
 
+def run_with_capture(*args, **kwargs):
+    sp = subprocess.Popen(*args,
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE,
+                          **kwargs)
+    out, err = sp.communicate()
+    if out:
+        sys.stdout.write(out.decode("UTF-8"))
+    if err:
+        sys.stderr.write(err.decode("UTF-8"))
+
+    if sp.returncode != 0:
+        retstderr = " ".join(args[0])
+        retstderr += "\n\nTHIS IS THE REAL CAUSE OF THE FAILURE:\n" 
+        retstderr += err.decode("UTF-8") + "\n"
+        raise subprocess.CalledProcessError(sp.returncode, retstderr)
+
+    return sp.returncode
+
+
 PARALLEL_TEST = {"rockstar_nest.py": "3"}
 BLACKLIST = ["opengl_ipython.py", "opengl_vr.py"]
 
@@ -37,10 +57,16 @@
 
 def check_recipe(cmd):
     '''Run single recipe'''
-    try:
-        subprocess.check_call(cmd)
-        result = True
-    except subprocess.CalledProcessError as e:
-        print(("Stdout output:\n", e.output))
-        result = False
-    assert result
+    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+    out, err = proc.communicate()
+    if out:
+        sys.stdout.write(out.decode("utf8"))
+    if err:
+        sys.stderr.write(err.decode("utf8"))
+
+    if proc.returncode != 0:
+        retstderr = " ".join(cmd)
+        retstderr += "\n\nTHIS IS THE REAL CAUSE OF THE FAILURE:\n" 
+        retstderr += err.decode("UTF-8") + "\n"
+        raise subprocess.CalledProcessError(proc.returncode, retstderr)

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/developing/creating_derived_fields.rst
--- a/doc/source/developing/creating_derived_fields.rst
+++ b/doc/source/developing/creating_derived_fields.rst
@@ -71,9 +71,9 @@
 a dimensionless float or array.
 
 If your field definition includes physical constants rather than defining a
-constant as a float, you can import it from ``yt.utilities.physical_constants``
+constant as a float, you can import it from ``yt.units``
 to get a predefined version of the constant with the correct units. If you know
-the units your data is supposed to have ahead of time, you can import unit
+the units your data is supposed to have ahead of time, you can also import unit
 symbols like ``g`` or ``cm`` from the ``yt.units`` namespace and multiply the
 return value of your field function by the appropriate combination of unit
 symbols for your field's units. You can also convert floats or NumPy arrays into

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/developing/creating_frontend.rst
--- a/doc/source/developing/creating_frontend.rst
+++ b/doc/source/developing/creating_frontend.rst
@@ -34,7 +34,8 @@
 `yt-dev <http://lists.spacepope.org/listinfo.cgi/yt-dev-spacepope.org>`_!
 
 To get started, make a new directory in ``yt/frontends`` with the name
-of your code.  Copying the contents of the ``yt/frontends/_skeleton``
+of your code and add the name into ``yt/frontends/api.py``.
+Copying the contents of the ``yt/frontends/_skeleton``
 directory will add a lot of boilerplate for the required classes and
 methods that are needed.  In particular, you'll have to create a
 subclass of ``Dataset`` in the data_structures.py file. This subclass

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/developing/testing.rst
--- a/doc/source/developing/testing.rst
+++ b/doc/source/developing/testing.rst
@@ -245,6 +245,12 @@
 * ``IsothermalCollapse/snap_505.hdf5``
 * ``GadgetDiskGalaxy/snapshot_200.hdf5``
 
+GAMER
+~~~~~~
+
+* ``InteractingJets/jet_000002``
+* ``WaveDarkMatter/psiDM_000020``
+
 Halo Catalog
 ~~~~~~~~~~~~
 
@@ -532,7 +538,13 @@
 
       local_pw_000:
 
-would regenerate answers for OWLS frontend.
+would regenerate answers for OWLS frontend. 
+
+When adding tests to an existing set of answers (like ``local_owls_000`` or ``local_varia_000``), 
+it is considered best practice to first submit a pull request adding the tests WITHOUT incrementing 
+the version number. Then, allow the tests to run (resulting in "no old answer" errors for the missing
+answers). If no other failures are present, you can then increment the version number to regenerate
+the answers. This way, we can avoid accidently covering up test breakages. 
 
 Adding New Answer Tests
 ~~~~~~~~~~~~~~~~~~~~~~~

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1021,6 +1021,34 @@
 
 yt will utilize length, mass and time to set up all other units.
 
+GAMER Data
+----------
+
+GAMER HDF5 data is supported and cared for by Hsi-Yu Schive. You can load the data like this:
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("InteractingJets/jet_000002")
+
+Currently GAMER does not assume any unit for non-cosmological simulations. To specify the units for yt,
+you need to supply conversions for length, time, and mass to ``load`` using the ``units_override`` functionality:
+
+.. code-block:: python
+
+   import yt
+   code_units = { "length_unit":(1.0,"kpc"),
+                  "time_unit"  :(3.08567758096e+13,"s"),
+                  "mass_unit"  :(1.4690033e+36,"g") }
+   ds = yt.load("InteractingJets/jet_000002", units_override=code_units)
+
+This means that the yt fields, e.g., ``("gas","density")``, will be in cgs units, but the GAMER fields,
+e.g., ``("gamer","Dens")``, will be in code units.
+
+.. rubric:: Caveats
+
+* GAMER data in raw binary format (i.e., OPT__OUTPUT_TOTAL = C-binary) is not supported.
+
 .. _loading-amr-data:
 
 Generic AMR Data

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/installing.rst
--- a/doc/source/installing.rst
+++ b/doc/source/installing.rst
@@ -18,26 +18,27 @@
 
 * If you do not have root access on your computer, are not comfortable managing
   python packages, or are working on a supercomputer or cluster computer, you
-  will probably want to use the bash all-in-one installation script.  This builds
-  Python, NumPy, Matplotlib, and yt from source to set up an isolated scientific
-  python environment inside of a single folder in your home directory. See
-  :ref:`install-script` for more details.
+  will probably want to use the bash all-in-one installation script.  This
+  creates a python environment using the `miniconda python
+  distrubtion <http://conda.pydata.org/miniconda.html>`_ and the
+  `conda <http://conda.pydata.org/docs/>`_ package manager inside of a single
+  folder in your home directory. See :ref:`install-script` for more details.
 
 * If you use the `Anaconda <https://store.continuum.io/cshop/anaconda/>`_ python
-  distribution see :ref:`anaconda-installation` for details on how to install
-  yt using the ``conda`` package manager.  Source-based installation from the
-  mercurial repository or via ``pip`` should also work under Anaconda. Note that
-  this is currently the only supported installation mechanism on Windows.
+  distribution and already have ``conda`` installed, see
+  :ref:`anaconda-installation` for details on how to install yt using the
+  ``conda`` package manager. Note that this is currently the only supported
+  installation mechanism on Windows.
 
-* If you already have a scientific python software stack installed on your
-  computer and are comfortable installing python packages,
+* If you want to build a development version of yt or are comfortable with
+  compilers and know your way around python packaging,
   :ref:`source-installation` will probably be the best choice. If you have set
   up python using a source-based package manager like `Homebrew
   <http://brew.sh>`_ or `MacPorts <http://www.macports.org/>`_ this choice will
-  let you install yt using the python installed by the package manager. Similarly
-  for python environments set up via Linux package managers so long as you
-  have the necessary compilers installed (e.g. the ``build-essentials``
-  package on Debian and Ubuntu).
+  let you install yt using the python installed by the package
+  manager. Similarly, this will also work for python environments set up via
+  Linux package managers so long as you have the necessary compilers installed
+  (e.g. the ``build-essentials`` package on Debian and Ubuntu).
 
 .. note::
   See `Parallel Computation
@@ -53,19 +54,21 @@
 Before you install yt, you must decide which branch (i.e. version) of the code
 you prefer to use:
 
-* ``yt`` -- The most up-to-date *development* version with the most current features but sometimes unstable (yt-3.x)
-* ``stable`` -- The latest stable release of yt-3.x
-* ``yt-2.x`` -- The latest stable release of yt-2.x
+* ``yt`` -- The most up-to-date *development* version with the most current
+  features but sometimes unstable (the development version of the next ``yt-3.x``
+  release).
+* ``stable`` -- The latest stable release of ``yt-3.x``.
+* ``yt-2.x`` -- The last stable release of ``yt-2.x``.
 
-If this is your first time using the code, we recommend using ``stable``,
-unless you specifically need some piece of brand-new functionality only
-available in ``yt`` or need to run an old script developed for ``yt-2.x``.
-There were major API and functionality changes made in yt after version 2.7
-in moving to version 3.0.  For a detailed description of the changes
-between versions 2.x (e.g. branch ``yt-2.x``) and 3.x (e.g. branches ``yt`` and
-``stable``) see :ref:`yt3differences`.  Lastly, don't feel like you're locked
-into one branch when you install yt, because you can easily change the active
-branch by following the instructions in :ref:`switching-between-yt-versions`.
+If this is your first time using the code, we recommend using ``stable``, unless
+you specifically need some piece of brand-new functionality only available in
+``yt`` or need to run an old script developed for ``yt-2.x``.  There were major
+API and functionality changes made in yt for version 3.0.  For a detailed
+description of the changes between versions 2.x (e.g. branch ``yt-2.x``) and 3.x
+(e.g. branches ``yt`` and ``stable``) see :ref:`yt3differences`.  Lastly, don't
+feel like you're locked into one branch when you install yt, because you can
+easily change the active branch by following the instructions in
+:ref:`switching-between-yt-versions`.
 
 .. _install-script:
 
@@ -74,9 +77,8 @@
 
 Because installation of all of the interlocking parts necessary to install yt
 itself can be time-consuming, yt provides an all-in-one installation script
-which downloads and builds a fully-isolated Python + NumPy + Matplotlib + HDF5 +
-Mercurial installation. Since the install script compiles yt's dependencies from
-source, you must have C, C++, and optionally Fortran compilers installed.
+which downloads and builds a fully-isolated installation of Python that includes
+NumPy, Matplotlib, H5py, Mercurial, and yt.
 
 The install script supports UNIX-like systems, including Linux, OS X, and most
 supercomputer and cluster environments. It is particularly suited for deployment
@@ -94,30 +96,62 @@
 ^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 To get the installation script for the ``stable`` branch of the code,
-download it from:
+download it using the following command:
 
 .. code-block:: bash
 
-  wget http://bitbucket.org/yt_analysis/yt/raw/stable/doc/install_script.sh
+  $ wget http://bitbucket.org/yt_analysis/yt/raw/stable/doc/install_script.sh
 
-If you wish to install a different version of yt (see
-:ref:`above <branches-of-yt>`), replace ``stable`` with the appropriate
-branch name (e.g. ``yt``, ``yt-2.x``) in the path above to get the correct
-install script.
-
-By default, the bash install script will install an array of items, but there
-are additional packages that can be downloaded and installed (e.g. SciPy, enzo,
-etc.). The script has all of these options at the top of the file. You should be
-able to open it and edit it without any knowledge of bash syntax.  To execute
-it, run:
+If you do not have ``wget``, the following should also work:
 
 .. code-block:: bash
 
-  bash install_script.sh
+  $ curl -OL http://bitbucket.org/yt_analysis/yt/raw/stable/doc/install_script.sh
+
+If you wish to install a different version of yt (see :ref:`branches-of-yt`),
+replace ``stable`` with the appropriate branch name (e.g. ``yt``, ``yt-2.x``) in
+the path above to get the correct install script.
+
+By default, the bash install script will create a python environment based on
+the `miniconda python distrubtion <http://conda.pydata.org/miniconda.html>`_,
+and will install yt's dependencies using the `conda
+<http://conda.pydata.org/docs/>`_ package manager. To avoid needing a
+compilation environment to run the install script, yt itself will also be
+installed using `conda`.
+
+If you would like to customize your yt installation, you can edit the values of
+several variables that are defined at the top of the script.
+
+If you would like to build yt from source, you will need to edit the install
+script and set ``INST_YT_SOURCE=1`` near the top. This will clone a copy of the
+yt mercurial repository and build yt form source. The default is
+``INST_YT_SOURCE=0``, which installs yt from a binary conda package.
+
+The install script can also build python and all yt dependencies from source. To
+switch to this mode, set ``INST_CONDA=0`` at the top of the install script. If
+you choose this mode, you must also set ``INST_YT_SOURCE=1``.
+
+In addition, you can tell the install script to download and install some
+additional packages --- currently these include
+`PyX <http://pyx.sourceforge.net/>`_, the `Rockstar halo
+finder <http://arxiv.org/abs/1110.4372>`_, `SciPy <https://www.scipy.org/>`_,
+`Astropy <http://www.astropy.org/>`_, and the necessary dependencies for
+:ref:`unstructured mesh rendering <unstructured_mesh_rendering>`. The script has
+all of the options for installing optional packages near the top of the
+file. You should be able to open it and edit it without any knowledge of bash
+syntax. For example, to install scipy, change ``INST_SCIPY=0`` to
+``INST_SCIPY=1``.
+
+To execute the install script, run:
+
+.. code-block:: bash
+
+  $ bash install_script.sh
 
 Because the installer is downloading and building a variety of packages from
-source, this will likely take a while (e.g. 20 minutes), but you will get
-updates of its status at the command line throughout.
+source, this will likely take a few minutes, especially if you have a slow
+internet connection or have ``INST_CONDA=0`` set. You will get updates of its
+status at the command prompt throughout.
 
 If you receive errors during this process, the installer will provide you
 with a large amount of information to assist in debugging your problems.  The
@@ -127,26 +161,63 @@
 potentially figure out what went wrong.  If you have problems, though, do not
 hesitate to :ref:`contact us <asking-for-help>` for assistance.
 
+If the install script errors out with a message about being unable to import the
+python SSL bindings, this means that the Python built by the install script was
+unable to link against the OpenSSL library. This likely means that you installed
+with ``INST_CONDA=0`` on a recent version of OSX, or on a cluster that has a
+very out of date installation of OpenSSL. In both of these cases you will either
+need to install OpenSSL yourself from the system package manager or consider
+using ``INST_CONDA=1``, since conda-based installs can install the conda package
+for OpenSSL.
+
 .. _activating-yt:
 
 Activating Your Installation
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 Once the installation has completed, there will be instructions on how to set up
-your shell environment to use yt by executing the activate script.  You must
-run this script in order to have yt properly recognized by your system.  You can
-either add it to your login script, or you must execute it in each shell session
-prior to working with yt.
+your shell environment to use yt.  
+
+Activating Conda-based installs (``INST_CONDA=1``)
+""""""""""""""""""""""""""""""""""""""""""""""""""
+
+For conda-based installs, you will need to ensure that the installation's
+``yt-conda/bin`` directory is prepended to your ``PATH`` environment variable.
+
+For Bash-style shells, you can use the following command in a terminal session
+to temporarily activate the yt installation:
 
 .. code-block:: bash
 
-  source <yt installation directory>/bin/activate
+  $ export PATH=/path/to/yt-conda/bin:$PATH
+
+and on csh-style shells:
+
+.. code-block:: csh
+
+  $ setenv PATH /path/to/yt-conda/bin:$PATH
+
+If you would like to permanently activate yt, you can also update the init file
+appropriate for your shell and OS (e.g. .bashrc, .bash_profile, .cshrc, .zshrc)
+to include the same command.
+
+Activating source-based installs (``INST_CONDA=0``)
+"""""""""""""""""""""""""""""""""""""""""""""""""""
+
+For this installation method, you must run an ``activate`` script to activate
+the yt environment in a terminal session. You must run this script in order to
+have yt properly recognized by your system.  You can either add it to your login
+script, or you must execute it in each shell session prior to working with yt.
+
+.. code-block:: bash
+
+  $ source <yt installation directory>/bin/activate
 
 If you use csh or tcsh as your shell, activate that version of the script:
 
 .. code-block:: bash
 
-  source <yt installation directory>/bin/activate.csh
+  $ source <yt installation directory>/bin/activate.csh
 
 If you don't like executing outside scripts on your computer, you can set
 the shell variables manually.  ``YT_DEST`` needs to point to the root of the
@@ -166,14 +237,21 @@
 
 .. code-block:: bash
 
-  yt update
+  $ yt update
 
-Additionally, if you want to make sure you have the latest dependencies
-associated with yt and update the codebase simultaneously, type this:
+Additionally, if you ran the install script with ``INST_CONDA=0`` and want to
+make sure you have the latest dependencies associated with yt and update the
+codebase simultaneously, type this:
 
 .. code-block:: bash
 
-  yt update --all
+  $ yt update --all
+
+If you ran the install script with ``INST_CONDA=1`` and want to update your dependencies, run:
+
+.. code-block:: bash
+
+  $ conda update --all
 
 .. _removing-yt:
 
@@ -192,35 +270,26 @@
 Installing yt Using Anaconda
 ++++++++++++++++++++++++++++
 
-Perhaps the quickest way to get yt up and running is to install it using the
-`Anaconda Python Distribution <https://store.continuum.io/cshop/anaconda/>`_,
-which will provide you with a easy-to-use environment for installing Python
-packages.
-
-If you do not want to install the full anaconda python distribution, you can
-install a bare-bones Python installation using miniconda.  To install miniconda,
-visit http://repo.continuum.io/miniconda/ and download ``Miniconda-latest-...``
-script for your platform and system architecture. Next, run the script, e.g.:
-
-.. code-block:: bash
-
-  bash Miniconda-latest-Linux-x86_64.sh
-
 For both the Anaconda and Miniconda installations, make sure that the Anaconda
 ``bin`` directory is in your path, and then issue:
 
 .. code-block:: bash
 
-  conda install yt
+  $ conda install yt
 
 which will install stable branch of yt along with all of its dependencies.
 
+.. _nightly-conda-builds:
+
+Nightly Conda Builds
+^^^^^^^^^^^^^^^^^^^^
+
 If you would like to install latest development version of yt, you can download
 it from our custom anaconda channel:
 
 .. code-block:: bash
 
-  conda install -c http://use.yt/with_conda/ yt
+  $ conda install -c http://use.yt/with_conda/ yt
 
 New packages for development branch are built after every pull request is
 merged. In order to make sure you are running latest version, it's recommended
@@ -228,28 +297,26 @@
 
 .. code-block:: bash
 
-  conda update -c http://use.yt/with_conda/ yt
+  $ conda update -c http://use.yt/with_conda/ yt
 
 Location of our channel can be added to ``.condarc`` to avoid retyping it during
 each *conda* invocation. Please refer to `Conda Manual
 <http://conda.pydata.org/docs/config.html#channel-locations-channels>`_ for
 detailed instructions.
 
+.. _conda-source-build:
 
-Obtaining Source Code
-^^^^^^^^^^^^^^^^^^^^^
+Building yt from Source For Conda-based Installs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-There are two ways to get the yt source code when using an Anaconda
-installation.
-
-Option 1:
-
-Ensure that you have all build dependencies installed in your current
+First, ensure that you have all build dependencies installed in your current
 conda environment:
 
 .. code-block:: bash
 
-  conda install cython mercurial sympy ipython h5py matplotlib
+  $ conda install cython mercurial sympy ipython matplotlib
+
+In addition, you will need a C compiler installed.
 
 .. note::
   
@@ -260,87 +327,124 @@
 
   .. code-block:: bash
 
-     export CONDA_DIR=$(python -c 'import sys; print(sys.executable.split("/bin/python")[0])')
-     conda create -y -n py27 python=2.7 mercurial
-     ln -s ${CONDA_DIR}/envs/py27/bin/hg ${CONDA_DIR}/bin
+   $ export CONDA_DIR=$(python -c 'import sys; print(sys.executable.split("/bin/python")[0])')
+   $ conda create -y -n py27 python=2.7 mercurial
+   $ ln -s ${CONDA_DIR}/envs/py27/bin/hg ${CONDA_DIR}/bin
 
 Clone the yt repository with:
 
 .. code-block:: bash
 
-  hg clone https://bitbucket.org/yt_analysis/yt
+  $ hg clone https://bitbucket.org/yt_analysis/yt
 
 Once inside the yt directory, update to the appropriate branch and
-run ``setup.py``. For example, the following commands will allow you
+run ``setup.py develop``. For example, the following commands will allow you
 to see the tip of the development branch.
 
 .. code-block:: bash
 
-  hg up yt
-  python setup.py develop
+  $ hg pull
+  $ hg update yt
+  $ python setup.py develop
 
 This will make sure you are running a version of yt corresponding to the
 most up-to-date source code.
 
-Option 2:
+.. _rockstar-conda:
 
-Recipes to build conda packages for yt are available at
-https://github.com/conda/conda-recipes.  To build the yt conda recipe, first
-clone the conda-recipes repository
+Rockstar Halo Finder for Conda-based installations
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The easiest way to set rockstar up in a conda-based python envrionment is to run
+the install script with both ``INST_CONDA=1`` and ``INST_ROCKSTAR=1``.
+
+If you want to do this manually, you will need to follow these
+instructions. First, clone Matt Turk's fork of rockstar and compile it:
 
 .. code-block:: bash
 
-  git clone https://github.com/conda/conda-recipes
+  $ hg clone https://bitbucket.org/MatthewTurk/rockstar
+  $ cd rockstar
+  $ make lib
 
-Then navigate to the repository root and invoke ``conda build``:
+Next, copy `librockstar.so` into the `lib` folder of your anaconda installation:
 
 .. code-block:: bash
 
-  cd conda-recipes
-  conda build ./yt/
+  $ cp librockstar.so /path/to/anaconda/lib
 
-Note that building a yt conda package requires a C compiler.
+Finally, you will need to recompile yt to enable the rockstar interface. Clone a
+copy of the yt mercurial repository (see :ref:`conda-source-build`), or navigate
+to a clone that you have already made, and do the following:
+
+.. code-block:: bash
+
+  $ cd /path/to/yt-hg
+  $ ./clean.sh
+  $ echo /path/to/rockstar > rockstar.cfg
+  $ python setup.py develop
+
+Here ``/path/to/yt-hg`` is the path to your clone of the yt mercurial repository
+and ``/path/to/rockstar`` is the path to your clone of Matt Turk's fork of
+rockstar.
+
+Finally, to actually use rockstar, you will need to ensure the folder containing
+`librockstar.so` is in your LD_LIBRARY_PATH:
+
+.. code-block:: bash
+
+  $ export LD_LIBRARY_PATH=/path/to/anaconda/lib
+
+You should now be able to enter a python session and import the rockstar
+interface:
+
+.. code-block:: python
+
+  >>> from yt.analysis_modules.halo_finding.rockstar import rockstar_interface
+
+If this python import fails, then you have not installed rockstar and yt's
+rockstar interface correctly.
 
 .. _windows-installation:
 
 Installing yt on Windows
 ^^^^^^^^^^^^^^^^^^^^^^^^
 
-Installation on 64-bit Microsoft Windows platforms is supported using Anaconda (see
-:ref:`anaconda-installation`). Also see :ref:`windows-developing` for details on how to build yt
-from source in Windows.
+Installation on 64-bit Microsoft Windows platforms is supported using Anaconda
+(see :ref:`anaconda-installation`). Also see :ref:`windows-developing` for
+details on how to build yt from source in Windows.
 
 .. _source-installation:
 
-Installing yt Using pip or from Source
-++++++++++++++++++++++++++++++++++++++
+Installing yt Using ``pip`` or From Source
+++++++++++++++++++++++++++++++++++++++++++
+
+.. note::
+
+  If you wish to install yt from source in a conda-based installation of yt,
+  see :ref:`conda-source-build`.
 
 To install yt from source, you must make sure you have yt's dependencies
-installed on your system.
+installed on your system. Right now, the dependencies to build yt from
+source include:
 
-If you use a Linux OS, use your distro's package manager to install these yt
-dependencies on your system:
+- ``mercurial``
+- A C compiler such as ``gcc`` or ``clang``
+- ``Python 2.7``, ``Python 3.4``, or ``Python 3.5``
 
-- ``HDF5``
-- ``zeromq``
-- ``sqlite``
-- ``mercurial``
-
-Then install the required Python packages with ``pip``:
+In addition, building yt from source requires several python packages
+which can be installed with ``pip``:
 
 .. code-block:: bash
 
-  $ pip install numpy matplotlib cython h5py nose sympy
+  $ pip install numpy matplotlib cython sympy
 
-If you're using IPython notebooks, you can install its dependencies
-with ``pip`` as well:
+You may also want to install some of yt's optional dependencies, including
+``jupyter``, ``h5py`` (which in turn depends on the HDF5 library), ``scipy``, or
+``astropy``,
 
-.. code-block:: bash
-
-  $ pip install ipython[notebook]
-
-From here, you can use ``pip`` (which comes with ``Python``) to install the latest
-stable version of yt:
+From here, you can use ``pip`` (which comes with ``Python``) to install the
+latest stable version of yt:
 
 .. code-block:: bash
 
@@ -353,46 +457,30 @@
 
 .. code-block:: bash
 
-  hg clone https://bitbucket.org/yt_analysis/yt
-  cd yt
-  hg update yt
-  python setup.py install --user --prefix=
+  $ hg clone https://bitbucket.org/yt_analysis/yt
+  $ cd yt
+  $ hg update yt
+  $ python setup.py install --user --prefix=
 
 .. note::
 
-  If you maintain your own user-level python installation separate from the OS-level python
-  installation, you can leave off ``--user --prefix=``, although you might need
-  ``sudo`` depending on where python is installed. See `This StackOverflow
-  discussion
+  If you maintain your own user-level python installation separate from the
+  OS-level python installation, you can leave off ``--user --prefix=``, although
+  you might need ``sudo`` depending on where python is installed. See `This
+  StackOverflow discussion
   <http://stackoverflow.com/questions/4495120/combine-user-with-prefix-error-with-setup-py-install>`_
   if you are curious why ``--prefix=`` is neccessary on some systems.
 
-.. note::
-
-   yt requires version 18.0 or higher of ``setuptools``. If you see
-   error messages about this package, you may need to update it. For
-   example, with pip via
-
-   .. code-block:: bash
-
-      pip install --upgrade setuptools
-
-   or your preferred method. If you have ``distribute`` installed, you
-   may also see error messages for it if it's out of date. You can
-   update with pip via
-
-   .. code-block:: bash
-
-      pip install --upgrade distribute
-
-   or via your preferred method.
-   
-
 This will install yt into a folder in your home directory
 (``$HOME/.local/lib64/python2.7/site-packages`` on Linux,
 ``$HOME/Library/Python/2.7/lib/python/site-packages/`` on OSX) Please refer to
 the ``setuptools`` documentation for the additional options.
 
+If you are unable to locate the ``yt`` executable (i.e. ``yt version`` failes),
+then you likely need to add the ``$HOME/.local/bin`` (or the equivalent on your
+OS) to your PATH. Some linux distributions do not include this directory in the
+default search path.
+
 If you choose this installation method, you do not need to run any activation
 script since this will install yt into your global python environment.
 
@@ -401,15 +489,35 @@
 
 .. code-block:: bash
 
-  hg clone https://bitbucket.org/yt_analysis/yt
-  cd yt
-  hg update yt
-  python setup.py develop --user --prefix=
+  $ hg clone https://bitbucket.org/yt_analysis/yt
+  $ cd yt
+  $ hg update yt
+  $ python setup.py develop --user --prefix=
 
 As above, you can leave off ``--user --prefix=`` if you want to install yt into the default
 package install path.  If you do not have write access for this location, you
 might need to use ``sudo``.
 
+Build errors with ``setuptools`` or ``distribute``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Building yt requires version 18.0 or higher of ``setuptools``. If you see error
+messages about this package, you may need to update it. For example, with pip
+via
+
+.. code-block:: bash
+
+  $ pip install --upgrade setuptools
+
+or your preferred method. If you have ``distribute`` installed, you may also see
+error messages for it if it's out of date. You can update with pip via
+
+.. code-block:: bash
+
+  $ pip install --upgrade distribute
+
+or via your preferred method.   
+
 Keeping yt Updated via Mercurial
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -424,7 +532,7 @@
 
 .. code-block:: bash
 
-  yt update
+  $ yt update
 
 This will detect that you have installed yt from the mercurial repository, pull
 any changes from Bitbucket, and then recompile yt if necessary.
@@ -439,7 +547,7 @@
 
 .. code-block:: bash
 
-  yt --help
+  $ yt --help
 
 If this works, you should get a list of the various command-line options for
 yt, which means you have successfully installed yt.  Congratulations!
@@ -453,21 +561,57 @@
 
 .. _switching-between-yt-versions:
 
-Switching versions of yt: yt-2.x, yt-3.x, stable, and dev
----------------------------------------------------------
+Switching versions of yt: ``yt-2.x``, ``stable``, and ``yt`` branches
+---------------------------------------------------------------------
 
-With the release of version 3.0 of yt, development of the legacy yt 2.x series
-has been relegated to bugfixes.  That said, we will continue supporting the 2.x
-series for the foreseeable future.  This makes it easy to use scripts written
-for older versions of yt without substantially updating them to support the
-new field naming or unit systems in yt version 3.
+Here we explain how to switch between different development branches of yt. 
 
-Currently, the yt-2.x codebase is contained in a named branch in the yt
-mercurial repository.  Thus, depending on the method you used to install
-yt, there are different instructions for switching versions.
+If You Installed yt Using the Bash Install Script
++++++++++++++++++++++++++++++++++++++++++++++++++
 
-If You Installed yt Using the Installer Script
-++++++++++++++++++++++++++++++++++++++++++++++
+The instructions for how to switch between branches depend on whether you ran
+the install script with ``INST_YT_SOURCE=0`` (the default) or
+``INST_YT_SOURCE=1``. You can determine which option you used by inspecting the
+output:
+
+.. code-block:: bash
+
+  $ yt version 
+
+If the output from this command looks like:
+
+.. code-block:: none
+
+  The current version and changeset for the code is:
+
+  ---
+  Version = 3.2.3
+  ---
+
+i.e. it does not refer to a specific changeset hash, then you originally chose
+``INST_YT_SOURCE=0``.
+
+On the other hand, if the output from ``yt version`` looks like:
+
+.. code-block:: none
+
+  The current version and changeset for the code is:
+
+  ---
+  Version = 3.3-dev
+  Changeset = d8eec89b2c86 (yt) tip
+  ---
+
+i.e. it refers to a specific changeset in the yt mercurial repository, then
+you installed using ``INST_YT_SOURCE=1``.
+
+Conda-based installs (``INST_YT_SOURCE=0``)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In this case you can either install one of the nightly conda builds (see :ref:`nightly-conda-builds`), or you can follow the instructions above to build yt from source under conda (see :ref:`conda-source-build`).
+
+Source-based installs (``INST_YT_SOURCE=1``)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 You already have the mercurial repository, so you simply need to switch
 which version you're using.  Navigate to the root of the yt mercurial
@@ -476,9 +620,9 @@
 
 .. code-block:: bash
 
-  cd yt-<machine>/src/yt-hg
-  hg update <desired-version>
-  python setup.py develop
+  $ cd yt-<machine>/src/yt-hg
+  $ hg update <desired-version>
+  $ python setup.py develop
 
 Valid versions to jump to are described in :ref:`branches-of-yt`.
 
@@ -494,8 +638,8 @@
 
 .. code-block:: bash
 
-  pip uninstall yt
-  hg clone https://bitbucket.org/yt_analysis/yt
+  $ pip uninstall yt
+  $ hg clone https://bitbucket.org/yt_analysis/yt
 
 Now, to switch between versions, you need to navigate to the root of
 the mercurial yt repository. Use mercurial to
@@ -503,9 +647,9 @@
 
 .. code-block:: bash
 
-  cd yt
-  hg update <desired-version>
-  python setup.py install --user --prefix=
+  $ cd yt
+  $ hg update <desired-version>
+  $ python setup.py install --user --prefix=
 
 Valid versions to jump to are described in :ref:`branches-of-yt`).
 

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -34,6 +34,8 @@
 -----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gadget                |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+| GAMER                 |     Y      |     N     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gasoline              |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gizmo                 |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e doc/source/visualizing/sketchfab.rst
--- a/doc/source/visualizing/sketchfab.rst
+++ b/doc/source/visualizing/sketchfab.rst
@@ -55,7 +55,7 @@
 .. code-block:: python
 
    import yt
-   ds = yt.load("/data/workshop2012/IsolatedGalaxy/galaxy0030/galaxy0030")
+   ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
    sphere = ds.sphere("max", (1.0, "Mpc"))
    surface = ds.surface(sphere, "density", 1e-27)
 
@@ -113,24 +113,23 @@
 
 .. code-block:: python
 
-   import yt
-   ds = yt.load("redshift0058")
-   dd = ds.sphere("max", (200, "kpc"))
-   rho = 5e-27
+    import yt
+    from yt.units import kpc
+    ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
+    dd = ds.sphere(ds.domain_center, (500, "kpc"))
+    rho = 1e-28
 
-   bounds = [(dd.center[i] - 100.0/ds['kpc'],
-              dd.center[i] + 100.0/ds['kpc']) for i in range(3)]
+    bounds = [[dd.center[i] - 250*kpc, dd.center[i] + 250*kpc] for i in range(3)]
 
-   surf = ds.surface(dd, "density", rho)
+    surf = ds.surface(dd, "density", rho)
 
-   upload_id = surf.export_sketchfab(
-       title = "RD0058 - 5e-27",
-       description = "Extraction of Density (colored by Temperature) at 5e-27 " \
-                   + "g/cc from a galaxy formation simulation by Ryan Joung."
-       color_field = "temperature",
-       color_map = "hot",
-       color_log = True,
-       bounds = bounds
+    upload_id = surf.export_sketchfab(
+        title="galaxy0030 - 1e-28",
+        description="Extraction of Density (colored by temperature) at 1e-28 g/cc",
+        color_field="temperature",
+        color_map="hot",
+        color_log=True,
+        bounds=bounds
    )
 
 and yt will extract a surface, convert to a format that Sketchfab.com
@@ -141,15 +140,13 @@
 
 .. raw:: html
 
-   <iframe frameborder="0" height="480" width="854" allowFullScreen
-   webkitallowfullscreen="true" mozallowfullscreen="true"
-   src="http://skfb.ly/l4jh2edcba?autostart=0&transparent=0&autospin=0&controls=1&watermark=1"></iframe>
+     <iframe width="640" height="480" src="https://sketchfab.com/models/ff59dacd55824110ad5bcc292371a514/embed" frameborder="0" allowfullscreen mozallowfullscreen="true" webkitallowfullscreen="true" onmousewheel=""></iframe>
 
 As a note, Sketchfab has a maximum model size of 50MB for the free account.
-50MB is pretty hefty, though, so it shouldn't be a problem for most needs.
-We're working on a way to optionally upload links to the Sketchfab models on
-the `yt Hub <https://hub.yt-project.org/>`_, but for now, if you want to share
-a cool model we'd love to see it!
+50MB is pretty hefty, though, so it shouldn't be a problem for most
+needs. Additionally, if you have an eligible e-mail address associated with a
+school or university, you can request a free professional account, which allows
+models up to 200MB. See https://sketchfab.com/education for details.
 
 OBJ and MTL Files
 -----------------
@@ -167,7 +164,7 @@
 
    import yt
 
-   ds = yt.load("/data/workshop2012/IsolatedGalaxy/galaxy0030/galaxy0030")
+   ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
    rho = [2e-27, 1e-27]
    trans = [1.0, 0.5]
    filename = './surfaces'
@@ -239,7 +236,7 @@
 
    import yt
 
-   ds = yt.load("/data/workshop2012/IsolatedGalaxy/galaxy0030/galaxy0030")
+   ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
    rho = [2e-27, 1e-27]
    trans = [1.0, 0.5]
    filename = './surfaces'

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e setup.cfg
--- a/setup.cfg
+++ b/setup.cfg
@@ -10,10 +10,10 @@
 
 [flake8]
 # we exclude:
-#      api.py and __init__.py files to avoid spurious unused import errors
-#      _mpl_imports.py for the same reason
+#      api.py, mods.py, _mpl_imports.py, and __init__.py files to avoid spurious 
+#      unused import errors
 #      autogenerated __config__.py files
 #      vendored libraries
-exclude = */api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py
+exclude = */api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/lru_cache.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py
 max-line-length=999
 ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E402,E502,E701,E703,E731,W291,W292,W293,W391,W503
\ No newline at end of file

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e setupext.py
--- a/setupext.py
+++ b/setupext.py
@@ -1,10 +1,11 @@
 import os
 from pkg_resources import resource_filename
 import shutil
-import subprocess
+from subprocess import Popen, PIPE
 import sys
 import tempfile
 
+
 def check_for_openmp():
     """Returns True if local setup supports OpenMP, False otherwise"""
 
@@ -37,13 +38,21 @@
             "}"
         )
         file.flush()
-        with open(os.devnull, 'w') as fnull:
-            exit_code = subprocess.call(compiler + ['-fopenmp', filename],
-                                        stdout=fnull, stderr=fnull)
+        p = Popen(compiler + ['-fopenmp', filename],
+                  stdin=PIPE, stdout=PIPE, stderr=PIPE)
+        output, err = p.communicate()
+        exit_code = p.returncode
+        
+        if exit_code != 0:
+            print("Compilation of OpenMP test code failed with the error: ")
+            print(err)
+            print("Disabling OpenMP support. ")
 
         # Clean up
         file.close()
     except OSError:
+        print("check_for_openmp() could not find your C compiler. "
+              "Attempted to use '%s'. " % compiler)
         return False
     finally:
         os.chdir(curdir)
@@ -82,12 +91,11 @@
         except IOError:
             rd = '/usr/local'
 
-    fail_msg = ("Pyembree is installed, but I could not compile Embree test code. \n"
-               "I attempted to find Embree headers in %s. \n"
+    fail_msg = ("I attempted to find Embree headers in %s. \n"
                "If this is not correct, please set your correct embree location \n"
                "using EMBREE_DIR environment variable or your embree.cfg file. \n"
                "Please see http://yt-project.org/docs/dev/visualizing/unstructured_mesh_rendering.html "
-                "for more information." % rd)
+                "for more information. \n" % rd)
 
     # Create a temporary directory
     tmpdir = tempfile.mkdtemp()
@@ -110,23 +118,29 @@
             '}'
         )
         file.flush()
-        with open(os.devnull, 'w') as fnull:
-            exit_code = subprocess.call(compiler + ['-I%s/include/' % rd, filename],
-                             stdout=fnull, stderr=fnull)
+        p = Popen(compiler + ['-I%s/include/' % rd, filename], 
+                  stdin=PIPE, stdout=PIPE, stderr=PIPE)
+        output, err = p.communicate()
+        exit_code = p.returncode
+
+        if exit_code != 0:
+            print("Pyembree is installed, but I could not compile Embree test code.")
+            print("The error message was: ")
+            print(err)
+            print(fail_msg)
 
         # Clean up
         file.close()
 
     except OSError:
-        print(fail_msg)
+        print("read_embree_location() could not find your C compiler. "
+              "Attempted to use '%s'. " % compiler)
+        return False
 
     finally:
         os.chdir(curdir)
         shutil.rmtree(tmpdir)
 
-    if exit_code != 0:
-        print(fail_msg)
-
     return rd
 
 

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -20,6 +20,9 @@
   local_gadget_000:
     - yt/frontends/gadget/tests/test_outputs.py
 
+  local_gamer_000:
+    - yt/frontends/gamer/tests/test_outputs.py
+
   local_gdf_000:
     - yt/frontends/gdf/tests/test_outputs.py
 

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -394,7 +394,8 @@
             #    10; this will assure we don't get spikes in the deposited
             #    spectra from uneven numbers of vbins per bin
             resolution = thermal_width / self.bin_width
-            n_vbins_per_bin = 10**(np.ceil(np.log10(subgrid_resolution/resolution)).clip(0, np.inf))
+            n_vbins_per_bin = (10 ** (np.ceil( np.log10( subgrid_resolution / 
+                               resolution) ).clip(0, np.inf) ) ).astype('int')
             vbin_width = self.bin_width.d / n_vbins_per_bin
 
             # a note to the user about which lines components are unresolved

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py
--- a/yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py
+++ b/yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py
@@ -176,7 +176,7 @@
     constant_metallicity: float, optional
         If specified, assume a constant metallicity for the emission 
         from metals.  The *with_metals* keyword must be set to False 
-        to use this.
+        to use this. It should be given in unit of solar metallicity.
         Default: None.
 
     This will create three fields:
@@ -245,7 +245,7 @@
 
     emiss_name = "xray_emissivity_%s_%s_keV" % (e_min, e_max)
     ds.add_field(("gas", emiss_name), function=_emissivity_field,
-                 display_name=r"\epsilon_{X}\ (%s-%s\ keV)" % (e_min, e_max),
+                 display_name=r"\epsilon_{X} (%s-%s keV)" % (e_min, e_max),
                  units="erg/cm**3/s")
 
     def _luminosity_field(field, data):
@@ -253,7 +253,7 @@
 
     lum_name = "xray_luminosity_%s_%s_keV" % (e_min, e_max)
     ds.add_field(("gas", lum_name), function=_luminosity_field,
-                 display_name=r"\rm{L}_{X}\ (%s-%s\ keV)" % (e_min, e_max),
+                 display_name=r"\rm{L}_{X} (%s-%s keV)" % (e_min, e_max),
                  units="erg/s")
 
     def _photon_emissivity_field(field, data):
@@ -273,7 +273,7 @@
 
     phot_name = "xray_photon_emissivity_%s_%s_keV" % (e_min, e_max)
     ds.add_field(("gas", phot_name), function=_photon_emissivity_field,
-                 display_name=r"\epsilon_{X}\ (%s-%s\ keV)" % (e_min, e_max),
+                 display_name=r"\epsilon_{X} (%s-%s keV)" % (e_min, e_max),
                  units="photons/cm**3/s")
 
     return emiss_name, lum_name, phot_name

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -597,7 +597,8 @@
             ftype = self._last_freq[0] or ftype
         field = (ftype, fname)
         if field == self._last_freq:
-            return self._last_finfo
+            if field not in self.field_info.field_aliases.values():
+                return self._last_finfo
         if field in self.field_info:
             self._last_freq = field
             self._last_finfo = self.field_info[(ftype, fname)]

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/extern/functools32.py
--- a/yt/extern/functools32.py
+++ /dev/null
@@ -1,423 +0,0 @@
-"""functools.py - Tools for working with functions and callable objects
-"""
-# Python module wrapper for _functools C module
-# to allow utilities written in Python to be added
-# to the functools module.
-# Written by Nick Coghlan <ncoghlan at gmail.com>
-# and Raymond Hettinger <python at rcn.com>
-#   Copyright (C) 2006-2010 Python Software Foundation.
-# See C source code for _functools credits/copyright
-
-__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
-           'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
-
-from _functools import partial, reduce
-from collections import MutableMapping, namedtuple
-from .reprlib32 import recursive_repr as _recursive_repr
-from weakref import proxy as _proxy
-import sys as _sys
-try:
-    from _thread import allocate_lock as Lock
-except:
-    from ._dummy_thread32 import allocate_lock as Lock
-
-################################################################################
-### OrderedDict
-################################################################################
-
-class _Link(object):
-    __slots__ = 'prev', 'next', 'key', '__weakref__'
-
-class OrderedDict(dict):
-    'Dictionary that remembers insertion order'
-    # An inherited dict maps keys to values.
-    # The inherited dict provides __getitem__, __len__, __contains__, and get.
-    # The remaining methods are order-aware.
-    # Big-O running times for all methods are the same as regular dictionaries.
-
-    # The internal self.__map dict maps keys to links in a doubly linked list.
-    # The circular doubly linked list starts and ends with a sentinel element.
-    # The sentinel element never gets deleted (this simplifies the algorithm).
-    # The sentinel is in self.__hardroot with a weakref proxy in self.__root.
-    # The prev links are weakref proxies (to prevent circular references).
-    # Individual links are kept alive by the hard reference in self.__map.
-    # Those hard references disappear when a key is deleted from an OrderedDict.
-
-    def __init__(self, *args, **kwds):
-        '''Initialize an ordered dictionary.  The signature is the same as
-        regular dictionaries, but keyword arguments are not recommended because
-        their insertion order is arbitrary.
-
-        '''
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__root
-        except AttributeError:
-            self.__hardroot = _Link()
-            self.__root = root = _proxy(self.__hardroot)
-            root.prev = root.next = root
-            self.__map = {}
-        self.__update(*args, **kwds)
-
-    def __setitem__(self, key, value,
-                    dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
-        'od.__setitem__(i, y) <==> od[i]=y'
-        # Setting a new item creates a new link at the end of the linked list,
-        # and the inherited dictionary is updated with the new key/value pair.
-        if key not in self:
-            self.__map[key] = link = Link()
-            root = self.__root
-            last = root.prev
-            link.prev, link.next, link.key = last, root, key
-            last.next = link
-            root.prev = proxy(link)
-        dict_setitem(self, key, value)
-
-    def __delitem__(self, key, dict_delitem=dict.__delitem__):
-        'od.__delitem__(y) <==> del od[y]'
-        # Deleting an existing item uses self.__map to find the link which gets
-        # removed by updating the links in the predecessor and successor nodes.
-        dict_delitem(self, key)
-        link = self.__map.pop(key)
-        link_prev = link.prev
-        link_next = link.next
-        link_prev.next = link_next
-        link_next.prev = link_prev
-
-    def __iter__(self):
-        'od.__iter__() <==> iter(od)'
-        # Traverse the linked list in order.
-        root = self.__root
-        curr = root.next
-        while curr is not root:
-            yield curr.key
-            curr = curr.next
-
-    def __reversed__(self):
-        'od.__reversed__() <==> reversed(od)'
-        # Traverse the linked list in reverse order.
-        root = self.__root
-        curr = root.prev
-        while curr is not root:
-            yield curr.key
-            curr = curr.prev
-
-    def clear(self):
-        'od.clear() -> None.  Remove all items from od.'
-        root = self.__root
-        root.prev = root.next = root
-        self.__map.clear()
-        dict.clear(self)
-
-    def popitem(self, last=True):
-        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
-        Pairs are returned in LIFO order if last is true or FIFO order if false.
-
-        '''
-        if not self:
-            raise KeyError('dictionary is empty')
-        root = self.__root
-        if last:
-            link = root.prev
-            link_prev = link.prev
-            link_prev.next = root
-            root.prev = link_prev
-        else:
-            link = root.next
-            link_next = link.next
-            root.next = link_next
-            link_next.prev = root
-        key = link.key
-        del self.__map[key]
-        value = dict.pop(self, key)
-        return key, value
-
-    def move_to_end(self, key, last=True):
-        '''Move an existing element to the end (or beginning if last==False).
-
-        Raises KeyError if the element does not exist.
-        When last=True, acts like a fast version of self[key]=self.pop(key).
-
-        '''
-        link = self.__map[key]
-        link_prev = link.prev
-        link_next = link.next
-        link_prev.next = link_next
-        link_next.prev = link_prev
-        root = self.__root
-        if last:
-            last = root.prev
-            link.prev = last
-            link.next = root
-            last.next = root.prev = link
-        else:
-            first = root.next
-            link.prev = root
-            link.next = first
-            root.next = first.prev = link
-
-    def __sizeof__(self):
-        sizeof = _sys.getsizeof
-        n = len(self) + 1                       # number of links including root
-        size = sizeof(self.__dict__)            # instance dictionary
-        size += sizeof(self.__map) * 2          # internal dict and inherited dict
-        size += sizeof(self.__hardroot) * n     # link objects
-        size += sizeof(self.__root) * n         # proxy objects
-        return size
-
-    update = __update = MutableMapping.update
-    keys = MutableMapping.keys
-    values = MutableMapping.values
-    items = MutableMapping.items
-    __ne__ = MutableMapping.__ne__
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding
-        value.  If key is not found, d is returned if given, otherwise KeyError
-        is raised.
-
-        '''
-        if key in self:
-            result = self[key]
-            del self[key]
-            return result
-        if default is self.__marker:
-            raise KeyError(key)
-        return default
-
-    def setdefault(self, key, default=None):
-        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-        if key in self:
-            return self[key]
-        self[key] = default
-        return default
-
-    @_recursive_repr()
-    def __repr__(self):
-        'od.__repr__() <==> repr(od)'
-        if not self:
-            return '%s()' % (self.__class__.__name__,)
-        return '%s(%r)' % (self.__class__.__name__, list(self.items()))
-
-    def __reduce__(self):
-        'Return state information for pickling'
-        items = [[k, self[k]] for k in self]
-        inst_dict = vars(self).copy()
-        for k in vars(OrderedDict()):
-            inst_dict.pop(k, None)
-        if inst_dict:
-            return (self.__class__, (items,), inst_dict)
-        return self.__class__, (items,)
-
-    def copy(self):
-        'od.copy() -> a shallow copy of od'
-        return self.__class__(self)
-
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
-        If not specified, the value defaults to None.
-
-        '''
-        self = cls()
-        for key in iterable:
-            self[key] = value
-        return self
-
-    def __eq__(self, other):
-        '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
-        while comparison to a regular mapping is order-insensitive.
-
-        '''
-        if isinstance(other, OrderedDict):
-            return len(self)==len(other) and \
-                   all(p==q for p, q in zip(self.items(), other.items()))
-        return dict.__eq__(self, other)
-
-# update_wrapper() and wraps() are tools to help write
-# wrapper functions that can handle naive introspection
-
-WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
-WRAPPER_UPDATES = ('__dict__',)
-def update_wrapper(wrapper,
-                   wrapped,
-                   assigned = WRAPPER_ASSIGNMENTS,
-                   updated = WRAPPER_UPDATES):
-    """Update a wrapper function to look like the wrapped function
-
-       wrapper is the function to be updated
-       wrapped is the original function
-       assigned is a tuple naming the attributes assigned directly
-       from the wrapped function to the wrapper function (defaults to
-       functools.WRAPPER_ASSIGNMENTS)
-       updated is a tuple naming the attributes of the wrapper that
-       are updated with the corresponding attribute from the wrapped
-       function (defaults to functools.WRAPPER_UPDATES)
-    """
-    wrapper.__wrapped__ = wrapped
-    for attr in assigned:
-        try:
-            value = getattr(wrapped, attr)
-        except AttributeError:
-            pass
-        else:
-            setattr(wrapper, attr, value)
-    for attr in updated:
-        getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
-    # Return the wrapper so this can be used as a decorator via partial()
-    return wrapper
-
-def wraps(wrapped,
-          assigned = WRAPPER_ASSIGNMENTS,
-          updated = WRAPPER_UPDATES):
-    """Decorator factory to apply update_wrapper() to a wrapper function
-
-       Returns a decorator that invokes update_wrapper() with the decorated
-       function as the wrapper argument and the arguments to wraps() as the
-       remaining arguments. Default arguments are as for update_wrapper().
-       This is a convenience function to simplify applying partial() to
-       update_wrapper().
-    """
-    return partial(update_wrapper, wrapped=wrapped,
-                   assigned=assigned, updated=updated)
-
-def total_ordering(cls):
-    """Class decorator that fills in missing ordering methods"""
-    convert = {
-        '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
-                   ('__le__', lambda self, other: self < other or self == other),
-                   ('__ge__', lambda self, other: not self < other)],
-        '__le__': [('__ge__', lambda self, other: not self <= other or self == other),
-                   ('__lt__', lambda self, other: self <= other and not self == other),
-                   ('__gt__', lambda self, other: not self <= other)],
-        '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
-                   ('__ge__', lambda self, other: self > other or self == other),
-                   ('__le__', lambda self, other: not self > other)],
-        '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
-                   ('__gt__', lambda self, other: self >= other and not self == other),
-                   ('__lt__', lambda self, other: not self >= other)]
-    }
-    roots = set(dir(cls)) & set(convert)
-    if not roots:
-        raise ValueError('must define at least one ordering operation: < ><= >=')
-    root = max(roots)       # prefer __lt__ to __le__ to __gt__ to __ge__
-    for opname, opfunc in convert[root]:
-        if opname not in roots:
-            opfunc.__name__ = opname
-            opfunc.__doc__ = getattr(int, opname).__doc__
-            setattr(cls, opname, opfunc)
-    return cls
-
-def cmp_to_key(mycmp):
-    """Convert a cmp= function into a key= function"""
-    class K(object):
-        __slots__ = ['obj']
-        def __init__(self, obj):
-            self.obj = obj
-        def __lt__(self, other):
-            return mycmp(self.obj, other.obj) < 0
-        def __gt__(self, other):
-            return mycmp(self.obj, other.obj) > 0
-        def __eq__(self, other):
-            return mycmp(self.obj, other.obj) == 0
-        def __le__(self, other):
-            return mycmp(self.obj, other.obj) <= 0
-        def __ge__(self, other):
-            return mycmp(self.obj, other.obj) >= 0
-        def __ne__(self, other):
-            return mycmp(self.obj, other.obj) != 0
-        __hash__ = None
-    return K
-
-_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
-
-def lru_cache(maxsize=100):
-    """Least-recently-used cache decorator.
-
-    If *maxsize* is set to None, the LRU features are disabled and the cache
-    can grow without bound.
-
-    Arguments to the cached function must be hashable.
-
-    View the cache statistics named tuple (hits, misses, maxsize, currsize) with
-    f.cache_info().  Clear the cache and statistics with f.cache_clear().
-    Access the underlying function with f.__wrapped__.
-
-    See:  http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
-
-    """
-    # Users should only access the lru_cache through its public API:
-    #       cache_info, cache_clear, and f.__wrapped__
-    # The internals of the lru_cache are encapsulated for thread safety and
-    # to allow the implementation to change (including a possible C version).
-
-    def decorating_function(user_function,
-                tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
-
-        hits, misses = [0], [0]
-        kwd_mark = (object(),)          # separates positional and keyword args
-        lock = Lock()                   # needed because OrderedDict isn't threadsafe
-
-        if maxsize is None:
-            cache = dict()              # simple cache without ordering or size limit
-
-            @wraps(user_function)
-            def wrapper(*args, **kwds):
-                key = args
-                if kwds:
-                    key += kwd_mark + tuple(sorted(kwds.items()))
-                try:
-                    result = cache[key]
-                    hits[0] += 1
-                    return result
-                except KeyError:
-                    pass
-                result = user_function(*args, **kwds)
-                cache[key] = result
-                misses[0] += 1
-                return result
-        else:
-            cache = OrderedDict()           # ordered least recent to most recent
-            cache_popitem = cache.popitem
-            cache_renew = cache.move_to_end
-
-            @wraps(user_function)
-            def wrapper(*args, **kwds):
-                key = args
-                if kwds:
-                    key += kwd_mark + tuple(sorted(kwds.items()))
-                with lock:
-                    try:
-                        result = cache[key]
-                        cache_renew(key)    # record recent use of this key
-                        hits[0] += 1
-                        return result
-                    except KeyError:
-                        pass
-                result = user_function(*args, **kwds)
-                with lock:
-                    cache[key] = result     # record recent use of this key
-                    misses[0] += 1
-                    if len(cache) > maxsize:
-                        cache_popitem(0)    # purge least recently used cache entry
-                return result
-
-        def cache_info():
-            """Report cache statistics"""
-            with lock:
-                return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
-
-        def cache_clear():
-            """Clear the cache and cache statistics"""
-            with lock:
-                cache.clear()
-                hits[0] = misses[0] = 0
-
-        wrapper.cache_info = cache_info
-        wrapper.cache_clear = cache_clear
-        return wrapper
-
-    return decorating_function

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/field_aliases.py
--- a/yt/fields/field_aliases.py
+++ b/yt/fields/field_aliases.py
@@ -79,8 +79,8 @@
     ("TangentialVelocity",               "tangential_velocity"),
     ("CuttingPlaneVelocityX",            "cutting_plane_velocity_x"),
     ("CuttingPlaneVelocityY",            "cutting_plane_velocity_y"),
-    ("CuttingPlaneBX",                   "cutting_plane_bx"),
-    ("CuttingPlaneBy",                   "cutting_plane_by"),
+    ("CuttingPlaneBX",                   "cutting_plane_magnetic_field_x"),
+    ("CuttingPlaneBy",                   "cutting_plane_magnetic_field_y"),
     ("MeanMolecularWeight",              "mean_molecular_weight"),
     ("particle_density",                 "particle_density"),
     ("ThermalEnergy",                    "thermal_energy"),

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -253,7 +253,17 @@
                 self[name] = DerivedField(name, f, **kwargs)
                 return f
             return create_function
-        self[name] = DerivedField(name, function, **kwargs)
+        ptype = kwargs.get("particle_type", False)
+        if ptype:
+            ftype = 'all'
+        else:
+            ftype = self.ds.default_fluid_type
+        if not isinstance(name, tuple) and (ftype, name) not in self:
+            tuple_name = (ftype, name)
+            self[tuple_name] = DerivedField(tuple_name, function, **kwargs)
+            self.alias(name, tuple_name)
+        else:
+            self[name] = DerivedField(name, function, **kwargs)
 
     def load_all_plugins(self, ftype="gas"):
         loaded = []

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/fluid_fields.py
--- a/yt/fields/fluid_fields.py
+++ b/yt/fields/fluid_fields.py
@@ -54,6 +54,7 @@
     unit_system = registry.ds.unit_system
 
     create_vector_fields(registry, "velocity", unit_system["velocity"], ftype, slice_info)
+    create_vector_fields(registry, "magnetic_field", unit_system["magnetic_field"], ftype, slice_info)
 
     def _cell_mass(field, data):
         return data[ftype, "density"] * data[ftype, "cell_volume"]

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/local_fields.py
--- a/yt/fields/local_fields.py
+++ b/yt/fields/local_fields.py
@@ -25,7 +25,10 @@
 class LocalFieldInfoContainer(FieldInfoContainer):
     def add_field(self, name, function=None, **kwargs):
         if not isinstance(name, tuple):
-            name = ('gas', name)
+            if kwargs.setdefault('particle_type', False):
+                name = ('all', name)
+            else:
+                name = ('gas', name)
         override = kwargs.get("force_override", False)
         # Handle the case where the field has already been added.
         if not override and name in self:

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -783,7 +783,8 @@
         mass = data[ptype, mass_name].in_base(unit_system.name)
         dens = data[ptype, density_name].in_base(unit_system.name)
         quan = data[ptype, smoothed_field]
-        quan = quan.convert_to_units(field_units)
+        if hasattr(quan, "units"):
+            quan = quan.convert_to_units(field_units)
 
         if smoothing_length_name is None:
             hsml = np.zeros(quan.shape, dtype='float64') - 1

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/tests/test_fields.py
--- a/yt/fields/tests/test_fields.py
+++ b/yt/fields/tests/test_fields.py
@@ -4,6 +4,7 @@
     load
 from yt.testing import \
     fake_random_ds, \
+    fake_particle_ds, \
     assert_almost_equal, \
     assert_equal, \
     assert_array_almost_equal_nulp, \
@@ -313,6 +314,28 @@
     ad['density_alias']
     assert ds.derived_field_list[0] == 'density_alias'
 
+def test_add_field_string_aliasing():
+    ds = fake_random_ds(16)
+
+    def density_alias(field, data):
+        return data['density']
+
+    ds.add_field('density_alias', function=density_alias, units='g/cm**3')
+
+    ds.field_info['density_alias']
+    ds.field_info['gas', 'density_alias']
+
+    ds = fake_particle_ds()
+    
+    def pmass_alias(field, data):
+        return data['particle_mass']
+        
+    ds.add_field('particle_mass_alias', function=pmass_alias, 
+                 units='g', particle_type=True)
+
+    ds.field_info['particle_mass_alias']
+    ds.field_info['all', 'particle_mass_alias']
+    
 
 if __name__ == "__main__":
     setup()

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/fields/vector_operations.py
--- a/yt/fields/vector_operations.py
+++ b/yt/fields/vector_operations.py
@@ -183,13 +183,15 @@
     def _cp_vectors(ax):
         def _cp_val(field, data):
             vec = data.get_field_parameter("cp_%s_vec" % (ax))
-            bv = data.get_field_parameter("bulk_%s" % basename)
-            if bv is None: bv = np.zeros(3)
-            tr  = (data[xn] - bv[0]) * vec[0]
-            tr += (data[yn] - bv[1]) * vec[1]
-            tr += (data[zn] - bv[2]) * vec[2]
+            bv = data.get_field_parameter("bulk_%s" % basename, None)
+            if bv is None:
+                bv = data.ds.arr(np.zeros(3), data[xn].units)
+            tr  = (data[xn] - bv[0]) * vec.d[0]
+            tr += (data[yn] - bv[1]) * vec.d[1]
+            tr += (data[zn] - bv[2]) * vec.d[2]
             return tr
         return _cp_val
+
     registry.add_field((ftype, "cutting_plane_%s_x" % basename),
                        function=_cp_vectors('x'),
                        units=field_units)

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/frontends/_skeleton/data_structures.py
--- a/yt/frontends/_skeleton/data_structures.py
+++ b/yt/frontends/_skeleton/data_structures.py
@@ -14,6 +14,8 @@
 #-----------------------------------------------------------------------------
 
 import os
+import numpy as np
+import weakref
 
 from yt.data_objects.grid_patch import \
     AMRGridPatch
@@ -25,15 +27,12 @@
 
 class SkeletonGrid(AMRGridPatch):
     _id_offset = 0
-    def __init__(self, id, index, level, start, dimensions):
+    def __init__(self, id, index, level):
         AMRGridPatch.__init__(self, id, filename=index.index_filename,
                               index=index)
-        self.Parent = []
+        self.Parent = None
         self.Children = []
         self.Level = level
-        self.start_index = start.copy()
-        self.stop_index = self.start_index + dimensions
-        self.ActiveDimensions = dimensions.copy()
 
     def __repr__(self):
         return "SkeletonGrid_%04i (%s)" % (self.id, self.ActiveDimensions)
@@ -43,14 +42,17 @@
 
     def __init__(self, ds, dataset_type='skeleton'):
         self.dataset_type = dataset_type
+        self.dataset = weakref.proxy(ds)
         # for now, the index file is the dataset!
         self.index_filename = self.dataset.parameter_filename
         self.directory = os.path.dirname(self.index_filename)
+        # float type for the simulation edges and must be float64 now
+        self.float_type = np.float64
         GridIndex.__init__(self, ds, dataset_type)
 
     def _detect_output_fields(self):
         # This needs to set a self.field_list that contains all the available,
-        # on-disk fields.
+        # on-disk fields. No derived fields should be defined here.
         # NOTE: Each should be a tuple, where the first element is the on-disk
         # fluid type or particle type.  Convention suggests that the on-disk
         # fluid type is usually the dataset_type and the on-disk particle type
@@ -69,7 +71,7 @@
         #   self.grid_particle_count    (N, 1) <= int
         #   self.grid_levels            (N, 1) <= int
         #   self.grids                  (N, 1) <= grid objects
-        #
+        #   self.max_level = self.grid_levels.max()
         pass
 
     def _populate_grid_objects(self):
@@ -94,6 +96,8 @@
         Dataset.__init__(self, filename, dataset_type,
                          units_override=units_override)
         self.storage_filename = storage_filename
+        # refinement factor between a grid and its subgrid
+        # self.refine_by = 2
 
     def _set_code_unit_attributes(self):
         # This is where quantities are created that represent the various
@@ -114,10 +118,11 @@
     def _parse_parameter_file(self):
         # This needs to set up the following items.  Note that these are all
         # assumed to be in code units; domain_left_edge and domain_right_edge
-        # will be updated to be in code units at a later time.  This includes
-        # the cosmological parameters.
+        # will be converted to YTArray automatically at a later time.
+        # This includes the cosmological parameters.
         #
-        #   self.unique_identifier
+        #   self.unique_identifier      <= unique identifier for the dataset
+        #                                  being read (e.g., UUID or ST_CTIME)
         #   self.parameters             <= full of code-specific items of use
         #   self.domain_left_edge       <= array of float64
         #   self.domain_right_edge      <= array of float64

diff -r e7f00268f51b3c78b0013dffdbb00c87c4e6842c -r a33c31184bc1fe0da3a42fe62b7d8583a730293e yt/frontends/_skeleton/fields.py
--- a/yt/frontends/_skeleton/fields.py
+++ b/yt/frontends/_skeleton/fields.py
@@ -31,13 +31,14 @@
         # ( "name", ("units", ["fields", "to", "alias"], # "display_name")),
     )
 
-    def __init__(self, ds):
-        super(SkeletonFieldInfo, self).__init__(ds)
+    def __init__(self, ds, field_list):
+        super(SkeletonFieldInfo, self).__init__(ds, field_list)
         # If you want, you can check self.field_list
 
     def setup_fluid_fields(self):
         # Here we do anything that might need info about the dataset.
-        # You can use self.alias, self.add_output_field and self.add_field .
+        # You can use self.alias, self.add_output_field (for on-disk fields)
+        # and self.add_field (for derived fields).
         pass
 
     def setup_particle_fields(self, ptype):

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/e936e8f5c538/
Changeset:   e936e8f5c538
Branch:      yt
User:        chummels
Date:        2016-05-11 19:42:34+00:00
Summary:     Adding answer test entry for gizmo
Affected #:  1 file

diff -r a33c31184bc1fe0da3a42fe62b7d8583a730293e -r e936e8f5c538cb3a1bc51974c5a3952645ea7397 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -26,6 +26,9 @@
   local_gdf_000:
     - yt/frontends/gdf/tests/test_outputs.py
 
+  local_gizmo_000:
+    - yt/frontends/gizmo/tests/test_outputs.py
+
   local_halos_000:
     - yt/analysis_modules/halo_analysis/tests/test_halo_finders.py  # [py2]
     - yt/analysis_modules/halo_finding/tests/test_rockstar.py  # [py2]


https://bitbucket.org/yt_analysis/yt/commits/9eb20a27b1bd/
Changeset:   9eb20a27b1bd
Branch:      yt
User:        brittonsmith
Date:        2016-05-12 06:48:59+00:00
Summary:     Fixing color on the bikeshed.
Affected #:  2 files

diff -r e936e8f5c538cb3a1bc51974c5a3952645ea7397 -r 9eb20a27b1bdcb344ba338d4a1290511b820c5e6 yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -25,13 +25,13 @@
     'eagle',
     'enzo',
     'exodus_ii',
-    'fire',
     'fits',
     'flash',
     'gadget',
     'gadget_fof',
     'gamer',
     'gdf',
+    'gizmo',
     'halo_catalog',
     'http_stream',
     'moab',

diff -r e936e8f5c538cb3a1bc51974c5a3952645ea7397 -r 9eb20a27b1bdcb344ba338d4a1290511b820c5e6 yt/frontends/gizmo/api.py
--- a/yt/frontends/gizmo/api.py
+++ b/yt/frontends/gizmo/api.py
@@ -1,5 +1,5 @@
 """
-API for FIRE frontend.
+API for Gizmo frontend.
 
 
 


https://bitbucket.org/yt_analysis/yt/commits/5fa56c085d67/
Changeset:   5fa56c085d67
Branch:      yt
User:        brittonsmith
Date:        2016-05-12 07:07:30+00:00
Summary:     Adding deposited versions of metal fields and gas aliases.
Affected #:  1 file

diff -r 9eb20a27b1bdcb344ba338d4a1290511b820c5e6 -r 5fa56c085d67436ce563b68ebd6cdae7147c7bfe yt/frontends/gizmo/fields.py
--- a/yt/frontends/gizmo/fields.py
+++ b/yt/frontends/gizmo/fields.py
@@ -14,6 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
 from yt.fields.species_fields import \
     add_species_field_by_density
 from yt.frontends.gadget.fields import \
@@ -96,9 +98,19 @@
             return data[ptype, "density"] * \
               data[ptype, "%s_metallicity" % species]
 
+        num_neighbors = 64
         for species in self.nuclei_names:
             self.add_field(
                 (ptype, "%s_nuclei_mass_density" % species),
                 function=_nuclei_mass_density_field,
                 particle_type=True,
                 units=self.ds.unit_system["density"])
+
+            for suf in ["_nuclei_mass_density", "_metallicity"]:
+                field = "%s%s" % (species, suf)
+                fn = add_volume_weighted_smoothed_field(
+                    ptype, "particle_position", "particle_mass",
+                    "smoothing_length", "density", field,
+                    self, num_neighbors)
+
+                self.alias(("gas", field), fn[0])


https://bitbucket.org/yt_analysis/yt/commits/ee3d232be4d7/
Changeset:   ee3d232be4d7
Branch:      yt
User:        brittonsmith
Date:        2016-05-12 15:52:04+00:00
Summary:     Returning metallicity to field list.
Affected #:  1 file

diff -r 5fa56c085d67436ce563b68ebd6cdae7147c7bfe -r ee3d232be4d7d3fb014ca89e717cef312d6207d6 yt/frontends/sph/fields.py
--- a/yt/frontends/sph/fields.py
+++ b/yt/frontends/sph/fields.py
@@ -41,6 +41,7 @@
         ("Phi", ("code_length", [], None)),
         ("StarFormationRate", ("Msun / yr", [], None)),
         ("FormationTime", ("code_time", ["creation_time"], None)),
+        ("Metallicity_00", ("", ["metallicity"], None)),
     )
 
     def setup_particle_fields(self, ptype, *args, **kwargs):


https://bitbucket.org/yt_analysis/yt/commits/e092b580d094/
Changeset:   e092b580d094
Branch:      yt
User:        brittonsmith
Date:        2016-05-12 15:53:15+00:00
Summary:     Change one more FIRE to Gizmo.
Affected #:  1 file

diff -r ee3d232be4d7d3fb014ca89e717cef312d6207d6 -r e092b580d09453c5eb894167a208761d47b594f2 yt/frontends/gizmo/tests/test_outputs.py
--- a/yt/frontends/gizmo/tests/test_outputs.py
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -41,7 +41,7 @@
 @requires_ds(FIRE_m12i)
 def test_GizmoDataset():
     ds = data_dir_load(FIRE_m12i)
-    assert isinstance(ds, FIREDataset)
+    assert isinstance(ds, GizmoDataset)
     for test in sph_answer(ds, 'FIRE_m12i', 4786950, fields):
         test_GizmoDataset.__name__ = test.description
         yield test


https://bitbucket.org/yt_analysis/yt/commits/09a8105d77d1/
Changeset:   09a8105d77d1
Branch:      yt
User:        brittonsmith
Date:        2016-05-13 09:45:38+00:00
Summary:     Wrong name.
Affected #:  1 file

diff -r e092b580d09453c5eb894167a208761d47b594f2 -r 09a8105d77d18f2aacf63da054f78305fe50b395 yt/frontends/gizmo/tests/test_outputs.py
--- a/yt/frontends/gizmo/tests/test_outputs.py
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -42,7 +42,7 @@
 def test_GizmoDataset():
     ds = data_dir_load(FIRE_m12i)
     assert isinstance(ds, GizmoDataset)
-    for test in sph_answer(ds, 'FIRE_m12i', 4786950, fields):
+    for test in sph_answer(ds, 'snapshot_600', 4786950, fields):
         test_GizmoDataset.__name__ = test.description
         yield test
     assert False


https://bitbucket.org/yt_analysis/yt/commits/52544f4f9834/
Changeset:   52544f4f9834
Branch:      yt
User:        brittonsmith
Date:        2016-05-13 09:48:59+00:00
Summary:     Changing tests a bit.
Affected #:  1 file

diff -r 09a8105d77d18f2aacf63da054f78305fe50b395 -r 52544f4f9834012fbd67f28ba93b6b2149def44c yt/frontends/gizmo/tests/test_outputs.py
--- a/yt/frontends/gizmo/tests/test_outputs.py
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -28,13 +28,12 @@
 fields = OrderedDict(
     [
         (("gas", "density"), None),
-        (("gas", "temperature"), None),
         (("gas", "temperature"), ('gas', 'density')),
+        (("gas", "metallicity"), ('gas', 'density')),
+        (("gas", "O_metallicity"), ('gas', 'density')),
         (('gas', 'velocity_magnitude'), None),
-        (("deposit", "all_density"), None),
         (("deposit", "all_count"), None),
         (("deposit", "all_cic"), None),
-        (("deposit", "PartType0_density"), None),
     ]
 )
 


https://bitbucket.org/yt_analysis/yt/commits/bbf532bc0680/
Changeset:   bbf532bc0680
Branch:      yt
User:        brittonsmith
Date:        2016-05-19 09:50:12+00:00
Summary:     Fixing table.
Affected #:  1 file

diff -r 52544f4f9834012fbd67f28ba93b6b2149def44c -r bbf532bc0680d70497571a89beafa69aecbcc655 doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -29,9 +29,9 @@
 | Enzo                  |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | FITS                  |     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
-++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | FLASH                 |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
------------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gadget                |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | GAMER                 |     Y      |     N     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |


https://bitbucket.org/yt_analysis/yt/commits/436b6de403e8/
Changeset:   436b6de403e8
Branch:      yt
User:        brittonsmith
Date:        2016-05-19 15:51:14+00:00
Summary:     Removing explicit test failure.
Affected #:  1 file

diff -r bbf532bc0680d70497571a89beafa69aecbcc655 -r 436b6de403e814e6d2bed77f66511ad4f1dff740 yt/frontends/gizmo/tests/test_outputs.py
--- a/yt/frontends/gizmo/tests/test_outputs.py
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -44,4 +44,3 @@
     for test in sph_answer(ds, 'snapshot_600', 4786950, fields):
         test_GizmoDataset.__name__ = test.description
         yield test
-    assert False


https://bitbucket.org/yt_analysis/yt/commits/b3c23ff06040/
Changeset:   b3c23ff06040
Branch:      yt
User:        ngoldbaum
Date:        2016-05-19 17:43:36+00:00
Summary:     Merged in brittonsmith/yt (pull request #2136)

Adding Gizmo frontend
Affected #:  11 files

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1319,6 +1319,31 @@
 ``bbox``
        The bounding box for the particle positions.
 
+.. _loading-gizmo-data:
+
+Gizmo Data
+----------
+
+Gizmo datasets, including FIRE outputs, can be loaded into yt in the usual 
+manner.  Like other SPH data formats, yt loads Gizmo data as particle fields 
+and then uses smoothing kernels to deposit those fields to an underlying 
+grid structure as spatial fields as described in :ref:`loading-gadget-data`.  
+To load Gizmo datasets using the standard HDF5 output format::
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("snapshot_600.hdf5")
+
+Because the Gizmo output format is similar to the Gadget format, yt
+may load Gizmo datasets as Gadget depending on the circumstances, but this
+should not pose a problem in most situations.  FIRE outputs will be loaded 
+accordingly due to the number of metallicity fields found (11 or 17).  
+
+For Gizmo outputs written as raw binary outputs, you may have to specify
+a bounding box, field specification, and units as are done for standard 
+Gadget outputs.  See :ref:`loading-gadget-data` for more information.
+
 .. _loading-pyne-data:
 
 Halo Catalog Data

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a doc/source/reference/code_support.rst
--- a/doc/source/reference/code_support.rst
+++ b/doc/source/reference/code_support.rst
@@ -28,16 +28,18 @@
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Enzo                  |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+| FITS                  |     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | FLASH                 |     Y      |     Y     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
-| FITS                  |     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
-+-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gadget                |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | GAMER                 |     Y      |     N     |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Gasoline              |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
+| Gizmo                 |     Y      |     Y     |      Y     |   Y   | Y [#f2]_ |    Y     |     Y      |   Full   |
++-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Grid Data Format (GDF)|     Y      |    N/A    |      Y     |   Y   |    Y     |    Y     |     Y      |   Full   |
 +-----------------------+------------+-----------+------------+-------+----------+----------+------------+----------+
 | Maestro               |   Y [#f1]_ |     N     |      Y     |   Y   |    Y     |    Y     |     N      | Partial  |

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -26,6 +26,9 @@
   local_gdf_000:
     - yt/frontends/gdf/tests/test_outputs.py
 
+  local_gizmo_000:
+    - yt/frontends/gizmo/tests/test_outputs.py
+
   local_halos_000:
     - yt/analysis_modules/halo_analysis/tests/test_halo_finders.py  # [py2]
     - yt/analysis_modules/halo_finding/tests/test_rockstar.py  # [py2]

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -31,6 +31,7 @@
     'gadget_fof',
     'gamer',
     'gdf',
+    'gizmo',
     'halo_catalog',
     'http_stream',
     'moab',

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a yt/frontends/gizmo/api.py
--- /dev/null
+++ b/yt/frontends/gizmo/api.py
@@ -0,0 +1,21 @@
+"""
+API for Gizmo frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+    GizmoDataset
+
+from .fields import \
+    GizmoFieldInfo

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a yt/frontends/gizmo/data_structures.py
--- /dev/null
+++ b/yt/frontends/gizmo/data_structures.py
@@ -0,0 +1,44 @@
+"""
+Data structures for Gizmo frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.utilities.on_demand_imports import _h5py as h5py
+
+from yt.frontends.gadget.data_structures import \
+    GadgetHDF5Dataset
+
+from .fields import \
+    GizmoFieldInfo
+
+class GizmoDataset(GadgetHDF5Dataset):
+    _field_info_class = GizmoFieldInfo
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        need_groups = ['Header']
+        veto_groups = ['FOF', 'Group', 'Subhalo']
+        valid = True
+        try:
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            dmetal = "/PartType0/Metallicity"
+            if dmetal not in fh or fh[dmetal].shape[1] not in (11, 17):
+                valid = False
+            fh.close()
+        except:
+            valid = False
+            pass
+        return valid

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a yt/frontends/gizmo/fields.py
--- /dev/null
+++ b/yt/frontends/gizmo/fields.py
@@ -0,0 +1,116 @@
+"""
+Gizmo-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
+from yt.fields.species_fields import \
+    add_species_field_by_density
+from yt.frontends.gadget.fields import \
+    GadgetFieldInfo
+from yt.frontends.sph.fields import \
+    SPHFieldInfo
+
+class GizmoFieldInfo(GadgetFieldInfo):
+    known_particle_fields = (
+        ("Mass", ("code_mass", ["particle_mass"], None)),
+        ("Masses", ("code_mass", ["particle_mass"], None)),
+        ("Coordinates", ("code_length", ["particle_position"], None)),
+        ("Velocity", ("code_velocity", ["particle_velocity"], None)),
+        ("Velocities", ("code_velocity", ["particle_velocity"], None)),
+        ("ParticleIDs", ("", ["particle_index"], None)),
+        ("InternalEnergy", ("code_velocity ** 2", ["thermal_energy"], None)),
+        ("SmoothingLength", ("code_length", ["smoothing_length"], None)),
+        ("Density", ("code_mass / code_length**3", ["density"], None)),
+        ("MaximumTemperature", ("K", [], None)),
+        ("Temperature", ("K", ["temperature"], None)),
+        ("Epsilon", ("code_length", [], None)),
+        ("Metals", ("code_metallicity", ["metallicity"], None)),
+        ("Metallicity", ("code_metallicity", ["metallicity"], None)),
+        ("Phi", ("code_length", [], None)),
+        ("StarFormationRate", ("Msun / yr", [], None)),
+        ("FormationTime", ("code_time", ["creation_time"], None)),
+        ("Metallicity_00", ("", ["metallicity"], None)),
+        ("Metallicity_01", ("", ["He_metallicity"], None)),
+        ("Metallicity_02", ("", ["C_metallicity"], None)),
+        ("Metallicity_03", ("", ["N_metallicity"], None)),
+        ("Metallicity_04", ("", ["O_metallicity"], None)),
+        ("Metallicity_05", ("", ["Ne_metallicity"], None)),
+        ("Metallicity_06", ("", ["Mg_metallicity"], None)),
+        ("Metallicity_07", ("", ["Si_metallicity"], None)),
+        ("Metallicity_08", ("", ["S_metallicity"], None)),
+        ("Metallicity_09", ("", ["Ca_metallicity"], None)),
+        ("Metallicity_10", ("", ["Fe_metallicity"], None)),
+    )
+
+    def __init__(self, *args, **kwargs):
+        super(SPHFieldInfo, self).__init__(*args, **kwargs)
+        if ("PartType0", "Metallicity_00") in self.field_list:
+            self.nuclei_names = ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
+                                 "Ca", "Fe"]
+
+    def setup_gas_particle_fields(self, ptype):
+        super(GizmoFieldInfo, self).setup_gas_particle_fields(ptype)
+        self.alias((ptype, "temperature"), (ptype, "Temperature"))
+
+        def _h_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              data[(ptype, "NeutralHydrogenAbundance")]
+
+        self.add_field(
+            (ptype, "H_density"),
+            function=_h_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H", particle_type=True)
+        for suffix in ["density", "fraction", "mass", "number_density"]:
+            self.alias((ptype, "H_p0_%s" % suffix), (ptype, "H_%s" % suffix))
+
+        def _h_p1_density(field, data):
+            x_H = 1.0 - data[(ptype, "He_metallicity")] - \
+              data[(ptype, "metallicity")]
+            return x_H * data[(ptype, "density")] * \
+              (1.0 - data[(ptype, "NeutralHydrogenAbundance")])
+
+        self.add_field(
+            (ptype, "H_p1_density"),
+            function=_h_p1_density,
+            particle_type=True,
+            units=self.ds.unit_system["density"])
+        add_species_field_by_density(self, ptype, "H_p1", particle_type=True)
+
+        def _nuclei_mass_density_field(field, data):
+            species = field.name[1][:field.name[1].find("_")]
+            return data[ptype, "density"] * \
+              data[ptype, "%s_metallicity" % species]
+
+        num_neighbors = 64
+        for species in self.nuclei_names:
+            self.add_field(
+                (ptype, "%s_nuclei_mass_density" % species),
+                function=_nuclei_mass_density_field,
+                particle_type=True,
+                units=self.ds.unit_system["density"])
+
+            for suf in ["_nuclei_mass_density", "_metallicity"]:
+                field = "%s%s" % (species, suf)
+                fn = add_volume_weighted_smoothed_field(
+                    ptype, "particle_position", "particle_mass",
+                    "smoothing_length", "density", field,
+                    self, num_neighbors)
+
+                self.alias(("gas", field), fn[0])

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a yt/frontends/gizmo/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -0,0 +1,46 @@
+"""
+Gizmo frontend tests
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import OrderedDict
+
+from yt.utilities.answer_testing.framework import \
+    data_dir_load, \
+    requires_ds, \
+    sph_answer
+from yt.frontends.gizmo.api import GizmoDataset
+
+FIRE_m12i = 'FIRE_M12i_ref11/snapshot_600.hdf5'
+
+# This maps from field names to weight field names to use for projections
+fields = OrderedDict(
+    [
+        (("gas", "density"), None),
+        (("gas", "temperature"), ('gas', 'density')),
+        (("gas", "metallicity"), ('gas', 'density')),
+        (("gas", "O_metallicity"), ('gas', 'density')),
+        (('gas', 'velocity_magnitude'), None),
+        (("deposit", "all_count"), None),
+        (("deposit", "all_cic"), None),
+    ]
+)
+
+ at requires_ds(FIRE_m12i)
+def test_GizmoDataset():
+    ds = data_dir_load(FIRE_m12i)
+    assert isinstance(ds, GizmoDataset)
+    for test in sph_answer(ds, 'snapshot_600', 4786950, fields):
+        test_GizmoDataset.__name__ = test.description
+        yield test

diff -r de4aaa4344b2ffd78d9e4411074b9b398f5e6ad4 -r b3c23ff06040ef967b7c9ac041d5ad3dc4edb79a yt/frontends/sph/fields.py
--- a/yt/frontends/sph/fields.py
+++ b/yt/frontends/sph/fields.py
@@ -41,27 +41,9 @@
         ("Phi", ("code_length", [], None)),
         ("StarFormationRate", ("Msun / yr", [], None)),
         ("FormationTime", ("code_time", ["creation_time"], None)),
-        # These are metallicity fields that get discovered for FIRE simulations
         ("Metallicity_00", ("", ["metallicity"], None)),
-        ("Metallicity_01", ("", ["He_fraction"], None)),
-        ("Metallicity_02", ("", ["C_fraction"], None)),
-        ("Metallicity_03", ("", ["N_fraction"], None)),
-        ("Metallicity_04", ("", ["O_fraction"], None)),
-        ("Metallicity_05", ("", ["Ne_fraction"], None)),
-        ("Metallicity_06", ("", ["Mg_fraction"], None)),
-        ("Metallicity_07", ("", ["Si_fraction"], None)),
-        ("Metallicity_08", ("", ["S_fraction"], None)),
-        ("Metallicity_09", ("", ["Ca_fraction"], None)),
-        ("Metallicity_10", ("", ["Fe_fraction"], None)),
     )
 
-    def __init__(self, *args, **kwargs):
-        super(SPHFieldInfo, self).__init__(*args, **kwargs)
-        # Special case for FIRE
-        if ("PartType0", "Metallicity_00") in self.field_list:
-            self.species_names += ["He", "C", "N", "O", "Ne", "Mg", "Si", "S",
-                "Ca", "Fe"]
-
     def setup_particle_fields(self, ptype, *args, **kwargs):
         super(SPHFieldInfo, self).setup_particle_fields(ptype, *args, **kwargs)
         setup_species_fields(self, ptype)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.spacepope.org/pipermail/yt-svn-spacepope.org/attachments/20160519/01a133d2/attachment.html>


More information about the yt-svn mailing list