[yt-svn] commit/yt: 7 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Mar 19 05:59:49 PDT 2014


7 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/d89feebcb971/
Changeset:   d89feebcb971
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-03-18 19:15:25
Summary:     Able to import GDF frontend.
Affected #:  3 files

diff -r 9a10e88b06c14acc7d2b569c085dbec5865ec830 -r d89feebcb971029b630aa0d8f38e87cf75fadada yt/frontends/gdf/api.py
--- a/yt/frontends/gdf/api.py
+++ b/yt/frontends/gdf/api.py
@@ -18,9 +18,8 @@
       GDFDataset
 
 from .fields import \
-      GDFFieldInfo, \
-      KnownGDFFields, \
-      add_gdf_field
+      GDFFieldInfo
+add_gdf_field = GDFFieldInfo.add_field
 
 from .io import \
       IOHandlerGDFHDF5

diff -r 9a10e88b06c14acc7d2b569c085dbec5865ec830 -r d89feebcb971029b630aa0d8f38e87cf75fadada yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -27,18 +27,10 @@
     Dataset
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+from yt.units.yt_array import \
+    uconcatenate, YTArray
 
-from .fields import GDFFieldInfo, KnownGDFFields
-from yt.data_objects.field_info_container import \
-    NullFunc
-
-def _get_convert(fname):
-    def _conv(data):
-        return 1.0  # data.convert(fname) FIXME
-    return _conv
-
+from .fields import GDFFieldInfo
 
 class GDFGrid(AMRGridPatch):
     _id_offset = 0
@@ -75,7 +67,6 @@
 class GDFHierarchy(GridIndex):
 
     grid = GDFGrid
-    filtered_particle_types = []
 
     def __init__(self, pf, dataset_type='grid_data_format'):
         self.parameter_file = weakref.proxy(pf)
@@ -88,12 +79,9 @@
         self.directory = os.path.dirname(self.index_filename)
         h5f.close()
 
-    def _initialize_data_storage(self):
-        pass
-
-    def _detect_fields(self):
+    def _detect_output_fields(self):
         h5f = h5py.File(self.index_filename, 'r')
-        self.field_list = h5f['field_types'].keys()
+        self.field_list = [("gdf", str(f)) for f in h5f['field_types'].keys()]
         h5f.close()
 
     def _count_grids(self):
@@ -121,8 +109,8 @@
                   self.parameter_file.domain_left_edge) / \
                 self.parameter_file.domain_dimensions
             dx[active_dims] /= self.parameter_file.refine_by ** levels[i]
-            dxs.append(dx)
-        dx = np.array(dxs)
+            dxs.append(dx.in_units("code_length"))
+        dx = self.parameter_file.arr(dxs, input_units="code_length")
         self.grid_left_edge = self.parameter_file.domain_left_edge + dx * glis
         self.grid_dimensions = gdims.astype("int32")
         self.grid_right_edge = self.grid_left_edge + dx * self.grid_dimensions
@@ -152,9 +140,6 @@
                 g.OverlappingSiblings = siblings.tolist()
         self.max_level = self.grid_levels.max()
 
-    def _setup_derived_fields(self):
-        super(GDFHierarchy, self)._setup_derived_fields()
-
     def _get_box_grids(self, left_edge, right_edge):
         """
         Gets back all the grids between a left edge and right edge
@@ -174,8 +159,7 @@
 
 class GDFDataset(Dataset):
     _index_class = GDFHierarchy
-    _fieldinfo_fallback = GDFFieldInfo
-    _fieldinfo_known = KnownGDFFields
+    _field_info_class = GDFFieldInfo
 
     def __init__(self, filename, dataset_type='grid_data_format',
                  storage_filename=None):
@@ -183,33 +167,18 @@
         self.storage_filename = storage_filename
         self.filename = filename
 
-    def _set_units(self):
+    def _set_code_unit_attributes(self):
         """
         Generates the conversion to various physical _units
         based on the parameter file
         """
-        self.units = {}
-        self.time_units = {}
-        if len(self.parameters) == 0:
-            self._parse_parameter_file()
-        self.time_units['1'] = 1
-        self.units['1'] = 1.0
-        self.units['cm'] = 1.0
-        self.units['unitary'] = 1.0 / (self.domain_right_edge -
-                                       self.domain_left_edge).max()
-        for unit in mpc_conversion.keys():
-            self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-        for unit in sec_conversion.keys():
-            self.time_units[unit] = 1.0 / sec_conversion[unit]
 
         # This should be improved.
         h5f = h5py.File(self.parameter_filename, "r")
         for field_name in h5f["/field_types"]:
             current_field = h5f["/field_types/%s" % field_name]
             if 'field_to_cgs' in current_field.attrs:
-                self.units[field_name] = current_field.attrs['field_to_cgs']
-            else:
-                self.units[field_name] = 1.0
+                self.field_units[field_name] = current_field.attrs['field_to_cgs']
             if 'field_units' in current_field.attrs:
                 if type(current_field.attrs['field_units']) == str:
                     current_fields_unit = current_field.attrs['field_units']
@@ -218,11 +187,10 @@
                         just_one(current_field.attrs['field_units'])
             else:
                 current_fields_unit = ""
-            self._fieldinfo_known.add_field(
-                field_name, function=NullFunc, take_log=False,
-                units=current_fields_unit, projected_units="")
-
         h5f.close()
+        self.length_unit = self.quan(1.0, "cm")
+        self.mass_unit = self.quan(1.0, "g")
+        self.time_unit = self.quan(1.0, "s")
 
     def _parse_parameter_file(self):
         self._handle = h5py.File(self.parameter_filename, "r")

diff -r 9a10e88b06c14acc7d2b569c085dbec5865ec830 -r d89feebcb971029b630aa0d8f38e87cf75fadada yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -13,66 +13,25 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import numpy as np
+
+from yt.funcs import mylog
 from yt.fields.field_info_container import \
-    FieldInfoContainer, \
-    FieldInfo, \
-    ValidateParameter, \
-    ValidateDataField, \
-    ValidateProperty, \
-    ValidateSpatial, \
-    ValidateGridType, \
-    NullFunc, \
-    TranslationFunc
+    FieldInfoContainer
 
-log_translation_dict = {"Density": "density",
-                        "Pressure": "pressure"}
+# The nice thing about GDF is that for the most part, everything is in CGS,
+# with potentially a scalar modification.
 
-translation_dict = {"x-velocity": "velocity_x",
-                    "y-velocity": "velocity_y",
-                    "z-velocity": "velocity_z"}
-                    
-# translation_dict = {"mag_field_x": "cell_centered_B_x ",
-#                     "mag_field_y": "cell_centered_B_y ",
-#                     "mag_field_z": "cell_centered_B_z "}
-
-GDFFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_field = GDFFieldInfo.add_field
-
-KnownGDFFields = FieldInfoContainer()
-add_gdf_field = KnownGDFFields.add_field
-
-add_gdf_field("density", function=NullFunc, take_log=True, units="g/cm**3")
-
-add_gdf_field("specific_energy", function=NullFunc, take_log=True,
-          units="erg / g")
-
-add_gdf_field("pressure", function=NullFunc, take_log=True,
-          units="erg/g")
-
-add_gdf_field("velocity_x", function=NullFunc, take_log=False,
-          units="cm/s")
-
-add_gdf_field("velocity_y", function=NullFunc, take_log=False,
-          units="cm/s")
-
-add_gdf_field("velocity_z", function=NullFunc, take_log=False,
-          units="cm / s")
-
-add_gdf_field("mag_field_x", function=NullFunc, take_log=False,
-          units="cm / s")
-
-add_gdf_field("mag_field_y", function=NullFunc, take_log=False,
-          units="cm / s")
-
-add_gdf_field("mag_field_z", function=NullFunc, take_log=False,
-          units="cm / s")
-
-for f,v in log_translation_dict.items():
-    add_field(f, TranslationFunc(v), take_log=True,
-              units=KnownGDFFields[v].get_units(),
-              projected_units=KnownGDFFields[v].get_projected_units())
-
-for f,v in translation_dict.items():
-    add_field(f, TranslationFunc(v), take_log=False,
-              units=KnownGDFFields[v].get_units(),
-              projected_units=KnownGDFFields[v].get_projected_units())
+class GDFFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+        ("density", ("g/cm**3", [], None)),
+        ("specific_energy", ("erg / g", ["thermal_energy"], None)),
+        ("pressure", ("erg/g", [], None)),
+        ("velocity_x", ("cm/s", [], None)),
+        ("velocity_y", ("cm/s", [], None)),
+        ("velocity_z", ("cm/s", [], None)),
+        ("mag_field_x", ("gauss", ["magnetic_field_x"], None)),
+        ("mag_field_y", ("gauss", ["magnetic_field_y"], None)),
+        ("mag_field_z", ("gauss", ["magnetic_field_z"], None)),
+    )
+    known_particle_fields = ()


https://bitbucket.org/yt_analysis/yt/commits/70bc0e869d33/
Changeset:   70bc0e869d33
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-03-18 19:24:41
Summary:     Fixing fluid type definition, enabling units for GDF.
Affected #:  2 files

diff -r d89feebcb971029b630aa0d8f38e87cf75fadada -r 70bc0e869d33f1688d0486bbc9b76eff0046eca2 yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -16,6 +16,7 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
+import types
 
 from yt.funcs import mylog, only_on_root
 from yt.units.unit_object import Unit
@@ -104,6 +105,12 @@
             args = known_other_fields.get(
                 field[1], ("", [], None))
             units, aliases, display_name = args
+            # We allow field_units to override this.  First we check if the
+            # field *name* is in there, then the field *tuple*.
+            units = self.pf.field_units.get(field[1], units)
+            units = self.pf.field_units.get(field, units)
+            if not isinstance(units, types.StringTypes):
+                units = "((%s)*%s)" % (args[0], units)
             self.add_output_field(field, units = units,
                                   display_name = display_name)
             for alias in aliases:

diff -r d89feebcb971029b630aa0d8f38e87cf75fadada -r 70bc0e869d33f1688d0486bbc9b76eff0046eca2 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -14,6 +14,7 @@
 #-----------------------------------------------------------------------------
 
 import h5py
+import types
 import numpy as np
 import weakref
 import os
@@ -163,6 +164,7 @@
 
     def __init__(self, filename, dataset_type='grid_data_format',
                  storage_filename=None):
+        self.fluid_types += ("gdf",)
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
         self.filename = filename
@@ -178,13 +180,16 @@
         for field_name in h5f["/field_types"]:
             current_field = h5f["/field_types/%s" % field_name]
             if 'field_to_cgs' in current_field.attrs:
-                self.field_units[field_name] = current_field.attrs['field_to_cgs']
-            if 'field_units' in current_field.attrs:
-                if type(current_field.attrs['field_units']) == str:
+                field_conv = current_field.attrs['field_to_cgs']
+                self.field_units[field_name] = just_one(field_conv)
+            elif 'field_units' in current_field.attrs:
+                field_units = current_field.attrs['field_units']
+                if isinstance(field_units, types.StringTypes):
                     current_fields_unit = current_field.attrs['field_units']
                 else:
                     current_fields_unit = \
                         just_one(current_field.attrs['field_units'])
+                self.field_units[field_name] = current_field_units
             else:
                 current_fields_unit = ""
         h5f.close()


https://bitbucket.org/yt_analysis/yt/commits/51a08874d19c/
Changeset:   51a08874d19c
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-03-18 19:28:30
Summary:     Fixing the IO portion.  Removed a few things, too.
Affected #:  1 file

diff -r 70bc0e869d33f1688d0486bbc9b76eff0046eca2 -r 51a08874d19c326958726ed053720c6f1a2593be yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -36,28 +36,6 @@
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
 
-    def _read_field_names(self, grid):
-        if grid.filename is None:
-            return []
-        print 'grid.filename = %', grid.filename
-        h5f = h5py.File(grid.filename, mode="r")
-        group = h5f[_grid_dname(grid.id)]
-        fields = []
-        for name, v in group.iteritems():
-            # NOTE: This won't work with 1D datasets.
-            if not hasattr(v, "shape"):
-                continue
-            elif len(v.dims) == 1:
-                fields.append(("io", str(name)))
-            else:
-                fields.append(("gas", str(name)))
-        h5f.close()
-        return fields
-
-    @property
-    def _read_exception(self):
-        return (exceptions.KeyError, )
-
     def _read_fluid_selection(self, chunks, selector, fields, size):
         rv = {}
         chunks = list(chunks)
@@ -79,7 +57,7 @@
             size = sum((grid.count(selector) for chunk in chunks
                         for grid in chunk.objs))
 
-        if any((ftype != "gas" for ftype, fname in fields)):
+        if any((ftype != "gdf" for ftype, fname in fields)):
             raise NotImplementedError
 
         for field in fields:


https://bitbucket.org/yt_analysis/yt/commits/19117c35e9b8/
Changeset:   19117c35e9b8
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-03-18 19:29:17
Summary:     Re-enabling GDF in mods.py.
Affected #:  1 file

diff -r 51a08874d19c326958726ed053720c6f1a2593be -r 19117c35e9b8c95c0faaebad2b304cba663e7f3f yt/mods.py
--- a/yt/mods.py
+++ b/yt/mods.py
@@ -105,8 +105,8 @@
 #from yt.frontends.chombo.api import \
 #    ChomboDataset, ChomboFieldInfo, add_chombo_field
 
-#from yt.frontends.gdf.api import \
-#    GDFDataset, GDFFieldInfo, add_gdf_field
+from yt.frontends.gdf.api import \
+    GDFDataset, GDFFieldInfo, add_gdf_field
 
 from yt.frontends.moab.api import \
     MoabHex8Dataset, MoabFieldInfo, \


https://bitbucket.org/yt_analysis/yt/commits/60f8175086cd/
Changeset:   60f8175086cd
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-03-18 19:39:53
Summary:     Fixing aliases for density
Affected #:  1 file

diff -r 19117c35e9b8c95c0faaebad2b304cba663e7f3f -r 60f8175086cd1db33813fe1d3d6adf86df28deb6 yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -24,12 +24,12 @@
 
 class GDFFieldInfo(FieldInfoContainer):
     known_other_fields = (
-        ("density", ("g/cm**3", [], None)),
+        ("density", ("g/cm**3", ["density"], None)),
         ("specific_energy", ("erg / g", ["thermal_energy"], None)),
-        ("pressure", ("erg/g", [], None)),
-        ("velocity_x", ("cm/s", [], None)),
-        ("velocity_y", ("cm/s", [], None)),
-        ("velocity_z", ("cm/s", [], None)),
+        ("pressure", ("", ["pressure"], None)),
+        ("velocity_x", ("cm/s", ["velocity_x"], None)),
+        ("velocity_y", ("cm/s", ["velocity_y"], None)),
+        ("velocity_z", ("cm/s", ["velocity_z"], None)),
         ("mag_field_x", ("gauss", ["magnetic_field_x"], None)),
         ("mag_field_y", ("gauss", ["magnetic_field_y"], None)),
         ("mag_field_z", ("gauss", ["magnetic_field_z"], None)),


https://bitbucket.org/yt_analysis/yt/commits/1fbf412555a9/
Changeset:   1fbf412555a9
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-03-18 20:54:36
Summary:     Clean up the unit setting slightly.
Affected #:  1 file

diff -r 60f8175086cd1db33813fe1d3d6adf86df28deb6 -r 1fbf412555a9464171d61221a9ab7bd1ec387615 yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -17,6 +17,7 @@
 
 import numpy as np
 import types
+from numbers import Number as numeric_type
 
 from yt.funcs import mylog, only_on_root
 from yt.units.unit_object import Unit
@@ -109,8 +110,15 @@
             # field *name* is in there, then the field *tuple*.
             units = self.pf.field_units.get(field[1], units)
             units = self.pf.field_units.get(field, units)
-            if not isinstance(units, types.StringTypes):
+            if not isinstance(units, types.StringTypes) and args[0] != "":
                 units = "((%s)*%s)" % (args[0], units)
+            if isinstance(units, (numeric_type, np.number, np.ndarray)) and \
+                args[0] == "" and units != 1.0:
+                mylog.warning("Cannot interpret units: %s * %s, " +
+                              "setting to dimensionless.", units, args[0])
+                units = ""
+            elif units == 1.0:
+                units = ""
             self.add_output_field(field, units = units,
                                   display_name = display_name)
             for alias in aliases:


https://bitbucket.org/yt_analysis/yt/commits/f099b2d7fe94/
Changeset:   f099b2d7fe94
Branch:      yt-3.0
User:        xarthisius
Date:        2014-03-19 13:59:42
Summary:     Merged in MatthewTurk/yt/yt-3.0 (pull request #731)

Updating GDF
Affected #:  6 files

diff -r b67bd51073b20a9a851e73e3e53eb2b2fbb5cc63 -r f099b2d7fe9448b0e0579c28d0c941dcb2ba04f9 yt/fields/field_info_container.py
--- a/yt/fields/field_info_container.py
+++ b/yt/fields/field_info_container.py
@@ -16,6 +16,8 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
+import types
+from numbers import Number as numeric_type
 
 from yt.funcs import mylog, only_on_root
 from yt.units.unit_object import Unit
@@ -104,6 +106,19 @@
             args = known_other_fields.get(
                 field[1], ("", [], None))
             units, aliases, display_name = args
+            # We allow field_units to override this.  First we check if the
+            # field *name* is in there, then the field *tuple*.
+            units = self.pf.field_units.get(field[1], units)
+            units = self.pf.field_units.get(field, units)
+            if not isinstance(units, types.StringTypes) and args[0] != "":
+                units = "((%s)*%s)" % (args[0], units)
+            if isinstance(units, (numeric_type, np.number, np.ndarray)) and \
+                args[0] == "" and units != 1.0:
+                mylog.warning("Cannot interpret units: %s * %s, " +
+                              "setting to dimensionless.", units, args[0])
+                units = ""
+            elif units == 1.0:
+                units = ""
             self.add_output_field(field, units = units,
                                   display_name = display_name)
             for alias in aliases:

diff -r b67bd51073b20a9a851e73e3e53eb2b2fbb5cc63 -r f099b2d7fe9448b0e0579c28d0c941dcb2ba04f9 yt/frontends/gdf/api.py
--- a/yt/frontends/gdf/api.py
+++ b/yt/frontends/gdf/api.py
@@ -18,9 +18,8 @@
       GDFDataset
 
 from .fields import \
-      GDFFieldInfo, \
-      KnownGDFFields, \
-      add_gdf_field
+      GDFFieldInfo
+add_gdf_field = GDFFieldInfo.add_field
 
 from .io import \
       IOHandlerGDFHDF5

diff -r b67bd51073b20a9a851e73e3e53eb2b2fbb5cc63 -r f099b2d7fe9448b0e0579c28d0c941dcb2ba04f9 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -14,6 +14,7 @@
 #-----------------------------------------------------------------------------
 
 import h5py
+import types
 import numpy as np
 import weakref
 import os
@@ -27,18 +28,10 @@
     Dataset
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+from yt.units.yt_array import \
+    uconcatenate, YTArray
 
-from .fields import GDFFieldInfo, KnownGDFFields
-from yt.data_objects.field_info_container import \
-    NullFunc
-
-def _get_convert(fname):
-    def _conv(data):
-        return 1.0  # data.convert(fname) FIXME
-    return _conv
-
+from .fields import GDFFieldInfo
 
 class GDFGrid(AMRGridPatch):
     _id_offset = 0
@@ -75,7 +68,6 @@
 class GDFHierarchy(GridIndex):
 
     grid = GDFGrid
-    filtered_particle_types = []
 
     def __init__(self, pf, dataset_type='grid_data_format'):
         self.parameter_file = weakref.proxy(pf)
@@ -88,12 +80,9 @@
         self.directory = os.path.dirname(self.index_filename)
         h5f.close()
 
-    def _initialize_data_storage(self):
-        pass
-
-    def _detect_fields(self):
+    def _detect_output_fields(self):
         h5f = h5py.File(self.index_filename, 'r')
-        self.field_list = h5f['field_types'].keys()
+        self.field_list = [("gdf", str(f)) for f in h5f['field_types'].keys()]
         h5f.close()
 
     def _count_grids(self):
@@ -121,8 +110,8 @@
                   self.parameter_file.domain_left_edge) / \
                 self.parameter_file.domain_dimensions
             dx[active_dims] /= self.parameter_file.refine_by ** levels[i]
-            dxs.append(dx)
-        dx = np.array(dxs)
+            dxs.append(dx.in_units("code_length"))
+        dx = self.parameter_file.arr(dxs, input_units="code_length")
         self.grid_left_edge = self.parameter_file.domain_left_edge + dx * glis
         self.grid_dimensions = gdims.astype("int32")
         self.grid_right_edge = self.grid_left_edge + dx * self.grid_dimensions
@@ -152,9 +141,6 @@
                 g.OverlappingSiblings = siblings.tolist()
         self.max_level = self.grid_levels.max()
 
-    def _setup_derived_fields(self):
-        super(GDFHierarchy, self)._setup_derived_fields()
-
     def _get_box_grids(self, left_edge, right_edge):
         """
         Gets back all the grids between a left edge and right edge
@@ -174,55 +160,42 @@
 
 class GDFDataset(Dataset):
     _index_class = GDFHierarchy
-    _fieldinfo_fallback = GDFFieldInfo
-    _fieldinfo_known = KnownGDFFields
+    _field_info_class = GDFFieldInfo
 
     def __init__(self, filename, dataset_type='grid_data_format',
                  storage_filename=None):
+        self.fluid_types += ("gdf",)
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
         self.filename = filename
 
-    def _set_units(self):
+    def _set_code_unit_attributes(self):
         """
         Generates the conversion to various physical _units
         based on the parameter file
         """
-        self.units = {}
-        self.time_units = {}
-        if len(self.parameters) == 0:
-            self._parse_parameter_file()
-        self.time_units['1'] = 1
-        self.units['1'] = 1.0
-        self.units['cm'] = 1.0
-        self.units['unitary'] = 1.0 / (self.domain_right_edge -
-                                       self.domain_left_edge).max()
-        for unit in mpc_conversion.keys():
-            self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-        for unit in sec_conversion.keys():
-            self.time_units[unit] = 1.0 / sec_conversion[unit]
 
         # This should be improved.
         h5f = h5py.File(self.parameter_filename, "r")
         for field_name in h5f["/field_types"]:
             current_field = h5f["/field_types/%s" % field_name]
             if 'field_to_cgs' in current_field.attrs:
-                self.units[field_name] = current_field.attrs['field_to_cgs']
-            else:
-                self.units[field_name] = 1.0
-            if 'field_units' in current_field.attrs:
-                if type(current_field.attrs['field_units']) == str:
+                field_conv = current_field.attrs['field_to_cgs']
+                self.field_units[field_name] = just_one(field_conv)
+            elif 'field_units' in current_field.attrs:
+                field_units = current_field.attrs['field_units']
+                if isinstance(field_units, types.StringTypes):
                     current_fields_unit = current_field.attrs['field_units']
                 else:
                     current_fields_unit = \
                         just_one(current_field.attrs['field_units'])
+                self.field_units[field_name] = current_field_units
             else:
                 current_fields_unit = ""
-            self._fieldinfo_known.add_field(
-                field_name, function=NullFunc, take_log=False,
-                units=current_fields_unit, projected_units="")
-
         h5f.close()
+        self.length_unit = self.quan(1.0, "cm")
+        self.mass_unit = self.quan(1.0, "g")
+        self.time_unit = self.quan(1.0, "s")
 
     def _parse_parameter_file(self):
         self._handle = h5py.File(self.parameter_filename, "r")

diff -r b67bd51073b20a9a851e73e3e53eb2b2fbb5cc63 -r f099b2d7fe9448b0e0579c28d0c941dcb2ba04f9 yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -13,66 +13,25 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import numpy as np
+
+from yt.funcs import mylog
 from yt.fields.field_info_container import \
-    FieldInfoContainer, \
-    FieldInfo, \
-    ValidateParameter, \
-    ValidateDataField, \
-    ValidateProperty, \
-    ValidateSpatial, \
-    ValidateGridType, \
-    NullFunc, \
-    TranslationFunc
+    FieldInfoContainer
 
-log_translation_dict = {"Density": "density",
-                        "Pressure": "pressure"}
+# The nice thing about GDF is that for the most part, everything is in CGS,
+# with potentially a scalar modification.
 
-translation_dict = {"x-velocity": "velocity_x",
-                    "y-velocity": "velocity_y",
-                    "z-velocity": "velocity_z"}
-                    
-# translation_dict = {"mag_field_x": "cell_centered_B_x ",
-#                     "mag_field_y": "cell_centered_B_y ",
-#                     "mag_field_z": "cell_centered_B_z "}
-
-GDFFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_field = GDFFieldInfo.add_field
-
-KnownGDFFields = FieldInfoContainer()
-add_gdf_field = KnownGDFFields.add_field
-
-add_gdf_field("density", function=NullFunc, take_log=True, units="g/cm**3")
-
-add_gdf_field("specific_energy", function=NullFunc, take_log=True,
-          units="erg / g")
-
-add_gdf_field("pressure", function=NullFunc, take_log=True,
-          units="erg/g")
-
-add_gdf_field("velocity_x", function=NullFunc, take_log=False,
-          units="cm/s")
-
-add_gdf_field("velocity_y", function=NullFunc, take_log=False,
-          units="cm/s")
-
-add_gdf_field("velocity_z", function=NullFunc, take_log=False,
-          units="cm / s")
-
-add_gdf_field("mag_field_x", function=NullFunc, take_log=False,
-          units="cm / s")
-
-add_gdf_field("mag_field_y", function=NullFunc, take_log=False,
-          units="cm / s")
-
-add_gdf_field("mag_field_z", function=NullFunc, take_log=False,
-          units="cm / s")
-
-for f,v in log_translation_dict.items():
-    add_field(f, TranslationFunc(v), take_log=True,
-              units=KnownGDFFields[v].get_units(),
-              projected_units=KnownGDFFields[v].get_projected_units())
-
-for f,v in translation_dict.items():
-    add_field(f, TranslationFunc(v), take_log=False,
-              units=KnownGDFFields[v].get_units(),
-              projected_units=KnownGDFFields[v].get_projected_units())
+class GDFFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+        ("density", ("g/cm**3", ["density"], None)),
+        ("specific_energy", ("erg / g", ["thermal_energy"], None)),
+        ("pressure", ("", ["pressure"], None)),
+        ("velocity_x", ("cm/s", ["velocity_x"], None)),
+        ("velocity_y", ("cm/s", ["velocity_y"], None)),
+        ("velocity_z", ("cm/s", ["velocity_z"], None)),
+        ("mag_field_x", ("gauss", ["magnetic_field_x"], None)),
+        ("mag_field_y", ("gauss", ["magnetic_field_y"], None)),
+        ("mag_field_z", ("gauss", ["magnetic_field_z"], None)),
+    )
+    known_particle_fields = ()

diff -r b67bd51073b20a9a851e73e3e53eb2b2fbb5cc63 -r f099b2d7fe9448b0e0579c28d0c941dcb2ba04f9 yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -36,28 +36,6 @@
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
 
-    def _read_field_names(self, grid):
-        if grid.filename is None:
-            return []
-        print 'grid.filename = %', grid.filename
-        h5f = h5py.File(grid.filename, mode="r")
-        group = h5f[_grid_dname(grid.id)]
-        fields = []
-        for name, v in group.iteritems():
-            # NOTE: This won't work with 1D datasets.
-            if not hasattr(v, "shape"):
-                continue
-            elif len(v.dims) == 1:
-                fields.append(("io", str(name)))
-            else:
-                fields.append(("gas", str(name)))
-        h5f.close()
-        return fields
-
-    @property
-    def _read_exception(self):
-        return (exceptions.KeyError, )
-
     def _read_fluid_selection(self, chunks, selector, fields, size):
         rv = {}
         chunks = list(chunks)
@@ -79,7 +57,7 @@
             size = sum((grid.count(selector) for chunk in chunks
                         for grid in chunk.objs))
 
-        if any((ftype != "gas" for ftype, fname in fields)):
+        if any((ftype != "gdf" for ftype, fname in fields)):
             raise NotImplementedError
 
         for field in fields:

diff -r b67bd51073b20a9a851e73e3e53eb2b2fbb5cc63 -r f099b2d7fe9448b0e0579c28d0c941dcb2ba04f9 yt/mods.py
--- a/yt/mods.py
+++ b/yt/mods.py
@@ -105,8 +105,8 @@
 #from yt.frontends.chombo.api import \
 #    ChomboDataset, ChomboFieldInfo, add_chombo_field
 
-#from yt.frontends.gdf.api import \
-#    GDFDataset, GDFFieldInfo, add_gdf_field
+from yt.frontends.gdf.api import \
+    GDFDataset, GDFFieldInfo, add_gdf_field
 
 from yt.frontends.moab.api import \
     MoabHex8Dataset, MoabFieldInfo, \

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list