[yt-svn] commit/yt: 8 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sat Sep 24 14:15:22 PDT 2016


8 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/e31085039fd8/
Changeset:   e31085039fd8
Branch:      yt
User:        brittonsmith
Date:        2016-09-04 18:50:24+00:00
Summary:     Ensure all halo quantities are converted to correct unit system after the action loop.
Affected #:  1 file

diff -r 58f60e6a6799a2ee44bd18ebd5619814307a4c99 -r e31085039fd86b833f6e85537b04434aa82cb4fe yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -431,13 +431,16 @@
                     key, quantity = action
                     if quantity in self.halos_ds.field_info:
                         new_halo.quantities[key] = \
-                          self.data_source[quantity][int(i)].in_cgs()
+                          self.data_source[quantity][int(i)]
                     elif callable(quantity):
                         new_halo.quantities[key] = quantity(new_halo)
                 else:
-                    raise RuntimeError("Action must be a callback, filter, or quantity.")
+                    raise RuntimeError(
+                        "Action must be a callback, filter, or quantity.")
 
             if halo_filter:
+                for quantity in new_halo.quantities.values():
+                    quantity.convert_to_base()
                 self.catalog.append(new_halo.quantities)
 
             if save_halos and halo_filter:
@@ -465,7 +468,7 @@
                      "omega_matter", "hubble_constant"]:
             out_file.attrs[attr] = getattr(self.halos_ds, attr)
         for attr in ["domain_left_edge", "domain_right_edge"]:
-            out_file.attrs[attr] = getattr(self.halos_ds, attr).in_cgs()
+            out_file.attrs[attr] = getattr(self.halos_ds, attr).in_base()
         out_file.attrs["data_type"] = "halo_catalog"
         out_file.attrs["num_halos"] = n_halos
         if n_halos > 0:


https://bitbucket.org/yt_analysis/yt/commits/9591dfcbfc32/
Changeset:   9591dfcbfc32
Branch:      yt
User:        brittonsmith
Date:        2016-09-04 19:19:13+00:00
Summary:     Refactoring to use save_as_dataset.
Affected #:  1 file

diff -r e31085039fd86b833f6e85537b04434aa82cb4fe -r 9591dfcbfc322b6b72bf28fcbcdeb5665bf1a537 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -13,10 +13,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 
+from yt.frontends.ytdata.utilities import \
+    save_as_dataset
 from yt.funcs import \
     ensure_dir, \
     mylog
@@ -461,27 +462,18 @@
         mylog.info("Saving halo catalog (%d halos) to %s." %
                    (n_halos, os.path.join(self.output_dir,
                                          self.output_prefix)))
-        out_file = h5py.File(filename, 'w')
-        for attr in ["current_redshift", "current_time",
-                     "domain_dimensions",
-                     "cosmological_simulation", "omega_lambda",
-                     "omega_matter", "hubble_constant"]:
-            out_file.attrs[attr] = getattr(self.halos_ds, attr)
-        for attr in ["domain_left_edge", "domain_right_edge"]:
-            out_file.attrs[attr] = getattr(self.halos_ds, attr).in_base()
-        out_file.attrs["data_type"] = "halo_catalog"
-        out_file.attrs["num_halos"] = n_halos
+        extra_attrs = {"data_type": "halo_catalog",
+                       "num_halos": n_halos}
+        data = {}
+        ftypes = {}
         if n_halos > 0:
-            field_data = np.empty(n_halos)
             for key in self.quantities:
-                units = ""
-                if hasattr(self.catalog[0][key], "units"):
-                    units = str(self.catalog[0][key].units)
-                for i in range(n_halos):
-                    field_data[i] = self.catalog[i][key]
-                dataset = out_file.create_dataset(str(key), data=field_data)
-                dataset.attrs["units"] = units
-        out_file.close()
+                ftypes[key] = "."
+                data[key] = self.halos_ds.arr(
+                    [halo[key] for halo in self.catalog])
+
+        save_as_dataset(self.halos_ds, filename, data,
+                        field_types=ftypes, extra_attrs=extra_attrs)
 
     def add_default_quantities(self, field_type='halos'):
         self.add_quantity("particle_identifier", field_type=field_type,prepend=True)


https://bitbucket.org/yt_analysis/yt/commits/35739ab2b1eb/
Changeset:   35739ab2b1eb
Branch:      yt
User:        brittonsmith
Date:        2016-09-04 19:24:17+00:00
Summary:     Adding test to check dataset type.
Affected #:  1 file

diff -r 9591dfcbfc322b6b72bf28fcbcdeb5665bf1a537 -r 35739ab2b1eb936f75137f361c7e92f33eb3a34d yt/analysis_modules/halo_analysis/tests/test_halo_finders.py
--- a/yt/analysis_modules/halo_analysis/tests/test_halo_finders.py
+++ b/yt/analysis_modules/halo_analysis/tests/test_halo_finders.py
@@ -2,6 +2,8 @@
 import sys
 
 from yt.convenience import load
+from yt.frontends.halo_catalog.data_structures import \
+    HaloCatalogDataset
 from yt.utilities.answer_testing.framework import \
     FieldValuesTest, \
     requires_ds
@@ -30,6 +32,7 @@
                           "halo_catalogs", method,
                           "%s.0.h5" % method)
         ds = load(fn)
+        assert isinstance(ds, HaloCatalogDataset)
         for field in _fields:
             yield FieldValuesTest(ds, field, particle_type=True,
                                   decimals=decimals[method])


https://bitbucket.org/yt_analysis/yt/commits/f230baf205e9/
Changeset:   f230baf205e9
Branch:      yt
User:        brittonsmith
Date:        2016-09-05 19:50:47+00:00
Summary:     Don't try to read fields if pcount is 0.
Affected #:  1 file

diff -r 35739ab2b1eb936f75137f361c7e92f33eb3a34d -r f230baf205e974fef056eadd9fa991a6371b8117 yt/frontends/halo_catalog/io.py
--- a/yt/frontends/halo_catalog/io.py
+++ b/yt/frontends/halo_catalog/io.py
@@ -78,6 +78,7 @@
         mylog.debug("Initializing index % 5i (% 7i particles)",
                     data_file.file_id, pcount)
         ind = 0
+        if pcount == 0: return None
         with h5py.File(data_file.filename, "r") as f:
             if not f.keys(): return None
             pos = np.empty((pcount, 3), dtype="float64")


https://bitbucket.org/yt_analysis/yt/commits/d2fd951ce3fa/
Changeset:   d2fd951ce3fa
Branch:      yt
User:        brittonsmith
Date:        2016-09-05 20:02:51+00:00
Summary:     Python3 fix.
Affected #:  1 file

diff -r f230baf205e974fef056eadd9fa991a6371b8117 -r d2fd951ce3fa39d073465a028dc13b229723967e yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -88,6 +88,6 @@
         if not args[0].endswith(".h5"): return False
         with h5py.File(args[0], "r") as f:
             if "data_type" in f.attrs and \
-              f.attrs["data_type"] == "halo_catalog":
+              f.attrs["data_type"].decode("utf-8") == "halo_catalog":
                 return True
         return False


https://bitbucket.org/yt_analysis/yt/commits/dfece2663e3f/
Changeset:   dfece2663e3f
Branch:      yt
User:        brittonsmith
Date:        2016-09-06 10:57:23+00:00
Summary:     Use python3-safe function for getting hdf5 attributes.
Affected #:  3 files

diff -r d2fd951ce3fa39d073465a028dc13b229723967e -r dfece2663e3f3fed6c2ecab3fdda9382ea318003 yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -24,6 +24,7 @@
     HaloCatalogFieldInfo
 
 from yt.funcs import \
+    parse_h5_attr, \
     setdefaultattr
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
@@ -34,7 +35,7 @@
 class HaloCatalogHDF5File(ParticleFile):
     def __init__(self, ds, io, filename, file_id):
         with h5py.File(filename, "r") as f:
-            self.header = dict((field, f.attrs[field]) \
+            self.header = dict((field, parse_h5_attr(f, field)) \
                                for field in f.attrs.keys())
 
         super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
@@ -56,7 +57,7 @@
 
     def _parse_parameter_file(self):
         with h5py.File(self.parameter_filename, "r") as f:
-            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
+            hvals = dict((key, parse_h5_attr(f, key)) for key in f.attrs.keys())
         self.dimensionality = 3
         self.refine_by = 2
         self.unique_identifier = \
@@ -88,6 +89,6 @@
         if not args[0].endswith(".h5"): return False
         with h5py.File(args[0], "r") as f:
             if "data_type" in f.attrs and \
-              f.attrs["data_type"].decode("utf-8") == "halo_catalog":
+              parse_h5_attr(f, "data_type") == "halo_catalog":
                 return True
         return False

diff -r d2fd951ce3fa39d073465a028dc13b229723967e -r dfece2663e3f3fed6c2ecab3fdda9382ea318003 yt/frontends/ytdata/data_structures.py
--- a/yt/frontends/ytdata/data_structures.py
+++ b/yt/frontends/ytdata/data_structures.py
@@ -41,7 +41,8 @@
 from yt.extern.six import \
     string_types
 from yt.funcs import \
-    is_root
+    is_root, \
+    parse_h5_attr
 from yt.geometry.grid_geometry_handler import \
     GridIndex
 from yt.geometry.particle_geometry_handler import \
@@ -65,13 +66,6 @@
                          "covering_grid",
                          "smoothed_covering_grid"]
 
-def parse_h5_attr(f, attr):
-    val = f.attrs.get(attr, None)
-    if isinstance(val, bytes):
-        return val.decode('utf8')
-    else:
-        return val
-
 class YTDataset(Dataset):
     """Base dataset class for all ytdata datasets."""
     def _parse_parameter_file(self):

diff -r d2fd951ce3fa39d073465a028dc13b229723967e -r dfece2663e3f3fed6c2ecab3fdda9382ea318003 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -1013,3 +1013,14 @@
     if not hasattr(obj, name):
         setattr(obj, name, value)
     return getattr(obj, name)
+
+def parse_h5_attr(f, attr):
+    """A Python3-safe function for getting hdf5 attributes.
+
+    If an attribute is supposed to be a string, this will return it as such.
+    """
+    val = f.attrs.get(attr, None)
+    if isinstance(val, bytes):
+        return val.decode('utf8')
+    else:
+        return val


https://bitbucket.org/yt_analysis/yt/commits/b098e372d197/
Changeset:   b098e372d197
Branch:      yt
User:        brittonsmith
Date:        2016-09-08 14:50:53+00:00
Summary:     Adding comment.
Affected #:  1 file

diff -r dfece2663e3f3fed6c2ecab3fdda9382ea318003 -r b098e372d19752e6db2556a7bc0f91f2783326b9 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -468,6 +468,8 @@
         ftypes = {}
         if n_halos > 0:
             for key in self.quantities:
+                # This sets each field to be saved in the root hdf5 group,
+                # as per the HaloCatalog format.
                 ftypes[key] = "."
                 data[key] = self.halos_ds.arr(
                     [halo[key] for halo in self.catalog])


https://bitbucket.org/yt_analysis/yt/commits/6d05ffb05f3f/
Changeset:   6d05ffb05f3f
Branch:      yt
User:        xarthisius
Date:        2016-09-24 21:14:53+00:00
Summary:     Merged in brittonsmith/yt (pull request #2360)

Refactor HaloCatalog saving
Affected #:  6 files

diff -r af34b428166a02df3f069fa33f5df3ca8cf58545 -r 6d05ffb05f3fbe2295f0fcce5db0b82837b03401 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -13,10 +13,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 
+from yt.frontends.ytdata.utilities import \
+    save_as_dataset
 from yt.funcs import \
     ensure_dir, \
     mylog
@@ -431,13 +432,16 @@
                     key, quantity = action
                     if quantity in self.halos_ds.field_info:
                         new_halo.quantities[key] = \
-                          self.data_source[quantity][int(i)].in_cgs()
+                          self.data_source[quantity][int(i)]
                     elif callable(quantity):
                         new_halo.quantities[key] = quantity(new_halo)
                 else:
-                    raise RuntimeError("Action must be a callback, filter, or quantity.")
+                    raise RuntimeError(
+                        "Action must be a callback, filter, or quantity.")
 
             if halo_filter:
+                for quantity in new_halo.quantities.values():
+                    quantity.convert_to_base()
                 self.catalog.append(new_halo.quantities)
 
             if save_halos and halo_filter:
@@ -458,27 +462,20 @@
         mylog.info("Saving halo catalog (%d halos) to %s." %
                    (n_halos, os.path.join(self.output_dir,
                                          self.output_prefix)))
-        out_file = h5py.File(filename, 'w')
-        for attr in ["current_redshift", "current_time",
-                     "domain_dimensions",
-                     "cosmological_simulation", "omega_lambda",
-                     "omega_matter", "hubble_constant"]:
-            out_file.attrs[attr] = getattr(self.halos_ds, attr)
-        for attr in ["domain_left_edge", "domain_right_edge"]:
-            out_file.attrs[attr] = getattr(self.halos_ds, attr).in_cgs()
-        out_file.attrs["data_type"] = "halo_catalog"
-        out_file.attrs["num_halos"] = n_halos
+        extra_attrs = {"data_type": "halo_catalog",
+                       "num_halos": n_halos}
+        data = {}
+        ftypes = {}
         if n_halos > 0:
-            field_data = np.empty(n_halos)
             for key in self.quantities:
-                units = ""
-                if hasattr(self.catalog[0][key], "units"):
-                    units = str(self.catalog[0][key].units)
-                for i in range(n_halos):
-                    field_data[i] = self.catalog[i][key]
-                dataset = out_file.create_dataset(str(key), data=field_data)
-                dataset.attrs["units"] = units
-        out_file.close()
+                # This sets each field to be saved in the root hdf5 group,
+                # as per the HaloCatalog format.
+                ftypes[key] = "."
+                data[key] = self.halos_ds.arr(
+                    [halo[key] for halo in self.catalog])
+
+        save_as_dataset(self.halos_ds, filename, data,
+                        field_types=ftypes, extra_attrs=extra_attrs)
 
     def add_default_quantities(self, field_type='halos'):
         self.add_quantity("particle_identifier", field_type=field_type,prepend=True)

diff -r af34b428166a02df3f069fa33f5df3ca8cf58545 -r 6d05ffb05f3fbe2295f0fcce5db0b82837b03401 yt/analysis_modules/halo_analysis/tests/test_halo_finders.py
--- a/yt/analysis_modules/halo_analysis/tests/test_halo_finders.py
+++ b/yt/analysis_modules/halo_analysis/tests/test_halo_finders.py
@@ -2,6 +2,8 @@
 import sys
 
 from yt.convenience import load
+from yt.frontends.halo_catalog.data_structures import \
+    HaloCatalogDataset
 from yt.utilities.answer_testing.framework import \
     FieldValuesTest, \
     requires_ds
@@ -30,6 +32,7 @@
                           "halo_catalogs", method,
                           "%s.0.h5" % method)
         ds = load(fn)
+        assert isinstance(ds, HaloCatalogDataset)
         for field in _fields:
             yield FieldValuesTest(ds, field, particle_type=True,
                                   decimals=decimals[method])

diff -r af34b428166a02df3f069fa33f5df3ca8cf58545 -r 6d05ffb05f3fbe2295f0fcce5db0b82837b03401 yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -24,6 +24,7 @@
     HaloCatalogFieldInfo
 
 from yt.funcs import \
+    parse_h5_attr, \
     setdefaultattr
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
@@ -34,7 +35,7 @@
 class HaloCatalogHDF5File(ParticleFile):
     def __init__(self, ds, io, filename, file_id):
         with h5py.File(filename, "r") as f:
-            self.header = dict((field, f.attrs[field]) \
+            self.header = dict((field, parse_h5_attr(f, field)) \
                                for field in f.attrs.keys())
 
         super(HaloCatalogHDF5File, self).__init__(ds, io, filename, file_id)
@@ -56,7 +57,7 @@
 
     def _parse_parameter_file(self):
         with h5py.File(self.parameter_filename, "r") as f:
-            hvals = dict((key, f.attrs[key]) for key in f.attrs.keys())
+            hvals = dict((key, parse_h5_attr(f, key)) for key in f.attrs.keys())
         self.dimensionality = 3
         self.refine_by = 2
         self.unique_identifier = \
@@ -88,6 +89,6 @@
         if not args[0].endswith(".h5"): return False
         with h5py.File(args[0], "r") as f:
             if "data_type" in f.attrs and \
-              f.attrs["data_type"] == "halo_catalog":
+              parse_h5_attr(f, "data_type") == "halo_catalog":
                 return True
         return False

diff -r af34b428166a02df3f069fa33f5df3ca8cf58545 -r 6d05ffb05f3fbe2295f0fcce5db0b82837b03401 yt/frontends/halo_catalog/io.py
--- a/yt/frontends/halo_catalog/io.py
+++ b/yt/frontends/halo_catalog/io.py
@@ -78,6 +78,7 @@
         mylog.debug("Initializing index % 5i (% 7i particles)",
                     data_file.file_id, pcount)
         ind = 0
+        if pcount == 0: return None
         with h5py.File(data_file.filename, "r") as f:
             if not f.keys(): return None
             pos = np.empty((pcount, 3), dtype="float64")

diff -r af34b428166a02df3f069fa33f5df3ca8cf58545 -r 6d05ffb05f3fbe2295f0fcce5db0b82837b03401 yt/frontends/ytdata/data_structures.py
--- a/yt/frontends/ytdata/data_structures.py
+++ b/yt/frontends/ytdata/data_structures.py
@@ -41,7 +41,8 @@
 from yt.extern.six import \
     string_types
 from yt.funcs import \
-    is_root
+    is_root, \
+    parse_h5_attr
 from yt.geometry.grid_geometry_handler import \
     GridIndex
 from yt.geometry.particle_geometry_handler import \
@@ -67,13 +68,6 @@
                          "covering_grid",
                          "smoothed_covering_grid"]
 
-def parse_h5_attr(f, attr):
-    val = f.attrs.get(attr, None)
-    if isinstance(val, bytes):
-        return val.decode('utf8')
-    else:
-        return val
-
 class YTDataset(Dataset):
     """Base dataset class for all ytdata datasets."""
     def _parse_parameter_file(self):

diff -r af34b428166a02df3f069fa33f5df3ca8cf58545 -r 6d05ffb05f3fbe2295f0fcce5db0b82837b03401 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -1024,3 +1024,14 @@
     if not hasattr(obj, name):
         setattr(obj, name, value)
     return getattr(obj, name)
+
+def parse_h5_attr(f, attr):
+    """A Python3-safe function for getting hdf5 attributes.
+
+    If an attribute is supposed to be a string, this will return it as such.
+    """
+    val = f.attrs.get(attr, None)
+    if isinstance(val, bytes):
+        return val.decode('utf8')
+    else:
+        return val

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list