[yt-svn] commit/yt: ngoldbaum: Merged in brittonsmith/yt (pull request #1247)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Oct 14 18:07:12 PDT 2014


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/91e843865764/
Changeset:   91e843865764
Branch:      yt
User:        ngoldbaum
Date:        2014-10-15 01:07:03+00:00
Summary:     Merged in brittonsmith/yt (pull request #1247)

[MINOR ENHANCEMENT] Expanding OWLSSubfind frontend to work with only FOF groups
Affected #:  5 files

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -80,7 +80,7 @@
         # TODO: Add additional fields
         dsl = []
         units = {}
-        for dom in self.data_files[:1]:
+        for dom in self.data_files:
             fl, _units = self.io._identify_fields(dom)
             units.update(_units)
             dom._calculate_offsets(fl)
@@ -208,14 +208,15 @@
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
+        need_groups = ['Constants', 'Header', 'Parameters', 'Units', 'FOF']
+        veto_groups = []
+        valid = True
         try:
-            fileh = h5py.File(args[0], mode='r')
-            if "Constants" in fileh["/"].keys() and \
-               "Header" in fileh["/"].keys() and \
-               "SUBFIND" in fileh["/"].keys():
-                fileh.close()
-                return True
-            fileh.close()
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            fh.close()
         except:
+            valid = False
             pass
-        return False
+        return valid

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/halo_catalogs/owls_subfind/io.py
--- a/yt/frontends/halo_catalogs/owls_subfind/io.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/io.py
@@ -82,6 +82,7 @@
             with h5py.File(data_file.filename, "r") as f:
                 for ptype, field_list in sorted(ptf.items()):
                     pcount = data_file.total_particles[ptype]
+                    if pcount == 0: continue
                     coords = f[ptype]["CenterOfMass"].value.astype("float64")
                     coords = np.resize(coords, (pcount, 3))
                     x = coords[:, 0]
@@ -113,8 +114,9 @@
                         yield (ptype, field), data
 
     def _initialize_index(self, data_file, regions):
-        pcount = sum(self._count_particles(data_file).values())
+        pcount = sum(data_file.total_particles.values())
         morton = np.empty(pcount, dtype='uint64')
+        if pcount == 0: return morton
         mylog.debug("Initializing index % 5i (% 7i particles)",
                     data_file.file_id, pcount)
         ind = 0
@@ -122,12 +124,11 @@
             if not f.keys(): return None
             dx = np.finfo(f["FOF"]['CenterOfMass'].dtype).eps
             dx = 2.0*self.ds.quan(dx, "code_length")
-            
-            for ptype, pattr in zip(["FOF", "SUBFIND"],
-                                    ["Number_of_groups", "Number_of_subgroups"]):
-                my_pcount = f[ptype].attrs[pattr]
+
+            for ptype in data_file.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
                 pos = f[ptype]["CenterOfMass"].value.astype("float64")
-                pos = np.resize(pos, (my_pcount, 3))
+                pos = np.resize(pos, (data_file.total_particles[ptype], 3))
                 pos = data_file.ds.arr(pos, "code_length")
                 
                 # These are 32 bit numbers, so we give a little lee-way.
@@ -151,17 +152,24 @@
 
     def _count_particles(self, data_file):
         with h5py.File(data_file.filename, "r") as f:
-            # We need this to figure out where the offset fields are stored.
-            data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
-            return {"FOF": f["FOF"].attrs["Number_of_groups"],
-                    "SUBFIND": f["FOF"].attrs["Number_of_subgroups"]}
+            pcount = {"FOF": f["FOF"].attrs["Number_of_groups"]}
+            if "SUBFIND" in f:
+                # We need this to figure out where the offset fields are stored.
+                data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
+                pcount["SUBFIND"] = f["FOF"].attrs["Number_of_subgroups"]
+            else:
+                data_file.total_offset = 0
+                pcount["SUBFIND"] = 0
+            return pcount
 
     def _identify_fields(self, data_file):
-        fields = [(ptype, "particle_identifier")
-                  for ptype in self.ds.particle_types_raw]
+        fields = []
         pcount = data_file.total_particles
+        if sum(pcount.values()) == 0: return fields, {}
         with h5py.File(data_file.filename, "r") as f:
             for ptype in self.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                fields.append((ptype, "particle_identifier"))
                 my_fields, my_offset_fields = \
                   subfind_field_list(f[ptype], ptype, data_file.total_particles)
                 fields.extend(my_fields)

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
@@ -0,0 +1,40 @@
+"""
+OWLSSubfind frontend tests using owls_fof_halos datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    data_dir_load
+
+_fields = ("particle_position_x", "particle_position_y",
+           "particle_position_z", "particle_mass")
+
+g8 = "owls_fof_halos/groups_008/group_008.0.hdf5"
+ at requires_ds(g8)
+def test_fields_g8():
+    ds = data_dir_load(g8)
+    yield assert_equal, str(ds), "group_008.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g8, field)
+
+# a dataset with empty files
+g3 = "owls_fof_halos/groups_003/group_003.0.hdf5"
+ at requires_ds(g3)
+def test_fields_g3():
+    ds = data_dir_load(g3)
+    yield assert_equal, str(ds), "group_003.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g3, field)

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -379,7 +379,7 @@
     @classmethod
     def _is_valid(self, *args, **kwargs):
         need_groups = ['Constants', 'Header', 'Parameters', 'Units']
-        veto_groups = ['SUBFIND',
+        veto_groups = ['SUBFIND', 'FOF',
                        'PartType0/ChemistryAbundances', 
                        'PartType0/ChemicalAbundances',
                        'RuntimePars', 'HashTable']

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list