[yt-svn] commit/yt: 6 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Oct 14 18:07:10 PDT 2014


6 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/4a9ddc2ebdbb/
Changeset:   4a9ddc2ebdbb
Branch:      yt
User:        brittonsmith
Date:        2014-10-09 21:20:22+00:00
Summary:     Adding FOF to veto groups to distinguish OWLS from OWLSSubfind with only FOF groups.
Affected #:  1 file

diff -r 047649b328179cc900ab38f3400390e440530ac1 -r 4a9ddc2ebdbb63dee8fc93b133ca4937d5a96649 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -379,7 +379,7 @@
     @classmethod
     def _is_valid(self, *args, **kwargs):
         need_groups = ['Constants', 'Header', 'Parameters', 'Units']
-        veto_groups = ['SUBFIND',
+        veto_groups = ['SUBFIND', 'FOF',
                        'PartType0/ChemistryAbundances', 
                        'PartType0/ChemicalAbundances',
                        'RuntimePars', 'HashTable']


https://bitbucket.org/yt_analysis/yt/commits/049569cf7f27/
Changeset:   049569cf7f27
Branch:      yt
User:        brittonsmith
Date:        2014-10-09 21:21:58+00:00
Summary:     Allowing OWLSSubfindDataset to work when no SUBFIND groups are present (only FOF).
Affected #:  2 files

diff -r 4a9ddc2ebdbb63dee8fc93b133ca4937d5a96649 -r 049569cf7f277396c76a6456873f89dbfff77c0c yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -208,14 +208,19 @@
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
+        need_groups = ['Constants', 'Header', 'Parameters', 'Units', 'FOF']
+        veto_groups = []
+        valid = True
         try:
             fileh = h5py.File(args[0], mode='r')
-            if "Constants" in fileh["/"].keys() and \
-               "Header" in fileh["/"].keys() and \
-               "SUBFIND" in fileh["/"].keys():
-                fileh.close()
-                return True
+            for ng in need_groups:
+                if ng not in fileh["/"]:
+                    valid = False
+            for vg in veto_groups:
+                if vg in fileh["/"]:
+                    valid = False                    
             fileh.close()
         except:
+            valid = False
             pass
-        return False
+        return valid

diff -r 4a9ddc2ebdbb63dee8fc93b133ca4937d5a96649 -r 049569cf7f277396c76a6456873f89dbfff77c0c yt/frontends/halo_catalogs/owls_subfind/io.py
--- a/yt/frontends/halo_catalogs/owls_subfind/io.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/io.py
@@ -113,7 +113,7 @@
                         yield (ptype, field), data
 
     def _initialize_index(self, data_file, regions):
-        pcount = sum(self._count_particles(data_file).values())
+        pcount = sum(data_file.total_particles.values())
         morton = np.empty(pcount, dtype='uint64')
         mylog.debug("Initializing index % 5i (% 7i particles)",
                     data_file.file_id, pcount)
@@ -122,12 +122,11 @@
             if not f.keys(): return None
             dx = np.finfo(f["FOF"]['CenterOfMass'].dtype).eps
             dx = 2.0*self.ds.quan(dx, "code_length")
-            
-            for ptype, pattr in zip(["FOF", "SUBFIND"],
-                                    ["Number_of_groups", "Number_of_subgroups"]):
-                my_pcount = f[ptype].attrs[pattr]
+
+            for ptype in data_file.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
                 pos = f[ptype]["CenterOfMass"].value.astype("float64")
-                pos = np.resize(pos, (my_pcount, 3))
+                pos = np.resize(pos, (data_file.total_particles[ptype], 3))
                 pos = data_file.ds.arr(pos, "code_length")
                 
                 # These are 32 bit numbers, so we give a little lee-way.
@@ -151,17 +150,23 @@
 
     def _count_particles(self, data_file):
         with h5py.File(data_file.filename, "r") as f:
-            # We need this to figure out where the offset fields are stored.
-            data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
-            return {"FOF": f["FOF"].attrs["Number_of_groups"],
-                    "SUBFIND": f["FOF"].attrs["Number_of_subgroups"]}
+            pcount = {"FOF": f["FOF"].attrs["Number_of_groups"]}
+            if "SUBFIND" in f:
+                # We need this to figure out where the offset fields are stored.
+                data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
+                pcount["SUBFIND"] = f["FOF"].attrs["Number_of_subgroups"]
+            else:
+                data_file.total_offset = 0
+                pcount["SUBFIND"] = 0
+            return pcount
 
     def _identify_fields(self, data_file):
-        fields = [(ptype, "particle_identifier")
-                  for ptype in self.ds.particle_types_raw]
+        fields = []
         pcount = data_file.total_particles
         with h5py.File(data_file.filename, "r") as f:
             for ptype in self.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                fields.append((ptype, "particle_identifier"))
                 my_fields, my_offset_fields = \
                   subfind_field_list(f[ptype], ptype, data_file.total_particles)
                 fields.extend(my_fields)


https://bitbucket.org/yt_analysis/yt/commits/db9253725afa/
Changeset:   db9253725afa
Branch:      yt
User:        brittonsmith
Date:        2014-10-10 18:03:01+00:00
Summary:     Field querying now succeeds even if not all data files have particles.
Affected #:  2 files

diff -r 049569cf7f277396c76a6456873f89dbfff77c0c -r db9253725afa31be20b0d003a138d9ee6944b601 yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -80,7 +80,7 @@
         # TODO: Add additional fields
         dsl = []
         units = {}
-        for dom in self.data_files[:1]:
+        for dom in self.data_files:
             fl, _units = self.io._identify_fields(dom)
             units.update(_units)
             dom._calculate_offsets(fl)

diff -r 049569cf7f277396c76a6456873f89dbfff77c0c -r db9253725afa31be20b0d003a138d9ee6944b601 yt/frontends/halo_catalogs/owls_subfind/io.py
--- a/yt/frontends/halo_catalogs/owls_subfind/io.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/io.py
@@ -82,6 +82,7 @@
             with h5py.File(data_file.filename, "r") as f:
                 for ptype, field_list in sorted(ptf.items()):
                     pcount = data_file.total_particles[ptype]
+                    if pcount == 0: continue
                     coords = f[ptype]["CenterOfMass"].value.astype("float64")
                     coords = np.resize(coords, (pcount, 3))
                     x = coords[:, 0]
@@ -115,6 +116,7 @@
     def _initialize_index(self, data_file, regions):
         pcount = sum(data_file.total_particles.values())
         morton = np.empty(pcount, dtype='uint64')
+        if pcount == 0: return morton
         mylog.debug("Initializing index % 5i (% 7i particles)",
                     data_file.file_id, pcount)
         ind = 0
@@ -163,6 +165,7 @@
     def _identify_fields(self, data_file):
         fields = []
         pcount = data_file.total_particles
+        if sum(pcount.values()) == 0: return fields, {}
         with h5py.File(data_file.filename, "r") as f:
             for ptype in self.ds.particle_types_raw:
                 if data_file.total_particles[ptype] == 0: continue


https://bitbucket.org/yt_analysis/yt/commits/83164a430e4d/
Changeset:   83164a430e4d
Branch:      yt
User:        brittonsmith
Date:        2014-10-10 18:43:14+00:00
Summary:     Adding answer testing for OWLSSubfind frontend.
Affected #:  2 files

diff -r db9253725afa31be20b0d003a138d9ee6944b601 -r 83164a430e4d0ee0c5b1149ae3ac22c2c2f319cc yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
@@ -0,0 +1,40 @@
+"""
+OWLSSubfind frontend tests using owls_fof_halos datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    data_dir_load
+
+_fields = ("particle_position_x", "particle_position_y",
+           "particle_position_z", "particle_mass")
+
+g8 = "owls_fof_halos/groups_008/group_008.0.hdf5"
+ at requires_ds(g8)
+def test_fields_g8():
+    ds = data_dir_load(g8)
+    yield assert_equal, str(ds), "group_008.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g8, field)
+
+# a dataset with empty files
+g3 = "owls_fof_halos/groups_003/group_003.0.hdf5"
+ at requires_ds(g3)
+def test_fields_g3():
+    ds = data_dir_load(g3)
+    yield assert_equal, str(ds), "group_003.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g3, field)


https://bitbucket.org/yt_analysis/yt/commits/25424756ebf1/
Changeset:   25424756ebf1
Branch:      yt
User:        brittonsmith
Date:        2014-10-14 22:33:14+00:00
Summary:     Making syntix slightly nicer.
Affected #:  1 file

diff -r 83164a430e4d0ee0c5b1149ae3ac22c2c2f319cc -r 25424756ebf1a8efd397a79d0fa02b20c4b121f7 yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -212,14 +212,10 @@
         veto_groups = []
         valid = True
         try:
-            fileh = h5py.File(args[0], mode='r')
-            for ng in need_groups:
-                if ng not in fileh["/"]:
-                    valid = False
-            for vg in veto_groups:
-                if vg in fileh["/"]:
-                    valid = False                    
-            fileh.close()
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            fh.close()
         except:
             valid = False
             pass


https://bitbucket.org/yt_analysis/yt/commits/91e843865764/
Changeset:   91e843865764
Branch:      yt
User:        ngoldbaum
Date:        2014-10-15 01:07:03+00:00
Summary:     Merged in brittonsmith/yt (pull request #1247)

[MINOR ENHANCEMENT] Expanding OWLSSubfind frontend to work with only FOF groups
Affected #:  5 files

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- a/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -80,7 +80,7 @@
         # TODO: Add additional fields
         dsl = []
         units = {}
-        for dom in self.data_files[:1]:
+        for dom in self.data_files:
             fl, _units = self.io._identify_fields(dom)
             units.update(_units)
             dom._calculate_offsets(fl)
@@ -208,14 +208,15 @@
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
+        need_groups = ['Constants', 'Header', 'Parameters', 'Units', 'FOF']
+        veto_groups = []
+        valid = True
         try:
-            fileh = h5py.File(args[0], mode='r')
-            if "Constants" in fileh["/"].keys() and \
-               "Header" in fileh["/"].keys() and \
-               "SUBFIND" in fileh["/"].keys():
-                fileh.close()
-                return True
-            fileh.close()
+            fh = h5py.File(args[0], mode='r')
+            valid = all(ng in fh["/"] for ng in need_groups) and \
+              not any(vg in fh["/"] for vg in veto_groups)
+            fh.close()
         except:
+            valid = False
             pass
-        return False
+        return valid

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/halo_catalogs/owls_subfind/io.py
--- a/yt/frontends/halo_catalogs/owls_subfind/io.py
+++ b/yt/frontends/halo_catalogs/owls_subfind/io.py
@@ -82,6 +82,7 @@
             with h5py.File(data_file.filename, "r") as f:
                 for ptype, field_list in sorted(ptf.items()):
                     pcount = data_file.total_particles[ptype]
+                    if pcount == 0: continue
                     coords = f[ptype]["CenterOfMass"].value.astype("float64")
                     coords = np.resize(coords, (pcount, 3))
                     x = coords[:, 0]
@@ -113,8 +114,9 @@
                         yield (ptype, field), data
 
     def _initialize_index(self, data_file, regions):
-        pcount = sum(self._count_particles(data_file).values())
+        pcount = sum(data_file.total_particles.values())
         morton = np.empty(pcount, dtype='uint64')
+        if pcount == 0: return morton
         mylog.debug("Initializing index % 5i (% 7i particles)",
                     data_file.file_id, pcount)
         ind = 0
@@ -122,12 +124,11 @@
             if not f.keys(): return None
             dx = np.finfo(f["FOF"]['CenterOfMass'].dtype).eps
             dx = 2.0*self.ds.quan(dx, "code_length")
-            
-            for ptype, pattr in zip(["FOF", "SUBFIND"],
-                                    ["Number_of_groups", "Number_of_subgroups"]):
-                my_pcount = f[ptype].attrs[pattr]
+
+            for ptype in data_file.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
                 pos = f[ptype]["CenterOfMass"].value.astype("float64")
-                pos = np.resize(pos, (my_pcount, 3))
+                pos = np.resize(pos, (data_file.total_particles[ptype], 3))
                 pos = data_file.ds.arr(pos, "code_length")
                 
                 # These are 32 bit numbers, so we give a little lee-way.
@@ -151,17 +152,24 @@
 
     def _count_particles(self, data_file):
         with h5py.File(data_file.filename, "r") as f:
-            # We need this to figure out where the offset fields are stored.
-            data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
-            return {"FOF": f["FOF"].attrs["Number_of_groups"],
-                    "SUBFIND": f["FOF"].attrs["Number_of_subgroups"]}
+            pcount = {"FOF": f["FOF"].attrs["Number_of_groups"]}
+            if "SUBFIND" in f:
+                # We need this to figure out where the offset fields are stored.
+                data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
+                pcount["SUBFIND"] = f["FOF"].attrs["Number_of_subgroups"]
+            else:
+                data_file.total_offset = 0
+                pcount["SUBFIND"] = 0
+            return pcount
 
     def _identify_fields(self, data_file):
-        fields = [(ptype, "particle_identifier")
-                  for ptype in self.ds.particle_types_raw]
+        fields = []
         pcount = data_file.total_particles
+        if sum(pcount.values()) == 0: return fields, {}
         with h5py.File(data_file.filename, "r") as f:
             for ptype in self.ds.particle_types_raw:
+                if data_file.total_particles[ptype] == 0: continue
+                fields.append((ptype, "particle_identifier"))
                 my_fields, my_offset_fields = \
                   subfind_field_list(f[ptype], ptype, data_file.total_particles)
                 fields.extend(my_fields)

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/tests/test_outputs.py
@@ -0,0 +1,40 @@
+"""
+OWLSSubfind frontend tests using owls_fof_halos datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.testing import *
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    data_dir_load
+
+_fields = ("particle_position_x", "particle_position_y",
+           "particle_position_z", "particle_mass")
+
+g8 = "owls_fof_halos/groups_008/group_008.0.hdf5"
+ at requires_ds(g8)
+def test_fields_g8():
+    ds = data_dir_load(g8)
+    yield assert_equal, str(ds), "group_008.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g8, field)
+
+# a dataset with empty files
+g3 = "owls_fof_halos/groups_003/group_003.0.hdf5"
+ at requires_ds(g3)
+def test_fields_g3():
+    ds = data_dir_load(g3)
+    yield assert_equal, str(ds), "group_003.0.hdf5"
+    for field in _fields:
+        yield FieldValuesTest(g3, field)

diff -r 1504f1c3ccb64c005743134c59213fc0d561ada8 -r 91e84386576488097a7f4cf7950cf35d43301954 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -379,7 +379,7 @@
     @classmethod
     def _is_valid(self, *args, **kwargs):
         need_groups = ['Constants', 'Header', 'Parameters', 'Units']
-        veto_groups = ['SUBFIND',
+        veto_groups = ['SUBFIND', 'FOF',
                        'PartType0/ChemistryAbundances', 
                        'PartType0/ChemicalAbundances',
                        'RuntimePars', 'HashTable']

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list