[yt-svn] commit/yt: samskillman: Some fixes to the gdf. Still issues with field detection.

Bitbucket commits-noreply at bitbucket.org
Wed Jan 4 09:18:01 PST 2012


1 new commit in yt:


https://bitbucket.org/yt_analysis/yt/changeset/57483ef32927/
changeset:   57483ef32927
branch:      yt
user:        samskillman
date:        2012-01-04 16:50:59
summary:     Some fixes to the gdf.  Still issues with field detection.
affected #:  4 files

diff -r 75de61bbd2b98c71d1993f6260ef91f4dd16ff74 -r 57483ef329272546cb43a455ff08f7429f92ded8 yt/frontends/gdf/api.py
--- a/yt/frontends/gdf/api.py
+++ b/yt/frontends/gdf/api.py
@@ -1,13 +1,14 @@
 """
-API for yt.frontends.chombo
+API for yt.frontends.gdf
 
+Author: Samuel W. Skillman <samskillman at gmail.com>
+Affiliation: University of Colorado at Boulder
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
 Author: J.S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.Chombotools.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


diff -r 75de61bbd2b98c71d1993f6260ef91f4dd16ff74 -r 57483ef329272546cb43a455ff08f7429f92ded8 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -1,12 +1,15 @@
 """
-Data structures for Chombo.
+Data structures for GDF.
 
+Author: Samuel W. Skillman <samskillman at gmail.com>
+Affiliation: University of Colorado at Boulder
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
 Homepage: http://yt-project.org/
 License:
-  Copyright (C) 2008-2011 Matthew Turk, J. S. Oishi.  All Rights Reserved.
+  Copyright (C) 2008-2011 Samuel W. Skillman, Matthew Turk, J. S. Oishi.  
+  All Rights Reserved.
 
   This file is part of yt.
 
@@ -76,7 +79,7 @@
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
         self.directory = os.path.dirname(self.hierarchy_filename)
-        self._fhandle = h5py.File(self.hierarchy_filename)
+        self._fhandle = h5py.File(self.hierarchy_filename,'r')
         AMRHierarchy.__init__(self,pf,data_style)
 
         self._fhandle.close()
@@ -94,31 +97,33 @@
 
     def _count_grids(self):
         self.num_grids = self._fhandle['/grid_parent_id'].shape[0]
-        
+       
     def _parse_hierarchy(self):
         f = self._fhandle 
-        
-        # this relies on the first Group in the H5 file being
-        # 'Chombo_global'
+    
         levels = f.listnames()[1:]
         dxs=[]
         self.grids = na.empty(self.num_grids, dtype='object')
-        for i, grid in enumerate(f['data'].keys()):
-            self.grids[i] = self.grid(i, self, f['grid_level'][i],
-                                      f['grid_left_index'][i],
-                                      f['grid_dimensions'][i])
-            self.grids[i]._level_id = f['grid_level'][i]
+        levels = (f['grid_level'][:]).copy()
+        glis = (f['grid_left_index'][:]).copy()
+        gdims = (f['grid_dimensions'][:]).copy()
+        for i in range(levels.shape[0]):
+            self.grids[i] = self.grid(i, self, levels[i],
+                                      glis[i],
+                                      gdims[i])
+            self.grids[i]._level_id = levels[i]
 
             dx = (self.parameter_file.domain_right_edge-
                   self.parameter_file.domain_left_edge)/self.parameter_file.domain_dimensions
-            dx = dx/self.parameter_file.refine_by**(f['grid_level'][i])
+            dx = dx/self.parameter_file.refine_by**(levels[i])
             dxs.append(dx)
         dx = na.array(dxs)
-        self.grid_left_edge = self.parameter_file.domain_left_edge + dx*f['grid_left_index'][:]
-        self.grid_dimensions = f['grid_dimensions'][:].astype("int32")
+        self.grid_left_edge = self.parameter_file.domain_left_edge + dx*glis
+        self.grid_dimensions = gdims.astype("int32")
         self.grid_right_edge = self.grid_left_edge + dx*self.grid_dimensions
         self.grid_particle_count = f['grid_particle_count'][:]
-
+        del levels, glis, gdims
+ 
     def _populate_grid_objects(self):
         for g in self.grids:
             g._prepare_grid()
@@ -171,7 +176,11 @@
         # This should be improved.
         self._handle = h5py.File(self.parameter_filename, "r")
         for field_name in self._handle["/field_types"]:
-            self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+            try:
+                self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+            except:
+                self.units[field_name] = 1.0
+
         self._handle.close()
         del self._handle
         
@@ -181,7 +190,9 @@
         self.domain_left_edge = sp["domain_left_edge"][:]
         self.domain_right_edge = sp["domain_right_edge"][:]
         self.domain_dimensions = sp["domain_dimensions"][:]
-        self.refine_by = sp["refine_by"]
+        refine_by = sp["refine_by"]
+        if refine_by is None: refine_by = 2
+        self.refine_by = refine_by 
         self.dimensionality = sp["dimensionality"]
         self.current_time = sp["current_time"]
         self.unique_identifier = sp["unique_identifier"]
@@ -198,6 +209,7 @@
         else:
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
+        self.parameters['Time'] = 1.0 # Hardcode time conversion for now.
         self.parameters["HydroMethod"] = 0 # Hardcode for now until field staggering is supported.
         self._handle.close()
         del self._handle


diff -r 75de61bbd2b98c71d1993f6260ef91f4dd16ff74 -r 57483ef329272546cb43a455ff08f7429f92ded8 yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -1,11 +1,14 @@
 """
 GDF-specific fields
 
+Author: Samuel W. Skillman <samskillman at gmail.com>
+Affiliation: University of Colorado at Boulder
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
 Homepage: http://yt-project.org/
 License:
-  Copyright (C) 2009-2011 J. S. Oishi, Matthew Turk.  All Rights Reserved.
+  Copyright (C) 2008-2011 Samuel W. Skillman, Matthew Turk, J. S. Oishi.  
+  All Rights Reserved.
 
   This file is part of yt.
 
@@ -53,40 +56,31 @@
 add_gdf_field = KnownGDFFields.add_field
 
 add_gdf_field("density", function=NullFunc, take_log=True,
-          validators = [ValidateDataField("density")],
           units=r"\rm{g}/\rm{cm}^3",
           projected_units =r"\rm{g}/\rm{cm}^2")
 
 add_gdf_field("specific_energy", function=NullFunc, take_log=True,
-          validators = [ValidateDataField("specific_energy")],
           units=r"\rm{erg}/\rm{g}")
 
 add_gdf_field("pressure", function=NullFunc, take_log=True,
-          validators = [ValidateDataField("pressure")],
           units=r"\rm{erg}/\rm{g}")
 
-add_gdf_field("velocity_x", function=NullFunc, take_log=True,
-          validators = [ValidateDataField("velocity_x")],
+add_gdf_field("velocity_x", function=NullFunc, take_log=False,
           units=r"\rm{cm}/\rm{s}")
 
 add_gdf_field("velocity_y", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("velocity_y")],
           units=r"\rm{cm}/\rm{s}")
 
 add_gdf_field("velocity_z", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("velocity_z")],
           units=r"\rm{cm}/\rm{s}")
 
 add_gdf_field("mag_field_x", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("mag_field_x")],
           units=r"\rm{cm}/\rm{s}")
 
 add_gdf_field("mag_field_y", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("mag_field_y")],
           units=r"\rm{cm}/\rm{s}")
 
 add_gdf_field("mag_field_z", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("mag_field_z")],
           units=r"\rm{cm}/\rm{s}")
 
 for f,v in log_translation_dict.items():


diff -r 75de61bbd2b98c71d1993f6260ef91f4dd16ff74 -r 57483ef329272546cb43a455ff08f7429f92ded8 yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -1,6 +1,8 @@
 """
 The data-file handling functions
 
+Author: Samuel W. Skillman <samskillman at gmail.com>
+Affiliation: University of Colorado at Boulder
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
@@ -35,38 +37,25 @@
     def _field_dict(self,fhandle):
         keys = fhandle['field_types'].keys()
         val = fhandle['field_types'].keys()
-        # ncomp = int(fhandle['/'].attrs['num_components'])
-        # temp =  fhandle['/'].attrs.listitems()[-ncomp:]
-        # val, keys = zip(*temp)
-        # val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
         return dict(zip(keys,val))
         
     def _read_field_names(self,grid):
         fhandle = h5py.File(grid.filename,'r')
-        return fhandle['field_types'].keys()
+        names = fhandle['field_types'].keys()
+        fhandle.close()
+        return names
     
     def _read_data_set(self,grid,field):
         fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
-        return fhandle['/data/grid_%010i/'%grid.id+field][:]
-        # field_dict = self._field_dict(fhandle)
-        # lstring = 'level_%i' % grid.Level
-        # lev = fhandle[lstring]
-        # dims = grid.ActiveDimensions
-        # boxsize = dims.prod()
-        
-        # grid_offset = lev[self._offset_string][grid._level_id]
-        # start = grid_offset+field_dict[field]*boxsize
-        # stop = start + boxsize
-        # data = lev[self._data_string][start:stop]
-
-        # return data.reshape(dims, order='F')
-                                          
+        data = (fhandle['/data/grid_%010i/'%grid.id+field][:]).copy()
+        fhandle.close()
+        return data
 
     def _read_data_slice(self, grid, field, axis, coord):
         sl = [slice(None), slice(None), slice(None)]
         sl[axis] = slice(coord, coord + 1)
         fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
-        return fhandle['/data/grid_%010i/'%grid.id+field][:][sl]
+        data = (fhandle['/data/grid_%010i/'%grid.id+field][:][sl]).copy()
+        fhandle.close()
+        return data
 
-    # return self._read_data_set(grid,field)[sl]
-

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list