[Yt-svn] yt-commit r1571 - in trunk: . yt/lagos

joishi at wrangler.dreamhost.com joishi at wrangler.dreamhost.com
Fri Jan 8 11:33:05 PST 2010


Author: joishi
Date: Fri Jan  8 11:33:02 2010
New Revision: 1571
URL: http://yt.enzotools.org/changeset/1571

Log:
upgraded orion readers for hierarchy/grid refactor

Modified:
   trunk/setup.py
   trunk/yt/lagos/BaseGridType.py
   trunk/yt/lagos/DataReadingFuncs.py
   trunk/yt/lagos/HierarchyType.py
   trunk/yt/lagos/OrionDefs.py
   trunk/yt/lagos/OutputTypes.py

Modified: trunk/setup.py
==============================================================================
--- trunk/setup.py	(original)
+++ trunk/setup.py	Fri Jan  8 11:33:02 2010
@@ -52,7 +52,7 @@
                         "Topic :: Scientific/Engineering :: Physics",
                         "Topic :: Scientific/Engineering :: Visualization", ],
         keywords='astronomy astrophysics visualization amr adaptivemeshrefinement',
-        install_requires = ['matplotlib', 'numpy','ipython'],
+        #install_requires = ['matplotlib', 'numpy','ipython'],
         extras_require = { 'GUI' : ['wxPython'],
                            'storage' : ['h5py'], 
                            'pdf' : ['pypdf']},

Modified: trunk/yt/lagos/BaseGridType.py
==============================================================================
--- trunk/yt/lagos/BaseGridType.py	(original)
+++ trunk/yt/lagos/BaseGridType.py	Fri Jan  8 11:33:02 2010
@@ -520,23 +520,23 @@
 
 class OrionGrid(AMRGridPatch):
     _id_offset = 0
-    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset, dimensions,start,stop,paranoia=False):
-        AMRGridPatch.__init__(self, index)
+    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset, dimensions,start,stop,paranoia=False,**kwargs):
+        AMRGridPatch.__init__(self, index,**kwargs)
         self.filename = filename
         self._offset = offset
         self._paranoid = paranoia
         
         # should error check this
         self.ActiveDimensions = (dimensions.copy()).astype('int32')#.transpose()
-        self.start = start.copy()#.transpose()
-        self.stop = stop.copy()#.transpose()
+        self.start_index = start.copy()#.transpose()
+        self.stop_index = stop.copy()#.transpose()
         self.LeftEdge  = LeftEdge.copy()
         self.RightEdge = RightEdge.copy()
         self.index = index
         self.Level = level
 
     def get_global_startindex(self):
-        return self.start
+        return self.start_index
 
     def _prepare_grid(self):
         """
@@ -546,19 +546,34 @@
         # Now we give it pointers to all of its attributes
         # Note that to keep in line with Enzo, we have broken PEP-8
         h = self.hierarchy # cache it
-        self.StartIndices = h.gridStartIndices[self.id]
-        self.EndIndices = h.gridEndIndices[self.id]
-        h.gridLevels[self.id,0] = self.Level
+        #self.StartIndices = h.gridStartIndices[self.id]
+        #self.EndIndices = h.gridEndIndices[self.id]
+        h.grid_levels[self.id,0] = self.Level
         h.grid_left_edge[self.id,:] = self.LeftEdge[:]
         h.grid_right_edge[self.id,:] = self.RightEdge[:]
-        self.Time = h.gridTimes[self.id,0]
-        self.NumberOfParticles = h.gridNumberOfParticles[self.id,0]
+        #self.Time = h.gridTimes[self.id,0]
+        #self.NumberOfParticles = h.gridNumberOfParticles[self.id,0]
+        self.field_indexes = h.field_indexes
         self.Children = h.gridTree[self.id]
         pIDs = h.gridReverseTree[self.id]
         if len(pIDs) > 0:
             self.Parent = [weakref.proxy(h.grids[pID]) for pID in pIDs]
         else:
-            self.Parent = []
+            self.Parent = None
+
+    def _setup_dx(self):
+        # So first we figure out what the index is.  We don't assume
+        # that dx=dy=dz , at least here.  We probably do elsewhere.
+        id = self.id - self._id_offset
+        if self.Parent is not None:
+            self.dds = self.Parent[0].dds / self.pf["RefineBy"]
+        else:
+            LE, RE = self.hierarchy.grid_left_edge[id,:], \
+                     self.hierarchy.grid_right_edge[id,:]
+            self.dds = na.array((RE-LE)/self.ActiveDimensions)
+        if self.pf["TopGridRank"] < 2: self.dds[1] = 1.0
+        if self.pf["TopGridRank"] < 3: self.dds[2] = 1.0
+        self.data['dx'], self.data['dy'], self.data['dz'] = self.dds
 
     def __repr__(self):
         return "OrionGrid_%04i" % (self.id)

Modified: trunk/yt/lagos/DataReadingFuncs.py
==============================================================================
--- trunk/yt/lagos/DataReadingFuncs.py	(original)
+++ trunk/yt/lagos/DataReadingFuncs.py	Fri Jan  8 11:33:02 2010
@@ -296,12 +296,97 @@
 
     _data_style = "orion_native"
 
-    def _read_data_set(self, grid, field):
-        return readDataNative(grid, field)
-
     def modify(self, field):
         return field.swapaxes(0,2)
 
+    def _read_data_set(self,grid,field):
+        """
+        reads packed multiFABs output by BoxLib in "NATIVE" format.
+
+        """
+        filen = os.path.expanduser(grid.filename[field])
+        off = grid._offset[field]
+        inFile = open(filen,'rb')
+        inFile.seek(off)
+        header = inFile.readline()
+        header.strip()
+
+        if grid._paranoid:
+            mylog.warn("Orion Native reader: Paranoid read mode.")
+            headerRe = re.compile(orion_FAB_header_pattern)
+            bytesPerReal,endian,start,stop,centerType,nComponents = headerRe.search(header).groups()
+
+            # we will build up a dtype string, starting with endian
+            # check endianness (this code is ugly. fix?)
+            bytesPerReal = int(bytesPerReal)
+            if bytesPerReal == int(endian[0]):
+                dtype = '<'
+            elif bytesPerReal == int(endian[-1]):
+                dtype = '>'
+            else:
+                raise ValueError("FAB header is neither big nor little endian. Perhaps the file is corrupt?")
+
+            dtype += ('f%i'% bytesPerReal) #always a floating point
+
+            # determine size of FAB
+            start = na.array(map(int,start.split(',')))
+            stop = na.array(map(int,stop.split(',')))
+
+            gridSize = stop - start + 1
+
+            error_count = 0
+            if (start != grid.start).any():
+                print "Paranoia Error: Cell_H and %s do not agree on grid start." %grid.filename
+                error_count += 1
+            if (stop != grid.stop).any():
+                print "Paranoia Error: Cell_H and %s do not agree on grid stop." %grid.filename
+                error_count += 1
+            if (gridSize != grid.ActiveDimensions).any():
+                print "Paranoia Error: Cell_H and %s do not agree on grid dimensions." %grid.filename
+                error_count += 1
+            if bytesPerReal != grid.hierarchy._bytesPerReal:
+                print "Paranoia Error: Cell_H and %s do not agree on bytes per real number." %grid.filename
+                error_count += 1
+            if (bytesPerReal == grid.hierarchy._bytesPerReal and dtype != grid.hierarchy._dtype):
+                print "Paranoia Error: Cell_H and %s do not agree on endianness." %grid.filename
+                error_count += 1
+
+            if error_count > 0:
+                raise RunTimeError("Paranoia unveiled %i differences between Cell_H and %s." % (error_count, grid.filename))
+
+        else:
+            start = grid.start_index
+            stop = grid.stop_index
+            dtype = grid.hierarchy._dtype
+            bytesPerReal = grid.hierarchy._bytesPerReal
+
+        nElements = grid.ActiveDimensions.prod()
+
+        # one field has nElements*bytesPerReal bytes and is located
+        # nElements*bytesPerReal*field_index from the offset location
+        if yt2orionFieldsDict.has_key(field):
+            fieldname = yt2orionFieldsDict[field]
+        else:
+            fieldname = field
+        field_index = grid.field_indexes[fieldname]
+        inFile.seek(int(nElements*bytesPerReal*field_index),1)
+        field = na.fromfile(inFile,count=nElements,dtype=dtype)
+        field = field.reshape(grid.ActiveDimensions[::-1]).swapaxes(0,2)
+
+        # we can/should also check against the max and min in the header file
+
+        inFile.close()
+        return field
+
+    def _read_data_slice(self, grid, field, axis, coord):
+        """wishful thinking?
+        """
+        sl = [slice(None), slice(None), slice(None)]
+        sl[axis] = slice(coord, coord + 1)
+        #sl = tuple(reversed(sl))
+        return self._read_data_set(grid,field)[sl]
+
+
 class IOHandlerPacked2D(IOHandlerPackedHDF5):
 
     _data_style = "enzo_packed_2d"
@@ -338,92 +423,4 @@
 #
 # BoxLib/Orion data readers follow
 #
-def readDataNative(self,field):
-    """
-    reads packed multiFABs output by BoxLib in "NATIVE" format.
-
-    """
-    filen = os.path.expanduser(self.filename[field])
-    off = self._offset[field]
-    inFile = open(filen,'rb')
-    inFile.seek(off)
-    header = inFile.readline()
-    header.strip()
-
-    if self._paranoid:
-        mylog.warn("Orion Native reader: Paranoid read mode.")
-        headerRe = re.compile(orion_FAB_header_pattern)
-        bytesPerReal,endian,start,stop,centerType,nComponents = headerRe.search(header).groups()
-
-        # we will build up a dtype string, starting with endian
-        # check endianness (this code is ugly. fix?)
-        bytesPerReal = int(bytesPerReal)
-        if bytesPerReal == int(endian[0]):
-            dtype = '<'
-        elif bytesPerReal == int(endian[-1]):
-            dtype = '>'
-        else:
-            raise ValueError("FAB header is neither big nor little endian. Perhaps the file is corrupt?")
-
-        dtype += ('f%i'% bytesPerReal) #always a floating point
-
-        # determine size of FAB
-        start = na.array(map(int,start.split(',')))
-        stop = na.array(map(int,stop.split(',')))
-
-        gridSize = stop - start + 1
-
-        error_count = 0
-        if (start != self.start).any():
-            print "Paranoia Error: Cell_H and %s do not agree on grid start." %self.filename
-            error_count += 1
-        if (stop != self.stop).any():
-            print "Paranoia Error: Cell_H and %s do not agree on grid stop." %self.filename
-            error_count += 1
-        if (gridSize != self.ActiveDimensions).any():
-            print "Paranoia Error: Cell_H and %s do not agree on grid dimensions." %self.filename
-            error_count += 1
-        if bytesPerReal != self.hierarchy._bytesPerReal:
-            print "Paranoia Error: Cell_H and %s do not agree on bytes per real number." %self.filename
-            error_count += 1
-        if (bytesPerReal == self.hierarchy._bytesPerReal and dtype != self.hierarchy._dtype):
-            print "Paranoia Error: Cell_H and %s do not agree on endianness." %self.filename
-            error_count += 1
-
-        if error_count > 0:
-            raise RunTimeError("Paranoia unveiled %i differences between Cell_H and %s." % (error_count, self.filename))
-
-    else:
-        start = self.start
-        stop = self.stop
-        dtype = self.hierarchy._dtype
-        bytesPerReal = self.hierarchy._bytesPerReal
-        
-    nElements = self.ActiveDimensions.prod()
-
-    # one field has nElements*bytesPerReal bytes and is located
-    # nElements*bytesPerReal*field_index from the offset location
-    if yt2orionFieldsDict.has_key(field):
-        fieldname = yt2orionFieldsDict[field]
-    else:
-        fieldname = field
-    field_index = self.field_indexes[fieldname]
-    inFile.seek(int(nElements*bytesPerReal*field_index),1)
-    field = na.fromfile(inFile,count=nElements,dtype=dtype)
-    field = field.reshape(self.ActiveDimensions[::-1]).swapaxes(0,2)
-
-    # we can/should also check against the max and min in the header file
     
-    inFile.close()
-    return field
-    
-def readAllDataNative():
-    pass
-
-def readDataSliceNative(self, grid, field, axis, coord):
-    """wishful thinking?
-    """
-    sl = [slice(None), slice(None), slice(None)]
-    sl[axis] = slice(coord, coord + 1)
-    #sl = tuple(reversed(sl))
-    return grid.readDataFast(field)[sl]

Modified: trunk/yt/lagos/HierarchyType.py
==============================================================================
--- trunk/yt/lagos/HierarchyType.py	(original)
+++ trunk/yt/lagos/HierarchyType.py	Fri Jan  8 11:33:02 2010
@@ -61,7 +61,6 @@
         self._setup_data_io()
 
         mylog.debug("Detecting fields.")
-        self.field_list = []
         self._detect_fields()
 
         mylog.debug("Adding unknown detected fields")
@@ -520,6 +519,7 @@
         # We don't do any of the logic here, we just check if the data file
         # is open...
         if self._data_file is None: return
+        if self._data_mode == 'r': return
         if self.data_style != "enzo_packed_3d": return
         mylog.info("Storing the binary hierarchy")
         f = h5py.File(self.hierarchy_filename[:-9] + "harrays", "w")
@@ -576,6 +576,7 @@
         self.max_level = self.grid_levels.max()
 
     def _detect_fields(self):
+        self.field_list = []
         # Do this only on the root processor to save disk work.
         if self._mpi_get_rank() == 0 or self._mpi_get_rank() == None:
             field_list = self.get_data("/", "DataFields")
@@ -849,6 +850,7 @@
     yield buf  # First line.
 
 class OrionHierarchy(AMRHierarchy):
+    grid = OrionGrid
     def __init__(self, pf, data_style='orion_native'):
         self.field_info = OrionFieldContainer()
         self.field_indexes = {}
@@ -856,13 +858,15 @@
         header_filename = os.path.join(pf.fullplotdir,'Header')
         self.directory = pf.fullpath
         self.data_style = data_style
-        self._setup_classes()
+        #self._setup_classes()
+
         self.readGlobalHeader(header_filename,self.parameter_file.paranoid_read) # also sets up the grid objects
         self.__cache_endianness(self.levels[-1].grids[-1])
-        AMRHierarchy.__init__(self,pf)
+        AMRHierarchy.__init__(self,pf, self.data_style)
         self._setup_data_io()
         self._setup_field_list()
-
+        self._populate_hierarchy()
+        
     def readGlobalHeader(self,filename,paranoid_read):
         """
         read the global header file for an Orion plotfile output.
@@ -992,7 +996,7 @@
                 lo = na.array([xlo,ylo,zlo])
                 hi = na.array([xhi,yhi,zhi])
                 dims,start,stop = self.__calculate_grid_dimensions(start_stop_index[grid])
-                self.levels[-1].grids.append(self.grid(lo,hi,grid_counter,level,gfn, gfo, dims,start,stop,paranoia=paranoid_read))
+                self.levels[-1].grids.append(self.grid(lo,hi,grid_counter,level,gfn, gfo, dims,start,stop,paranoia=paranoid_read,hierarchy=self))
                 grid_counter += 1 # this is global, and shouldn't be reset
                                   # for each level
 
@@ -1039,30 +1043,22 @@
         dimension = stop - start + 1
         return dimension,start,stop
         
-
-    def _initialize_grids(self):
-        mylog.debug("Allocating memory for %s grids", self.num_grids)
-        self.gridDimensions = na.zeros((self.num_grids,3), 'int32')
-        self.gridStartIndices = na.zeros((self.num_grids,3), 'int32')
-        self.gridEndIndices = na.zeros((self.num_grids,3), 'int32')
-        self.gridTimes = na.zeros((self.num_grids,1), 'float64')
-        self.gridNumberOfParticles = na.zeros((self.num_grids,1))
-        mylog.debug("Done allocating")
+    def _populate_grid_objects(self):
         mylog.debug("Creating grid objects")
         self.grids = na.concatenate([level.grids for level in self.levels])
-        self.gridLevels = na.concatenate([level.ngrids*[level.level] for level in self.levels])
-        self.gridLevels = self.gridLevels.reshape((self.num_grids,1))
-        gridDcs = na.concatenate([level.ngrids*[self.dx[level.level]] for level in self.levels],axis=0)
-        self.gridDxs = gridDcs[:,0].reshape((self.num_grids,1))
-        self.gridDys = gridDcs[:,1].reshape((self.num_grids,1))
-        self.gridDzs = gridDcs[:,2].reshape((self.num_grids,1))
+        self.grid_levels = na.concatenate([level.ngrids*[level.level] for level in self.levels])
+        self.grid_levels = self.grid_levels.reshape((self.num_grids,1))
+        grid_dcs = na.concatenate([level.ngrids*[self.dx[level.level]] for level in self.levels],axis=0)
+        self.grid_dxs = grid_dcs[:,0].reshape((self.num_grids,1))
+        self.grid_dys = grid_dcs[:,1].reshape((self.num_grids,1))
+        self.grid_dzs = grid_dcs[:,2].reshape((self.num_grids,1))
         left_edges = []
         right_edges = []
         for level in self.levels:
             left_edges += [g.LeftEdge for g in level.grids]
             right_edges += [g.RightEdge for g in level.grids]
-        self.gridLeftEdge = na.array(left_edges)
-        self.gridRightEdge = na.array(right_edges)
+        self.grid_left_edge = na.array(left_edges)
+        self.grid_right_edge = na.array(right_edges)
         self.gridReverseTree = [] * self.num_grids
         self.gridReverseTree = [ [] for i in range(self.num_grids)]
         self.gridTree = [ [] for i in range(self.num_grids)]
@@ -1070,7 +1066,7 @@
 
     def _populate_hierarchy(self):
         self.__setup_grid_tree()
-        self._setup_grid_corners()
+        #self._setup_grid_corners()
         for i, grid in enumerate(self.grids):
             if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
             grid._prepare_grid()
@@ -1087,14 +1083,14 @@
         dd = self._get_data_reader_dict()
         dd["field_indexes"] = self.field_indexes
         AMRHierarchy._setup_classes(self, dd)
-        self._add_object_class('grid', "OrionGrid", OrionGridBase, dd)
+        #self._add_object_class('grid', "OrionGrid", OrionGridBase, dd)
         self.object_types.sort()
 
     def _get_grid_children(self, grid):
         mask = na.zeros(self.num_grids, dtype='bool')
         grids, grid_ind = self.get_box_grids(grid.LeftEdge, grid.RightEdge)
         mask[grid_ind] = True
-        mask = na.logical_and(mask, (self.gridLevels == (grid.Level+1)).flat)
+        mask = na.logical_and(mask, (self.grid_levels == (grid.Level+1)).flat)
         return self.grids[mask]
 
     def _setup_field_list(self):
@@ -1110,6 +1106,41 @@
             if field not in self.derived_field_list:
                 self.derived_field_list.append(field)
 
+    def _count_grids(self):
+        """this is already provided in 
+
+        """
+        pass
+
+    def _initialize_grid_arrays(self):
+        mylog.debug("Allocating arrays for %s grids", self.num_grids)
+        self.grid_dimensions = na.ones((self.num_grids,3), 'int32')
+        self.grid_left_edge = na.zeros((self.num_grids,3), self.float_type)
+        self.grid_right_edge = na.ones((self.num_grids,3), self.float_type)
+        self.grid_levels = na.zeros((self.num_grids,1), 'int32')
+        self.grid_particle_count = na.zeros((self.num_grids,1), 'int32')
+
+    def _parse_hierarchy(self):
+        pass
+    
+    def _detect_fields(self):
+        pass
+
+    def _setup_unknown_fields(self):
+        pass
+
+    def _setup_derived_fields(self):
+        pass
+
+    def _initialize_state_variables(self):
+        """override to not re-initialize num_grids in AMRHierarchy.__init__
+
+        """
+        self._parallel_locking = False
+        self._data_file = None
+        self._data_mode = None
+        self._max_locations = {}
+    
 class OrionLevel:
     def __init__(self,level,ngrids):
         self.level = level

Modified: trunk/yt/lagos/OrionDefs.py
==============================================================================
--- trunk/yt/lagos/OrionDefs.py	(original)
+++ trunk/yt/lagos/OrionDefs.py	Fri Jan  8 11:33:02 2010
@@ -31,7 +31,8 @@
 # converts the Orion inputs file name to the Enzo/yt name expected
 # throughout the code. key is Orion name, value is Enzo/yt equivalent
 orion2enzoDict = {"amr.n_cell": "TopGridRank",
-                  "materials.gamma": "Gamma"
+                  "materials.gamma": "Gamma",
+                  "amr.ref_ratio": "RefineBy"
                   }
 
 yt2orionFieldsDict = {}

Modified: trunk/yt/lagos/OutputTypes.py
==============================================================================
--- trunk/yt/lagos/OutputTypes.py	(original)
+++ trunk/yt/lagos/OutputTypes.py	Fri Jan  8 11:33:02 2010
@@ -494,7 +494,7 @@
         self.parameters["EOSType"] = -1 # default
         if self.fparameters.has_key("mu"):
             self.parameters["mu"] = self.fparameters["mu"]
-
+        self.parameters["RefineBy"] = self.parameters["RefineBy"][0]
     def _localize(self, f, default):
         if f is None:
             return os.path.join(self.directory, default)



More information about the yt-svn mailing list