[Yt-svn] commit/yt: MatthewTurk: Attempting to close file handles when they have been opened.
Bitbucket
commits-noreply at bitbucket.org
Thu Nov 3 07:05:56 PDT 2011
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/changeset/280982fcf8ab/
changeset: 280982fcf8ab
branch: yt
user: MatthewTurk
date: 2011-11-03 15:05:29
summary: Attempting to close file handles when they have been opened.
affected #: 14 files
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -1752,6 +1752,7 @@
for halo in self._groups:
if not self.comm.is_mine(halo): continue
halo.write_particle_list(f)
+ f.close()
def dump(self, basename="HopAnalysis"):
r"""Save the full halo data to disk.
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
--- a/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
+++ b/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
@@ -105,6 +105,9 @@
self.parse_halo_catalog()
if cache: self.cache = dict()#MaxLengthDict()
+ def __del__(self):
+ self.particle_file.close()
+
def parse_halo_catalog(self):
hp = []
for line in open("FOF/groups_%05i.dat" % self.output_id):
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/convenience.py
--- a/yt/convenience.py
+++ b/yt/convenience.py
@@ -131,6 +131,7 @@
proj[new_name] = b[f][:]
proj.axis = axis
proj.pf = pf
+ f.close()
return proj
def _chunk(arrlike, chunksize = 800000):
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/data_objects/hierarchy.py
--- a/yt/data_objects/hierarchy.py
+++ b/yt/data_objects/hierarchy.py
@@ -88,6 +88,10 @@
mylog.debug("Re-examining hierarchy")
self._initialize_level_stats()
+ def __del__(self):
+ if self._data_file is not None:
+ self._data_file.close()
+
def _get_parameters(self):
return self.parameter_file.parameters
parameters=property(_get_parameters)
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -302,7 +302,9 @@
def _is_valid(self, *args, **kwargs):
try:
fileh = h5py.File(args[0],'r')
- return "Chombo_global" in fileh["/"]
+ valid = "Chombo_global" in fileh["/"]
+ fileh.close()
+ return valid
except:
pass
return False
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -45,7 +45,8 @@
fhandle = h5py.File(grid.filename,'r')
ncomp = int(fhandle['/'].attrs['num_components'])
- return [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+ fns = [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+ fhandle.close()
def _read_data_set(self,grid,field):
fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
@@ -61,6 +62,7 @@
stop = start + boxsize
data = lev[self._data_string][start:stop]
+ fhandle.close()
return data.reshape(dims, order='F')
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -146,6 +146,7 @@
try:
harray_fp = h5py.File(harray_fn)
self.num_grids = harray_fp["/Level"].len()
+ harray_fp.close()
except IOError:
pass
elif os.path.getsize(self.hierarchy_filename) == 0:
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -126,7 +126,8 @@
except KeyError:
self.grid_particle_count[:] = 0.0
self._particle_indices = na.zeros(self.num_grids + 1, dtype='int64')
- na.add.accumulate(self.grid_particle_count, out=self._particle_indices[1:])
+ na.add.accumulate(self.grid_particle_count.squeeze(),
+ out=self._particle_indices[1:])
# This will become redundant, as _prepare_grid will reset it to its
# current value. Note that FLASH uses 1-based indexing for refinement
# levels, but we do not, so we reduce the level by 1.
@@ -274,14 +275,18 @@
def _find_parameter(self, ptype, pname, scalar = False, handle = None):
# We're going to implement handle caching eventually
- if handle is None: handle = self._handle
if handle is None:
+ close = False
+ handle = self._handle
+ if handle is None:
+ close = True
handle = h5py.File(self.parameter_filename, "r")
nn = "/%s %s" % (ptype,
{False: "runtime parameters", True: "scalars"}[scalar])
for tpname, pval in handle[nn][:]:
if tpname.strip() == pname:
return pval
+ if close: handle.close()
raise KeyError(pname)
def _parse_parameter_file(self):
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -46,6 +46,9 @@
enumerate(particle_fields)])
except KeyError:
self._particle_fields = {}
+
+ def __del__(self):
+ self._handle.close()
def _read_particles(self, fields_to_read, type, args, grid_list,
count_list, conv_factors):
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -194,7 +194,9 @@
if add1 in fileh['/'].items():
if add2 in fileh['/'+add1].attrs.keys():
if fileh['/'+add1].attrs[add2] == format:
+ fileh.close()
return True
- except h5py.h5e.LowLevelIOError:
+ fileh.close()
+ except:
pass
return False
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/gadget/io.py
--- a/yt/frontends/gadget/io.py
+++ b/yt/frontends/gadget/io.py
@@ -45,6 +45,7 @@
adr = grid.Address
fh = h5py.File(grid.filename,mode='r')
rets = cPickle.loads(fh['/root'].attrs['fieldnames'])
+ fh.close()
return rets
def _read_data_slice(self,grid, field, axis, coord):
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -41,8 +41,9 @@
def _read_field_names(self,grid):
fhandle = h5py.File(grid.filename,'r')
ncomp = int(fhandle['/'].attrs['num_components'])
-
- return [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+ field_names = [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+ fhandle.close()
+ return field_names
def _read_data_set(self,grid,field):
fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
@@ -57,7 +58,7 @@
start = grid_offset+field_dict[field]*boxsize
stop = start + boxsize
data = lev[self._data_string][start:stop]
-
+ fhandle.close()
return data.reshape(dims, order='F')
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/visualization/volume_rendering/grid_partitioner.py
--- a/yt/visualization/volume_rendering/grid_partitioner.py
+++ b/yt/visualization/volume_rendering/grid_partitioner.py
@@ -177,6 +177,7 @@
self.brick_dimensions[i,:],
))
self.bricks = na.array(bricks, dtype='object')
+ f.close()
def reset_cast(self):
pass
diff -r 0b853a9d693b36d2ea7bb8047e2eeeb37ced41f0 -r 280982fcf8ab02fcdc5163240084b0b269941837 yt/visualization/volume_rendering/image_handling.py
--- a/yt/visualization/volume_rendering/image_handling.py
+++ b/yt/visualization/volume_rendering/image_handling.py
@@ -71,6 +71,7 @@
g = f['G'].value
b = f['B'].value
a = f['A'].value
+ f.close()
else:
print 'No support for fits import.'
return na.array([r,g,b,a]).swapaxes(0,2).swapaxes(0,1)
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list