[yt-svn] commit/yt: MatthewTurk: Merged in ngoldbaum/yt/yt-3.0 (pull request #947)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Jun 18 17:10:48 PDT 2014
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/88bcdc5ae09c/
Changeset: 88bcdc5ae09c
Branch: yt-3.0
User: MatthewTurk
Date: 2014-06-19 02:10:40
Summary: Merged in ngoldbaum/yt/yt-3.0 (pull request #947)
Ensure Dataset and Index instances do not participate in reference cycles
Affected #: 9 files
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -56,7 +56,7 @@
self.id = id
self._child_mask = self._child_indices = self._child_index_mask = None
self.pf = index.parameter_file
- self._index = index
+ self._index = weakref.proxy(index)
self.start_index = None
self.filename = filename
self._last_mask = None
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -498,7 +498,7 @@
continue
cname = cls.__name__
if cname.endswith("Base"): cname = cname[:-4]
- self._add_object_class(name, cname, cls, {'pf':self})
+ self._add_object_class(name, cname, cls, {'pf':weakref.proxy(self)})
if self.refine_by != 2 and hasattr(self, 'proj') and \
hasattr(self, 'overlap_proj'):
mylog.warning("Refine by something other than two: reverting to"
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -33,6 +33,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ HDF5FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.lib.misc_utilities import \
@@ -241,7 +243,7 @@
def __init__(self, filename, dataset_type='chombo_hdf5',
storage_filename = None, ini_filename = None):
self.fluid_types += ("chombo",)
- self._handle = h5py.File(filename, 'r')
+ self._handle = HDF5FileHandler(filename)
# look up the dimensionality of the dataset
D = self._handle['Chombo_global/'].attrs['SpaceDim']
@@ -271,9 +273,6 @@
self.parameters["DualEnergyFormalism"] = 0
self.parameters["EOSType"] = -1 # default
- def __del__(self):
- self._handle.close()
-
def _set_code_unit_attributes(self):
self.length_unit = YTQuantity(1.0, "cm")
self.mass_unit = YTQuantity(1.0, "g")
@@ -316,11 +315,10 @@
return LE
def _calc_right_edge(self):
- fileh = h5py.File(self.parameter_filename,'r')
+ fileh = self._handle
dx0 = fileh['/level_0'].attrs['dx']
D = self.dimensionality
RE = dx0*((np.array(list(fileh['/level_0'].attrs['prob_domain'])))[D:] + 1)
- fileh.close()
return RE
def _calc_domain_dimensions(self):
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -29,6 +29,8 @@
YTDataChunk
from yt.data_objects.static_output import \
Dataset
+from yt.utilities.file_handler import \
+ FITSFileHandler
from yt.utilities.io_handler import \
io_registry
from .fields import FITSFieldInfo
@@ -153,7 +155,7 @@
naxis4 = self.parameter_file.primary_header["naxis4"]
else:
naxis4 = 1
- for i, fits_file in enumerate(self.parameter_file._fits_files):
+ for i, fits_file in enumerate(self.parameter_file._handle._fits_files):
for j, hdu in enumerate(fits_file):
if self._ensure_same_dims(hdu):
units = self._determine_image_units(hdu.header, known_units)
@@ -355,15 +357,12 @@
elif isinstance(nan_mask, dict):
self.nan_mask = nan_mask
if isinstance(self.filenames[0], _astropy.pyfits.PrimaryHDU):
- self._handle = _astropy.pyfits.HDUList(self.filenames[0])
+ self._handle = FITSFileHandler(self.filenames[0])
fn = "InMemoryFITSImage_%s" % (uuid.uuid4().hex)
else:
- self._handle = _astropy.pyfits.open(self.filenames[0],
- memmap=True,
- do_not_scale_image_data=True,
- ignore_blank=True)
+ self._handle = FITSFileHandler(self.filenames[0])
fn = self.filenames[0]
- self._fits_files = [self._handle]
+ self._handle._fits_files = [self._handle]
if self.num_files > 1:
for fits_file in auxiliary_files:
if os.path.exists(fits_file):
@@ -373,7 +372,7 @@
f = _astropy.pyfits.open(fn, memmap=True,
do_not_scale_image_data=True,
ignore_blank=True)
- self._fits_files.append(f)
+ self._handle._fits_files.append(f)
if len(self._handle) > 1 and self._handle[1].name == "EVENTS":
self.events_data = True
@@ -648,13 +647,6 @@
return self.arr((pv.v-self._p0)*self._dz+self._z0,
self.spec_unit)
- def __del__(self):
- for f in self._fits_files:
- f.close()
- del file
- self._handle.close()
- del self._handle
-
@classmethod
def _is_valid(cls, *args, **kwargs):
ext = args[0].rsplit(".", 1)[-1]
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -30,6 +30,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ HDF5FileHandler
from yt.utilities.io_handler import \
io_registry
from yt.utilities.physical_constants import cm_per_mpc
@@ -183,7 +185,7 @@
self.fluid_types += ("flash",)
if self._handle is not None: return
- self._handle = h5py.File(filename, "r")
+ self._handle = HDF5FileHandler(filename)
if conversion_override is None: conversion_override = {}
self._conversion_override = conversion_override
@@ -192,15 +194,15 @@
if self.particle_filename is None :
self._particle_handle = self._handle
else :
- try :
- self._particle_handle = h5py.File(self.particle_filename, "r")
+ try:
+ self._particle_handle = HDF5FileHandler(self.particle_filename)
except :
raise IOError(self.particle_filename)
# These should be explicitly obtained from the file, but for now that
# will wait until a reorganization of the source tree and better
# generalization.
self.refine_by = 2
-
+
Dataset.__init__(self, filename, dataset_type)
self.storage_filename = storage_filename
@@ -384,19 +386,12 @@
self.current_redshift = self.omega_lambda = self.omega_matter = \
self.hubble_constant = self.cosmological_simulation = 0.0
- def __del__(self):
- if self._handle is not self._particle_handle:
- self._particle_handle.close()
- self._handle.close()
-
@classmethod
def _is_valid(self, *args, **kwargs):
try:
- fileh = h5py.File(args[0],'r')
+ fileh = HDF5FileHandler(args[0])
if "bounding box" in fileh["/"].keys():
- fileh.close()
return True
- fileh.close()
except:
pass
return False
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/pluto/data_structures.py
--- a/yt/frontends/pluto/data_structures.py
+++ b/yt/frontends/pluto/data_structures.py
@@ -13,7 +13,6 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-import h5py
import re
import os
import weakref
@@ -38,6 +37,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ HDF5FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.io_handler import \
@@ -167,7 +168,7 @@
def __init__(self, filename, dataset_type='pluto_hdf5',
storage_filename = None, ini_filename = None):
- self._handle = h5py.File(filename,'r')
+ self._handle = HDF5FileHandler(filename)
self.current_time = self._handle.attrs['time']
self.ini_filename = ini_filename
self.fullplotdir = os.path.abspath(filename)
@@ -180,9 +181,6 @@
self.parameters["DualEnergyFormalism"] = 0
self.parameters["EOSType"] = -1 # default
- def __del__(self):
- self._handle.close()
-
def _set_units(self):
"""
Generates the conversion to various physical _units based on the parameter file
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/utilities/file_handler.py
--- /dev/null
+++ b/yt/utilities/file_handler.py
@@ -0,0 +1,57 @@
+"""
+A wrapper class for h5py file objects.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+
+class HDF5FileHandler(object):
+ def __init__(self, filename):
+ self.handle = h5py.File(filename, 'r')
+
+ def __del__(self):
+ if hasattr(self, 'handle'):
+ self.handle.close()
+
+ def __getitem__(self, key):
+ return self.handle[key]
+
+ def __contains__(self, item):
+ return item in self.handle
+
+ def __len__(self):
+ return len(self.handle)
+
+ @property
+ def attrs(self):
+ return self.handle.attrs
+
+ @property
+ def keys(self):
+ return self.handle.keys
+
+class FITSFileHandler(HDF5FileHandler):
+ def __init__(self, filename):
+ from yt.utilities.on_demand_imports import _astropy
+ if isinstance(filename, _astropy.pyfits.PrimaryHDU):
+ self.handle = _astropy.pyfits.HDUList(filename)
+ else:
+ self.handle = _astropy.pyfits.open(
+ filename, memmap=True, do_not_scale_image_data=True,
+ ignore_blank=True)
+
+ def __del__(self):
+ for f in self._fits_files:
+ f.close()
+ del f
+ super(FITSFileHandler, self).__del__()
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list