[yt-svn] commit/yt: 12 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Jun 18 17:10:47 PDT 2014
12 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/c77dac9d2652/
Changeset: c77dac9d2652
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:04:50
Summary: Making FLASHDataset indirectly keep track of the h5py file handle.
Affected #: 3 files
diff -r d1de2160a4a81ac968d3bc382a778ef956a260a1 -r c77dac9d265296d9c0eda391e6170464e83a31cb yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -30,6 +30,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ FileHandler
from yt.utilities.io_handler import \
io_registry
from yt.utilities.physical_constants import cm_per_mpc
@@ -69,18 +71,18 @@
pass
def _detect_output_fields(self):
- ncomp = self._handle["/unknown names"].shape[0]
- self.field_list = [("flash", s) for s in self._handle["/unknown names"][:].flat]
- if ("/particle names" in self._particle_handle) :
+ ncomp = self._handle.handle["/unknown names"].shape[0]
+ self.field_list = [("flash", s) for s in self._handle.handle["/unknown names"][:].flat]
+ if ("/particle names" in self._particle_handle.handle) :
self.field_list += [("io", "particle_" + s[0].strip()) for s
- in self._particle_handle["/particle names"][:]]
+ in self._particle_handle.handle["/particle names"][:]]
def _count_grids(self):
try:
self.num_grids = self.parameter_file._find_parameter(
"integer", "globalnumblocks", True)
except KeyError:
- self.num_grids = self._handle["/simulation parameters"][0][0]
+ self.num_grids = self._handle.handle["/simulation parameters"][0][0]
def _parse_index(self):
f = self._handle # shortcut
@@ -95,19 +97,19 @@
self.grid_left_edge[:,i] = DLE[i]
self.grid_right_edge[:,i] = DRE[i]
# We only go up to ND for 2D datasets
- self.grid_left_edge[:,:ND] = f["/bounding box"][:,:ND,0]
- self.grid_right_edge[:,:ND] = f["/bounding box"][:,:ND,1]
+ self.grid_left_edge[:,:ND] = f.handle["/bounding box"][:,:ND,0]
+ self.grid_right_edge[:,:ND] = f.handle["/bounding box"][:,:ND,1]
# Move this to the parameter file
try:
nxb = pf.parameters['nxb']
nyb = pf.parameters['nyb']
nzb = pf.parameters['nzb']
except KeyError:
- nxb, nyb, nzb = [int(f["/simulation parameters"]['n%sb' % ax])
+ nxb, nyb, nzb = [int(f.handle["/simulation parameters"]['n%sb' % ax])
for ax in 'xyz']
self.grid_dimensions[:] *= (nxb, nyb, nzb)
try:
- self.grid_particle_count[:] = f_part["/localnp"][:][:,None]
+ self.grid_particle_count[:] = f_part.handle["/localnp"][:][:,None]
except KeyError:
self.grid_particle_count[:] = 0.0
self._particle_indices = np.zeros(self.num_grids + 1, dtype='int64')
@@ -119,7 +121,7 @@
# This will become redundant, as _prepare_grid will reset it to its
# current value. Note that FLASH uses 1-based indexing for refinement
# levels, but we do not, so we reduce the level by 1.
- self.grid_levels.flat[:] = f["/refine level"][:][:] - 1
+ self.grid_levels.flat[:] = f.handle["/refine level"][:][:] - 1
self.grids = np.empty(self.num_grids, dtype='object')
for i in xrange(self.num_grids):
self.grids[i] = self.grid(i+1, self, self.grid_levels[i,0])
@@ -149,7 +151,7 @@
offset = 7
ii = np.argsort(self.grid_levels.flat)
- gid = self._handle["/gid"][:]
+ gid = self._handle.handle["/gid"][:]
first_ind = -(self.parameter_file.refine_by**self.parameter_file.dimensionality)
for g in self.grids[ii].flat:
gi = g.id - g._id_offset
@@ -183,7 +185,7 @@
self.fluid_types += ("flash",)
if self._handle is not None: return
- self._handle = h5py.File(filename, "r")
+ self._handle = FileHandler(filename)
if conversion_override is None: conversion_override = {}
self._conversion_override = conversion_override
@@ -192,15 +194,15 @@
if self.particle_filename is None :
self._particle_handle = self._handle
else :
- try :
- self._particle_handle = h5py.File(self.particle_filename, "r")
+ try:
+ self._particle_handle = FileHandler(self.particle_filename)
except :
raise IOError(self.particle_filename)
# These should be explicitly obtained from the file, but for now that
# will wait until a reorganization of the source tree and better
# generalization.
self.refine_by = 2
-
+
Dataset.__init__(self, filename, dataset_type)
self.storage_filename = storage_filename
@@ -241,9 +243,9 @@
def _find_parameter(self, ptype, pname, scalar = False):
nn = "/%s %s" % (ptype,
{False: "runtime parameters", True: "scalars"}[scalar])
- if nn not in self._handle: raise KeyError(nn)
- for tpname, pval in zip(self._handle[nn][:,'name'],
- self._handle[nn][:,'value']):
+ if nn not in self._handle.handle: raise KeyError(nn)
+ for tpname, pval in zip(self._handle.handle[nn][:,'name'],
+ self._handle.handle[nn][:,'value']):
if tpname.strip() == pname:
if ptype == "string" :
return pval.strip()
@@ -254,12 +256,12 @@
def _parse_parameter_file(self):
self.unique_identifier = \
int(os.stat(self.parameter_filename)[stat.ST_CTIME])
- if "file format version" in self._handle:
+ if "file format version" in self._handle.handle:
self._flash_version = int(
- self._handle["file format version"][:])
- elif "sim info" in self._handle:
+ self._handle.handle["file format version"][:])
+ elif "sim info" in self._handle.handle:
self._flash_version = int(
- self._handle["sim info"][:]["file format version"])
+ self._handle.handle["sim info"][:]["file format version"])
else:
raise RuntimeError("Can't figure out FLASH file version.")
# First we load all of the parameters
@@ -271,10 +273,10 @@
hns.append("%s %s" % (vtype, ptype))
if self._flash_version > 7:
for hn in hns:
- if hn not in self._handle:
+ if hn not in self._handle.handle:
continue
- for varname, val in zip(self._handle[hn][:,'name'],
- self._handle[hn][:,'value']):
+ for varname, val in zip(self._handle.handle[hn][:,'name'],
+ self._handle.handle[hn][:,'value']):
vn = varname.strip()
if hn.startswith("string") :
pval = val.strip()
@@ -285,12 +287,12 @@
self.parameters[vn] = pval
if self._flash_version == 7:
for hn in hns:
- if hn not in self._handle:
+ if hn not in self._handle.handle:
continue
if hn is 'simulation parameters':
- zipover = zip(self._handle[hn].dtype.names,self._handle[hn][0])
+ zipover = zip(self._handle.handle[hn].dtype.names,self._handle.handle[hn][0])
else:
- zipover = zip(self._handle[hn][:,'name'],self._handle[hn][:,'value'])
+ zipover = zip(self._handle.handle[hn][:,'name'],self._handle.handle[hn][:,'value'])
for varname, val in zipover:
vn = varname.strip()
if hn.startswith("string") :
@@ -307,7 +309,7 @@
nyb = self.parameters["nyb"]
nzb = self.parameters["nzb"]
except KeyError:
- nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
+ nxb, nyb, nzb = [int(self._handle.handle["/simulation parameters"]['n%sb' % ax])
for ax in 'xyz'] # FLASH2 only!
# Determine dimensionality
@@ -384,19 +386,12 @@
self.current_redshift = self.omega_lambda = self.omega_matter = \
self.hubble_constant = self.cosmological_simulation = 0.0
- def __del__(self):
- if self._handle is not self._particle_handle:
- self._particle_handle.close()
- self._handle.close()
-
@classmethod
def _is_valid(self, *args, **kwargs):
try:
- fileh = h5py.File(args[0],'r')
- if "bounding box" in fileh["/"].keys():
- fileh.close()
+ fileh = FileHandler(args[0])
+ if "bounding box" in fileh.handle["/"].keys():
return True
- fileh.close()
except:
pass
return False
diff -r d1de2160a4a81ac968d3bc382a778ef956a260a1 -r c77dac9d265296d9c0eda391e6170464e83a31cb yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -42,8 +42,8 @@
def __init__(self, pf):
super(IOHandlerFLASH, self).__init__(pf)
# Now we cache the particle fields
- self._handle = pf._handle
- self._particle_handle = pf._particle_handle
+ self._handle = pf._handle.handle
+ self._particle_handle = pf._particle_handle.handle
try :
particle_fields = [s[0].strip() for s in
diff -r d1de2160a4a81ac968d3bc382a778ef956a260a1 -r c77dac9d265296d9c0eda391e6170464e83a31cb yt/utilities/file_handler.py
--- /dev/null
+++ b/yt/utilities/file_handler.py
@@ -0,0 +1,24 @@
+"""
+A wrapper class for h5py file objects.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+
+class FileHandler(object):
+ def __init__(self, filename):
+ self.handle = h5py.File(filename, 'r')
+
+ def __del__(self):
+ self.handle.close()
+
https://bitbucket.org/yt_analysis/yt/commits/8e973fe46c9d/
Changeset: 8e973fe46c9d
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:06:17
Summary: Adding weakrefs to help avoid reference cycles.
Affected #: 3 files
diff -r c77dac9d265296d9c0eda391e6170464e83a31cb -r 8e973fe46c9d95ef425dc203c6a36f5064a779c8 yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -56,7 +56,7 @@
self.id = id
self._child_mask = self._child_indices = self._child_index_mask = None
self.pf = index.parameter_file
- self._index = index
+ self._index = weakref.proxy(index)
self.start_index = None
self.filename = filename
self._last_mask = None
diff -r c77dac9d265296d9c0eda391e6170464e83a31cb -r 8e973fe46c9d95ef425dc203c6a36f5064a779c8 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -498,7 +498,7 @@
continue
cname = cls.__name__
if cname.endswith("Base"): cname = cname[:-4]
- self._add_object_class(name, cname, cls, {'pf':self})
+ self._add_object_class(name, cname, cls, {'pf':weakref.proxy(self)})
if self.refine_by != 2 and hasattr(self, 'proj') and \
hasattr(self, 'overlap_proj'):
mylog.warning("Refine by something other than two: reverting to"
diff -r c77dac9d265296d9c0eda391e6170464e83a31cb -r 8e973fe46c9d95ef425dc203c6a36f5064a779c8 yt/fields/field_detector.py
--- a/yt/fields/field_detector.py
+++ b/yt/fields/field_detector.py
@@ -58,7 +58,7 @@
pf.domain_right_edge = np.ones(3, 'float64')
pf.dimensionality = 3
pf.periodicity = (True, True, True)
- self.pf = pf
+ self.pf = weakref.proxy(pf)
class fake_index(object):
class fake_io(object):
https://bitbucket.org/yt_analysis/yt/commits/b6525013f9e3/
Changeset: b6525013f9e3
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:19:27
Summary: Making FileHandler a bit fancier to avoid rewriting a lot of code.
Affected #: 3 files
diff -r c77dac9d265296d9c0eda391e6170464e83a31cb -r b6525013f9e3fd72ab3f77e0746cfc146fd239df yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -71,18 +71,18 @@
pass
def _detect_output_fields(self):
- ncomp = self._handle.handle["/unknown names"].shape[0]
- self.field_list = [("flash", s) for s in self._handle.handle["/unknown names"][:].flat]
- if ("/particle names" in self._particle_handle.handle) :
+ ncomp = self._handle["/unknown names"].shape[0]
+ self.field_list = [("flash", s) for s in self._handle["/unknown names"][:].flat]
+ if ("/particle names" in self._particle_handle) :
self.field_list += [("io", "particle_" + s[0].strip()) for s
- in self._particle_handle.handle["/particle names"][:]]
+ in self._particle_handle["/particle names"][:]]
def _count_grids(self):
try:
self.num_grids = self.parameter_file._find_parameter(
"integer", "globalnumblocks", True)
except KeyError:
- self.num_grids = self._handle.handle["/simulation parameters"][0][0]
+ self.num_grids = self._handle["/simulation parameters"][0][0]
def _parse_index(self):
f = self._handle # shortcut
@@ -151,7 +151,7 @@
offset = 7
ii = np.argsort(self.grid_levels.flat)
- gid = self._handle.handle["/gid"][:]
+ gid = self._handle["/gid"][:]
first_ind = -(self.parameter_file.refine_by**self.parameter_file.dimensionality)
for g in self.grids[ii].flat:
gi = g.id - g._id_offset
@@ -243,9 +243,9 @@
def _find_parameter(self, ptype, pname, scalar = False):
nn = "/%s %s" % (ptype,
{False: "runtime parameters", True: "scalars"}[scalar])
- if nn not in self._handle.handle: raise KeyError(nn)
- for tpname, pval in zip(self._handle.handle[nn][:,'name'],
- self._handle.handle[nn][:,'value']):
+ if nn not in self._handle: raise KeyError(nn)
+ for tpname, pval in zip(self._handle[nn][:,'name'],
+ self._handle[nn][:,'value']):
if tpname.strip() == pname:
if ptype == "string" :
return pval.strip()
@@ -256,12 +256,12 @@
def _parse_parameter_file(self):
self.unique_identifier = \
int(os.stat(self.parameter_filename)[stat.ST_CTIME])
- if "file format version" in self._handle.handle:
+ if "file format version" in self._handle:
self._flash_version = int(
- self._handle.handle["file format version"][:])
- elif "sim info" in self._handle.handle:
+ self._handle["file format version"][:])
+ elif "sim info" in self._handle:
self._flash_version = int(
- self._handle.handle["sim info"][:]["file format version"])
+ self._handle["sim info"][:]["file format version"])
else:
raise RuntimeError("Can't figure out FLASH file version.")
# First we load all of the parameters
@@ -273,10 +273,10 @@
hns.append("%s %s" % (vtype, ptype))
if self._flash_version > 7:
for hn in hns:
- if hn not in self._handle.handle:
+ if hn not in self._handle:
continue
- for varname, val in zip(self._handle.handle[hn][:,'name'],
- self._handle.handle[hn][:,'value']):
+ for varname, val in zip(self._handle[hn][:,'name'],
+ self._handle[hn][:,'value']):
vn = varname.strip()
if hn.startswith("string") :
pval = val.strip()
@@ -287,12 +287,12 @@
self.parameters[vn] = pval
if self._flash_version == 7:
for hn in hns:
- if hn not in self._handle.handle:
+ if hn not in self._handle:
continue
if hn is 'simulation parameters':
- zipover = zip(self._handle.handle[hn].dtype.names,self._handle.handle[hn][0])
+ zipover = zip(self._handle[hn].dtype.names,self._handle[hn][0])
else:
- zipover = zip(self._handle.handle[hn][:,'name'],self._handle.handle[hn][:,'value'])
+ zipover = zip(self._handle[hn][:,'name'],self._handle[hn][:,'value'])
for varname, val in zipover:
vn = varname.strip()
if hn.startswith("string") :
@@ -309,7 +309,7 @@
nyb = self.parameters["nyb"]
nzb = self.parameters["nzb"]
except KeyError:
- nxb, nyb, nzb = [int(self._handle.handle["/simulation parameters"]['n%sb' % ax])
+ nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
for ax in 'xyz'] # FLASH2 only!
# Determine dimensionality
diff -r c77dac9d265296d9c0eda391e6170464e83a31cb -r b6525013f9e3fd72ab3f77e0746cfc146fd239df yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -42,8 +42,8 @@
def __init__(self, pf):
super(IOHandlerFLASH, self).__init__(pf)
# Now we cache the particle fields
- self._handle = pf._handle.handle
- self._particle_handle = pf._particle_handle.handle
+ self._handle = pf._handle
+ self._particle_handle = pf._particle_handle
try :
particle_fields = [s[0].strip() for s in
diff -r c77dac9d265296d9c0eda391e6170464e83a31cb -r b6525013f9e3fd72ab3f77e0746cfc146fd239df yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -22,3 +22,8 @@
def __del__(self):
self.handle.close()
+ def __getitem__(self, key):
+ return self.handle[key]
+
+ def __contains__(self, item):
+ return item in self.handle
https://bitbucket.org/yt_analysis/yt/commits/d34375f2069d/
Changeset: d34375f2069d
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:19:49
Summary: Merging.
Affected #: 3 files
diff -r b6525013f9e3fd72ab3f77e0746cfc146fd239df -r d34375f2069d16dfd5551be50dcb368d4b4317e6 yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -56,7 +56,7 @@
self.id = id
self._child_mask = self._child_indices = self._child_index_mask = None
self.pf = index.parameter_file
- self._index = index
+ self._index = weakref.proxy(index)
self.start_index = None
self.filename = filename
self._last_mask = None
diff -r b6525013f9e3fd72ab3f77e0746cfc146fd239df -r d34375f2069d16dfd5551be50dcb368d4b4317e6 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -498,7 +498,7 @@
continue
cname = cls.__name__
if cname.endswith("Base"): cname = cname[:-4]
- self._add_object_class(name, cname, cls, {'pf':self})
+ self._add_object_class(name, cname, cls, {'pf':weakref.proxy(self)})
if self.refine_by != 2 and hasattr(self, 'proj') and \
hasattr(self, 'overlap_proj'):
mylog.warning("Refine by something other than two: reverting to"
diff -r b6525013f9e3fd72ab3f77e0746cfc146fd239df -r d34375f2069d16dfd5551be50dcb368d4b4317e6 yt/fields/field_detector.py
--- a/yt/fields/field_detector.py
+++ b/yt/fields/field_detector.py
@@ -58,7 +58,7 @@
pf.domain_right_edge = np.ones(3, 'float64')
pf.dimensionality = 3
pf.periodicity = (True, True, True)
- self.pf = pf
+ self.pf = weakref.proxy(pf)
class fake_index(object):
class fake_io(object):
https://bitbucket.org/yt_analysis/yt/commits/f1bc20761bd4/
Changeset: f1bc20761bd4
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:23:32
Summary: Updating chombo to use FileHandler
Affected #: 1 file
diff -r d34375f2069d16dfd5551be50dcb368d4b4317e6 -r f1bc20761bd4e84225685b55cf3a39c9af403769 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -33,6 +33,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.lib.misc_utilities import \
@@ -241,7 +243,7 @@
def __init__(self, filename, dataset_type='chombo_hdf5',
storage_filename = None, ini_filename = None):
self.fluid_types += ("chombo",)
- self._handle = h5py.File(filename, 'r')
+ self._handle = FileHandler(filename)
# look up the dimensionality of the dataset
D = self._handle['Chombo_global/'].attrs['SpaceDim']
@@ -271,9 +273,6 @@
self.parameters["DualEnergyFormalism"] = 0
self.parameters["EOSType"] = -1 # default
- def __del__(self):
- self._handle.close()
-
def _set_code_unit_attributes(self):
self.length_unit = YTQuantity(1.0, "cm")
self.mass_unit = YTQuantity(1.0, "g")
@@ -316,11 +315,10 @@
return LE
def _calc_right_edge(self):
- fileh = h5py.File(self.parameter_filename,'r')
+ fileh = self._handle
dx0 = fileh['/level_0'].attrs['dx']
D = self.dimensionality
RE = dx0*((np.array(list(fileh['/level_0'].attrs['prob_domain'])))[D:] + 1)
- fileh.close()
return RE
def _calc_domain_dimensions(self):
https://bitbucket.org/yt_analysis/yt/commits/b85583c53d56/
Changeset: b85583c53d56
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:35:02
Summary: Updating FITS
Affected #: 2 files
diff -r f1bc20761bd4e84225685b55cf3a39c9af403769 -r b85583c53d563cdd8d261656924b362b1c74cf6b yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -29,6 +29,8 @@
YTDataChunk
from yt.data_objects.static_output import \
Dataset
+from yt.utilities.file_handler import \
+ FITSFileHandler
from yt.utilities.io_handler import \
io_registry
from .fields import FITSFieldInfo
@@ -153,7 +155,7 @@
naxis4 = self.parameter_file.primary_header["naxis4"]
else:
naxis4 = 1
- for i, fits_file in enumerate(self.parameter_file._fits_files):
+ for i, fits_file in enumerate(self.parameter_file._handle._fits_files):
for j, hdu in enumerate(fits_file):
if self._ensure_same_dims(hdu):
units = self._determine_image_units(hdu.header, known_units)
@@ -355,15 +357,12 @@
elif isinstance(nan_mask, dict):
self.nan_mask = nan_mask
if isinstance(self.filenames[0], _astropy.pyfits.PrimaryHDU):
- self._handle = _astropy.pyfits.HDUList(self.filenames[0])
+ self._handle = FITSFileHandler(self.filenames[0])
fn = "InMemoryFITSImage_%s" % (uuid.uuid4().hex)
else:
- self._handle = _astropy.pyfits.open(self.filenames[0],
- memmap=True,
- do_not_scale_image_data=True,
- ignore_blank=True)
+ self._handle = FITSFileHandler(self.filenames[0])
fn = self.filenames[0]
- self._fits_files = [self._handle]
+ self._handle._fits_files = [self._handle]
if self.num_files > 1:
for fits_file in auxiliary_files:
if os.path.exists(fits_file):
@@ -373,7 +372,7 @@
f = _astropy.pyfits.open(fn, memmap=True,
do_not_scale_image_data=True,
ignore_blank=True)
- self._fits_files.append(f)
+ self._handle._fits_files.append(f)
if len(self._handle) > 1 and self._handle[1].name == "EVENTS":
self.events_data = True
@@ -648,13 +647,6 @@
return self.arr((pv.v-self._p0)*self._dz+self._z0,
self.spec_unit)
- def __del__(self):
- for f in self._fits_files:
- f.close()
- del file
- self._handle.close()
- del self._handle
-
@classmethod
def _is_valid(cls, *args, **kwargs):
ext = args[0].rsplit(".", 1)[-1]
diff -r f1bc20761bd4e84225685b55cf3a39c9af403769 -r b85583c53d563cdd8d261656924b362b1c74cf6b yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -27,3 +27,19 @@
def __contains__(self, item):
return item in self.handle
+
+class FITSFileHandler(FileHandler):
+ from yt.utilities.on_demand_imports import _astropy
+ def __init__(self, filename):
+ if isinstance(filename, _astropy.pyfits.PrimaryHDU):
+ self.handle = _astropy.pyfits.HDUList(filename)
+ else:
+ self.handle = _astropy.pyfits.open(
+ filename, memmap=True, do_not_scale_image_data=True,
+ ignore_blank=True)
+
+ def __del__(self):
+ for f in self._fits_files:
+ f.close()
+ del f
+ super(FITSFileHandler, self).__del__()
https://bitbucket.org/yt_analysis/yt/commits/f96339dcc6a2/
Changeset: f96339dcc6a2
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 07:36:42
Summary: Updating PLUTO
Affected #: 1 file
diff -r b85583c53d563cdd8d261656924b362b1c74cf6b -r f96339dcc6a2d334759d5d4bbef7a06907c3d887 yt/frontends/pluto/data_structures.py
--- a/yt/frontends/pluto/data_structures.py
+++ b/yt/frontends/pluto/data_structures.py
@@ -13,7 +13,6 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-import h5py
import re
import os
import weakref
@@ -38,6 +37,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.io_handler import \
@@ -167,7 +168,7 @@
def __init__(self, filename, dataset_type='pluto_hdf5',
storage_filename = None, ini_filename = None):
- self._handle = h5py.File(filename,'r')
+ self._handle = FileHandler(filename)
self.current_time = self._handle.attrs['time']
self.ini_filename = ini_filename
self.fullplotdir = os.path.abspath(filename)
@@ -180,9 +181,6 @@
self.parameters["DualEnergyFormalism"] = 0
self.parameters["EOSType"] = -1 # default
- def __del__(self):
- self._handle.close()
-
def _set_units(self):
"""
Generates the conversion to various physical _units based on the parameter file
https://bitbucket.org/yt_analysis/yt/commits/342fa1eabe63/
Changeset: 342fa1eabe63
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 08:02:27
Summary: Reverting some unnecessary changes relative to mainline.
Affected #: 1 file
diff -r f96339dcc6a2d334759d5d4bbef7a06907c3d887 -r 342fa1eabe63ab6e085804bc00d95f2bf0632d55 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -97,19 +97,19 @@
self.grid_left_edge[:,i] = DLE[i]
self.grid_right_edge[:,i] = DRE[i]
# We only go up to ND for 2D datasets
- self.grid_left_edge[:,:ND] = f.handle["/bounding box"][:,:ND,0]
- self.grid_right_edge[:,:ND] = f.handle["/bounding box"][:,:ND,1]
+ self.grid_left_edge[:,:ND] = f["/bounding box"][:,:ND,0]
+ self.grid_right_edge[:,:ND] = f["/bounding box"][:,:ND,1]
# Move this to the parameter file
try:
nxb = pf.parameters['nxb']
nyb = pf.parameters['nyb']
nzb = pf.parameters['nzb']
except KeyError:
- nxb, nyb, nzb = [int(f.handle["/simulation parameters"]['n%sb' % ax])
+ nxb, nyb, nzb = [int(f["/simulation parameters"]['n%sb' % ax])
for ax in 'xyz']
self.grid_dimensions[:] *= (nxb, nyb, nzb)
try:
- self.grid_particle_count[:] = f_part.handle["/localnp"][:][:,None]
+ self.grid_particle_count[:] = f_part["/localnp"][:][:,None]
except KeyError:
self.grid_particle_count[:] = 0.0
self._particle_indices = np.zeros(self.num_grids + 1, dtype='int64')
@@ -121,7 +121,7 @@
# This will become redundant, as _prepare_grid will reset it to its
# current value. Note that FLASH uses 1-based indexing for refinement
# levels, but we do not, so we reduce the level by 1.
- self.grid_levels.flat[:] = f.handle["/refine level"][:][:] - 1
+ self.grid_levels.flat[:] = f["/refine level"][:][:] - 1
self.grids = np.empty(self.num_grids, dtype='object')
for i in xrange(self.num_grids):
self.grids[i] = self.grid(i+1, self, self.grid_levels[i,0])
@@ -390,7 +390,7 @@
def _is_valid(self, *args, **kwargs):
try:
fileh = FileHandler(args[0])
- if "bounding box" in fileh.handle["/"].keys():
+ if "bounding box" in fileh["/"].keys():
return True
except:
pass
https://bitbucket.org/yt_analysis/yt/commits/3cb81b53b1d6/
Changeset: 3cb81b53b1d6
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 08:13:12
Summary: Fix test failures.
Affected #: 1 file
diff -r 342fa1eabe63ab6e085804bc00d95f2bf0632d55 -r 3cb81b53b1d678ff4d1876254c2f365ab30515f4 yt/fields/field_detector.py
--- a/yt/fields/field_detector.py
+++ b/yt/fields/field_detector.py
@@ -58,7 +58,7 @@
pf.domain_right_edge = np.ones(3, 'float64')
pf.dimensionality = 3
pf.periodicity = (True, True, True)
- self.pf = weakref.proxy(pf)
+ self.pf = pf
class fake_index(object):
class fake_io(object):
https://bitbucket.org/yt_analysis/yt/commits/b96e0aadaf8b/
Changeset: b96e0aadaf8b
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-12 09:01:02
Summary: More test fixes.
Affected #: 1 file
diff -r 3cb81b53b1d678ff4d1876254c2f365ab30515f4 -r b96e0aadaf8b1c54370ecf6ef7537a36908a3103 yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -20,7 +20,8 @@
self.handle = h5py.File(filename, 'r')
def __del__(self):
- self.handle.close()
+ if hasattr(self, 'handle'):
+ self.handle.close()
def __getitem__(self, key):
return self.handle[key]
@@ -28,9 +29,20 @@
def __contains__(self, item):
return item in self.handle
+ def __len__(self):
+ return len(self.handle)
+
+ @property
+ def attrs(self):
+ return self.handle.attrs
+
+ @property
+ def keys(self):
+ return self.handle.keys
+
class FITSFileHandler(FileHandler):
- from yt.utilities.on_demand_imports import _astropy
def __init__(self, filename):
+ from yt.utilities.on_demand_imports import _astropy
if isinstance(filename, _astropy.pyfits.PrimaryHDU):
self.handle = _astropy.pyfits.HDUList(filename)
else:
https://bitbucket.org/yt_analysis/yt/commits/893fcf8cc1dc/
Changeset: 893fcf8cc1dc
Branch: yt-3.0
User: ngoldbaum
Date: 2014-06-17 21:57:28
Summary: Renaming the base FileHandler class to HDF5FileHandler
Affected #: 4 files
diff -r b96e0aadaf8b1c54370ecf6ef7537a36908a3103 -r 893fcf8cc1dce07c0d7847964c01b9ede8205ab9 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -34,7 +34,7 @@
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities.file_handler import \
- FileHandler
+ HDF5FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.lib.misc_utilities import \
@@ -243,7 +243,7 @@
def __init__(self, filename, dataset_type='chombo_hdf5',
storage_filename = None, ini_filename = None):
self.fluid_types += ("chombo",)
- self._handle = FileHandler(filename)
+ self._handle = HDF5FileHandler(filename)
# look up the dimensionality of the dataset
D = self._handle['Chombo_global/'].attrs['SpaceDim']
diff -r b96e0aadaf8b1c54370ecf6ef7537a36908a3103 -r 893fcf8cc1dce07c0d7847964c01b9ede8205ab9 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -31,7 +31,7 @@
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities.file_handler import \
- FileHandler
+ HDF5FileHandler
from yt.utilities.io_handler import \
io_registry
from yt.utilities.physical_constants import cm_per_mpc
@@ -185,7 +185,7 @@
self.fluid_types += ("flash",)
if self._handle is not None: return
- self._handle = FileHandler(filename)
+ self._handle = HDF5FileHandler(filename)
if conversion_override is None: conversion_override = {}
self._conversion_override = conversion_override
@@ -195,7 +195,7 @@
self._particle_handle = self._handle
else :
try:
- self._particle_handle = FileHandler(self.particle_filename)
+ self._particle_handle = HDF5FileHandler(self.particle_filename)
except :
raise IOError(self.particle_filename)
# These should be explicitly obtained from the file, but for now that
@@ -389,7 +389,7 @@
@classmethod
def _is_valid(self, *args, **kwargs):
try:
- fileh = FileHandler(args[0])
+ fileh = HDF5FileHandler(args[0])
if "bounding box" in fileh["/"].keys():
return True
except:
diff -r b96e0aadaf8b1c54370ecf6ef7537a36908a3103 -r 893fcf8cc1dce07c0d7847964c01b9ede8205ab9 yt/frontends/pluto/data_structures.py
--- a/yt/frontends/pluto/data_structures.py
+++ b/yt/frontends/pluto/data_structures.py
@@ -38,7 +38,7 @@
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities.file_handler import \
- FileHandler
+ HDF5FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.io_handler import \
@@ -168,7 +168,7 @@
def __init__(self, filename, dataset_type='pluto_hdf5',
storage_filename = None, ini_filename = None):
- self._handle = FileHandler(filename)
+ self._handle = HDF5FileHandler(filename)
self.current_time = self._handle.attrs['time']
self.ini_filename = ini_filename
self.fullplotdir = os.path.abspath(filename)
diff -r b96e0aadaf8b1c54370ecf6ef7537a36908a3103 -r 893fcf8cc1dce07c0d7847964c01b9ede8205ab9 yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -15,7 +15,7 @@
import h5py
-class FileHandler(object):
+class HDF5FileHandler(object):
def __init__(self, filename):
self.handle = h5py.File(filename, 'r')
@@ -40,7 +40,7 @@
def keys(self):
return self.handle.keys
-class FITSFileHandler(FileHandler):
+class FITSFileHandler(HDF5FileHandler):
def __init__(self, filename):
from yt.utilities.on_demand_imports import _astropy
if isinstance(filename, _astropy.pyfits.PrimaryHDU):
https://bitbucket.org/yt_analysis/yt/commits/88bcdc5ae09c/
Changeset: 88bcdc5ae09c
Branch: yt-3.0
User: MatthewTurk
Date: 2014-06-19 02:10:40
Summary: Merged in ngoldbaum/yt/yt-3.0 (pull request #947)
Ensure Dataset and Index instances do not participate in reference cycles
Affected #: 9 files
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -56,7 +56,7 @@
self.id = id
self._child_mask = self._child_indices = self._child_index_mask = None
self.pf = index.parameter_file
- self._index = index
+ self._index = weakref.proxy(index)
self.start_index = None
self.filename = filename
self._last_mask = None
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -498,7 +498,7 @@
continue
cname = cls.__name__
if cname.endswith("Base"): cname = cname[:-4]
- self._add_object_class(name, cname, cls, {'pf':self})
+ self._add_object_class(name, cname, cls, {'pf':weakref.proxy(self)})
if self.refine_by != 2 and hasattr(self, 'proj') and \
hasattr(self, 'overlap_proj'):
mylog.warning("Refine by something other than two: reverting to"
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -33,6 +33,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ HDF5FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.lib.misc_utilities import \
@@ -241,7 +243,7 @@
def __init__(self, filename, dataset_type='chombo_hdf5',
storage_filename = None, ini_filename = None):
self.fluid_types += ("chombo",)
- self._handle = h5py.File(filename, 'r')
+ self._handle = HDF5FileHandler(filename)
# look up the dimensionality of the dataset
D = self._handle['Chombo_global/'].attrs['SpaceDim']
@@ -271,9 +273,6 @@
self.parameters["DualEnergyFormalism"] = 0
self.parameters["EOSType"] = -1 # default
- def __del__(self):
- self._handle.close()
-
def _set_code_unit_attributes(self):
self.length_unit = YTQuantity(1.0, "cm")
self.mass_unit = YTQuantity(1.0, "g")
@@ -316,11 +315,10 @@
return LE
def _calc_right_edge(self):
- fileh = h5py.File(self.parameter_filename,'r')
+ fileh = self._handle
dx0 = fileh['/level_0'].attrs['dx']
D = self.dimensionality
RE = dx0*((np.array(list(fileh['/level_0'].attrs['prob_domain'])))[D:] + 1)
- fileh.close()
return RE
def _calc_domain_dimensions(self):
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -29,6 +29,8 @@
YTDataChunk
from yt.data_objects.static_output import \
Dataset
+from yt.utilities.file_handler import \
+ FITSFileHandler
from yt.utilities.io_handler import \
io_registry
from .fields import FITSFieldInfo
@@ -153,7 +155,7 @@
naxis4 = self.parameter_file.primary_header["naxis4"]
else:
naxis4 = 1
- for i, fits_file in enumerate(self.parameter_file._fits_files):
+ for i, fits_file in enumerate(self.parameter_file._handle._fits_files):
for j, hdu in enumerate(fits_file):
if self._ensure_same_dims(hdu):
units = self._determine_image_units(hdu.header, known_units)
@@ -355,15 +357,12 @@
elif isinstance(nan_mask, dict):
self.nan_mask = nan_mask
if isinstance(self.filenames[0], _astropy.pyfits.PrimaryHDU):
- self._handle = _astropy.pyfits.HDUList(self.filenames[0])
+ self._handle = FITSFileHandler(self.filenames[0])
fn = "InMemoryFITSImage_%s" % (uuid.uuid4().hex)
else:
- self._handle = _astropy.pyfits.open(self.filenames[0],
- memmap=True,
- do_not_scale_image_data=True,
- ignore_blank=True)
+ self._handle = FITSFileHandler(self.filenames[0])
fn = self.filenames[0]
- self._fits_files = [self._handle]
+ self._handle._fits_files = [self._handle]
if self.num_files > 1:
for fits_file in auxiliary_files:
if os.path.exists(fits_file):
@@ -373,7 +372,7 @@
f = _astropy.pyfits.open(fn, memmap=True,
do_not_scale_image_data=True,
ignore_blank=True)
- self._fits_files.append(f)
+ self._handle._fits_files.append(f)
if len(self._handle) > 1 and self._handle[1].name == "EVENTS":
self.events_data = True
@@ -648,13 +647,6 @@
return self.arr((pv.v-self._p0)*self._dz+self._z0,
self.spec_unit)
- def __del__(self):
- for f in self._fits_files:
- f.close()
- del file
- self._handle.close()
- del self._handle
-
@classmethod
def _is_valid(cls, *args, **kwargs):
ext = args[0].rsplit(".", 1)[-1]
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -30,6 +30,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ HDF5FileHandler
from yt.utilities.io_handler import \
io_registry
from yt.utilities.physical_constants import cm_per_mpc
@@ -183,7 +185,7 @@
self.fluid_types += ("flash",)
if self._handle is not None: return
- self._handle = h5py.File(filename, "r")
+ self._handle = HDF5FileHandler(filename)
if conversion_override is None: conversion_override = {}
self._conversion_override = conversion_override
@@ -192,15 +194,15 @@
if self.particle_filename is None :
self._particle_handle = self._handle
else :
- try :
- self._particle_handle = h5py.File(self.particle_filename, "r")
+ try:
+ self._particle_handle = HDF5FileHandler(self.particle_filename)
except :
raise IOError(self.particle_filename)
# These should be explicitly obtained from the file, but for now that
# will wait until a reorganization of the source tree and better
# generalization.
self.refine_by = 2
-
+
Dataset.__init__(self, filename, dataset_type)
self.storage_filename = storage_filename
@@ -384,19 +386,12 @@
self.current_redshift = self.omega_lambda = self.omega_matter = \
self.hubble_constant = self.cosmological_simulation = 0.0
- def __del__(self):
- if self._handle is not self._particle_handle:
- self._particle_handle.close()
- self._handle.close()
-
@classmethod
def _is_valid(self, *args, **kwargs):
try:
- fileh = h5py.File(args[0],'r')
+ fileh = HDF5FileHandler(args[0])
if "bounding box" in fileh["/"].keys():
- fileh.close()
return True
- fileh.close()
except:
pass
return False
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/frontends/pluto/data_structures.py
--- a/yt/frontends/pluto/data_structures.py
+++ b/yt/frontends/pluto/data_structures.py
@@ -13,7 +13,6 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-import h5py
import re
import os
import weakref
@@ -38,6 +37,8 @@
Dataset
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
+from yt.utilities.file_handler import \
+ HDF5FileHandler
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.io_handler import \
@@ -167,7 +168,7 @@
def __init__(self, filename, dataset_type='pluto_hdf5',
storage_filename = None, ini_filename = None):
- self._handle = h5py.File(filename,'r')
+ self._handle = HDF5FileHandler(filename)
self.current_time = self._handle.attrs['time']
self.ini_filename = ini_filename
self.fullplotdir = os.path.abspath(filename)
@@ -180,9 +181,6 @@
self.parameters["DualEnergyFormalism"] = 0
self.parameters["EOSType"] = -1 # default
- def __del__(self):
- self._handle.close()
-
def _set_units(self):
"""
Generates the conversion to various physical _units based on the parameter file
diff -r 5a31577825d925d3795e8587a3106771d5b61dae -r 88bcdc5ae09cfa02cca341c77bf10e457c3dccb9 yt/utilities/file_handler.py
--- /dev/null
+++ b/yt/utilities/file_handler.py
@@ -0,0 +1,57 @@
+"""
+A wrapper class for h5py file objects.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+
+class HDF5FileHandler(object):
+ def __init__(self, filename):
+ self.handle = h5py.File(filename, 'r')
+
+ def __del__(self):
+ if hasattr(self, 'handle'):
+ self.handle.close()
+
+ def __getitem__(self, key):
+ return self.handle[key]
+
+ def __contains__(self, item):
+ return item in self.handle
+
+ def __len__(self):
+ return len(self.handle)
+
+ @property
+ def attrs(self):
+ return self.handle.attrs
+
+ @property
+ def keys(self):
+ return self.handle.keys
+
+class FITSFileHandler(HDF5FileHandler):
+ def __init__(self, filename):
+ from yt.utilities.on_demand_imports import _astropy
+ if isinstance(filename, _astropy.pyfits.PrimaryHDU):
+ self.handle = _astropy.pyfits.HDUList(filename)
+ else:
+ self.handle = _astropy.pyfits.open(
+ filename, memmap=True, do_not_scale_image_data=True,
+ ignore_blank=True)
+
+ def __del__(self):
+ for f in self._fits_files:
+ f.close()
+ del f
+ super(FITSFileHandler, self).__del__()
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list