[yt-svn] commit/yt-3.0: 5 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Thu May 9 06:33:19 PDT 2013
5 new commits in yt-3.0:
https://bitbucket.org/yt_analysis/yt-3.0/commits/e2c59d797d73/
Changeset: e2c59d797d73
Branch: yt-3.0
User: xarthisius
Date: 2013-04-16 17:57:58
Summary: [gdf] adding basic support for new data representation
Affected #: 2 files
diff -r ae0003cdf0a5c5c11d3722d37796c67b0b84428a -r e2c59d797d731004107cdd985ef75229e41c23bf yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -39,6 +39,8 @@
StaticOutput
from yt.utilities.lib import \
get_box_grids_level
+from yt.utilities.io_handler import \
+ io_registry
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
@@ -78,6 +80,10 @@
if self.pf.dimensionality < 3: self.dds[2] = 1.0
self.field_data['dx'], self.field_data['dy'], self.field_data['dz'] = self.dds
+ @property
+ def filename(self):
+ return None
+
class GDFHierarchy(GridGeometryHandler):
grid = GDFGrid
@@ -85,19 +91,23 @@
def __init__(self, pf, data_style='grid_data_format'):
self.parameter_file = weakref.proxy(pf)
self.data_style = data_style
+ self.max_level = 10 # FIXME
# for now, the hierarchy file is the parameter file!
self.hierarchy_filename = self.parameter_file.parameter_filename
self.directory = os.path.dirname(self.hierarchy_filename)
- self._fhandle = h5py.File(self.hierarchy_filename,'r')
- GridGeometryHandler.__init__(self,pf,data_style)
+# self._handle = h5py.File(self.hierarchy_filename, 'r')
+ self._handle = pf._handle
+# import pudb; pudb.set_trace()
+ GridGeometryHandler.__init__(self, pf, data_style)
+ print "!!!!"
- self._fhandle.close()
+# self._handle.close()
def _initialize_data_storage(self):
pass
def _detect_fields(self):
- self.field_list = self._fhandle['field_types'].keys()
+ self.field_list = self._handle['field_types'].keys()
def _setup_classes(self):
dd = self._get_data_reader_dict()
@@ -105,10 +115,10 @@
self.object_types.sort()
def _count_grids(self):
- self.num_grids = self._fhandle['/grid_parent_id'].shape[0]
+ self.num_grids = self._handle['/grid_parent_id'].shape[0]
def _parse_hierarchy(self):
- f = self._fhandle
+ f = self._handle
dxs = []
self.grids = np.empty(self.num_grids, dtype='object')
levels = (f['grid_level'][:]).copy()
@@ -139,7 +149,7 @@
for gi, g in enumerate(self.grids):
g._prepare_grid()
g._setup_dx()
-
+ return
for gi, g in enumerate(self.grids):
g.Children = self._get_grid_children(g)
for g1 in g.Children:
@@ -165,16 +175,22 @@
mask[grid_ind] = True
return [g for g in self.grids[mask] if g.Level == grid.Level + 1]
+ def _setup_data_io(self):
+ self.io = io_registry[self.data_style](self.parameter_file)
+
class GDFStaticOutput(StaticOutput):
_hierarchy_class = GDFHierarchy
_fieldinfo_fallback = GDFFieldInfo
_fieldinfo_known = KnownGDFFields
+ _handle = None
def __init__(self, filename, data_style='grid_data_format',
storage_filename = None):
- StaticOutput.__init__(self, filename, data_style)
+ if self._handle is not None: return
+ self._handle = h5py.File(filename, "r")
self.storage_filename = storage_filename
self.filename = filename
+ StaticOutput.__init__(self, filename, data_style)
def _set_units(self):
"""
@@ -208,9 +224,10 @@
self._fieldinfo_known.add_field(field_name, function=NullFunc, take_log=False,
units=current_fields_unit, projected_units="",
convert_function=_get_convert(field_name))
-
- self._handle.close()
- del self._handle
+ for p, v in self.units.items():
+ self.conversion_factors[p] = v
+# self._handle.close()
+# del self._handle
def _parse_parameter_file(self):
self._handle = h5py.File(self.parameter_filename, "r")
@@ -241,8 +258,8 @@
self.hubble_constant = self.cosmological_simulation = 0.0
self.parameters['Time'] = 1.0 # Hardcode time conversion for now.
self.parameters["HydroMethod"] = 0 # Hardcode for now until field staggering is supported.
- self._handle.close()
- del self._handle
+# self._handle.close()
+# del self._handle
@classmethod
def _is_valid(self, *args, **kwargs):
@@ -259,3 +276,5 @@
def __repr__(self):
return self.basename.rsplit(".", 1)[0]
+ def __del__(self):
+ self._handle.close()
diff -r ae0003cdf0a5c5c11d3722d37796c67b0b84428a -r e2c59d797d731004107cdd985ef75229e41c23bf yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -25,31 +25,67 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
+import numpy as np
+from yt.funcs import \
+ mylog
from yt.utilities.io_handler import \
- BaseIOHandler
-import h5py
+ BaseIOHandler
+
+def field_dname(grid_id, field_name):
+ return "/data/grid_%010i/%s" % (grid_id, field_name)
+
+
+# TODO all particle bits were removed
class IOHandlerGDFHDF5(BaseIOHandler):
_data_style = "grid_data_format"
_offset_string = 'data:offsets=0'
_data_string = 'data:datatype=0'
- def _field_dict(self,fhandle):
- keys = fhandle['field_types'].keys()
- val = fhandle['field_types'].keys()
- return dict(zip(keys,val))
+ def __init__(self, pf, *args, **kwargs):
+ # TODO check if _num_per_stride is needed
+ self._num_per_stride = kwargs.pop("num_per_stride", 1000000)
+ BaseIOHandler.__init__(self, *args, **kwargs)
+ self.pf = pf
+ self._handle = pf._handle
- def _read_field_names(self,grid):
- fhandle = h5py.File(grid.filename,'r')
- names = fhandle['field_types'].keys()
- fhandle.close()
- return names
- def _read_data(self,grid,field):
- fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
- data = (fhandle['/data/grid_%010i/'%grid.id+field][:]).copy()
- fhandle.close()
- if grid.pf.field_ordering == 1:
- return data.T
- else:
- return data
+ def _read_data_set(self, grid, field):
+ data = self._handle[field_dname(grid.id, field)][:, :, :]
+ # TODO transpose data if needed (grid.pf.field_ordering)
+ return data.astype("float64")
+
+ def _read_data_slice(self, grid, field, axis, coord):
+ slc = [slice(None), slice(None), slice(None)]
+ slc[axis] = slice(coord, coord + 1)
+ # TODO transpose data if needed
+ data = self._handle[field_dname(grid.id, field)][slc]
+ return data.astype("float64")
+
+ def _read_fluid_selection(self, chunks, selector, fields, size):
+ chunks = list(chunks)
+ # TODO ????
+ #if any((ftype != "gas" for ftype, fname in fields)):
+ # raise NotImplementedError
+ fhandle = self._handle
+ rv = {}
+ for field in fields:
+ ftype, fname = field
+ rv[field] = np.empty(
+ size, dtype=fhandle[field_dname(0, fname)].dtype)
+ ngrids = sum(len(chunk.objs) for chunk in chunks)
+ mylog.debug("Reading %s cells of %s fields in %s blocks",
+ size, [fname for ftype, fname in fields], ngrids)
+ for field in fields:
+ ftype, fname = field
+ ind = 0
+ for chunk in chunks:
+ for grid in chunk.objs:
+ mask = grid.select(selector) # caches
+ if mask is None:
+ continue
+ # TODO transpose if needed
+ data = fhandle[field_dname(grid.id, fname)][mask]
+ rv[field][ind:ind + data.size] = data
+ ind += data.size
+ return rv
https://bitbucket.org/yt_analysis/yt-3.0/commits/9ff17559b7f6/
Changeset: 9ff17559b7f6
Branch: yt-3.0
User: xarthisius
Date: 2013-01-16 20:39:20
Summary: Remove debug statements
Affected #: 1 file
diff -r e2c59d797d731004107cdd985ef75229e41c23bf -r 9ff17559b7f662be5cfc0ce48436e854de59adc0 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -97,9 +97,7 @@
self.directory = os.path.dirname(self.hierarchy_filename)
# self._handle = h5py.File(self.hierarchy_filename, 'r')
self._handle = pf._handle
-# import pudb; pudb.set_trace()
GridGeometryHandler.__init__(self, pf, data_style)
- print "!!!!"
# self._handle.close()
https://bitbucket.org/yt_analysis/yt-3.0/commits/96a9135733c7/
Changeset: 96a9135733c7
Branch: yt-3.0
User: xarthisius
Date: 2013-04-16 15:08:08
Summary: [gdf] update I/O, SlicePlot and ProjectionPlot are working for enzo dataset written with write_to_gdf
Affected #: 2 files
diff -r 9ff17559b7f662be5cfc0ce48436e854de59adc0 -r 96a9135733c790708dd46705625c527839c69eca yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -95,11 +95,9 @@
# for now, the hierarchy file is the parameter file!
self.hierarchy_filename = self.parameter_file.parameter_filename
self.directory = os.path.dirname(self.hierarchy_filename)
-# self._handle = h5py.File(self.hierarchy_filename, 'r')
self._handle = pf._handle
GridGeometryHandler.__init__(self, pf, data_style)
-# self._handle.close()
def _initialize_data_storage(self):
pass
@@ -147,7 +145,6 @@
for gi, g in enumerate(self.grids):
g._prepare_grid()
g._setup_dx()
- return
for gi, g in enumerate(self.grids):
g.Children = self._get_grid_children(g)
for g1 in g.Children:
@@ -167,9 +164,20 @@
def _setup_derived_fields(self):
self.derived_field_list = []
+ def _get_box_grids(self, left_edge, right_edge):
+ """
+ Gets back all the grids between a left edge and right edge
+ """
+ eps = np.finfo(np.float64).eps
+ grid_i = np.where((np.all((self.grid_right_edge - left_edge) > eps, axis=1) \
+ & np.all((right_edge - self.grid_left_edge) > eps, axis=1)) == True)
+
+ return self.grids[grid_i], grid_i
+
+
def _get_grid_children(self, grid):
mask = np.zeros(self.num_grids, dtype='bool')
- grids, grid_ind = self.get_box_grids(grid.LeftEdge, grid.RightEdge)
+ grids, grid_ind = self._get_box_grids(grid.LeftEdge, grid.RightEdge)
mask[grid_ind] = True
return [g for g in self.grids[mask] if g.Level == grid.Level + 1]
@@ -216,7 +224,7 @@
except:
self.units[field_name] = 1.0
try:
- current_fields_unit = current_field.attrs['field_units'][0]
+ current_fields_unit = current_field.attrs['field_units']
except:
current_fields_unit = ""
self._fieldinfo_known.add_field(field_name, function=NullFunc, take_log=False,
@@ -224,8 +232,6 @@
convert_function=_get_convert(field_name))
for p, v in self.units.items():
self.conversion_factors[p] = v
-# self._handle.close()
-# del self._handle
def _parse_parameter_file(self):
self._handle = h5py.File(self.parameter_filename, "r")
@@ -256,8 +262,6 @@
self.hubble_constant = self.cosmological_simulation = 0.0
self.parameters['Time'] = 1.0 # Hardcode time conversion for now.
self.parameters["HydroMethod"] = 0 # Hardcode for now until field staggering is supported.
-# self._handle.close()
-# del self._handle
@classmethod
def _is_valid(self, *args, **kwargs):
diff -r 9ff17559b7f662be5cfc0ce48436e854de59adc0 -r 96a9135733c790708dd46705625c527839c69eca yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -64,9 +64,8 @@
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
- # TODO ????
- #if any((ftype != "gas" for ftype, fname in fields)):
- # raise NotImplementedError
+ if any((ftype != "gas" for ftype, fname in fields)):
+ raise NotImplementedError
fhandle = self._handle
rv = {}
for field in fields:
https://bitbucket.org/yt_analysis/yt-3.0/commits/235a3da4c101/
Changeset: 235a3da4c101
Branch: yt-3.0
User: xarthisius
Date: 2013-04-16 21:22:35
Summary: [gdf] respect field_ordering variable, SlicePlot and ProjectionPlot now work for Fortran data
Affected #: 1 file
diff -r 96a9135733c790708dd46705625c527839c69eca -r 235a3da4c101e7946102b9bdb5ab961d5fcb0125 yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -51,15 +51,19 @@
def _read_data_set(self, grid, field):
- data = self._handle[field_dname(grid.id, field)][:, :, :]
- # TODO transpose data if needed (grid.pf.field_ordering)
+ if self.pf.field_ordering == 1:
+ data = self._handle[field_dname(grid.id, field)][:].swapaxes(0, 2)
+ else:
+ data = self._handle[field_dname(grid.id, field)][:, :, :]
return data.astype("float64")
def _read_data_slice(self, grid, field, axis, coord):
slc = [slice(None), slice(None), slice(None)]
slc[axis] = slice(coord, coord + 1)
- # TODO transpose data if needed
- data = self._handle[field_dname(grid.id, field)][slc]
+ if self.pf.field_ordering == 1:
+ data = self._handle[field_dname(grid.id, field)][:].swapaxes(0, 2)[slc]
+ else:
+ data = self._handle[field_dname(grid.id, field)][slc]
return data.astype("float64")
def _read_fluid_selection(self, chunks, selector, fields, size):
@@ -83,8 +87,10 @@
mask = grid.select(selector) # caches
if mask is None:
continue
- # TODO transpose if needed
- data = fhandle[field_dname(grid.id, fname)][mask]
+ if self.pf.field_ordering == 1:
+ data = fhandle[field_dname(grid.id, fname)][:].swapaxes(0, 2)[mask]
+ else:
+ data = fhandle[field_dname(grid.id, fname)][mask]
rv[field][ind:ind + data.size] = data
ind += data.size
return rv
https://bitbucket.org/yt_analysis/yt-3.0/commits/74c2c00d1078/
Changeset: 74c2c00d1078
Branch: yt-3.0
User: MatthewTurk
Date: 2013-05-09 15:33:10
Summary: Merged in xarthisius/yt-3.0 (pull request #30)
Adding support for GDF I/O
Affected #: 2 files
Diff not available.
Repository URL: https://bitbucket.org/yt_analysis/yt-3.0/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list