[Yt-svn] yt-commit r1689 - in trunk/yt: . extensions/image_panner lagos
mturk at wrangler.dreamhost.com
mturk at wrangler.dreamhost.com
Sat Apr 10 18:06:28 PDT 2010
Author: mturk
Date: Sat Apr 10 18:06:27 2010
New Revision: 1689
URL: http://yt.enzotools.org/changeset/1689
Log:
Backport from hg:
* Adding "projload" function to simply load a pre-generated projection into a
dictionary
* Some fixes for the image panner as well as the addition of a subset zoom
image panner
* John's fix for the problem of empty dictionaries in parallel jobs not being
passed around correctly
* Fixes for particle IO to keep particles out of memory unncessarily
* Guess the location of HDF5 if not explicitly told it
Modified:
trunk/yt/convenience.py
trunk/yt/extensions/image_panner/pan_and_scan_widget.py
trunk/yt/extensions/image_panner/vm_panner.py
trunk/yt/lagos/BaseDataTypes.py
trunk/yt/lagos/HDF5LightReader.c
trunk/yt/lagos/ParticleIO.py
trunk/yt/lagos/setup.py
trunk/yt/mods.py
Modified: trunk/yt/convenience.py
==============================================================================
--- trunk/yt/convenience.py (original)
+++ trunk/yt/convenience.py Sat Apr 10 18:06:27 2010
@@ -82,3 +82,28 @@
for c in candidates:
mylog.error(" Possible: %s", c)
return None
+
+def projload(pf, axis, weight_field = None):
+ # This is something of a hack, so that we can just get back a projection
+ # and not utilize any of the intermediate hierarchy objects.
+ class ProjMock(dict):
+ pass
+ import h5py
+ f = h5py.File(os.path.join(pf.fullpath, pf.parameter_filename + ".yt"))
+ b = f["/Projections/%s/" % (axis)]
+ wf = "weight_field_%s" % weight_field
+ if wf not in b: raise KeyError(wf)
+ fields = []
+ for k in b:
+ if k.startswith("weight_field"): continue
+ if k.endswith("_%s" % weight_field):
+ fields.append(k)
+ proj = ProjMock()
+ for f in ["px","py","pdx","pdy"]:
+ proj[f] = b[f][:]
+ for f in fields:
+ new_name = f[:-(len(weight_field) + 1)]
+ proj[new_name] = b[f][:]
+ proj.axis = axis
+ proj.pf = pf
+ return proj
Modified: trunk/yt/extensions/image_panner/pan_and_scan_widget.py
==============================================================================
--- trunk/yt/extensions/image_panner/pan_and_scan_widget.py (original)
+++ trunk/yt/extensions/image_panner/pan_and_scan_widget.py Sat Apr 10 18:06:27 2010
@@ -28,13 +28,13 @@
# Enthought library imports
from enthought.enable.api import Component, ComponentEditor, Window
from enthought.traits.api import HasTraits, Instance, Button, Any, Callable, \
- on_trait_change, Bool
+ on_trait_change, Bool, DelegatesTo, List, Enum
from enthought.traits.ui.api import Item, Group, View
# Chaco imports
from enthought.chaco.api import ArrayPlotData, jet, Plot, HPlotContainer, \
ColorBar, DataRange1D, DataRange2D, LinearMapper, ImageData, \
- CMapImagePlot
+ CMapImagePlot, OverlayPlotContainer
from enthought.chaco.tools.api import PanTool, ZoomTool, RangeSelection, \
RangeSelectionOverlay, RangeSelection
from enthought.chaco.tools.image_inspector_tool import ImageInspectorTool, \
@@ -83,11 +83,14 @@
class ImagePixelizerHelper(object):
index = None
- def __init__(self, panner):
+ def __init__(self, panner, run_callbacks = False):
self.panner = panner
+ self.run_callbacks = run_callbacks
def __call__(self, low, high):
b = self.panner.set_low_high(low, high)
+ if self.run_callbacks:
+ self.panner._run_callbacks()
if self.index is not None:
num_x_ticks = b.shape[0] + 1
num_y_ticks = b.shape[1] + 1
@@ -96,16 +99,45 @@
self.index.set_data( xs, ys )
return b
+class ZoomedPlotUpdater(object):
+ fid = None
+ def __init__(self, panner, zoom_factor=4):
+ """
+ Supply this an a viewport_callback argument to a panner if you want to
+ update a second panner in a smaller portion at higher resolution. If
+ you then set the *fid* property, you can also have it update a
+ FunctionImageData datarange. *panner* is the panner to update (not the
+ one this is a callback to) and *zoom_factor* is how much to zoom in by.
+ """
+ self.panner = panner
+ self.zoom_factor = zoom_factor
+
+ def __call__(self, xlim, ylim):
+ self.panner.xlim = xlim
+ self.panner.ylim = ylim
+ self.panner.zoom(self.zoom_factor)
+ nxlim = self.panner.xlim
+ nylim = self.panner.ylim
+ if self.fid is not None:
+ self.fid.data_range.set_bounds(
+ (nxlim[0], nylim[0]), (nxlim[1], nylim[1]))
+
class VMImagePlot(HasTraits):
plot = Instance(Plot)
fid = Instance(FunctionImageData)
img_plot = Instance(CMapImagePlot)
panner = Instance(VariableMeshPanner)
helper = Instance(ImagePixelizerHelper)
+ fields = List
+
+ def __init__(self, *args, **kwargs):
+ super(VMImagePlot, self).__init__(**kwargs)
+ self.add_trait("field", Enum(*self.fields))
+ self.field = self.panner.field
def _plot_default(self):
pd = ArrayPlotData()
- plot = Plot(pd)
+ plot = Plot(pd, padding = 0)
self.fid._data = self.panner.buffer
pd.set_data("imagedata", self.fid)
@@ -119,24 +151,46 @@
self.img_plot = img_plot
return plot
+ def _field_changed(self, old, new):
+ self.panner.field = new
+ self.fid.recalculate()
+
def _fid_default(self):
return FunctionImageData(func = self.helper)
def _helper_default(self):
return ImagePixelizerHelper(self.panner)
+ def _panner_changed(self, old, new):
+ index = self.helper.index
+ self.helper = ImagePixelizerHelper(new)
+ self.helper.index = index
+ self.fid.func = self.helper
+ self.fid.recalculate()
+
+ def _fields_default(self):
+ keys = []
+ for field in self.panner.source.keys():
+ if field not in ['px','py','pdx','pdy',
+ 'pz','pdz','weight_field']:
+ keys.append(field)
+ return keys
+
class VariableMeshPannerView(HasTraits):
plot = Instance(Plot)
spawn_zoom = Button
vm_plot = Instance(VMImagePlot)
use_tools = Bool(True)
+ full_container = Instance(HPlotContainer)
+ container = Instance(OverlayPlotContainer)
traits_view = View(
Group(
- Item('container', editor=ComponentEditor(size=(512,512)),
+ Item('full_container',
+ editor=ComponentEditor(size=(512,512)),
show_label=False),
- Item('spawn_zoom', show_label=False),
+ Item('field', show_label=False),
orientation = "vertical"),
width = 800, height=800,
resizable=True, title="Pan and Scan",
@@ -148,6 +202,7 @@
def __init__(self, **kwargs):
super(VariableMeshPannerView, self).__init__(**kwargs)
# Create the plot
+ self.add_trait("field", DelegatesTo("vm_plot"))
plot = self.vm_plot.plot
img_plot = self.vm_plot.img_plot
@@ -189,11 +244,8 @@
# the selection, so set that up as well
range_selection.listeners.append(img_plot)
- self.container = HPlotContainer(padding=30)
- self.container.add(self.colorbar)
+ self.full_container = HPlotContainer(padding=30)
+ self.container = OverlayPlotContainer(padding=0)
+ self.full_container.add(self.colorbar)
+ self.full_container.add(self.container)
self.container.add(self.vm_plot.plot)
-
- def _spawn_zoom_fired(self):
- np = self.panner.source.pf.h.image_panner(
- self.panner.source, self.panner.size, self.panner.field)
- new_window = VariableMeshPannerView(panner = np)
Modified: trunk/yt/extensions/image_panner/vm_panner.py
==============================================================================
--- trunk/yt/extensions/image_panner/vm_panner.py (original)
+++ trunk/yt/extensions/image_panner/vm_panner.py Sat Apr 10 18:06:27 2010
@@ -29,6 +29,7 @@
class VariableMeshPanner(object):
_buffer = None
+ _hold = False
def __init__(self, source, size, field, callback = None,
viewport_callback = None):
@@ -42,8 +43,8 @@
and *viewport_callback* is called with the new *xlim* and *ylim* values
each time the viewport changes.
"""
- if not isinstance(source, (AMRProjBase, AMRSliceBase)):
- raise RuntimeError
+ #if not isinstance(source, (AMRProjBase, AMRSliceBase)):
+ # raise RuntimeError
if callback is None:
callback = lambda a: None
self.callback = callback
@@ -56,6 +57,7 @@
self.xlim, self.ylim = self.bounds
def _run_callbacks(self):
+ if self._hold: return
self.callback(self.buffer)
self.viewport_callback(self.xlim, self.ylim)
Modified: trunk/yt/lagos/BaseDataTypes.py
==============================================================================
--- trunk/yt/lagos/BaseDataTypes.py (original)
+++ trunk/yt/lagos/BaseDataTypes.py Sat Apr 10 18:06:27 2010
@@ -593,7 +593,8 @@
# Now the next field can use this field
self[field] = temp_data[field]
# We finalize
- temp_data = self._mpi_catdict(temp_data)
+ if temp_data != {}:
+ temp_data = self._mpi_catdict(temp_data)
# And set, for the next group
for field in temp_data.keys():
self[field] = temp_data[field]
Modified: trunk/yt/lagos/HDF5LightReader.c
==============================================================================
--- trunk/yt/lagos/HDF5LightReader.c (original)
+++ trunk/yt/lagos/HDF5LightReader.c Sat Apr 10 18:06:27 2010
@@ -881,10 +881,9 @@
if(pv.file_id >= 0) {H5Fclose(pv.file_id); pv.file_id = -1;}
/* Let's pack up our return values */
- PyObject *my_list = PyList_New(0);
+ PyObject *my_list = PyList_New(pv.nfields);
for (i = 0; i < pv.nfields ; i++){
- PyList_Append(my_list, (PyObject *) pv.return_values[i]);
- Py_DECREF(pv.return_values[i]);
+ PyList_SET_ITEM(my_list, i, (PyObject *) pv.return_values[i]);
}
PyObject *return_value = Py_BuildValue("N", my_list);
@@ -899,6 +898,7 @@
Py_DECREF(conv_factors);
free(pv.validation_reqs);
/* We don't need to free pv */
+ if(!(pv.file_id <= 0)&&(H5Iget_ref(pv.file_id))) H5Fclose(pv.file_id);
return return_value;
@@ -923,6 +923,7 @@
if(pv.particle_position[i] != NULL) free(pv.particle_position[i]);
}
if(pv.validation_reqs != NULL) free(pv.validation_reqs);
+ if(!(pv.file_id <= 0)&&(H5Iget_ref(pv.file_id))) H5Fclose(pv.file_id);
return NULL;
}
Modified: trunk/yt/lagos/ParticleIO.py
==============================================================================
--- trunk/yt/lagos/ParticleIO.py (original)
+++ trunk/yt/lagos/ParticleIO.py Sat Apr 10 18:06:27 2010
@@ -39,28 +39,16 @@
def __init__(self, pf, source):
self.pf = pf
- self.data = {}
self.source = source
def __getitem__(self, key):
- if key not in self.data:
- self.get_data(key)
- return self.data[key]
-
- def __setitem__(self, key, val):
- self.data[key] = val
-
- def __delitem__(self, key):
- del self.data[key]
-
- def iter(self):
- for val in self.data.keys(): yield val
+ return self.get_data(key)
def get_data(self, fields):
fields = ensure_list(fields)
- self.source.get_data(fields, force_particle_read=True)
- for field in fields:
- self[field] = self.source[field]
+ rvs = self.source.get_data(fields, force_particle_read=True)
+ if len(fields) == 1: return rvs[0]
+ return rvs
particle_handler_registry.default_factory = lambda: ParticleIOHandler
@@ -98,10 +86,11 @@
count=len(grid_list), dtype='float64'))
conv_factors = na.array(conv_factors).transpose()
self.conv_factors = conv_factors
- rv = self.pf.h.io._read_particles(
+ rvs = self.pf.h.io._read_particles(
fields_to_read, rtype, args, grid_list, count_list,
conv_factors)
- for field, v in zip(fields, rv): self[field] = v
+ if len(fields) == 1: return rvs[0]
+ return rvs
class ParticleIOHandlerRegion(ParticleIOHandlerImplemented):
periodic = False
Modified: trunk/yt/lagos/setup.py
==============================================================================
--- trunk/yt/lagos/setup.py (original)
+++ trunk/yt/lagos/setup.py Sat Apr 10 18:06:27 2010
@@ -5,10 +5,37 @@
import os.path
def check_for_hdf5():
+ # First up: HDF5_DIR in environment
if "HDF5_DIR" in os.environ:
- return os.environ["HDF5_DIR"]
+ hdf5_dir = os.environ["HDF5_DIR"]
+ hdf5_inc = os.path.join(hdf5_dir, "include")
+ hdf5_lib = os.path.join(hdf5_dir, "lib")
+ print "HDF5_LOCATION: HDF5_DIR: %s, %s" % (hdf5_inc, hdf5_lib)
+ return (hdf5_inc, hdf5_lib)
+ # Next up, we try hdf5.cfg
elif os.path.exists("hdf5.cfg"):
- return open("hdf5.cfg").read().strip().rstrip()
+ hdf5_dir = open("hdf5.cfg").read().strip()
+ hdf5_inc = os.path.join(hdf5_dir, "include")
+ hdf5_lib = os.path.join(hdf5_dir, "lib")
+ print "HDF5_LOCATION: hdf5.cfg: %s, %s" % (hdf5_inc, hdf5_lib)
+ return (hdf5_inc, hdf5_lib)
+ # Now we see if ctypes can help us:
+ try:
+ import ctypes.util
+ hdf5_libfile = ctypes.util.find_library("hdf5")
+ if hdf5_libfile is not None and os.path.isfile(hdf5_libfile):
+ # Now we've gotten a library, but we'll need to figure out the
+ # includes if this is going to work. It feels like there is a
+ # better way to pull off two directory names.
+ hdf5_dir = os.path.dirname(os.path.dirname(hdf5_libfile))
+ if os.path.isdir(os.path.join(hdf5_dir, "include")) and \
+ os.path.isfile(os.path.join(hdf5_dir, "include", "hdf5.h")):
+ hdf5_inc = os.path.join(hdf5_dir, "include")
+ hdf5_lib = os.path.join(hdf5_dir, "lib")
+ print "HDF5_LOCATION: HDF5 found in: %s, %s" % (hdf5_inc, hdf5_lib)
+ return hdf5_inc, hdf5_lib
+ except ImportError:
+ pass
print "Reading HDF5 location from hdf5.cfg failed."
print "Please place the base directory of your HDF5 install in hdf5.cfg and restart."
print "(ex: \"echo '/usr/local/' > hdf5.cfg\" )"
@@ -23,15 +50,11 @@
config.add_subpackage("hop")
config.add_subpackage("fof")
config.add_subpackage("parallelHOP")
- H5dir = check_for_hdf5()
- if H5dir is not None:
- include_dirs=[os.path.join(H5dir,"include")]
- library_dirs=[os.path.join(H5dir,"lib")]
- config.add_extension("HDF5LightReader", "yt/lagos/HDF5LightReader.c",
- define_macros=[("H5_USE_16_API",True)],
- libraries=["m","hdf5"],
- library_dirs=library_dirs, include_dirs=include_dirs)
- # Uncomment the next two lines if you want particle_density support
- #config.add_extension("cic_deposit", ["yt/lagos/enzo_routines/cic_deposit.pyf",
- # "yt/lagos/enzo_routines/cic_deposit.f"])
+ hdf5_inc, hdf5_lib = check_for_hdf5()
+ include_dirs=[hdf5_inc]
+ library_dirs=[hdf5_lib]
+ config.add_extension("HDF5LightReader", "yt/lagos/HDF5LightReader.c",
+ define_macros=[("H5_USE_16_API",True)],
+ libraries=["m","hdf5"],
+ library_dirs=library_dirs, include_dirs=include_dirs)
return config
Modified: trunk/yt/mods.py
==============================================================================
--- trunk/yt/mods.py (original)
+++ trunk/yt/mods.py Sat Apr 10 18:06:27 2010
@@ -72,7 +72,7 @@
import yt.funcs
-from yt.convenience import all_pfs, max_spheres, load
+from yt.convenience import all_pfs, max_spheres, load, projload
# Some convenience functions to ease our time running scripts
# from the command line
More information about the yt-svn
mailing list