[yt-svn] commit/yt: 6 new changesets
Bitbucket
commits-noreply at bitbucket.org
Tue Dec 4 07:20:27 PST 2012
6 new commits in yt:
https://bitbucket.org/yt_analysis/yt/changeset/d43a9e0a0d78/
changeset: d43a9e0a0d78
branch: yt
user: MatthewTurk
date: 2012-11-30 21:47:03
summary: Initial import of the AMRSurface object. docstrings and fluxes to follow.
affected #: 1 file
diff -r c9b4cd40d3d4acd631614130811a79bd44898dfb -r d43a9e0a0d78218f9bd732e69029e6384d816479 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -35,6 +35,7 @@
import exceptions
import itertools
import shelve
+import cStringIO
from yt.funcs import *
@@ -48,7 +49,7 @@
DataCubeRefine, DataCubeReplace, FillRegion, FillBuffer
from yt.utilities.definitions import axis_names, x_dict, y_dict
from yt.utilities.parallel_tools.parallel_analysis_interface import \
- ParallelAnalysisInterface
+ ParallelAnalysisInterface, parallel_root_only
from yt.utilities.linear_interpolators import \
UnilinearFieldInterpolator, \
BilinearFieldInterpolator, \
@@ -4162,6 +4163,143 @@
self._cut_masks[grid.id] = this_cut_mask
return this_cut_mask
+class AMRSurfaceBase(AMRData, ParallelAnalysisInterface):
+ _type_name = "surface"
+ _con_args = ("data_source", "surface_field", "field_value")
+ vertices = None
+ def __init__(self, data_source, surface_field, field_value):
+ ParallelAnalysisInterface.__init__(self)
+ self.data_source = data_source
+ self.surface_field = surface_field
+ self.field_value = field_value
+ center = data_source.get_field_parameter("center")
+ AMRData.__init__(self, center = center, fields = None, pf =
+ data_source.pf)
+ self._grids = self.data_source._grids.copy()
+
+ def get_data(self, fields = None):
+ if isinstance(fields, list) and len(fields) > 1:
+ for field in fields: self.get_data(field)
+ return
+ elif isinstance(fields, list):
+ fields = fields[0]
+ # Now we have a "fields" value that is either a string or None
+ pb = get_pbar("Extracting (sampling: %s)" % fields,
+ len(list(self._get_grid_objs())))
+ verts = []
+ samples = []
+ for i,g in enumerate(self._get_grid_objs()):
+ pb.update(i)
+ my_verts = self._extract_isocontours_from_grid(
+ g, self.surface_field, self.field_value,
+ fields)
+ if fields is not None:
+ my_verts, svals = my_verts
+ samples.append(svals)
+ verts.append(my_verts)
+ pb.finish()
+ verts = np.concatenate(verts).transpose()
+ verts = self.comm.par_combine_object(verts, op='cat', datatype='array')
+ self.vertices = verts
+ if fields is not None:
+ samples = np.concatenate(samples)
+ samples = self.comm.par_combine_object(samples, op='cat',
+ datatype='array')
+ self[fields] = samples
+
+ @restore_grid_state
+ def _extract_isocontours_from_grid(self, grid, field, value,
+ sample_values = None):
+ mask = self.data_source._get_cut_mask(grid) * grid.child_mask
+ vals = grid.get_vertex_centered_data(field, no_ghost = False)
+ if sample_values is not None:
+ svals = grid.get_vertex_centered_data(sample_values)
+ else:
+ svals = None
+ my_verts = march_cubes_grid(value, vals, mask, grid.LeftEdge,
+ grid.dds, svals)
+ return my_verts
+
+ @restore_grid_state
+ def _calculate_flux_in_grid(self, grid, field, value,
+ field_x, field_y, field_z, fluxing_field = None):
+ mask = self.data_source._get_cut_mask(grid) * grid.child_mask
+ vals = grid.get_vertex_centered_data(field)
+ if fluxing_field is None:
+ ff = np.ones(vals.shape, dtype="float64")
+ else:
+ ff = grid.get_vertex_centered_data(fluxing_field)
+ xv, yv, zv = [grid.get_vertex_centered_data(f) for f in
+ [field_x, field_y, field_z]]
+ return march_cubes_grid_flux(value, vals, xv, yv, zv,
+ ff, mask, grid.LeftEdge, grid.dds)
+
+ def export_ply(self, filename, bounds = None, color_field = None,
+ color_map = "algae", color_log = True):
+ if self.vertices is None:
+ self.get_data(color_field)
+ elif color_field is not None and color_field not in self.field_data:
+ self[color_field]
+ self._export_ply(filename, bounds, color_field, color_map, color_log)
+
+ @parallel_root_only
+ def _export_ply(self, filename, bounds = None, color_field = None,
+ color_map = "algae", color_log = True):
+ if self.comm.rank != 0:
+ return
+ if hasattr(filename, "write"):
+ f = filename
+ else:
+ f = open(filename, "wb")
+ if bounds is None:
+ DLE = self.pf.domain_left_edge
+ DRE = self.pf.domain_right_edge
+ bounds = [(DLE[i], DRE[i]) for i in range(3)]
+ fs = [("ni", "uint8"), ("v1", "<i4"), ("v2", "<i4"), ("v3", "<i4"),
+ ("red", "uint8"), ("green", "uint8"), ("blue", "uint8") ]
+ v = np.empty((self.vertices.shape[1], 3), "<f")
+ for i in range(3):
+ v[:,i] = self.vertices[i,:]
+ np.subtract(v[:,i], bounds[i][0], v[:,i])
+ w = bounds[i][1] - bounds[i][0]
+ np.divide(v[:,i], w, v[:,i])
+ f.write("ply\n")
+ f.write("format binary_little_endian 1.0\n")
+ f.write("element vertex %s\n" % (v.shape[0]))
+ f.write("property float x\n")
+ f.write("property float y\n")
+ f.write("property float z\n")
+ f.write("element face %s\n" % (v.shape[0]/3))
+ f.write("property list uchar int vertex_indices\n")
+ if color_field is not None:
+ f.write("property uchar red\n")
+ f.write("property uchar green\n")
+ f.write("property uchar blue\n")
+ # Now we get our samples
+ cs = self[color_field]
+ if color_log: cs = np.log10(cs)
+ mi, ma = cs.min(), cs.max()
+ cs = (cs - mi) / (ma - mi)
+ from yt.visualization.image_writer import map_to_colors
+ cs = map_to_colors(cs, color_map)
+ arr = np.empty(cs.shape[1], dtype=np.dtype(fs))
+ arr["red"][:] = cs[0,:,0]
+ arr["green"][:] = cs[0,:,1]
+ arr["blue"][:] = cs[0,:,2]
+ import pdb;pdb.set_trace()
+ else:
+ arr = np.empty(v.shape[0]/3, np.dtype(fs[:-3]))
+ f.write("end_header\n")
+ v.tofile(f)
+ arr["ni"][:] = 3
+ vi = np.arange(v.shape[0], dtype="<i")
+ vi.shape = (v.shape[0]/3, 3)
+ arr["v1"][:] = vi[:,0]
+ arr["v2"][:] = vi[:,1]
+ arr["v3"][:] = vi[:,2]
+ arr.tofile(f)
+ f.close()
+
def _reconstruct_object(*args, **kwargs):
pfid = args[0]
dtype = args[1]
https://bitbucket.org/yt_analysis/yt/changeset/18c7b0bc0b93/
changeset: 18c7b0bc0b93
branch: yt
user: MatthewTurk
date: 2012-11-30 22:08:30
summary: Adding docstrings, making flux work
affected #: 1 file
diff -r d43a9e0a0d78218f9bd732e69029e6384d816479 -r 18c7b0bc0b93e78f1acfdd27ada48e0c01a82c1f yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4168,6 +4168,48 @@
_con_args = ("data_source", "surface_field", "field_value")
vertices = None
def __init__(self, data_source, surface_field, field_value):
+ r"""This surface object identifies isocontours on a cell-by-cell basis,
+ with no consideration of global connectedness, and returns the vertices
+ of the Triangles in that isocontour.
+
+ This object simply returns the vertices of all the triangles
+ calculated by the marching cubes algorithm; for more complex
+ operations, such as identifying connected sets of cells above a given
+ threshold, see the extract_connected_sets function. This is more
+ useful for calculating, for instance, total isocontour area, or
+ visualizing in an external program (such as `MeshLab
+ <http://meshlab.sf.net>`_.) The object has the properties .vertices
+ and will sample values if a field is requested. The values are
+ interpolated to the center of a given face.
+
+ Parameters
+ ----------
+ data_source : AMR3DDataObject
+ This is the object which will used as a source
+ surface_field : string
+ Any field that can be obtained in a data object. This is the field
+ which will be isocontoured.
+ field_value : float
+ The value at which the isocontour should be calculated.
+
+ References
+ ----------
+
+ .. [1] Marching Cubes: http://en.wikipedia.org/wiki/Marching_cubes
+
+ Examples
+ --------
+ This will create a data object, find a nice value in the center, and
+ output the vertices to "triangles.obj" after rescaling them.
+
+ >>> sp = pf.h.sphere("max", (10, "kpc")
+ >>> surf = pf.h.surface(sp, "Density", 5e-27)
+ >>> print surf["Temperature"]
+ >>> print surf.vertices
+ >>> bounds = [(sp.center[i] - 5.0/pf['kpc'],
+ ... sp.center[i] + 5.0/pf['kpc']) for i in range(3)]
+ >>> surf.export_ply("my_galaxy.ply", bounds = bounds)
+ """
ParallelAnalysisInterface.__init__(self)
self.data_source = data_source
self.surface_field = surface_field
@@ -4220,22 +4262,117 @@
grid.dds, svals)
return my_verts
+ def calculate_flux(self, field_x, field_y, field_z, fluxing_field = None):
+ r"""This calculates the flux over the surface.
+
+ This function will conduct marching cubes on all the cells in a given
+ data container (grid-by-grid), and then for each identified triangular
+ segment of an isocontour in a given cell, calculate the gradient (i.e.,
+ normal) in the isocontoured field, interpolate the local value of the
+ "fluxing" field, the area of the triangle, and then return:
+
+ area * local_flux_value * (n dot v)
+
+ Where area, local_value, and the vector v are interpolated at the barycenter
+ (weighted by the vertex values) of the triangle. Note that this
+ specifically allows for the field fluxing across the surface to be
+ *different* from the field being contoured. If the fluxing_field is
+ not specified, it is assumed to be 1.0 everywhere, and the raw flux
+ with no local-weighting is returned.
+
+ Additionally, the returned flux is defined as flux *into* the surface,
+ not flux *out of* the surface.
+
+ Parameters
+ ----------
+ field_x : string
+ The x-component field
+ field_y : string
+ The y-component field
+ field_z : string
+ The z-component field
+ fluxing_field : string, optional
+ The field whose passage over the surface is of interest. If not
+ specified, assumed to be 1.0 everywhere.
+
+ Returns
+ -------
+ flux : float
+ The summed flux. Note that it is not currently scaled; this is
+ simply the code-unit area times the fields.
+
+ References
+ ----------
+
+ .. [1] Marching Cubes: http://en.wikipedia.org/wiki/Marching_cubes
+
+ Examples
+ --------
+
+ This will create a data object, find a nice value in the center, and
+ calculate the metal flux over it.
+
+ >>> sp = pf.h.sphere("max", (10, "kpc")
+ >>> surf = pf.h.surface(sp, "Density", 5e-27)
+ >>> flux = surf.calculate_flux(
+ ... "x-velocity", "y-velocity", "z-velocity", "Metal_Density")
+ """
+ flux = 0.0
+ pb = get_pbar("Fluxing %s" % fluxing_field,
+ len(list(self._get_grid_objs())))
+ for i, g in enumerate(self._get_grid_objs()):
+ pb.update(i)
+ flux += self._calculate_flux_in_grid(g,
+ field_x, field_y, field_z, fluxing_field)
+ pb.finish()
+ flux = self.comm.mpi_allreduce(flux, op="sum")
+ return flux
+
@restore_grid_state
- def _calculate_flux_in_grid(self, grid, field, value,
+ def _calculate_flux_in_grid(self, grid,
field_x, field_y, field_z, fluxing_field = None):
mask = self.data_source._get_cut_mask(grid) * grid.child_mask
- vals = grid.get_vertex_centered_data(field)
+ vals = grid.get_vertex_centered_data(self.surface_field)
if fluxing_field is None:
ff = np.ones(vals.shape, dtype="float64")
else:
ff = grid.get_vertex_centered_data(fluxing_field)
xv, yv, zv = [grid.get_vertex_centered_data(f) for f in
[field_x, field_y, field_z]]
- return march_cubes_grid_flux(value, vals, xv, yv, zv,
+ return march_cubes_grid_flux(self.field_value, vals, xv, yv, zv,
ff, mask, grid.LeftEdge, grid.dds)
def export_ply(self, filename, bounds = None, color_field = None,
color_map = "algae", color_log = True):
+ r"""This exports the surface to the PLY format, suitable for visualization
+ in many different programs (e.g., MeshLab).
+
+ Parameters
+ ----------
+ filename : string
+ The file this will be exported to. This cannot be a file-like object.
+ bounds : list of tuples
+ The bounds the vertices will be normalized to. This is of the format:
+ [(xmin, xmax), (ymin, ymax), (zmin, zmax)]. Defaults to the full
+ domain.
+ color_field : string
+ Should a field be sample and colormapped?
+ color_map : string
+ Which color map should be applied?
+ color_log : bool
+ Should the color field be logged before being mapped?
+
+ Examples
+ --------
+
+ >>> sp = pf.h.sphere("max", (10, "kpc")
+ >>> surf = pf.h.surface(sp, "Density", 5e-27)
+ >>> print surf["Temperature"]
+ >>> print surf.vertices
+ >>> bounds = [(sp.center[i] - 5.0/pf['kpc'],
+ ... sp.center[i] + 5.0/pf['kpc']) for i in range(3)]
+ >>> surf.export_ply("my_galaxy.ply", bounds = bounds)
+ """
if self.vertices is None:
self.get_data(color_field)
elif color_field is not None and color_field not in self.field_data:
@@ -4247,10 +4384,7 @@
color_map = "algae", color_log = True):
if self.comm.rank != 0:
return
- if hasattr(filename, "write"):
- f = filename
- else:
- f = open(filename, "wb")
+ f = open(filename, "wb")
if bounds is None:
DLE = self.pf.domain_left_edge
DRE = self.pf.domain_right_edge
https://bitbucket.org/yt_analysis/yt/changeset/2a49fad97601/
changeset: 2a49fad97601
branch: yt
user: MatthewTurk
date: 2012-12-01 05:25:20
summary: Center PLY files at origin.
affected #: 1 file
diff -r 18c7b0bc0b93e78f1acfdd27ada48e0c01a82c1f -r 2a49fad97601992672cc652429d8509b6c4f6e55 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4382,8 +4382,6 @@
@parallel_root_only
def _export_ply(self, filename, bounds = None, color_field = None,
color_map = "algae", color_log = True):
- if self.comm.rank != 0:
- return
f = open(filename, "wb")
if bounds is None:
DLE = self.pf.domain_left_edge
@@ -4397,6 +4395,7 @@
np.subtract(v[:,i], bounds[i][0], v[:,i])
w = bounds[i][1] - bounds[i][0]
np.divide(v[:,i], w, v[:,i])
+ np.subtract(v[:,i], 0.5, v[:,i]) # Center at origin.
f.write("ply\n")
f.write("format binary_little_endian 1.0\n")
f.write("element vertex %s\n" % (v.shape[0]))
@@ -4420,7 +4419,6 @@
arr["red"][:] = cs[0,:,0]
arr["green"][:] = cs[0,:,1]
arr["blue"][:] = cs[0,:,2]
- import pdb;pdb.set_trace()
else:
arr = np.empty(v.shape[0]/3, np.dtype(fs[:-3]))
f.write("end_header\n")
https://bitbucket.org/yt_analysis/yt/changeset/e1b25d00dd32/
changeset: e1b25d00dd32
branch: yt
user: MatthewTurk
date: 2012-12-03 18:44:14
summary: Adding a few tests for the AMR surface fluxing.
affected #: 1 file
diff -r 2a49fad97601992672cc652429d8509b6c4f6e55 -r e1b25d00dd3253ae3155984ac72d67e40a5c5d59 yt/data_objects/universal_fields.py
--- a/yt/data_objects/universal_fields.py
+++ b/yt/data_objects/universal_fields.py
@@ -131,6 +131,13 @@
add_field("OnesOverDx", function=_OnesOverDx,
display_field=False)
+def _Zeros(field, data):
+ return np.zeros(data.ActiveDimensions, dtype='float64')
+add_field("Zeros", function=_Zeros,
+ validators=[ValidateSpatial(0)],
+ projection_conversion="unitary",
+ display_field = False)
+
def _Ones(field, data):
return np.ones(data.ActiveDimensions, dtype='float64')
add_field("Ones", function=_Ones,
https://bitbucket.org/yt_analysis/yt/changeset/882e4a75aec5/
changeset: 882e4a75aec5
branch: yt
user: MatthewTurk
date: 2012-12-03 19:26:05
summary: Forgot to add the flux tests!
affected #: 1 file
diff -r e1b25d00dd3253ae3155984ac72d67e40a5c5d59 -r 882e4a75aec5d1eeda5ee108abf7e7d3dbfc6f6a yt/data_objects/tests/test_fluxes.py
--- /dev/null
+++ b/yt/data_objects/tests/test_fluxes.py
@@ -0,0 +1,14 @@
+from yt.testing import *
+
+def setup():
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+
+def test_flux_calculation():
+ pf = fake_random_pf(64, nprocs = 4)
+ dd = pf.h.all_data()
+ surf = pf.h.surface(dd, "x", 0.51)
+ yield assert_equal, surf["x"], 0.51
+ flux = surf.calculate_flux("Ones", "Zeros", "Zeros", "Ones")
+ yield assert_almost_equal, flux, 1.0, 12
+
https://bitbucket.org/yt_analysis/yt/changeset/6bc289da0cde/
changeset: 6bc289da0cde
branch: yt
user: brittonsmith
date: 2012-12-04 16:20:25
summary: Merged in MatthewTurk/yt (pull request #360)
affected #: 3 files
diff -r 3ffe2b24d1a76ebd778e7a47b4caaab4c0eda90c -r 6bc289da0cdeef6a676e5d3cf28a4ca7d961fc4d yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -35,6 +35,7 @@
import exceptions
import itertools
import shelve
+import cStringIO
from yt.funcs import *
@@ -48,7 +49,7 @@
DataCubeRefine, DataCubeReplace, FillRegion, FillBuffer
from yt.utilities.definitions import axis_names, x_dict, y_dict
from yt.utilities.parallel_tools.parallel_analysis_interface import \
- ParallelAnalysisInterface
+ ParallelAnalysisInterface, parallel_root_only
from yt.utilities.linear_interpolators import \
UnilinearFieldInterpolator, \
BilinearFieldInterpolator, \
@@ -4162,6 +4163,275 @@
self._cut_masks[grid.id] = this_cut_mask
return this_cut_mask
+class AMRSurfaceBase(AMRData, ParallelAnalysisInterface):
+ _type_name = "surface"
+ _con_args = ("data_source", "surface_field", "field_value")
+ vertices = None
+ def __init__(self, data_source, surface_field, field_value):
+ r"""This surface object identifies isocontours on a cell-by-cell basis,
+ with no consideration of global connectedness, and returns the vertices
+ of the Triangles in that isocontour.
+
+ This object simply returns the vertices of all the triangles
+ calculated by the marching cubes algorithm; for more complex
+ operations, such as identifying connected sets of cells above a given
+ threshold, see the extract_connected_sets function. This is more
+ useful for calculating, for instance, total isocontour area, or
+ visualizing in an external program (such as `MeshLab
+ <http://meshlab.sf.net>`_.) The object has the properties .vertices
+ and will sample values if a field is requested. The values are
+ interpolated to the center of a given face.
+
+ Parameters
+ ----------
+ data_source : AMR3DDataObject
+ This is the object which will used as a source
+ surface_field : string
+ Any field that can be obtained in a data object. This is the field
+ which will be isocontoured.
+ field_value : float
+ The value at which the isocontour should be calculated.
+
+ References
+ ----------
+
+ .. [1] Marching Cubes: http://en.wikipedia.org/wiki/Marching_cubes
+
+ Examples
+ --------
+ This will create a data object, find a nice value in the center, and
+ output the vertices to "triangles.obj" after rescaling them.
+
+ >>> sp = pf.h.sphere("max", (10, "kpc")
+ >>> surf = pf.h.surface(sp, "Density", 5e-27)
+ >>> print surf["Temperature"]
+ >>> print surf.vertices
+ >>> bounds = [(sp.center[i] - 5.0/pf['kpc'],
+ ... sp.center[i] + 5.0/pf['kpc']) for i in range(3)]
+ >>> surf.export_ply("my_galaxy.ply", bounds = bounds)
+ """
+ ParallelAnalysisInterface.__init__(self)
+ self.data_source = data_source
+ self.surface_field = surface_field
+ self.field_value = field_value
+ center = data_source.get_field_parameter("center")
+ AMRData.__init__(self, center = center, fields = None, pf =
+ data_source.pf)
+ self._grids = self.data_source._grids.copy()
+
+ def get_data(self, fields = None):
+ if isinstance(fields, list) and len(fields) > 1:
+ for field in fields: self.get_data(field)
+ return
+ elif isinstance(fields, list):
+ fields = fields[0]
+ # Now we have a "fields" value that is either a string or None
+ pb = get_pbar("Extracting (sampling: %s)" % fields,
+ len(list(self._get_grid_objs())))
+ verts = []
+ samples = []
+ for i,g in enumerate(self._get_grid_objs()):
+ pb.update(i)
+ my_verts = self._extract_isocontours_from_grid(
+ g, self.surface_field, self.field_value,
+ fields)
+ if fields is not None:
+ my_verts, svals = my_verts
+ samples.append(svals)
+ verts.append(my_verts)
+ pb.finish()
+ verts = np.concatenate(verts).transpose()
+ verts = self.comm.par_combine_object(verts, op='cat', datatype='array')
+ self.vertices = verts
+ if fields is not None:
+ samples = np.concatenate(samples)
+ samples = self.comm.par_combine_object(samples, op='cat',
+ datatype='array')
+ self[fields] = samples
+
+ @restore_grid_state
+ def _extract_isocontours_from_grid(self, grid, field, value,
+ sample_values = None):
+ mask = self.data_source._get_cut_mask(grid) * grid.child_mask
+ vals = grid.get_vertex_centered_data(field, no_ghost = False)
+ if sample_values is not None:
+ svals = grid.get_vertex_centered_data(sample_values)
+ else:
+ svals = None
+ my_verts = march_cubes_grid(value, vals, mask, grid.LeftEdge,
+ grid.dds, svals)
+ return my_verts
+
+ def calculate_flux(self, field_x, field_y, field_z, fluxing_field = None):
+ r"""This calculates the flux over the surface.
+
+ This function will conduct marching cubes on all the cells in a given
+ data container (grid-by-grid), and then for each identified triangular
+ segment of an isocontour in a given cell, calculate the gradient (i.e.,
+ normal) in the isocontoured field, interpolate the local value of the
+ "fluxing" field, the area of the triangle, and then return:
+
+ area * local_flux_value * (n dot v)
+
+ Where area, local_value, and the vector v are interpolated at the barycenter
+ (weighted by the vertex values) of the triangle. Note that this
+ specifically allows for the field fluxing across the surface to be
+ *different* from the field being contoured. If the fluxing_field is
+ not specified, it is assumed to be 1.0 everywhere, and the raw flux
+ with no local-weighting is returned.
+
+ Additionally, the returned flux is defined as flux *into* the surface,
+ not flux *out of* the surface.
+
+ Parameters
+ ----------
+ field_x : string
+ The x-component field
+ field_y : string
+ The y-component field
+ field_z : string
+ The z-component field
+ fluxing_field : string, optional
+ The field whose passage over the surface is of interest. If not
+ specified, assumed to be 1.0 everywhere.
+
+ Returns
+ -------
+ flux : float
+ The summed flux. Note that it is not currently scaled; this is
+ simply the code-unit area times the fields.
+
+ References
+ ----------
+
+ .. [1] Marching Cubes: http://en.wikipedia.org/wiki/Marching_cubes
+
+ Examples
+ --------
+
+ This will create a data object, find a nice value in the center, and
+ calculate the metal flux over it.
+
+ >>> sp = pf.h.sphere("max", (10, "kpc")
+ >>> surf = pf.h.surface(sp, "Density", 5e-27)
+ >>> flux = surf.calculate_flux(
+ ... "x-velocity", "y-velocity", "z-velocity", "Metal_Density")
+ """
+ flux = 0.0
+ pb = get_pbar("Fluxing %s" % fluxing_field,
+ len(list(self._get_grid_objs())))
+ for i, g in enumerate(self._get_grid_objs()):
+ pb.update(i)
+ flux += self._calculate_flux_in_grid(g,
+ field_x, field_y, field_z, fluxing_field)
+ pb.finish()
+ flux = self.comm.mpi_allreduce(flux, op="sum")
+ return flux
+
+ @restore_grid_state
+ def _calculate_flux_in_grid(self, grid,
+ field_x, field_y, field_z, fluxing_field = None):
+ mask = self.data_source._get_cut_mask(grid) * grid.child_mask
+ vals = grid.get_vertex_centered_data(self.surface_field)
+ if fluxing_field is None:
+ ff = np.ones(vals.shape, dtype="float64")
+ else:
+ ff = grid.get_vertex_centered_data(fluxing_field)
+ xv, yv, zv = [grid.get_vertex_centered_data(f) for f in
+ [field_x, field_y, field_z]]
+ return march_cubes_grid_flux(self.field_value, vals, xv, yv, zv,
+ ff, mask, grid.LeftEdge, grid.dds)
+
+ def export_ply(self, filename, bounds = None, color_field = None,
+ color_map = "algae", color_log = True):
+ r"""This exports the surface to the PLY format, suitable for visualization
+ in many different programs (e.g., MeshLab).
+
+ Parameters
+ ----------
+ filename : string
+ The file this will be exported to. This cannot be a file-like object.
+ bounds : list of tuples
+ The bounds the vertices will be normalized to. This is of the format:
+ [(xmin, xmax), (ymin, ymax), (zmin, zmax)]. Defaults to the full
+ domain.
+ color_field : string
+ Should a field be sample and colormapped?
+ color_map : string
+ Which color map should be applied?
+ color_log : bool
+ Should the color field be logged before being mapped?
+
+ Examples
+ --------
+
+ >>> sp = pf.h.sphere("max", (10, "kpc")
+ >>> surf = pf.h.surface(sp, "Density", 5e-27)
+ >>> print surf["Temperature"]
+ >>> print surf.vertices
+ >>> bounds = [(sp.center[i] - 5.0/pf['kpc'],
+ ... sp.center[i] + 5.0/pf['kpc']) for i in range(3)]
+ >>> surf.export_ply("my_galaxy.ply", bounds = bounds)
+ """
+ if self.vertices is None:
+ self.get_data(color_field)
+ elif color_field is not None and color_field not in self.field_data:
+ self[color_field]
+ self._export_ply(filename, bounds, color_field, color_map, color_log)
+
+ @parallel_root_only
+ def _export_ply(self, filename, bounds = None, color_field = None,
+ color_map = "algae", color_log = True):
+ f = open(filename, "wb")
+ if bounds is None:
+ DLE = self.pf.domain_left_edge
+ DRE = self.pf.domain_right_edge
+ bounds = [(DLE[i], DRE[i]) for i in range(3)]
+ fs = [("ni", "uint8"), ("v1", "<i4"), ("v2", "<i4"), ("v3", "<i4"),
+ ("red", "uint8"), ("green", "uint8"), ("blue", "uint8") ]
+ v = np.empty((self.vertices.shape[1], 3), "<f")
+ for i in range(3):
+ v[:,i] = self.vertices[i,:]
+ np.subtract(v[:,i], bounds[i][0], v[:,i])
+ w = bounds[i][1] - bounds[i][0]
+ np.divide(v[:,i], w, v[:,i])
+ np.subtract(v[:,i], 0.5, v[:,i]) # Center at origin.
+ f.write("ply\n")
+ f.write("format binary_little_endian 1.0\n")
+ f.write("element vertex %s\n" % (v.shape[0]))
+ f.write("property float x\n")
+ f.write("property float y\n")
+ f.write("property float z\n")
+ f.write("element face %s\n" % (v.shape[0]/3))
+ f.write("property list uchar int vertex_indices\n")
+ if color_field is not None:
+ f.write("property uchar red\n")
+ f.write("property uchar green\n")
+ f.write("property uchar blue\n")
+ # Now we get our samples
+ cs = self[color_field]
+ if color_log: cs = np.log10(cs)
+ mi, ma = cs.min(), cs.max()
+ cs = (cs - mi) / (ma - mi)
+ from yt.visualization.image_writer import map_to_colors
+ cs = map_to_colors(cs, color_map)
+ arr = np.empty(cs.shape[1], dtype=np.dtype(fs))
+ arr["red"][:] = cs[0,:,0]
+ arr["green"][:] = cs[0,:,1]
+ arr["blue"][:] = cs[0,:,2]
+ else:
+ arr = np.empty(v.shape[0]/3, np.dtype(fs[:-3]))
+ f.write("end_header\n")
+ v.tofile(f)
+ arr["ni"][:] = 3
+ vi = np.arange(v.shape[0], dtype="<i")
+ vi.shape = (v.shape[0]/3, 3)
+ arr["v1"][:] = vi[:,0]
+ arr["v2"][:] = vi[:,1]
+ arr["v3"][:] = vi[:,2]
+ arr.tofile(f)
+ f.close()
+
def _reconstruct_object(*args, **kwargs):
pfid = args[0]
dtype = args[1]
diff -r 3ffe2b24d1a76ebd778e7a47b4caaab4c0eda90c -r 6bc289da0cdeef6a676e5d3cf28a4ca7d961fc4d yt/data_objects/tests/test_fluxes.py
--- /dev/null
+++ b/yt/data_objects/tests/test_fluxes.py
@@ -0,0 +1,14 @@
+from yt.testing import *
+
+def setup():
+ from yt.config import ytcfg
+ ytcfg["yt","__withintesting"] = "True"
+
+def test_flux_calculation():
+ pf = fake_random_pf(64, nprocs = 4)
+ dd = pf.h.all_data()
+ surf = pf.h.surface(dd, "x", 0.51)
+ yield assert_equal, surf["x"], 0.51
+ flux = surf.calculate_flux("Ones", "Zeros", "Zeros", "Ones")
+ yield assert_almost_equal, flux, 1.0, 12
+
diff -r 3ffe2b24d1a76ebd778e7a47b4caaab4c0eda90c -r 6bc289da0cdeef6a676e5d3cf28a4ca7d961fc4d yt/data_objects/universal_fields.py
--- a/yt/data_objects/universal_fields.py
+++ b/yt/data_objects/universal_fields.py
@@ -131,6 +131,13 @@
add_field("OnesOverDx", function=_OnesOverDx,
display_field=False)
+def _Zeros(field, data):
+ return np.zeros(data.ActiveDimensions, dtype='float64')
+add_field("Zeros", function=_Zeros,
+ validators=[ValidateSpatial(0)],
+ projection_conversion="unitary",
+ display_field = False)
+
def _Ones(field, data):
return np.ones(data.ActiveDimensions, dtype='float64')
add_field("Ones", function=_Ones,
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list