[yt-svn] commit/yt: 9 new changesets
Bitbucket
commits-noreply at bitbucket.org
Wed Dec 5 08:00:21 PST 2012
9 new commits in yt:
https://bitbucket.org/yt_analysis/yt/changeset/da614f713751/
changeset: da614f713751
branch: yt
user: MatthewTurk
date: 2012-12-04 22:24:31
summary: Removing unused imports in profile_plotter
affected #: 1 file
diff -r 939cff5bd00cba49a175f99547fe27d3e416691a -r da614f7137515b0a2b9400fd545064ad780dd69d yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -36,7 +36,6 @@
from yt.data_objects.profiles import \
BinnedProfile1D, \
BinnedProfile2D
-from .plot_types import ProfilePlot, PhasePlot
from .tick_locators import LogLocator, LinearLocator
from yt.utilities.logger import ytLogger as mylog
https://bitbucket.org/yt_analysis/yt/changeset/6283ac4e5864/
changeset: 6283ac4e5864
branch: yt
user: MatthewTurk
date: 2012-12-04 23:15:03
summary: Adding ability to sample AMRSurface objects at vertices. This enables .PLY
files to be directly exported to SketchFab.
affected #: 3 files
diff -r da614f7137515b0a2b9400fd545064ad780dd69d -r 6283ac4e586443623ad8ed5e18cdbe1b6967e47b yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4214,12 +4214,13 @@
self.data_source = data_source
self.surface_field = surface_field
self.field_value = field_value
+ self.vertex_samples = YTFieldData()
center = data_source.get_field_parameter("center")
AMRData.__init__(self, center = center, fields = None, pf =
data_source.pf)
self._grids = self.data_source._grids.copy()
- def get_data(self, fields = None):
+ def get_data(self, fields = None, sample_type = "face"):
if isinstance(fields, list) and len(fields) > 1:
for field in fields: self.get_data(field)
return
@@ -4234,7 +4235,7 @@
pb.update(i)
my_verts = self._extract_isocontours_from_grid(
g, self.surface_field, self.field_value,
- fields)
+ fields, sample_type)
if fields is not None:
my_verts, svals = my_verts
samples.append(svals)
@@ -4247,19 +4248,25 @@
samples = np.concatenate(samples)
samples = self.comm.par_combine_object(samples, op='cat',
datatype='array')
- self[fields] = samples
+ if sample_type == "face":
+ self[fields] = samples
+ elif sample_type == "vertex":
+ self.vertex_samples[fields] = samples
+
@restore_grid_state
def _extract_isocontours_from_grid(self, grid, field, value,
- sample_values = None):
+ sample_values = None,
+ sample_type = "face"):
mask = self.data_source._get_cut_mask(grid) * grid.child_mask
vals = grid.get_vertex_centered_data(field, no_ghost = False)
if sample_values is not None:
svals = grid.get_vertex_centered_data(sample_values)
else:
svals = None
+ sample_type = {"face":1, "vertex":2}[sample_type]
my_verts = march_cubes_grid(value, vals, mask, grid.LeftEdge,
- grid.dds, svals)
+ grid.dds, svals, sample_type)
return my_verts
def calculate_flux(self, field_x, field_y, field_z, fluxing_field = None):
@@ -4343,7 +4350,7 @@
ff, mask, grid.LeftEdge, grid.dds)
def export_ply(self, filename, bounds = None, color_field = None,
- color_map = "algae", color_log = True):
+ color_map = "algae", color_log = True, sample_type = "face"):
r"""This exports the surface to the PLY format, suitable for visualization
in many different programs (e.g., MeshLab).
@@ -4374,58 +4381,78 @@
>>> surf.export_ply("my_galaxy.ply", bounds = bounds)
"""
if self.vertices is None:
- self.get_data(color_field)
- elif color_field is not None and color_field not in self.field_data:
- self[color_field]
- self._export_ply(filename, bounds, color_field, color_map, color_log)
+ self.get_data(color_field, sample_type)
+ elif color_field is not None:
+ if sample_type == "face" and \
+ color_field not in self.field_data:
+ self[color_field]
+ elif sample_type == "vertex" and \
+ color_field not in self.vertex_data:
+ self.get_data(color_field, sample_type)
+ self._export_ply(filename, bounds, color_field, color_map, color_log,
+ sample_type)
+
+ def _color_samples(self, cs, color_log, color_map, arr):
+ if color_log: cs = np.log10(cs)
+ mi, ma = cs.min(), cs.max()
+ cs = (cs - mi) / (ma - mi)
+ from yt.visualization.image_writer import map_to_colors
+ cs = map_to_colors(cs, color_map)
+ arr["red"][:] = cs[0,:,0]
+ arr["green"][:] = cs[0,:,1]
+ arr["blue"][:] = cs[0,:,2]
@parallel_root_only
def _export_ply(self, filename, bounds = None, color_field = None,
- color_map = "algae", color_log = True):
+ color_map = "algae", color_log = True, sample_type = "face"):
f = open(filename, "wb")
if bounds is None:
DLE = self.pf.domain_left_edge
DRE = self.pf.domain_right_edge
bounds = [(DLE[i], DRE[i]) for i in range(3)]
+ nv = self.vertices.shape[1]
+ vs = [("x", "<f"), ("y", "<f"), ("z", "<f"),
+ ("red", "uint8"), ("green", "uint8"), ("blue", "uint8") ]
fs = [("ni", "uint8"), ("v1", "<i4"), ("v2", "<i4"), ("v3", "<i4"),
("red", "uint8"), ("green", "uint8"), ("blue", "uint8") ]
- v = np.empty((self.vertices.shape[1], 3), "<f")
- for i in range(3):
- v[:,i] = self.vertices[i,:]
- np.subtract(v[:,i], bounds[i][0], v[:,i])
- w = bounds[i][1] - bounds[i][0]
- np.divide(v[:,i], w, v[:,i])
- np.subtract(v[:,i], 0.5, v[:,i]) # Center at origin.
f.write("ply\n")
f.write("format binary_little_endian 1.0\n")
- f.write("element vertex %s\n" % (v.shape[0]))
+ f.write("element vertex %s\n" % (nv))
f.write("property float x\n")
f.write("property float y\n")
f.write("property float z\n")
- f.write("element face %s\n" % (v.shape[0]/3))
+ if color_field is not None and sample_type == "vertex":
+ f.write("property uchar red\n")
+ f.write("property uchar green\n")
+ f.write("property uchar blue\n")
+ v = np.empty(self.vertices.shape[1], dtype=vs)
+ cs = self.vertex_samples[color_field]
+ self._color_samples(cs, color_log, color_map, v)
+ else:
+ v = np.empty(self.vertices.shape[1], dtype=vs[:3])
+ f.write("element face %s\n" % (nv/3))
f.write("property list uchar int vertex_indices\n")
- if color_field is not None:
+ if color_field is not None and sample_type == "face":
f.write("property uchar red\n")
f.write("property uchar green\n")
f.write("property uchar blue\n")
# Now we get our samples
cs = self[color_field]
- if color_log: cs = np.log10(cs)
- mi, ma = cs.min(), cs.max()
- cs = (cs - mi) / (ma - mi)
- from yt.visualization.image_writer import map_to_colors
- cs = map_to_colors(cs, color_map)
arr = np.empty(cs.shape[1], dtype=np.dtype(fs))
- arr["red"][:] = cs[0,:,0]
- arr["green"][:] = cs[0,:,1]
- arr["blue"][:] = cs[0,:,2]
+ self._color_samples(cs, color_log, color_map, arr)
else:
- arr = np.empty(v.shape[0]/3, np.dtype(fs[:-3]))
+ arr = np.empty(nv/3, np.dtype(fs[:-3]))
+ for i, ax in enumerate("xyz"):
+ v[ax][:] = self.vertices[i,:]
+ np.subtract(v[ax][:], bounds[i][0], v[ax][:])
+ w = bounds[i][1] - bounds[i][0]
+ np.divide(v[ax][:], w, v[ax][:])
+ np.subtract(v[ax][:], 0.5, v[ax][:]) # Center at origin.
f.write("end_header\n")
v.tofile(f)
arr["ni"][:] = 3
- vi = np.arange(v.shape[0], dtype="<i")
- vi.shape = (v.shape[0]/3, 3)
+ vi = np.arange(nv, dtype="<i")
+ vi.shape = (nv/3, 3)
arr["v1"][:] = vi[:,0]
arr["v2"][:] = vi[:,1]
arr["v3"][:] = vi[:,2]
diff -r da614f7137515b0a2b9400fd545064ad780dd69d -r 6283ac4e586443623ad8ed5e18cdbe1b6967e47b yt/data_objects/tests/test_fluxes.py
--- a/yt/data_objects/tests/test_fluxes.py
+++ b/yt/data_objects/tests/test_fluxes.py
@@ -12,3 +12,10 @@
flux = surf.calculate_flux("Ones", "Zeros", "Zeros", "Ones")
yield assert_almost_equal, flux, 1.0, 12
+def test_sampling():
+ pf = fake_random_pf(64, nprocs = 4)
+ dd = pf.h.all_data()
+ for i, ax in enumerate('xyz'):
+ surf = pf.h.surface(dd, ax, 0.51)
+ surf.get_data(ax, "vertex")
+ yield assert_equal, surf.vertex_samples[ax], surf.vertices[i,:]
diff -r da614f7137515b0a2b9400fd545064ad780dd69d -r 6283ac4e586443623ad8ed5e18cdbe1b6967e47b yt/utilities/lib/marching_cubes.pyx
--- a/yt/utilities/lib/marching_cubes.pyx
+++ b/yt/utilities/lib/marching_cubes.pyx
@@ -33,7 +33,7 @@
cdef struct Triangle:
Triangle *next
np.float64_t p[3][3]
- np.float64_t val
+ np.float64_t val[3] # Usually only use one value
cdef struct TriangleCollection:
int count
@@ -64,12 +64,14 @@
return count
cdef void FillTriangleValues(np.ndarray[np.float64_t, ndim=1] values,
- Triangle *first):
+ Triangle *first, int nskip = 1):
cdef Triangle *this = first
cdef Triangle *last
cdef int i = 0
+ cdef int j
while this != NULL:
- values[i] = this.val
+ for j in range(nskip):
+ values[i*nskip + j] = this.val[j]
i += 1
last = this
this = this.next
@@ -463,7 +465,7 @@
np.ndarray[np.int32_t, ndim=3] mask,
np.ndarray[np.float64_t, ndim=1] left_edge,
np.ndarray[np.float64_t, ndim=1] dxs,
- obj_sample = None):
+ obj_sample = None, int sample_type = 1):
cdef int dims[3]
cdef int i, j, k, n, m, nt
cdef int offset
@@ -478,7 +480,7 @@
if obj_sample is not None:
sample = obj_sample
sdata = <np.float64_t *> sample.data
- do_sample = 1
+ do_sample = sample_type # 1 for face, 2 for vertex
else:
do_sample = 0
for i in range(3):
@@ -502,13 +504,16 @@
offset_fill(dims, intdata, gv)
nt = march_cubes(gv, isovalue, dds, pos[0], pos[1], pos[2],
&triangles)
- if do_sample == 1 and nt > 0:
+ if nt == 0 or do_sample == 0:
+ pos[2] += dds[2]
+ continue
+ if last == NULL and triangles.first != NULL:
+ current = triangles.first
+ last = NULL
+ elif last != NULL:
+ current = last.next
+ if do_sample == 1:
# At each triangle's center, sample our secondary field
- if last == NULL and triangles.first != NULL:
- current = triangles.first
- last = NULL
- elif last != NULL:
- current = last.next
while current != NULL:
for n in range(3):
point[n] = 0.0
@@ -517,24 +522,38 @@
point[m] += (current.p[n][m]-pos[m])*idds[m]
for n in range(3):
point[n] /= 3.0
- current.val = offset_interpolate(dims, point,
+ current.val[0] = offset_interpolate(dims, point,
sdata + offset)
last = current
if current.next == NULL: break
current = current.next
+ elif do_sample == 2:
+ while current != NULL:
+ for n in range(3):
+ for m in range(3):
+ point[m] = (current.p[n][m]-pos[m])*idds[m]
+ current.val[n] = offset_interpolate(dims,
+ point, sdata + offset)
+ last = current
+ if current.next == NULL: break
+ current = current.next
pos[2] += dds[2]
pos[1] += dds[1]
pos[0] += dds[0]
# Hallo, we are all done.
cdef np.ndarray[np.float64_t, ndim=2] vertices
vertices = np.zeros((triangles.count*3,3), dtype='float64')
+ if do_sample == 0:
+ FillAndWipeTriangles(vertices, triangles.first)
+ cdef int nskip
if do_sample == 1:
- sampled = np.zeros(triangles.count, dtype='float64')
- FillTriangleValues(sampled, triangles.first)
- FillAndWipeTriangles(vertices, triangles.first)
- return vertices, sampled
+ nskip = 1
+ elif do_sample == 2:
+ nskip = 3
+ sampled = np.zeros(triangles.count * nskip, dtype='float64')
+ FillTriangleValues(sampled, triangles.first, nskip)
FillAndWipeTriangles(vertices, triangles.first)
- return vertices
+ return vertices, sampled
@cython.boundscheck(False)
@cython.wraparound(False)
https://bitbucket.org/yt_analysis/yt/changeset/e9dbc2f6e977/
changeset: e9dbc2f6e977
branch: yt
user: MatthewTurk
date: 2012-12-04 23:39:53
summary: Add ability to export AMRSurface objects directly to SketchFab
affected #: 3 files
diff -r 6283ac4e586443623ad8ed5e18cdbe1b6967e47b -r e9dbc2f6e9776ffae1d4328167053b643510e961 yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -62,7 +62,8 @@
answer_testing_tolerance = '3',
answer_testing_bitwise = 'False',
gold_standard_filename = 'gold003',
- local_standard_filename = 'local001'
+ local_standard_filename = 'local001',
+ sketchfab_api_key = 'None'
)
# Here is the upgrade. We're actually going to parse the file in its entirety
# here. Then, if it has any of the Forbidden Sections, it will be rewritten
diff -r 6283ac4e586443623ad8ed5e18cdbe1b6967e47b -r e9dbc2f6e9776ffae1d4328167053b643510e961 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4405,7 +4405,10 @@
@parallel_root_only
def _export_ply(self, filename, bounds = None, color_field = None,
color_map = "algae", color_log = True, sample_type = "face"):
- f = open(filename, "wb")
+ if isinstance(filename, file):
+ f = filename
+ else:
+ f = open(filename, "wb")
if bounds is None:
DLE = self.pf.domain_left_edge
DRE = self.pf.domain_right_edge
@@ -4438,7 +4441,7 @@
f.write("property uchar blue\n")
# Now we get our samples
cs = self[color_field]
- arr = np.empty(cs.shape[1], dtype=np.dtype(fs))
+ arr = np.empty(cs.shape[0], dtype=np.dtype(fs))
self._color_samples(cs, color_log, color_map, arr)
else:
arr = np.empty(nv/3, np.dtype(fs[:-3]))
@@ -4457,7 +4460,54 @@
arr["v2"][:] = vi[:,1]
arr["v3"][:] = vi[:,2]
arr.tofile(f)
- f.close()
+ if filename is not f:
+ f.close()
+
+ def export_sketchfab(self, title, description, api_key = None,
+ color_field = None, color_map = "algae",
+ color_log = True, bounds = None):
+ api_key = api_key or ytcfg.get("yt","sketchfab_api_key")
+ if api_key in (None, "None"):
+ raise YTNoAPIKey("SketchFab.com", "sketchfab_api_key")
+ import zipfile, json
+ from tempfile import TemporaryFile
+
+ ply_file = TemporaryFile()
+ self.export_ply(ply_file, bounds, color_field, color_map, color_log,
+ sample_type = "vertex")
+ ply_file.seek(0)
+
+ zfs = TemporaryFile()
+ with zipfile.ZipFile(zfs, "w", zipfile.ZIP_DEFLATED) as zf:
+ zf.writestr("yt_export.ply", ply_file.read())
+ zfs.seek(0)
+
+ zfs.seek(0)
+ data = {
+ 'title': title,
+ 'token': api_key,
+ 'description': description,
+ 'fileModel': zfs,
+ 'filenameModel': "yt_export.zip",
+ }
+ rv = self._upload_to_sketchfab(data)
+ rv = json.loads(rv)
+ upload_id = rv.get("id", None)
+ if id:
+ return "https://sketchfab.com/show/%s" % (id)
+ else:
+ return "Problem uploading."
+
+ @parallel_root_only
+ def _upload_to_sketchfab(self, data):
+ import urllib2
+ from yt.utilities.poster.encode import multipart_encode
+ from yt.utilities.poster.streaminghttp import register_openers
+ register_openers()
+ datamulti, headers = multipart_encode(data)
+ request = urllib2.Request("https://api.sketchfab.com/v1/models",
+ datamulti, headers)
+ return urllib2.urlopen(request).read()
def _reconstruct_object(*args, **kwargs):
pfid = args[0]
diff -r 6283ac4e586443623ad8ed5e18cdbe1b6967e47b -r e9dbc2f6e9776ffae1d4328167053b643510e961 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -189,3 +189,12 @@
def __str__(self):
return "Enzo test output file (OutputLog) not generated for: " + \
"'%s'" % (self.testname) + ".\nTest did not complete."
+
+class YTNoAPIKey(YTException):
+ def __init__(self, service, config_name):
+ self.service = service
+ self.config_name
+
+ def __str__(self):
+ return "You need to set an API key for %s in ~/.yt/config as %s" % (
+ self.service, self.config_name)
https://bitbucket.org/yt_analysis/yt/changeset/dd65df0782c0/
changeset: dd65df0782c0
branch: yt
user: MatthewTurk
date: 2012-12-04 23:44:33
summary: Adding docstring for export_sketchfab
affected #: 1 file
diff -r e9dbc2f6e9776ffae1d4328167053b643510e961 -r dd65df0782c05e4b9cefb7253414416b22dc1f92 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4466,6 +4466,64 @@
def export_sketchfab(self, title, description, api_key = None,
color_field = None, color_map = "algae",
color_log = True, bounds = None):
+ r"""This exports Surfaces to SketchFab.com, where they can be viewed
+ interactively in a web browser.
+
+ SketchFab.com is a proprietary web service that provides WebGL
+ rendering of models. This routine will use temporary files to
+ construct a compressed binary representation (in .PLY format) of the
+ Surface and any optional fields you specify and upload it to
+ SketchFab.com. It requires an API key, which can be found on your
+ SketchFab.com dashboard. You can either supply the API key to this
+ routine directly or you can place it in the variable
+ "sketchfab_api_key" in your ~/.yt/config file. This function is
+ parallel-safe.
+
+ Parameters
+ ----------
+ title : string
+ The title for the model on the website
+ description : string
+ How you want the model to be described on the website
+ api_key : string
+ Optional; defaults to using the one in the config file
+ color_field : string
+ If specified, the field by which the surface will be colored
+ color_map : string
+ The name of the color map to use to map the color field
+ color_log : bool
+ Should the field be logged before being mapped to RGB?
+ bounds : list of tuples
+ [ (xmin, xmax), (ymin, ymax), (zmin, zmax) ] within which the model
+ will be scaled and centered. Defaults to the full domain.
+
+ Returns
+ -------
+ URL : string
+ The URL at which your model can be viewed.
+
+ Examples
+ --------
+
+ from yt.mods import *
+ pf = load("redshift0058")
+ dd = pf.h.sphere("max", (200, "kpc"))
+ rho = 5e-27
+
+ bounds = [(dd.center[i] - 100.0/pf['kpc'],
+ dd.center[i] + 100.0/pf['kpc']) for i in range(3)]
+
+ surf = pf.h.surface(dd, "Density", rho)
+
+ rv = surf.export_sketchfab(
+ title = "Testing Upload",
+ description = "A simple test of the uploader",
+ color_field = "Temperature",
+ color_map = "hot",
+ color_log = True,
+ bounds = bounds
+ )
+ """
api_key = api_key or ytcfg.get("yt","sketchfab_api_key")
if api_key in (None, "None"):
raise YTNoAPIKey("SketchFab.com", "sketchfab_api_key")
https://bitbucket.org/yt_analysis/yt/changeset/a1e2ca635f2d/
changeset: a1e2ca635f2d
branch: yt
user: MatthewTurk
date: 2012-12-04 23:48:49
summary: Fixing json loader to address running in parallel issues
affected #: 1 file
diff -r dd65df0782c05e4b9cefb7253414416b22dc1f92 -r a1e2ca635f2db9276b73fef645c426a196d22f66 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4548,24 +4548,25 @@
'fileModel': zfs,
'filenameModel': "yt_export.zip",
}
- rv = self._upload_to_sketchfab(data)
- rv = json.loads(rv)
- upload_id = rv.get("id", None)
- if id:
- return "https://sketchfab.com/show/%s" % (id)
- else:
- return "Problem uploading."
+ self._upload_to_sketchfab(data)
@parallel_root_only
def _upload_to_sketchfab(self, data):
- import urllib2
+ import urllib2, json
from yt.utilities.poster.encode import multipart_encode
from yt.utilities.poster.streaminghttp import register_openers
register_openers()
datamulti, headers = multipart_encode(data)
request = urllib2.Request("https://api.sketchfab.com/v1/models",
datamulti, headers)
- return urllib2.urlopen(request).read()
+ rv = urllib2.urlopen(request).read()
+ rv = json.loads(rv)
+ upload_id = rv.get("id", None)
+ if upload_id:
+ mylog.info("Model uploaded to: https://sketchfab.com/show/%s", id)
+ else:
+ mylog.error("Problem uploading.")
+
def _reconstruct_object(*args, **kwargs):
pfid = args[0]
https://bitbucket.org/yt_analysis/yt/changeset/1ef5a5e19aa6/
changeset: 1ef5a5e19aa6
branch: yt
user: MatthewTurk
date: 2012-12-04 23:59:29
summary: Allowing return values from @parallel_root_only and using this in the SketchFab
uploader.
affected #: 2 files
diff -r a1e2ca635f2db9276b73fef645c426a196d22f66 -r 1ef5a5e19aa614326c36d0a4f79d9d8a6c08d011 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4548,7 +4548,9 @@
'fileModel': zfs,
'filenameModel': "yt_export.zip",
}
- self._upload_to_sketchfab(data)
+ upload_id = self._upload_to_sketchfab(data)
+ upload_id = self.comm.mpi_bcast(upload_id, root = 0)
+ return upload_id
@parallel_root_only
def _upload_to_sketchfab(self, data):
@@ -4561,11 +4563,13 @@
datamulti, headers)
rv = urllib2.urlopen(request).read()
rv = json.loads(rv)
- upload_id = rv.get("id", None)
+ upload_id = rv.get("result", {}).get("id", None)
if upload_id:
- mylog.info("Model uploaded to: https://sketchfab.com/show/%s", id)
+ mylog.info("Model uploaded to: https://sketchfab.com/show/%s",
+ upload_id)
else:
mylog.error("Problem uploading.")
+ return upload_id
def _reconstruct_object(*args, **kwargs):
diff -r a1e2ca635f2db9276b73fef645c426a196d22f66 -r 1ef5a5e19aa614326c36d0a4f79d9d8a6c08d011 yt/utilities/parallel_tools/parallel_analysis_interface.py
--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py
@@ -252,9 +252,10 @@
@wraps(func)
def root_only(*args, **kwargs):
comm = _get_comm(args)
+ rv = None
if comm.rank == 0:
try:
- func(*args, **kwargs)
+ rv = func(*args, **kwargs)
all_clear = 1
except:
traceback.print_last()
@@ -263,6 +264,7 @@
all_clear = None
all_clear = comm.mpi_bcast(all_clear)
if not all_clear: raise RuntimeError
+ return rv
if parallel_capable: return root_only
return func
https://bitbucket.org/yt_analysis/yt/changeset/72666f2c08e0/
changeset: 72666f2c08e0
branch: yt
user: MatthewTurk
date: 2012-12-05 15:40:32
summary: Fixing a missing import of ytcfg
affected #: 1 file
diff -r 1ef5a5e19aa614326c36d0a4f79d9d8a6c08d011 -r 72666f2c08e064ae14e67c7d0c84ceb1675dc365 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -38,6 +38,7 @@
import cStringIO
from yt.funcs import *
+from yt.config import ytcfg
from yt.data_objects.derived_quantities import GridChildMaskWrapper
from yt.data_objects.particle_io import particle_handler_registry
https://bitbucket.org/yt_analysis/yt/changeset/622ba345f041/
changeset: 622ba345f041
branch: yt
user: MatthewTurk
date: 2012-12-05 15:41:29
summary: Argh, fix dropped equal sign.
affected #: 1 file
diff -r 72666f2c08e064ae14e67c7d0c84ceb1675dc365 -r 622ba345f04176803f9ab50858a19aae4b388954 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -193,7 +193,7 @@
class YTNoAPIKey(YTException):
def __init__(self, service, config_name):
self.service = service
- self.config_name
+ self.config_name = config_name
def __str__(self):
return "You need to set an API key for %s in ~/.yt/config as %s" % (
https://bitbucket.org/yt_analysis/yt/changeset/f8f64e456c32/
changeset: f8f64e456c32
branch: yt
user: MatthewTurk
date: 2012-12-05 16:59:16
summary: For high precision calculations, we want to apply bounding box estimates before
casting to float32 in the PLY export in AMRSurface.
affected #: 1 file
diff -r 622ba345f04176803f9ab50858a19aae4b388954 -r f8f64e456c322873b011f1a22b313a066e59cb4a yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4447,11 +4447,13 @@
else:
arr = np.empty(nv/3, np.dtype(fs[:-3]))
for i, ax in enumerate("xyz"):
- v[ax][:] = self.vertices[i,:]
- np.subtract(v[ax][:], bounds[i][0], v[ax][:])
+ # Do the bounds first since we cast to f32
+ tmp = self.vertices[i,:]
+ np.subtract(tmp, bounds[i][0], tmp)
w = bounds[i][1] - bounds[i][0]
- np.divide(v[ax][:], w, v[ax][:])
- np.subtract(v[ax][:], 0.5, v[ax][:]) # Center at origin.
+ np.divide(tmp, w, tmp)
+ np.subtract(tmp, 0.5, tmp) # Center at origin.
+ v[ax][:] = tmp
f.write("end_header\n")
v.tofile(f)
arr["ni"][:] = 3
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list