[Yt-svn] commit/yt: 4 new changesets
Bitbucket
commits-noreply at bitbucket.org
Sat Sep 3 10:48:43 PDT 2011
4 new changesets in yt:
http://bitbucket.org/yt_analysis/yt/changeset/9c1623e4eba5/
changeset: 9c1623e4eba5
branch: yt
user: samskillman
date: 2011-09-02 21:53:12
summary: Adding initial go at command line yt render. Works, but could use some fiddling.
affected #: 1 file (3.9 KB)
--- a/yt/utilities/command_line.py Fri Sep 02 10:20:42 2011 -0400
+++ b/yt/utilities/command_line.py Fri Sep 02 13:53:12 2011 -0600
@@ -161,6 +161,37 @@
action="store_true",
dest="time", default=False,
help="Print time in years on image"),
+ contours = dict(short="", long="--contours",
+ action="store",type="int",
+ dest="contours", default=None,
+ help="Number of Contours for Rendering"),
+ contour_width = dict(short="", long="--contour_width",
+ action="store",type="float",
+ dest="contour_width", default=None,
+ help="Width of gaussians used for rendering."),
+ enhance = dict(short="", long="--enhance",
+ action="store_true",
+ dest="enhance", default=False,
+ help="Enhance!"),
+ range = dict(short="", long="--range",
+ action="store", type="float",
+ dest="range", default=None,
+ nargs=2,
+ help="Range, command separated"),
+ up = dict(short="", long="--up",
+ action="store", type="float",
+ dest="up", default=None,
+ nargs=3,
+ help="Up, command separated"),
+ viewpoint = dict(short="", long="--viewpoint",
+ action="store", type="float",
+ dest="viewpoint", default=[1., 1., 1.],
+ nargs=3,
+ help="Viewpoint, command separated"),
+ pixels = dict(short="", long="--pixels",
+ action="store",type="int",
+ dest="pixels", default=None,
+ help="Number of Pixels for Rendering"),
halos = dict(short="", long="--halos",
action="store", type="string",
dest="halos",default="multiple",
@@ -1448,6 +1479,89 @@
print
pprint.pprint(rv)
+ @add_cmd_options(["width", "unit", "center","enhance",'outputfn',
+ "field", "cmap", "contours", "viewpoint",
+ "pixels","up","range","log","contour_width"])
+ @check_args
+ def do_render(self, subcmd, opts, arg):
+ """
+ Create a simple volume rendering
+
+ ${cmd_usage}
+ ${cmd_option_list}
+ """
+ pf = _fix_pf(arg)
+ center = opts.center
+ if opts.center == (-1,-1,-1):
+ mylog.info("No center fed in; seeking.")
+ v, center = pf.h.find_max("Density")
+ elif opts.center is None:
+ center = 0.5*(pf.domain_left_edge + pf.domain_right_edge)
+ center = na.array(center)
+
+ L = opts.viewpoint
+ if L is None:
+ L = [1.]*3
+ L = na.array(opts.viewpoint)
+
+ unit = opts.unit
+ if unit is None:
+ unit = '1'
+ width = opts.width
+ if width is None:
+ width = 0.5*(pf.domain_right_edge - pf.domain_left_edge)
+ width /= pf[unit]
+
+ N = opts.pixels
+ if N is None:
+ N = 512
+
+ up = opts.up
+ if up is None:
+ up = [0.,0.,1.]
+
+ field = opts.field
+ if field is None:
+ field = 'Density'
+
+ log = opts.takelog
+ if log is None:
+ log = True
+
+ if opts.range is None:
+ mi, ma = pf.h.all_data().quantities['Extrema'](field)[0]
+ if log:
+ mi, ma = na.log10(mi), na.log10(ma)
+ else:
+ mi, ma = range[0], range[1]
+
+ n_contours = opts.contours
+ if n_contours is None:
+ n_contours = 7
+
+ contour_width = opts.contour_width
+
+ cmap = opts.cmap
+ if cmap is None:
+ cmap = 'jet'
+ tf = ColorTransferFunction((mi-2, ma+2))
+ tf.add_layers(n_contours,w=contour_width,col_bounds = (mi,ma), colormap=cmap)
+
+ cam = pf.h.camera(center, L, width, (N,N), transfer_function=tf)
+ image = cam.snapshot()
+
+ if opts.enhance:
+ for i in range(3):
+ image[:,:,i] = image[:,:,i]/(image[:,:,i].mean() + 5.*image[:,:,i].std())
+ image[image>1.0]=1.0
+
+ save_name = opts.output
+ if save_name is None:
+ save_name = "%s"%pf+"_"+field+"_rendering.png"
+ if not '.png' in save_name:
+ save_name += '.png'
+ write_bitmap(image,save_name)
+
def run_main():
for co in ["--parallel", "--paste"]:
if co in sys.argv: del sys.argv[sys.argv.index(co)]
http://bitbucket.org/yt_analysis/yt/changeset/044723760cf6/
changeset: 044723760cf6
branch: yt
user: samskillman
date: 2011-09-03 18:57:22
summary: Choose automatic min/max based on values in the region being rendered using 'yt render'
affected #: 1 file (54 bytes)
--- a/yt/utilities/command_line.py Fri Sep 02 13:53:12 2011 -0600
+++ b/yt/utilities/command_line.py Sat Sep 03 10:57:22 2011 -0600
@@ -1529,7 +1529,8 @@
log = True
if opts.range is None:
- mi, ma = pf.h.all_data().quantities['Extrema'](field)[0]
+ roi = pf.h.region(center, center-width, center+width)
+ mi, ma = roi.quantities['Extrema'](field)[0]
if log:
mi, ma = na.log10(mi), na.log10(ma)
else:
http://bitbucket.org/yt_analysis/yt/changeset/c4ded9f9a9d4/
changeset: c4ded9f9a9d4
branch: yt
user: samskillman
date: 2011-09-03 19:47:12
summary: "command"->"space" separated.
affected #: 1 file (8 bytes)
--- a/yt/utilities/command_line.py Sat Sep 03 10:57:22 2011 -0600
+++ b/yt/utilities/command_line.py Sat Sep 03 11:47:12 2011 -0600
@@ -91,7 +91,7 @@
action="store", type="float",
dest="center", default=None,
nargs=3,
- help="Center, command separated (-1 -1 -1 for max)"),
+ help="Center, space separated (-1 -1 -1 for max)"),
bn = dict(short="-b", long="--basename",
action="store", type="string",
dest="basename", default=None,
@@ -177,17 +177,17 @@
action="store", type="float",
dest="range", default=None,
nargs=2,
- help="Range, command separated"),
+ help="Range, space separated"),
up = dict(short="", long="--up",
action="store", type="float",
dest="up", default=None,
nargs=3,
- help="Up, command separated"),
+ help="Up, space separated"),
viewpoint = dict(short="", long="--viewpoint",
action="store", type="float",
dest="viewpoint", default=[1., 1., 1.],
nargs=3,
- help="Viewpoint, command separated"),
+ help="Viewpoint, space separated"),
pixels = dict(short="", long="--pixels",
action="store",type="int",
dest="pixels", default=None,
http://bitbucket.org/yt_analysis/yt/changeset/6eec28983e4e/
changeset: 6eec28983e4e
branch: yt
user: samskillman
date: 2011-09-03 19:48:19
summary: Merging.
affected #: 4 files (4.5 KB)
--- a/tests/object_field_values.py Sat Sep 03 11:47:12 2011 -0600
+++ b/tests/object_field_values.py Sat Sep 03 11:48:19 2011 -0600
@@ -9,6 +9,33 @@
class FieldHashesDontMatch(RegressionTestException):
pass
+known_objects = {}
+
+def register_object(func):
+ known_objects[func.func_name] = func
+ return func
+
+ at register_object
+def centered_sphere(self):
+ center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
+ width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
+ self.data_object = self.pf.h.sphere(center, width/0.25)
+
+ at register_object
+def off_centered_sphere(self):
+ center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
+ width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
+ self.data_object = self.pf.h.sphere(center - 0.25 * width, width/0.25)
+
+ at register_object
+def corner_sphere(self):
+ width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
+ self.data_object = self.pf.h.sphere(self.pf.domain_left_edge, width/0.25)
+
+ at register_object
+def all_data(self):
+ self.data_object = self.pf.h.all_data()
+
class YTFieldValuesTest(YTStaticOutputTest):
def run(self):
vals = self.data_object[self.field].copy()
@@ -18,42 +45,12 @@
def compare(self, old_result):
if self.result != old_result: raise FieldHashesDontMatch
-class CenteredSphere(YTFieldValuesTest):
+ def setup(self):
+ YTStaticOutputTest.setup(self)
+ known_objects[self.object_name](self)
- def setup(self):
- YTFieldValuesTest.setup(self)
- center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
- width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
- self.data_object = self.pf.h.sphere(center, width/0.25)
+for object_name in known_objects:
+ for field in field_list + particle_field_list:
+ create_test(YTFieldValuesTest, "%s_%s" % (object_name, field),
+ field = field, object_name = object_name)
-for field in field_list + particle_field_list:
- create_test(CenteredSphere, "centered_sphere_%s" % (field), field = field)
-
-class OffCenteredSphere(YTFieldValuesTest):
-
- def setup(self):
- YTFieldValuesTest.setup(self)
- center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
- width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
- self.data_object = self.pf.h.sphere(center - 0.25 * width, width/0.25)
-
-for field in field_list + particle_field_list:
- create_test(OffCenteredSphere, "off_centered_sphere_%s" % (field), field = field)
-
-class CornerSphere(YTFieldValuesTest):
-
- def setup(self):
- YTFieldValuesTest.setup(self)
- width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
- self.data_object = self.pf.h.sphere(self.pf.domain_left_edge, width/0.25)
-
-for field in field_list + particle_field_list:
- create_test(CornerSphere, "corner_sphere_%s" % (field), field = field)
-
-class AllData(YTFieldValuesTest):
- def setup(self):
- YTFieldValuesTest.setup(self)
- self.data_object = self.pf.h.all_data()
-
-for field in field_list + particle_field_list:
- create_test(AllData, "all_data_%s" % (field), field = field)
--- a/yt/data_objects/profiles.py Sat Sep 03 11:47:12 2011 -0600
+++ b/yt/data_objects/profiles.py Sat Sep 03 11:48:19 2011 -0600
@@ -30,8 +30,8 @@
from yt.funcs import *
-from yt.utilities.data_point_utilities import Bin2DProfile, \
- Bin3DProfile
+from yt.utilities.data_point_utilities import \
+ Bin1DProfile, Bin2DProfile, Bin3DProfile
from yt.utilities.parallel_tools.parallel_analysis_interface import \
ParallelAnalysisInterface
@@ -239,22 +239,19 @@
mi, inv_bin_indices = args # Args has the indices to use as input
# check_cut is set if source != self._data_source
# (i.e., lazy_reader)
- source_data = self._get_field(source, field, check_cut)[mi]
- if weight: weight_data = self._get_field(source, weight, check_cut)[mi]
+ source_data = self._get_field(source, field, check_cut)
+ if weight: weight_data = self._get_field(source, weight, check_cut)
+ else: weight_data = na.ones(source_data.shape, dtype='float64')
+ self.total_stuff = source_data.sum()
binned_field = self._get_empty_field()
weight_field = self._get_empty_field()
used_field = self._get_empty_field()
- # Now we perform the actual binning
- for bin in inv_bin_indices.keys():
- # temp_field is *all* the points from source that go into this bin
- temp_field = source_data[inv_bin_indices[bin]]
- if weight:
- # now w_i * v_i and store sum(w_i)
- weight_field[bin] = weight_data[inv_bin_indices[bin]].sum()
- temp_field *= weight_data[inv_bin_indices[bin]]
- binned_field[bin] = temp_field.sum()
- # inv_bin_indices is a tuple of indices
- if inv_bin_indices[bin][0].size > 0: used_field[bin] = 1
+ mi = args[0]
+ bin_indices_x = args[1].ravel().astype('int64')
+ source_data = source_data[mi]
+ weight_data = weight_data[mi]
+ Bin1DProfile(bin_indices_x, weight_data, source_data,
+ weight_field, binned_field, used_field)
# Fix for laziness, because at the *end* we will be
# summing up all of the histograms and dividing by the
# weights. Accumulation likely doesn't work with weighted
@@ -270,26 +267,21 @@
raise EmptyProfileData()
# Truncate at boundaries.
if self.end_collect:
- mi = na.arange(source_data.size)
+ sd = source_data[:]
else:
- mi = na.where( (source_data > self._bins.min())
- & (source_data < self._bins.max()))
- sd = source_data[mi]
+ mi = ((source_data > self._bins.min())
+ & (source_data < self._bins.max()))
+ sd = source_data[mi]
if sd.size == 0:
raise EmptyProfileData()
# Stick the bins into our fixed bins, set at initialization
bin_indices = na.digitize(sd, self._bins)
if self.end_collect: #limit the range of values to 0 and n_bins-1
- bin_indices = na.minimum(na.maximum(1, bin_indices), self.n_bins) - 1
+ bin_indices = na.clip(bin_indices, 0, self.n_bins - 1)
else: #throw away outside values
bin_indices -= 1
- # Now we set up our inverse bin indices
- inv_bin_indices = {}
- for bin in range(self[self.bin_field].size):
- # Which fall into our bin?
- inv_bin_indices[bin] = na.where(bin_indices == bin)
- return (mi, inv_bin_indices)
+ return (mi, bin_indices)
def choose_bins(self, bin_style):
# Depending on the bin_style, choose from bin edges 0...N either:
--- a/yt/utilities/answer_testing/runner.py Sat Sep 03 11:47:12 2011 -0600
+++ b/yt/utilities/answer_testing/runner.py Sat Sep 03 11:48:19 2011 -0600
@@ -133,7 +133,7 @@
print "NO OLD RESULTS"
else:
if success == True: print "SUCCEEDED"
- else: print "FAILED"
+ else: print "FAILED", msg
self.passed_tests[test.name] = success
if self.watcher is not None:
if success == True:
--- a/yt/utilities/data_point_utilities.c Sat Sep 03 11:47:12 2011 -0600
+++ b/yt/utilities/data_point_utilities.c Sat Sep 03 11:48:19 2011 -0600
@@ -273,6 +273,111 @@
}
+static PyObject *_profile1DError;
+
+static PyObject *Py_Bin1DProfile(PyObject *obj, PyObject *args)
+{
+ int i, j;
+ PyObject *obins_x, *owsource, *obsource, *owresult, *obresult, *oused;
+ PyArrayObject *bins_x, *wsource, *bsource, *wresult, *bresult, *used;
+ bins_x = wsource = bsource = wresult = bresult = used = NULL;
+
+ if (!PyArg_ParseTuple(args, "OOOOOO",
+ &obins_x, &owsource, &obsource,
+ &owresult, &obresult, &oused))
+ return PyErr_Format(_profile1DError,
+ "Bin1DProfile: Invalid parameters.");
+ i = 0;
+
+ bins_x = (PyArrayObject *) PyArray_FromAny(obins_x,
+ PyArray_DescrFromType(NPY_INT64), 1, 1,
+ NPY_IN_ARRAY, NULL);
+ if(bins_x==NULL) {
+ PyErr_Format(_profile1DError,
+ "Bin1DProfile: One dimension required for bins_x.");
+ goto _fail;
+ }
+
+ wsource = (PyArrayObject *) PyArray_FromAny(owsource,
+ PyArray_DescrFromType(NPY_FLOAT64), 1, 1,
+ NPY_IN_ARRAY, NULL);
+ if((wsource==NULL) || (PyArray_SIZE(bins_x) != PyArray_SIZE(wsource))) {
+ PyErr_Format(_profile1DError,
+ "Bin1DProfile: One dimension required for wsource, same size as bins_x.");
+ goto _fail;
+ }
+
+ bsource = (PyArrayObject *) PyArray_FromAny(obsource,
+ PyArray_DescrFromType(NPY_FLOAT64), 1, 1,
+ NPY_IN_ARRAY, NULL);
+ if((bsource==NULL) || (PyArray_SIZE(bins_x) != PyArray_SIZE(bsource))) {
+ PyErr_Format(_profile1DError,
+ "Bin1DProfile: One dimension required for bsource, same size as bins_x.");
+ goto _fail;
+ }
+
+ wresult = (PyArrayObject *) PyArray_FromAny(owresult,
+ PyArray_DescrFromType(NPY_FLOAT64), 1,1,
+ NPY_INOUT_ARRAY | NPY_UPDATEIFCOPY, NULL);
+ if(wresult==NULL){
+ PyErr_Format(_profile1DError,
+ "Bin1DProfile: Two dimensions required for wresult.");
+ goto _fail;
+ }
+
+ bresult = (PyArrayObject *) PyArray_FromAny(obresult,
+ PyArray_DescrFromType(NPY_FLOAT64), 1,1,
+ NPY_INOUT_ARRAY | NPY_UPDATEIFCOPY, NULL);
+ if((bresult==NULL) ||(PyArray_SIZE(wresult) != PyArray_SIZE(bresult))
+ || (PyArray_DIM(bresult,0) != PyArray_DIM(wresult,0))){
+ PyErr_Format(_profile1DError,
+ "Bin1DProfile: Two dimensions required for bresult, same shape as wresult.");
+ goto _fail;
+ }
+
+ used = (PyArrayObject *) PyArray_FromAny(oused,
+ PyArray_DescrFromType(NPY_FLOAT64), 1,1,
+ NPY_INOUT_ARRAY | NPY_UPDATEIFCOPY, NULL);
+ if((used==NULL) ||(PyArray_SIZE(used) != PyArray_SIZE(wresult))
+ || (PyArray_DIM(used,0) != PyArray_DIM(wresult,0))){
+ PyErr_Format(_profile1DError,
+ "Bin1DProfile: Two dimensions required for used, same shape as wresult.");
+ goto _fail;
+ }
+
+ npy_float64 wval, bval;
+ int n;
+
+ for(n=0; n<bins_x->dimensions[0]; n++) {
+ i = *(npy_int64*)PyArray_GETPTR1(bins_x, n);
+ bval = *(npy_float64*)PyArray_GETPTR1(bsource, n);
+ wval = *(npy_float64*)PyArray_GETPTR1(wsource, n);
+ *(npy_float64*)PyArray_GETPTR1(wresult, i) += wval;
+ *(npy_float64*)PyArray_GETPTR1(bresult, i) += wval*bval;
+ *(npy_float64*)PyArray_GETPTR1(used, i) = 1.0;
+ }
+
+ Py_DECREF(bins_x);
+ Py_DECREF(wsource);
+ Py_DECREF(bsource);
+ Py_DECREF(wresult);
+ Py_DECREF(bresult);
+ Py_DECREF(used);
+
+ PyObject *onum_found = PyInt_FromLong((long)1);
+ return onum_found;
+
+ _fail:
+ Py_XDECREF(bins_x);
+ Py_XDECREF(wsource);
+ Py_XDECREF(bsource);
+ Py_XDECREF(wresult);
+ Py_XDECREF(bresult);
+ Py_XDECREF(used);
+ return NULL;
+
+}
+
static PyObject *_profile2DError;
static PyObject *Py_Bin2DProfile(PyObject *obj, PyObject *args)
@@ -1674,6 +1779,7 @@
{"Interpolate", Py_Interpolate, METH_VARARGS},
{"DataCubeRefine", Py_DataCubeRefine, METH_VARARGS},
{"DataCubeReplace", Py_DataCubeReplace, METH_VARARGS},
+ {"Bin1DProfile", Py_Bin1DProfile, METH_VARARGS},
{"Bin2DProfile", Py_Bin2DProfile, METH_VARARGS},
{"Bin3DProfile", Py_Bin3DProfile, METH_VARARGS},
{"FindContours", Py_FindContours, METH_VARARGS},
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list