[Yt-svn] commit/yt: 2 new changesets

Bitbucket commits-noreply at bitbucket.org
Tue Jun 21 10:17:46 PDT 2011


2 new changesets in yt:

http://bitbucket.org/yt_analysis/yt/changeset/6b10d3a04475/
changeset:   6b10d3a04475
branch:      yt
user:        brittonsmith
date:        2011-06-21 16:13:14
summary:     Fixed Baryon_Overdensity field to not just point to the Density field.
affected #:  1 file (506 bytes)

--- a/yt/data_objects/universal_fields.py	Wed Jun 08 11:17:23 2011 -0700
+++ b/yt/data_objects/universal_fields.py	Tue Jun 21 16:13:14 2011 +0200
@@ -412,18 +412,28 @@
 add_field("DensityPerturbation",function=_DensityPerturbation,units=r"")
 
 # This is rho_b / <rho_b>.
+# def _Baryon_Overdensity(field, data):
+#     return data['Density']
+# def _Convert_Baryon_Overdensity(data):
+#     if data.pf.has_key('omega_baryon_now'):
+#         omega_baryon_now = data.pf['omega_baryon_now']
+#     else:
+#         omega_baryon_now = 0.0441
+#     return 1 / (omega_baryon_now * rho_crit_now * 
+#                 (data.pf['CosmologyHubbleConstantNow']**2) * 
+#                 ((1+data.pf['CosmologyCurrentRedshift'])**3))
+# add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
+#           convert_function=_Convert_Baryon_Overdensity, units=r"")
 def _Baryon_Overdensity(field, data):
-    return data['Density']
-def _Convert_Baryon_Overdensity(data):
     if data.pf.has_key('omega_baryon_now'):
         omega_baryon_now = data.pf['omega_baryon_now']
     else:
         omega_baryon_now = 0.0441
-    return 1 / (omega_baryon_now * rho_crit_now * 
-                (data.pf['CosmologyHubbleConstantNow']**2) * 
-                ((1+data.pf['CosmologyCurrentRedshift'])**3))
+    return data['Density'] / (omega_baryon_now * rho_crit_now * 
+                              (data.pf['CosmologyHubbleConstantNow']**2) * 
+                              ((1+data.pf['CosmologyCurrentRedshift'])**3))
 add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
-          convert_function=_Convert_Baryon_Overdensity, units=r"")
+          units=r"")
 
 # Weak lensing convergence.
 # Eqn 4 of Metzler, White, & Loken (2001, ApJ, 547, 560).


http://bitbucket.org/yt_analysis/yt/changeset/ab5c2b49d357/
changeset:   ab5c2b49d357
branch:      yt
user:        brittonsmith
date:        2011-06-21 19:17:34
summary:     Merged.
affected #:  36 files (30.3 KB)

--- a/tests/DD0010/moving7_0010	Tue Jun 21 16:13:14 2011 +0200
+++ b/tests/DD0010/moving7_0010	Tue Jun 21 19:17:34 2011 +0200
@@ -1,6 +1,7 @@
 InitialCycleNumber  = 10
 InitialTime         = 0.81751317119117
 InitialCPUTime      = 2.15207e+09
+CurrentTimeIdentifier = 0
 
 StopTime            = 20.097275649537
 StopCycle           = 10000


--- a/tests/runall.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/tests/runall.py	Tue Jun 21 19:17:34 2011 +0200
@@ -4,7 +4,8 @@
 
 from yt.utilities.answer_testing.api import \
     RegressionTestRunner, clear_registry, create_test, \
-    TestFieldStatistics, TestAllProjections, registry_entries
+    TestFieldStatistics, TestAllProjections, registry_entries, \
+    Xunit
 
 from yt.utilities.command_line import get_yt_version
 
@@ -78,20 +79,25 @@
         sys.exit(1)
     # Now we modify our compare name and self name to include the pf.
     compare_id = opts.compare_name
-    if compare_id is not None: compare_id += "_%s_%s" % (pf, pf._hash())
+    watcher = None
+    if compare_id is not None:
+        compare_id += "_%s_%s" % (pf, pf._hash())
+        watcher = Xunit()
     this_id = opts.this_name + "_%s_%s" % (pf, pf._hash())
     rtr = RegressionTestRunner(this_id, compare_id,
             results_path = opts.storage_dir,
             compare_results_path = opts.storage_dir,
             io_log = [opts.parameter_file])
+    rtr.watcher = watcher
     tests_to_run = []
     for m, vals in mapping.items():
-        print vals, opts.test_pattern
         new_tests = fnmatch.filter(vals, opts.test_pattern)
         if len(new_tests) == 0: continue
         tests_to_run += new_tests
         load_tests(m, cwd)
     for test_name in sorted(tests_to_run):
         rtr.run_test(test_name)
+    if watcher is not None:
+        rtr.watcher.report()
     for test_name, result in sorted(rtr.passed_tests.items()):
         print "TEST %s: %s" % (test_name, result)


--- a/yt/analysis_modules/level_sets/api.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/analysis_modules/level_sets/api.py	Tue Jun 21 19:17:34 2011 +0200
@@ -29,7 +29,6 @@
 """
 
 from .contour_finder import \
-    GridConsiderationQueue, \
     coalesce_join_tree, \
     identify_contours
 


--- a/yt/analysis_modules/level_sets/contour_finder.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/analysis_modules/level_sets/contour_finder.py	Tue Jun 21 19:17:34 2011 +0200
@@ -30,202 +30,6 @@
 import yt.utilities.data_point_utilities as data_point_utilities
 import yt.utilities.amr_utils as amr_utils
 
-class GridConsiderationQueue:
-    def __init__(self, white_list, priority_func=None):
-        """
-        This class exists to serve the contour finder.  It ensures that
-        we can create a cascading set of queue dependencies, and if
-        a grid is touched again ahead of time we can bump it to the top
-        of the queue again.  It like has few uses.
-        """
-        self.to_consider = []
-        self.considered = set()
-        self.n = 0
-        self.white_list = set(white_list)
-        self.priority_func = priority_func
-
-    def add(self, grids, force=False):
-        if not hasattr(grids,'size'):
-            grids = ensure_list(grids)
-        i = self.n
-        to_check = self.white_list.intersection(grids)
-        if not force: to_check.difference_update(self.considered)
-        for g in sorted(to_check, key=self.priority_func):
-            try:
-                # We only delete from subsequent checks
-                del self.to_consider[self.to_consider.index(g, i)]
-                self.to_consider.insert(i,g)
-                i += 1
-            except ValueError:
-                self.to_consider.append(g)
-
-    def __iter__(self):
-        return self
-
-    def next(self):
-        if self.n >= len(self.to_consider):
-            raise StopIteration
-        tr = self.to_consider[self.n]
-        self.considered.add(tr)
-        self.n += 1
-        return tr
-
-    def progress(self):
-        return self.n, len(self.to_consider)
-
-# We want an algorithm that deals with growing a given contour to *all* the
-# cells in a grid.
-
-def old_identify_contours(data_source, field, min_val, max_val, cached_fields=None):
-    """
-    Given a *data_source*, we will search for topologically connected sets
-    in *field* between *min_val* and *max_val*.
-    """
-    if cached_fields is None: cached_fields = defaultdict(lambda: dict())
-    maxn_cells = na.sum([g.ActiveDimensions.prod() for g in data_source._grids])
-    contour_ind = na.where( (data_source[field] > min_val)
-                          & (data_source[field] < max_val))[0]
-    np = contour_ind.size
-    if np == 0:
-        return {}
-    cur_max_id = maxn_cells - np
-    contour_ids = na.arange(maxn_cells, cur_max_id, -1) + 1 # Minimum of 1
-    data_source["tempContours"] = na.ones(data_source[field].shape, dtype='int32')*-1
-    mylog.info("Contouring over %s cells with %s candidates", contour_ids[0],np)
-    data_source["tempContours"][contour_ind] = contour_ids[:]
-    data_source._flush_data_to_grids("tempContours", -1, dtype='int32')
-    my_queue = GridConsiderationQueue(white_list = data_source._grids,
-                    priority_func = lambda g: -1*g["tempContours"].max())
-    my_queue.add(data_source._grids)
-    for i,grid in enumerate(my_queue):
-        mylog.info("Examining %s of %s", *my_queue.progress())
-        max_before = grid["tempContours"].max()
-        to_get = ["tempContours"]
-        if field in cached_fields[grid.id] and \
-            not na.any( (cached_fields[grid.id][field] > min_val)
-                      & (cached_fields[grid.id][field] < max_val)):
-            continue
-        for f in [field, "GridIndices"]:
-            if f not in cached_fields[grid.id]: to_get.append(f)
-        cg = grid.retrieve_ghost_zones(1,to_get)
-        for f in [field, "GridIndices"]:
-            if f in cached_fields[grid.id]:
-                cg.data[f] = cached_fields[grid.id][f]
-            else:
-                cached_fields[grid.id][f] = cg[f] 
-        local_ind = na.where( (cg[field] > min_val)
-                            & (cg[field] < max_val)
-                            & (cg["tempContours"] == -1) )
-        if local_ind[0].size > 0:
-            kk = na.arange(cur_max_id, cur_max_id-local_ind[0].size, -1)
-            cg["tempContours"][local_ind] = kk[:]
-            cur_max_id -= local_ind[0].size
-        fd = cg["tempContours"].astype('int64')
-        fd_original = fd.copy()
-        if na.all(fd > -1):
-            fd[:] = fd.max()
-        else:
-            xi_u,yi_u,zi_u = na.where(fd > -1)
-            cor_order = na.argsort(-1*fd[(xi_u,yi_u,zi_u)])
-            xi = xi_u[cor_order]
-            yi = yi_u[cor_order]
-            zi = zi_u[cor_order]
-            while data_point_utilities.FindContours(fd, xi, yi, zi) < 0: pass
-        cg["tempContours"] = fd.copy().astype('float64')
-        cg.flush_data("tempContours")
-        my_queue.add(cg._grids)
-        force_ind = na.unique(cg["GridIndices"][na.where(
-            (cg["tempContours"] > fd_original)
-          & (cg["GridIndices"] != grid.id-grid._id_offset) )])
-        if len(force_ind) > 0:
-            my_queue.add(data_source.hierarchy.grids[force_ind.astype('int32')], force=True)
-        for ax in 'xyz':
-            if not iterable(grid['d%s'%ax]):
-                grid['d%s'%ax] = grid['d%s'%ax]*na.ones(grid.ActiveDimensions)
-    del data_source.data["tempContours"] # Force a reload from the grids
-    data_source.get_data("tempContours", in_grids=True)
-    i = 0
-    contour_ind = {}
-    for contour_id in na.unique(data_source["tempContours"]):
-        if contour_id == -1: continue
-        contour_ind[i] = na.where(data_source["tempContours"] == contour_id)
-        mylog.debug("Contour id %s has %s cells", i, contour_ind[i][0].size)
-        i += 1
-    mylog.info("Identified %s contours between %0.5e and %0.5e",
-               len(contour_ind.keys()),min_val,max_val)
-    for grid in chain(data_source._grids, cg._grids):
-        grid.data.pop("tempContours", None)
-    del data_source.data["tempContours"]
-    return contour_ind
-
-def check_neighbors(data_object, field="Contours"):
-    """
-    This method is a means of error checking in the contour finder.
-    """
-    n_bad = na.zeros(1, dtype='int32')
-    for cid in na.unique(data_object[field]):
-        if cid == -1: continue
-        ids = na.where(data_object[field] == cid)
-        mx = data_object['x'][ids].copy()
-        my = data_object['y'][ids].copy()
-        mz = data_object['z'][ids].copy()
-        mdx = data_object['dx'][ids].copy()
-        mdy = data_object['dy'][ids].copy()
-        mdz = data_object['dz'][ids].copy()
-        grid_ids_m = data_object['GridIndices'][ids].copy()
-        grid_levels_m = data_object['GridLevel'][ids].copy()
-        mp = mx.size
-        ids = na.where( (data_object[field] != cid)
-                      & (data_object[field] >=  0 ))
-        nx = data_object['x'][ids].copy()
-        ny = data_object['y'][ids].copy()
-        nz = data_object['z'][ids].copy()
-        ndx = data_object['dx'][ids].copy()
-        ndy = data_object['dy'][ids].copy()
-        ndz = data_object['dz'][ids].copy()
-        grid_ids_n = data_object['GridIndices'][ids].copy()
-        grid_levels_n = data_object['GridLevel'][ids].copy()
-        np = nx.size
-        weave.inline(check_cell_distance,
-                   ['mx','my','mz','mdx','mdy','mdz','mp',
-                    'nx','ny','nz','ndx','ndy','ndz','np','n_bad',
-                    'grid_ids_m', 'grid_levels_m', 'grid_ids_n', 'grid_levels_n'],
-                    compiler='gcc', type_converters=converters.blitz,
-                    auto_downcast=0, verbose=2)
-    return n_bad[0]
-
-check_cell_distance = \
-r"""
-using namespace std;
-int i, j, k;
-long double cell_dist, rad_m, rad_n;
-k=0;
-for(i=0;i<mp;i++){
-  for(j=0;j<np;j++){
-    /*
-   cell_dist = sqrtl(pow(mx(i)-nx(j),2) +
-                     pow(my(i)-ny(j),2) +
-                     pow(mz(i)-nz(j),2));
-   rad_m = sqrtl(pow(mdx(i),2) +
-                 pow(mdy(i),2) +
-                 pow(mdz(i),2));
-   rad_n = sqrtl(pow(ndx(j),2) +
-                 pow(ndy(j),2) +
-                 pow(ndz(j),2));
-    */
-   //if(cell_dist > 1.01 * (rad_n/2.0+rad_m/2.0)) continue;
-   if(fabsl(mx(i)-nx(j))>(mdx(i)+ndx(j))/2.0) continue;
-   if(fabsl(my(i)-ny(j))>(mdy(i)+ndy(j))/2.0) continue;
-   if(fabsl(mz(i)-nz(j))>(mdz(i)+ndz(j))/2.0) continue;
-   k++;
-   break;
-   cout << cell_dist << "\t" << 1.01*(rad_n/2.0+rad_m/2.0) << "\t";
-   cout << grid_ids_m(i) << "\t" << grid_ids_n(j) << endl;
-  }
-}
-n_bad(0) += k;
-"""
-
 def coalesce_join_tree(jtree1):
     joins = defaultdict(set)
     nj = jtree1.shape[0]


--- a/yt/analysis_modules/star_analysis/sfr_spectrum.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/analysis_modules/star_analysis/sfr_spectrum.py	Tue Jun 21 19:17:34 2011 +0200
@@ -328,6 +328,8 @@
             if star_metallicity_constant is not None:
                 self.star_metal = na.ones(self.star_mass.size, dtype='float64') * \
                     star_metallicity_constant
+            if star_metallicity_fraction is not None:
+                self.star_metal = star_metallicity_fraction
         else:
             # Get the data we need.
             ct = self._data_source["creation_time"]


--- a/yt/data_objects/data_containers.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/data_objects/data_containers.py	Tue Jun 21 19:17:34 2011 +0200
@@ -1431,14 +1431,14 @@
         This is a data object corresponding to a line integral through the
         simulation domain.
 
-        This object is typically accessed through the `quad_proj` object that
+        This object is typically accessed through the `proj` object that
         hangs off of hierarchy objects.  AMRQuadProj is a projection of a
         `field` along an `axis`.  The field can have an associated
         `weight_field`, in which case the values are multiplied by a weight
         before being summed, and then divided by the sum of that weight; the
         two fundamental modes of operating are direct line integral (no
         weighting) and average along a line of sight (weighting.)  What makes
-        `quad_proj` different from the standard projection mechanism is that it
+        `proj` different from the standard projection mechanism is that it
         utilizes a quadtree data structure, rather than the old mechanism for
         projections.  It will not run in parallel, but serial runs should be
         substantially faster.  Note also that lines of sight are integrated at
@@ -1536,7 +1536,7 @@
     def _get_tree(self, nvals):
         xd = self.pf.domain_dimensions[x_dict[self.axis]]
         yd = self.pf.domain_dimensions[y_dict[self.axis]]
-        return QuadTree(na.array([xd,yd]), nvals)
+        return QuadTree(na.array([xd,yd], dtype='int64'), nvals)
 
     def _get_dls(self, grid, fields):
         # Place holder for a time when maybe we will not be doing just
@@ -2189,6 +2189,7 @@
         for i,grid in enumerate(self._get_grids()):
             mylog.debug("Getting fields from %s", i)
             self._get_data_from_grid(grid, fields_to_get, dls)
+        mylog.info("IO completed; summing")
         for field in fields_to_get:
             self[field] = self._mpi_Allsum_double(self[field])
             conv = self.pf.units[self.pf.field_info[field].projection_conversion]


--- a/yt/data_objects/derived_quantities.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/data_objects/derived_quantities.py	Tue Jun 21 19:17:34 2011 +0200
@@ -224,6 +224,23 @@
     amz = data["SpecificAngularMomentumZ"]*data["CellMassMsun"]
     j_mag = [amx.sum(), amy.sum(), amz.sum()]
     return [j_mag]
+
+def _StarAngularMomentumVector(data):
+    """
+    This function returns the mass-weighted average angular momentum vector 
+    for stars.
+    """
+    is_star = data["creation_time"] > 0
+    star_mass = data["ParticleMassMsun"][is_star]
+    sLx = data["ParticleSpecificAngularMomentumX"][is_star]
+    sLy = data["ParticleSpecificAngularMomentumY"][is_star]
+    sLz = data["ParticleSpecificAngularMomentumZ"][is_star]
+    amx = sLx * star_mass
+    amy = sLy * star_mass
+    amz = sLz * star_mass
+    j_mag = [amx.sum(), amy.sum(), amz.sum()]
+    return [j_mag]
+
 def _combAngularMomentumVector(data, j_mag):
     if len(j_mag.shape) < 2: j_mag = na.expand_dims(j_mag, 0)
     L_vec = j_mag.sum(axis=0)
@@ -232,6 +249,9 @@
 add_quantity("AngularMomentumVector", function=_AngularMomentumVector,
              combine_function=_combAngularMomentumVector, n_ret=1)
 
+add_quantity("StarAngularMomentumVector", function=_StarAngularMomentumVector,
+             combine_function=_combAngularMomentumVector, n_ret=1)
+
 def _BaryonSpinParameter(data):
     """
     This function returns the spin parameter for the baryons, but it uses


--- a/yt/data_objects/static_output.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/data_objects/static_output.py	Tue Jun 21 19:17:34 2011 +0200
@@ -62,12 +62,13 @@
             _cached_pfs[apath] = obj
         return _cached_pfs[apath]
 
-    def __init__(self, filename, data_style=None):
+    def __init__(self, filename, data_style=None, file_style=None):
         """
         Base class for generating new output types.  Principally consists of
         a *filename* and a *data_style* which will be passed on to children.
         """
         self.data_style = data_style
+        self.file_style = file_style
         self.parameter_filename = str(filename)
         self.basename = os.path.basename(filename)
         self.directory = os.path.expanduser(os.path.dirname(filename))


--- a/yt/frontends/castro/data_structures.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/frontends/castro/data_structures.py	Tue Jun 21 19:17:34 2011 +0200
@@ -620,6 +620,7 @@
             line = a_file.readline().strip()
             a_file.close()
             self.parameters["CosmologyCurrentRedshift"] = 1/float(line) - 1
+            self.cosmological_scale_factor = float(line)
             self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
         else:
             self.current_redshift = self.omega_lambda = self.omega_matter = \
@@ -668,7 +669,18 @@
         self.time_units = {}
         if len(self.parameters) == 0:
             self._parse_parameter_file()
-        self._setup_nounits_units()
+        if self.cosmological_simulation:
+            cf = 1e5*(self.cosmological_scale_factor)
+            for ax in 'xyz':
+                self.units['particle_velocity_%s' % ax] = cf
+            self.units['particle_mass'] = 1.989e33
+        mylog.warning("Setting 1.0 in code units to be 1.0 cm")
+        if not self.has_key("TimeUnits"):
+            mylog.warning("No time units.  Setting 1.0 = 1 second.")
+            self.conversion_factors["Time"] = 1.0
+        for unit in mpc_conversion.keys():
+            self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
+        
         self.conversion_factors = defaultdict(lambda: 1.0)
         self.time_units['1'] = 1
         self.units['1'] = 1.0
@@ -683,10 +695,3 @@
 
     def _setup_nounits_units(self):
         z = 0
-        mylog.warning("Setting 1.0 in code units to be 1.0 cm")
-        if not self.has_key("TimeUnits"):
-            mylog.warning("No time units.  Setting 1.0 = 1 second.")
-            self.conversion_factors["Time"] = 1.0
-        for unit in mpc_conversion.keys():
-            self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-


--- a/yt/frontends/enzo/data_structures.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/frontends/enzo/data_structures.py	Tue Jun 21 19:17:34 2011 +0200
@@ -135,6 +135,10 @@
     def __init__(self, pf, data_style):
         
         self.data_style = data_style
+        if pf.file_style != None:
+            self._bn = pf.file_style
+        else:
+            self._bn = "%s.cpu%%04i"
         self.hierarchy_filename = os.path.abspath(
             "%s.hierarchy" % (pf.parameter_filename))
         harray_fn = self.hierarchy_filename[:-9] + "harrays"
@@ -193,7 +197,11 @@
             self.data_style = 'enzo_hdf4'
             mylog.debug("Detected HDF4")
         except:
-            list_of_sets = hdf5_light_reader.ReadListOfDatasets(test_grid, "/")
+            try:
+                list_of_sets = hdf5_light_reader.ReadListOfDatasets(test_grid, "/")
+            except:
+                print "Could not find dataset.  Defaulting to packed HDF5"
+                list_of_sets = []
             if len(list_of_sets) == 0 and rank == 3:
                 mylog.debug("Detected packed HDF5")
                 self.data_style = 'enzo_packed_3d'
@@ -284,7 +292,6 @@
             second_grid.Level = first_grid.Level
         self.grid_levels[sgi] = second_grid.Level
 
-    _bn = "%s.cpu%%04i"
     def _parse_binary_hierarchy(self):
         mylog.info("Getting the binary hierarchy")
         if not ytcfg.getboolean("yt","serialize"): return False
@@ -630,6 +637,7 @@
     _hierarchy_class = EnzoHierarchy
     _fieldinfo_class = EnzoFieldContainer
     def __init__(self, filename, data_style=None,
+                 file_style = None,
                  parameter_override = None,
                  conversion_override = None,
                  storage_filename = None):
@@ -649,7 +657,7 @@
         self._conversion_override = conversion_override
         self.storage_filename = storage_filename
 
-        StaticOutput.__init__(self, filename, data_style)
+        StaticOutput.__init__(self, filename, data_style, file_style=file_style)
         if "InitialTime" not in self.parameters:
             self.current_time = 0.0
         rp = os.path.join(self.directory, "rates.out")


--- a/yt/frontends/flash/data_structures.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/frontends/flash/data_structures.py	Tue Jun 21 19:17:34 2011 +0200
@@ -84,6 +84,12 @@
     def _detect_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [s for s in self._handle["/unknown names"][:].flat]
+        facevars = [s for s in self._handle
+                    if s.startswith(("fcx","fcy","fcz")) and s[-1].isdigit()]
+        nfacevars = len(facevars)
+        if (nfacevars > 0) :
+            ncomp += nfacevars
+            self.field_list.append(facevars)
         if ("/particle names" in self._handle) :
             self.field_list += ["particle_" + s[0].strip() for s
                                 in self._handle["/particle names"][:]]
@@ -297,9 +303,9 @@
 
         # Determine domain dimensions
         try:
-            nxb = self._find_parameter("integer", "nxb", handle = self._handle)
-            nyb = self._find_parameter("integer", "nyb", handle = self._handle)
-            nzb = self._find_parameter("integer", "nzb", handle = self._handle)
+            nxb = self._find_parameter("integer", "nxb", scalar = True, handle = self._handle)
+            nyb = self._find_parameter("integer", "nyb", scalar = True, handle = self._handle)
+            nzb = self._find_parameter("integer", "nzb", scalar = True, handle = self._handle)
         except KeyError:
             nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
                               for ax in 'xyz']


--- a/yt/frontends/stream/data_structures.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/frontends/stream/data_structures.py	Tue Jun 21 19:17:34 2011 +0200
@@ -36,6 +36,8 @@
 from yt.data_objects.static_output import \
     StaticOutput
 from yt.utilities.logger import ytLogger as mylog
+from yt.utilities.amr_utils import \
+    get_box_grids_level
 
 from .fields import \
     StreamFieldContainer, \
@@ -94,7 +96,7 @@
 class StreamHandler(object):
     def __init__(self, left_edges, right_edges, dimensions,
                  levels, parent_ids, particle_count, processor_ids,
-                 fields):
+                 fields, io = None):
         self.left_edges = left_edges
         self.right_edges = right_edges
         self.dimensions = dimensions
@@ -104,6 +106,7 @@
         self.processor_ids = processor_ids
         self.num_grids = self.levels.size
         self.fields = fields
+        self.io = io
 
     def get_fields(self):
         return self.fields.all_fields
@@ -154,18 +157,22 @@
         self.grid_procs = self.stream_handler.processor_ids
         self.grid_particle_count[:] = self.stream_handler.particle_count
         mylog.debug("Copying reverse tree")
-        reverse_tree = self.stream_handler.parent_ids.tolist()
-        # Initial setup:
-        mylog.debug("Reconstructing parent-child relationships")
         self.grids = []
         # We enumerate, so it's 0-indexed id and 1-indexed pid
-        self.filenames = ["-1"] * self.num_grids
-        for id,pid in enumerate(reverse_tree):
+        for id in xrange(self.num_grids):
             self.grids.append(self.grid(id, self))
-            self.grids[-1].Level = self.grid_levels[id, 0]
-            if pid >= 0:
-                self.grids[-1]._parent_id = pid
-                self.grids[pid]._children_ids.append(self.grids[-1].id)
+            self.grids[id].Level = self.grid_levels[id, 0]
+        parent_ids = self.stream_handler.parent_ids
+        if parent_ids is not None:
+            reverse_tree = self.stream_handler.parent_ids.tolist()
+            # Initial setup:
+            for id,pid in enumerate(reverse_tree):
+                if pid >= 0:
+                    self.grids[-1]._parent_id = pid
+                    self.grids[pid]._children_ids.append(self.grids[-1].id)
+        else:
+            mylog.debug("Reconstructing parent-child relationships")
+            self._reconstruct_parent_child()
         self.max_level = self.grid_levels.max()
         mylog.debug("Preparing grids")
         for i, grid in enumerate(self.grids):
@@ -176,6 +183,22 @@
         self.grids = na.array(self.grids, dtype='object')
         mylog.debug("Prepared")
 
+    def _reconstruct_parent_child(self):
+        mask = na.empty(len(self.grids), dtype='int32')
+        mylog.debug("First pass; identifying child grids")
+        for i, grid in enumerate(self.grids):
+            get_box_grids_level(self.grid_left_edge[i,:],
+                                self.grid_right_edge[i,:],
+                                self.grid_levels[i] + 1,
+                                self.grid_left_edge, self.grid_right_edge,
+                                self.grid_levels, mask)
+            ids = na.where(mask.astype("bool"))
+            grid._children_ids = ids[0] # where is a tuple
+        mylog.debug("Second pass; identifying parents")
+        for i, grid in enumerate(self.grids): # Second pass
+            for child in grid.Children:
+                child._parent_id = i
+
     def _initialize_grid_arrays(self):
         AMRHierarchy._initialize_grid_arrays(self)
         self.grid_procs = na.zeros((self.num_grids,1),'int32')
@@ -211,7 +234,10 @@
         self.max_level = self.grid_levels.max()
 
     def _setup_data_io(self):
-        self.io = io_registry[self.data_style](self.stream_handler)
+        if self.stream_handler.io is not None:
+            self.io = self.stream_handler.io
+        else:
+            self.io = io_registry[self.data_style](self.stream_handler)
 
 class StreamStaticOutput(StaticOutput):
     _hierarchy_class = StreamHierarchy


--- a/yt/funcs.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/funcs.py	Tue Jun 21 19:17:34 2011 +0200
@@ -435,6 +435,10 @@
 # If we recognize one of the arguments on the command line as indicating a
 # different mechanism for handling tracebacks, we attach one of those handlers
 # and remove the argument from sys.argv.
+#
+# This fallback is for Paraview:
+if not hasattr(sys, 'argv') or sys.argv is None: sys.argv = []
+# Now, we check.
 if "--paste" in sys.argv:
     sys.excepthook = paste_traceback
     del sys.argv[sys.argv.index("--paste")]


--- a/yt/gui/reason/extdirect_repl.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/gui/reason/extdirect_repl.py	Tue Jun 21 19:17:34 2011 +0200
@@ -139,7 +139,9 @@
         self.repl.payload_handler.add_payload(
             {'type': 'cell_results',
              'output': result,
-             'input': highlighter(code)})
+             'input': highlighter(code),
+             'raw_input': code},
+            )
 
 def deliver_image(im):
     if hasattr(im, 'read'):
@@ -384,7 +386,17 @@
         cs = cStringIO.StringIO()
         cs.write("\n######\n".join(self.executed_cell_texts))
         cs = cs.getvalue()
-        ret = p.pastes.newPaste('pytb', cs, None, '', '', True)
+        ret = p.pastes.newPaste('python', cs, None, '', '', True)
+        site = "http://paste.enzotools.org/show/%s" % ret
+        return {'status': 'SUCCESS', 'site': site}
+
+    @lockit
+    def paste_text(self, to_paste):
+        import xmlrpclib, cStringIO
+        p = xmlrpclib.ServerProxy(
+            "http://paste.enzotools.org/xmlrpc/",
+            allow_none=True)
+        ret = p.pastes.newPaste('python', to_paste, None, '', '', True)
         site = "http://paste.enzotools.org/show/%s" % ret
         return {'status': 'SUCCESS', 'site': site}
 
@@ -546,6 +558,7 @@
         """ % dict(pfname = pfname)
         funccall = "\n".join((line.strip() for line in funccall.splitlines()))
         self.execute(funccall, hide = True)
+        self.execution_thread.queue.join()
         pf = self.locals['_tpf']
         levels = pf.h.grid_levels
         left_edge = pf.h.grid_left_edge
@@ -579,6 +592,7 @@
         """ % dict(pfname = pfname)
         funccall = "\n".join((line.strip() for line in funccall.splitlines()))
         self.execute(funccall, hide = True)
+        self.execution_thread.queue.join()
         pf = self.locals['_tpf']
         corners = pf.h.grid_corners
         levels = pf.h.grid_levels


Binary file yt/gui/reason/html/images/upload.png has changed


--- a/yt/gui/reason/html/index.html	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/gui/reason/html/index.html	Tue Jun 21 19:17:34 2011 +0200
@@ -53,6 +53,9 @@
     .singledownarrow { 
         background-image:url(images/single_down_sm.png) !important;
     }
+    .upload { 
+        background-image:url(images/upload.png) !important;
+    }
     #input_line {
         font-family: monospace;
     }


--- a/yt/gui/reason/html/js/functions.js	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/gui/reason/html/js/functions.js	Tue Jun 21 19:17:34 2011 +0200
@@ -54,7 +54,7 @@
         } else if (payload['type'] == 'cell_results') {
             text = "<pre>"+payload['output']+"</pre>";
             formatted_input = payload['input']
-            cell = new_cell(formatted_input, text);
+            cell = new_cell(formatted_input, text, payload['raw_input']);
             OutputContainer.add(cell);
             OutputContainer.doLayout();
             notebook.doLayout();
@@ -154,23 +154,70 @@
     });
 }
 
-function new_cell(input, result) {
+function new_cell(input, result, raw_input) {
     var name = "cell_" + cell_count;
     var CellPanel = new Ext.Panel(
         { 
             id: name, 
             //title: "Cell " + cell_count,
             items: [
-                new Ext.Panel({
-                    id:name+"_input",
-                    html:input,
-                }),
-                new Ext.Panel({
-                    id:name+"_result",
-                    autoScroll:true,
-                    width: "100%",
-                    html:result,
-                })
+                { xtype:'panel',
+                  layout: 'hbox',
+                  id:name+"_input",
+                  items: [
+                    { xtype:'panel',
+                      html:input,
+                      flex:1,
+                      boxMinHeight: 40,
+                    },
+                    { xtype: 'button',
+                      width: 24,
+                      height: 24,
+                      iconCls: 'upload',
+                      tooltip: 'Upload to Pastebin',
+                      listeners: {
+                          click: function(f, e) {
+                            yt_rpc.ExtDirectREPL.paste_text({to_paste:raw_input},
+                              function(f, a) {
+                                if (a.result['status'] == 'SUCCESS') {
+                                    var alert_text = 'Pasted cell to:<br>' + 
+                                    a.result['site']
+                                    var alert_text_rec = 'Pasted cell to: ' + 
+                                    a.result['site']
+                                    Ext.Msg.alert('Pastebin', alert_text);
+                                    var record = new logging_store.recordType(
+                                        {record: alert_text_rec });
+                                    logging_store.add(record, number_log_records++);
+                              }
+                            });
+                          }
+                        }
+                    },
+                    { xtype: 'button',
+                      width: 24,
+                      height: 24,
+                      iconCls: 'doubleuparrow',
+                      tooltip: 'Copy into current cell',
+                      listeners: {
+                          click: function(f, e) {
+                            repl_input.get('input_line').setValue(raw_input);
+                          }
+                      },
+                    },
+                  ],
+                },
+                { xtype:'panel',
+                  layout: 'hbox',
+                  items: [
+                    { xtype:'panel',
+                      id:name+"_result",
+                      autoScroll:true,
+                      flex: 1,
+                      html:result,
+                      boxMinHeight: 40,
+                    },
+                  ],
+                },
             ]
         }
     );


--- a/yt/gui/reason/html/js/reason.js	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/gui/reason/html/js/reason.js	Tue Jun 21 19:17:34 2011 +0200
@@ -55,11 +55,11 @@
 }
 
 var repl_input = new Ext.FormPanel({
-    title: 'YT Input',
     url: 'push',
-    flex: 0.2,
     layout: 'fit',
     padding: 5,
+    height: '100%',
+    flex: 1.0,
     items: [{
         id: 'input_line',
         xtype: 'textarea',
@@ -103,14 +103,37 @@
 });
 
 
-
+var CellInputContainer = new Ext.Panel({
+    title: 'YT Input',
+    flex: 0.3,
+    layout: {type: 'hbox',
+             pack: 'start',
+             align: 'stretch',
+             },
+    items: [ repl_input,
+            { xtype: 'button',
+              width: 24,
+              height: 24,
+              iconCls: 'doubledownarrow',
+              tooltip: 'Execute Cell',
+              listeners: {
+                  click: function(f, e) {
+                    disable_input();
+                    yt_rpc.ExtDirectREPL.execute({
+                        code:repl_input.get('input_line').getValue()},
+                    handle_result);
+                  }
+              },
+            }
+           ]
+});
 
 
 var OutputContainer = new Ext.Panel({
     title: 'YT Output',
     id: 'output_container',
     autoScroll: true,
-    flex: 0.8,
+    flex: 0.7,
     items: []
 });
 
@@ -141,6 +164,11 @@
                 null, {preventDefault: true});
             }
         },
+        dblclick: {
+            fn: function(node, e) {
+                treePanel.fireEvent("contextmenu", node, e);
+            }
+        },
         contextmenu: {
             fn: function(node, event){
                 var rightclickMenu;
@@ -156,10 +184,10 @@
                 } else if (node.attributes.objdata.type == 'pf') {
                   rightClickMenu = new Ext.menu.Menu({
                       items: [
-                          {
+                          /*{
                               text: 'View Grids',
                               handler: getGridViewerHandler(node),
-                          }, {
+                          },*/ {
                               text: 'View Grid Data',
                               handler: getGridDataViewerHandler(node),
                           }, {
@@ -171,10 +199,10 @@
                           /*}, {
                               text: 'Create Sphere',
                               handler: getSphereCreator(node), */
-                          }, {
+                          }, /*{
                               text: 'View Streamlines',
                               handler: getStreamlineViewerHandler(node),
-                          }
+                          }, */
                       ]
                   });
                 }
@@ -263,7 +291,7 @@
                         closable: false,
                         autoScroll: false,
                         iconCls: 'console',
-                        items: [repl_input, OutputContainer]
+                        items: [CellInputContainer, OutputContainer]
                     }, 
                 ]
             }


--- a/yt/gui/reason/html/map_index.html	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/gui/reason/html/map_index.html	Tue Jun 21 19:17:34 2011 +0200
@@ -8,6 +8,8 @@
 <script type="text/javascript">
   $(document).ready(function() {
       // initialize the map on the "map" div with a given center and zoom 
+      $("#map").width($(window).width());
+      $("#map").height($(window).height());
       var map = new L.Map('map', {
                     center: new L.LatLng(0.0, 0.0),
                     zoom: 0,


--- a/yt/mods.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/mods.py	Tue Jun 21 19:17:34 2011 +0200
@@ -40,6 +40,7 @@
 from yt.utilities.performance_counters import yt_counters, time_function
 from yt.config import ytcfg
 import yt.utilities.physical_constants as physical_constants
+from yt.utilities.cookbook import Intent
 
 from yt.data_objects.api import \
     BinnedProfile1D, BinnedProfile2D, BinnedProfile3D, \


--- a/yt/utilities/_amr_utils/ContourFinding.pyx	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/_amr_utils/ContourFinding.pyx	Tue Jun 21 19:17:34 2011 +0200
@@ -26,6 +26,7 @@
 import numpy as np
 cimport numpy as np
 cimport cython
+from stdlib cimport malloc, free
 
 cdef extern from "math.h":
     double fabs(double x)
@@ -38,6 +39,50 @@
     if i0 < i1: return i0
     return i1
 
+cdef extern from "union_find.h":
+    ctypedef struct forest_node:
+        void *value
+        forest_node *parent
+        int rank
+
+    forest_node* MakeSet(void* value)
+    void Union(forest_node* node1, forest_node* node2)
+    forest_node* Find(forest_node* node)
+
+ctypedef struct CellIdentifier:
+    np.int64_t hindex
+    int level
+
+cdef class GridContourContainer:
+    cdef np.int64_t dims[3]
+    cdef np.int64_t start_indices[3]
+    cdef forest_node **join_tree
+    cdef np.int64_t ncells
+
+    def __init__(self, dimensions, indices):
+        cdef int i
+        self.ncells = 1
+        for i in range(3):
+            self.ncells *= dimensions[i]
+            self.dims[i] = dimensions[i]
+            self.start_indices[i] = indices[i]
+        self.join_tree = <forest_node **> malloc(sizeof(forest_node) 
+                                                 * self.ncells)
+        for i in range(self.ncells): self.join_tree[i] = NULL
+
+    def __dealloc__(self):
+        cdef int i
+        for i in range(self.ncells):
+            if self.join_tree[i] != NULL: free(self.join_tree[i])
+        free(self.join_tree)
+
+    #def construct_join_tree(self,
+    #            np.ndarray[np.float64_t, ndim=3] field,
+    #            np.ndarray[np.bool_t, ndim=3] mask):
+    #    # This only looks at the components of the grid that are actually
+    #    # inside this grid -- boundary conditions are handled later.
+    #    pass
+
 #@cython.boundscheck(False)
 #@cython.wraparound(False)
 def construct_boundary_relationships(


--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Tue Jun 21 19:17:34 2011 +0200
@@ -58,6 +58,7 @@
 cdef extern from "math.h":
     double exp(double x)
     float expf(float x)
+    long double expl(long double x)
     double floor(double x)
     double ceil(double x)
     double fmod(double x, double y)
@@ -742,7 +743,7 @@
     cdef void add_stars(self, kdtree_utils.kdres *ballq,
             np.float64_t dt, np.float64_t pos[3], np.float64_t *rgba):
         cdef int i, n, ns
-        cdef double px, py, pz
+        cdef np.float64_t px, py, pz
         cdef np.float64_t gexp, gaussian
         cdef np.float64_t* colors = NULL
         ns = kdtree_utils.kd_res_size(ballq)
@@ -754,7 +755,7 @@
             gexp = (px - pos[0])*(px - pos[0]) \
                  + (py - pos[1])*(py - pos[1]) \
                  + (pz - pos[2])*(pz - pos[2])
-            gaussian = self.star_coeff * exp(-gexp/self.star_sigma_num)
+            gaussian = self.star_coeff * expl(-gexp/self.star_sigma_num)
             for i in range(3): rgba[i] += gaussian*dt*colors[i]
         kdtree_utils.kd_res_rewind(ballq)
         


--- a/yt/utilities/_amr_utils/misc_utilities.pyx	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx	Tue Jun 21 19:17:34 2011 +0200
@@ -67,8 +67,8 @@
             continue
         inside = 1
         for n in range(3):
-            if left_edge[n] > right_edges[i,n] or \
-               right_edge[n] < left_edges[i,n]:
+            if left_edge[n] >= right_edges[i,n] or \
+               right_edge[n] <= left_edges[i,n]:
                 inside = 0
                 break
         if inside == 1: mask[i] = 1


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/_amr_utils/union_find.c	Tue Jun 21 19:17:34 2011 +0200
@@ -0,0 +1,66 @@
+/* Copyright (c) 2011 the authors listed at the following URL, and/or
+the authors of referenced articles or incorporated external code:
+http://en.literateprograms.org/Disjoint_set_data_structure_(C)?action=history&offset=20080516180553
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Retrieved from: http://en.literateprograms.org/Disjoint_set_data_structure_(C)?oldid=13366
+*/
+
+#include <stdlib.h>
+
+#include "union_find.h"
+
+forest_node* MakeSet(void* value) {
+    forest_node* node = malloc(sizeof(forest_node));
+    node->value = value;
+    node->parent = NULL;
+    node->rank = 0;
+    return node;
+}
+
+void Union(forest_node* node1, forest_node* node2) {
+    if (node1->rank > node2->rank) {
+        node2->parent = node1;
+    } else if (node2->rank > node1->rank) {
+        node1->parent = node2;
+    } else { /* they are equal */
+        node2->parent = node1;
+        node1->rank++;
+    }
+}
+
+forest_node* Find(forest_node* node) {
+    forest_node* temp;
+    /* Find the root */
+    forest_node* root = node;
+    while (root->parent != NULL) {
+        root = root->parent;
+    }
+    /* Update the parent pointers */
+    while (node->parent != NULL) {
+        temp = node->parent;
+        node->parent = root;
+        node = temp;
+    }
+    return root;
+}
+
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/_amr_utils/union_find.h	Tue Jun 21 19:17:34 2011 +0200
@@ -0,0 +1,41 @@
+/* Copyright (c) 2011 the authors listed at the following URL, and/or
+the authors of referenced articles or incorporated external code:
+http://en.literateprograms.org/Disjoint_set_data_structure_(C)?action=history&offset=20080516180553
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Retrieved from: http://en.literateprograms.org/Disjoint_set_data_structure_(C)?oldid=13366
+*/
+
+#ifndef _UNION_FIND_H_
+#define _UNION_FIND_H_
+
+typedef struct forest_node_t {
+    void* value;
+    struct forest_node_t* parent;
+    int rank;
+} forest_node;
+
+forest_node* MakeSet(void* value);
+void Union(forest_node* node1, forest_node* node2);
+forest_node* Find(forest_node* node);
+
+#endif /* #ifndef _UNION_FIND_H_ */
+


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Tue Jun 21 19:17:34 2011 +0200
@@ -259,6 +259,7 @@
         self.current_split_dim = 0
 
         self.pf = pf
+        self._id_offset = pf.h.grids[0]._id_offset
         if nprocs > len(pf.h.grids):
             print('Parallel rendering requires that the number of \n \
             grids in the dataset is greater or equal to the number of \n \
@@ -568,7 +569,7 @@
         None
         
         """
-        thisnode.grid = self.pf.hierarchy.grids[thisnode.grid - 1]
+        thisnode.grid = self.pf.hierarchy.grids[thisnode.grid - self._id_offset]
         
         dds = thisnode.grid.dds
         gle = thisnode.grid.LeftEdge
@@ -844,7 +845,7 @@
                     # Check if we have children and have not exceeded l_max
                     if len(thisgrid.Children) > 0 and thisgrid.Level < self.l_max:
                         # Get the children that are actually in the current volume
-                        children = [child.id - 1 for child in thisgrid.Children  
+                        children = [child.id - self._id_offset for child in thisgrid.Children  
                                     if na.all(child.LeftEdge < current_node.r_corner) & 
                                     na.all(child.RightEdge > current_node.l_corner)]
 


--- a/yt/utilities/answer_testing/api.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/answer_testing/api.py	Tue Jun 21 19:17:34 2011 +0200
@@ -42,3 +42,6 @@
 from .default_tests import \
     TestFieldStatistics, \
     TestAllProjections
+
+from .xunit import \
+    Xunit


--- a/yt/utilities/command_line.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/command_line.py	Tue Jun 21 19:17:34 2011 +0200
@@ -556,7 +556,8 @@
         """
         pf = _fix_pf(arg)
         pf.h.print_stats()
-        v, c = pf.h.find_max("Density")
+        if "Density" in pf.h.field_list:
+            v, c = pf.h.find_max("Density")
         print "Maximum density: %0.5e at %s" % (v, c)
         if opts.output is not None:
             t = pf.current_time * pf['years']
@@ -1246,6 +1247,24 @@
         while 1:
             time.sleep(1)
 
+    def do_intents(self, subcmd, opts, *intents):
+        """
+        ${cmd_name}: What are your ... intentions?
+
+        ${cmd_usage}
+        ${cmd_option_list}
+        """
+        from yt.utilities.cookbook import Intent
+        if len(intents) == 0:
+            Intent.list_intents()
+        else:
+            intent = Intent.select_intent(intents[0])
+            if intent is None:
+                print "Could not find %s" % intents[0]
+                return 1
+            intent_inst = intent(intents[1:])
+            intent_inst.run()
+
 def run_main():
     for co in ["--parallel", "--paste"]:
         if co in sys.argv: del sys.argv[sys.argv.index(co)]


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/cookbook.py	Tue Jun 21 19:17:34 2011 +0200
@@ -0,0 +1,109 @@
+"""
+A way to find an utilize recipes
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+# See also:
+#  http://en.wikipedia.org/wiki/Me_(mythology)
+
+import os
+import argparse
+import abc
+import glob
+import imp
+import types
+import sys
+import pprint
+
+def _load_intent(intent_path):
+    mname = os.path.basename(intent_path[:-3])
+    f, filename, desc = imp.find_module(mname,
+            [os.path.dirname(intent_path)])
+    intent = imp.load_module(mname, f, filename, desc)
+    for i in sorted(dir(intent)):
+        obj = getattr(intent, i)
+        if issubclass(obj, Intent) and \
+           isinstance(obj.desc, types.StringTypes):
+             return obj
+    return None
+
+def _find_cookbook_dir():
+    yt_dest = os.environ.get("YT_DEST", None)
+    if yt_dest is None:
+        print "YT_DEST is not set!  Set it and try again."
+        return False
+    cookbook_dir = os.path.join(yt_dest,
+        "src/yt-supplemental/yt-cookbook/intents")
+    if not os.path.isdir(cookbook_dir):
+        print "Cookbook does not contain 'intents' directory."
+        print "Update with 'yt instinfo -u' and try again."
+        print "(%s)" % cookbook_dir
+        return False
+    return cookbook_dir
+
+class Intent(object):
+    __metaclass__ = abc.ABCMeta
+
+    def __init__(self, args):
+        self.args = args
+        if "help" in self.args:
+            print
+            print "The arguments to supply, in order:"
+            print
+            print self.help
+            print
+            sys.exit()
+
+    @abc.abstractmethod
+    def run(self):
+        pass
+
+    @abc.abstractproperty
+    def desc(self): pass
+
+    @abc.abstractproperty
+    def help(self): pass
+
+    @classmethod
+    def list_intents(self):
+        intents = []
+        cookbook_dir = _find_cookbook_dir()
+        if cookbook_dir is False: return 1
+        for fn in sorted(glob.glob(os.path.join(cookbook_dir, "*"))):
+            # We skim them, looking for the 'Intent' subclass
+            if any(("(Intent):" in line for line in open(fn))):
+                intents.append((os.path.basename(fn)[:-3],
+                                _load_intent(fn)))
+        print
+        print "Found these Intents:"
+        print "\n".join(("% 15s: %s" % (a, b.desc) for a, b in intents))
+        print
+
+    @classmethod
+    def select_intent(self, intent_name):
+        cookbook_dir = _find_cookbook_dir()
+        intent = None
+        for fn in glob.glob(os.path.join(cookbook_dir, "*")):
+            if os.path.basename(fn)[:-3] == intent_name:
+                intent = _load_intent(fn)
+        return intent


--- a/yt/utilities/data_point_utilities.c	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/data_point_utilities.c	Tue Jun 21 19:17:34 2011 +0200
@@ -880,7 +880,7 @@
     npy_int64 gxs, gys, gzs, gxe, gye, gze;
     npy_int64 cxs, cys, czs, cxe, cye, cze;
     npy_int64 ixs, iys, izs, ixe, iye, ize;
-    npy_int64 gxi, gyi, gzi, cxi, cyi, czi;
+    int gxi, gyi, gzi, cxi, cyi, czi;
     npy_int64 cdx, cdy, cdz;
     npy_int64 dw[3];
     int i;
@@ -1014,17 +1014,17 @@
         ci = (cxi % dw[0]);
         ci = (ci < 0) ? ci + dw[0] : ci;
         if ( ci < gxs*refratio || ci >= gxe*refratio) continue;
-        gxi = floor(ci / refratio) - gxs;
+        gxi = ((int) (ci / refratio)) - gxs;
         for(cyi=cys;cyi<=cye;cyi++) {
             cj = cyi % dw[1];
             cj = (cj < 0) ? cj + dw[1] : cj;
             if ( cj < gys*refratio || cj >= gye*refratio) continue;
-            gyi = floor(cj / refratio) - gys;
+            gyi = ((int) (cj / refratio)) - gys;
             for(czi=czs;czi<=cze;czi++) {
                 ck = czi % dw[2];
                 ck = (ck < 0) ? ck + dw[2] : ck;
                 if ( ck < gzs*refratio || ck >= gze*refratio) continue;
-                gzi = floor(ck / refratio) - gzs;
+                gzi = ((int) (ck / refratio)) - gzs;
                     if ((ll) || (*(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)) 
                 {
                 for(n=0;n<n_fields;n++){
@@ -1214,43 +1214,75 @@
     cye = (cys + cdy - 1);
     cze = (czs + cdz - 1);
 
-    /* It turns out that C89 doesn't define a mechanism for choosing the sign
-       of the remainder.
-    */
     int x_loc, y_loc; // For access into the buffer
-    for(cxi=cxs;cxi<=cxe;cxi++) {
+
+    /* We check here if the domain is important or not.
+       If it's not, then, well, we get to use the fast version. */
+    if (dw[0] == dw[1] == dw[2] == 0) {
+      for(gxi=gxs,cxi=gxs*refratio;gxi<gxe;gxi++,cxi+=refratio) {
+        for(gyi=gys,cyi=gys*refratio;gyi<gye;gyi++,cyi+=refratio) {
+          for(gzi=gzs,czi=gzs*refratio;gzi<gze;gzi++,czi+=refratio) {
+            if ((refratio!=1) &&
+                (*(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi)==0)) continue;
+            switch (axis) {
+              case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
+              case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
+              case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
+            }
+            //fprintf(stderr, "%d %d %d %d %d\n", x_loc, y_loc, gxi, gyi, gzi);
+            for(ri=0;ri<refratio;ri++){
+              for(rj=0;rj<refratio;rj++){
+                for(n=0;n<n_fields;n++){
+                  for(n=0;n<n_fields;n++){
+                    *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc+ri, y_loc+rj)
+                      +=  *(npy_float64*) PyArray_GETPTR3(g_data[n],
+                          gxi-gxs, gyi-gys, gzi-gzs) * dls[n];
+                  }
+                }
+              }
+            }
+            total+=1;
+          }
+        }
+      }
+    } else {
+      /* Gotta go the slow route. */
+      for(cxi=gxs*refratio;cxi<=cxe;cxi++) {
+        /* It turns out that C89 doesn't define a mechanism for choosing the sign
+           of the remainder.
+         */
         ci = (cxi % dw[0]);
         ci = (ci < 0) ? ci + dw[0] : ci;
-        if ( ci < gxs*refratio || ci >= gxe*refratio) continue;
+        if ( ci >= gxe*refratio) break;
         gxi = floor(ci / refratio) - gxs;
-        for(cyi=cys;cyi<=cye;cyi++) {
-            cj = cyi % dw[1];
-            cj = (cj < 0) ? cj + dw[1] : cj;
-            if ( cj < gys*refratio || cj >= gye*refratio) continue;
-            gyi = floor(cj / refratio) - gys;
-            for(czi=czs;czi<=cze;czi++) {
-                ck = czi % dw[2];
-                ck = (ck < 0) ? ck + dw[2] : ck;
-                if ( ck < gzs*refratio || ck >= gze*refratio) continue;
-                gzi = floor(ck / refratio) - gzs;
-                    if (refratio == 1 || *(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)
-                {
-                switch (axis) {
-                  case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
-                  case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
-                  case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
-                }
-                for(n=0;n<n_fields;n++){
-                    *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc, y_loc)
-                    +=  *(npy_float64*) PyArray_GETPTR3(g_data[n], gxi, gyi, gzi) 
-                        * dls[n] / refratio;
-                }
-                total += 1;
-                }
+        for(cyi=gys*refratio;cyi<=cye;cyi++) {
+          cj = cyi % dw[1];
+          cj = (cj < 0) ? cj + dw[1] : cj;
+          if ( cj >= gye*refratio) break;
+          gyi = floor(cj / refratio) - gys;
+          for(czi=gzs*refratio;czi<=cze;czi++) {
+            ck = czi % dw[2];
+            ck = (ck < 0) ? ck + dw[2] : ck;
+            if ( ck >= gze*refratio) break;
+            gzi = floor(ck / refratio) - gzs;
+            if (refratio == 1 || *(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)
+            {
+              switch (axis) {
+                case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
+                case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
+                case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
+              }
+              for(n=0;n<n_fields;n++){
+                *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc, y_loc)
+                  +=  *(npy_float64*) PyArray_GETPTR3(g_data[n], gxi, gyi, gzi) 
+                  * dls[n] / refratio;
+              }
+              total += 1;
             }
+          }
         }
+      }
     }
-
     Py_DECREF(g_start);
     Py_DECREF(c_start);
     Py_DECREF(g_dims);


--- a/yt/utilities/setup.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/utilities/setup.py	Tue Jun 21 19:17:34 2011 +0200
@@ -161,7 +161,8 @@
     config.add_extension("amr_utils", 
         ["yt/utilities/amr_utils.pyx",
          "yt/utilities/_amr_utils/FixedInterpolator.c",
-         "yt/utilities/_amr_utils/kdtree.c"] +
+         "yt/utilities/_amr_utils/kdtree.c",
+         "yt/utilities/_amr_utils/union_find.c"] +
          glob.glob("yt/utilities/_amr_utils/healpix_*.c"), 
         define_macros=[("PNG_SETJMP_NOT_SUPPORTED", True)],
         include_dirs=["yt/utilities/_amr_utils/", png_inc,
@@ -172,6 +173,10 @@
                 glob.glob("yt/utilities/_amr_utils/*.h") +
                 glob.glob("yt/utilities/_amr_utils/*.c"),
         )
+    #config.add_extension("voropp",
+    #    ["yt/utilities/voropp.pyx"],
+    #    language="c++",
+    #    include_dirs=["yt/utilities/voro++"])
     config.add_extension("libconfig_wrapper", 
         ["yt/utilities/libconfig_wrapper.pyx"] +
          glob.glob("yt/utilities/_libconfig/*.c"), 


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/voropp.pyx	Tue Jun 21 19:17:34 2011 +0200
@@ -0,0 +1,69 @@
+"""
+Wrapping code for voro++
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from cython.operator cimport dereference as deref, preincrement as inc
+from libc.stdlib cimport malloc, free, abs, calloc, labs
+cimport libcpp
+
+import numpy as np
+cimport numpy as np
+cimport cython
+
+cdef extern from "voro++.cc":
+    cdef cppclass container:
+        container(double xmin, double xmax, double ymin, double ymax,
+                  double zmin, double zmax, int nx, int ny, int nz,
+                  libcpp.bool xper, libcpp.bool yper, libcpp.bool zper, int alloc)
+        void put(int n, double x, double y, double z)
+        void store_cell_volumes(double *vols)
+
+cdef class VoronoiVolume:
+    cdef container *my_con
+    cdef int npart
+    def __init__(self, xi, yi, zi):
+        self.my_con = new container(0.0, 1.0, 0.0, 1.0, 0.0, 1.0,
+                                    xi, yi, zi, False, False, False, 8)
+        self.npart = 0
+
+    def __dealloc__(self):
+        del self.my_con
+
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    def add_array(self, np.ndarray[np.float64_t, ndim=1] xpos,
+                        np.ndarray[np.float64_t, ndim=1] ypos,
+                        np.ndarray[np.float64_t, ndim=1] zpos):
+        cdef int i
+        for i in range(xpos.shape[0]):
+            self.my_con.put(self.npart, xpos[i], ypos[i], zpos[i])
+            self.npart += 1
+
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    def get_volumes(self):
+        cdef np.ndarray vol = np.zeros(self.npart, 'double')
+        cdef double *vdouble = <double *> vol.data
+        self.my_con.store_cell_volumes(vdouble)
+        return vol


--- a/yt/visualization/eps_writer.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/visualization/eps_writer.py	Tue Jun 21 19:17:34 2011 +0200
@@ -279,7 +279,7 @@
         if isinstance(plot, VMPlot):
             if units == None:
                 # Determine the best units
-                astro_units = ['cm', 'rsun', 'au', 'pc', 'kpc', 'Mpc']
+                astro_units = ['cm', 'rsun', 'au', 'pc', 'kpc', 'mpc']
                 best_fit = 0
                 while plot.width*plot.pf[astro_units[best_fit]] > 1e3 and \
                           best_fit < len(astro_units):


--- a/yt/visualization/image_writer.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/visualization/image_writer.py	Tue Jun 21 19:17:34 2011 +0200
@@ -29,7 +29,7 @@
 import _colormap_data as cmd
 import yt.utilities.amr_utils as au
 
-def scale_image(image):
+def scale_image(image, mi=None, ma=None):
     r"""Scale an image ([NxNxM] where M = 1-4) to be uint8 and values scaled 
     from [0,255].
 
@@ -41,13 +41,17 @@
     --------
 
         >>> image = scale_image(image)
+
+        >>> image = scale_image(image, min=0, max=1000)
     """
     if isinstance(image, na.ndarray) and image.dtype == na.uint8:
         return image
     if isinstance(image, (types.TupleType, types.ListType)):
         image, mi, ma = image
-    else:
-        mi, ma = image.min(), image.max()
+    if mi is None:
+        mi = image.min()
+    if ma is None:
+        ma = image.max()
     image = (na.clip((image-mi)/(ma-mi) * 255, 0, 255)).astype('uint8')
     return image
 


--- a/yt/visualization/volume_rendering/camera.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/visualization/volume_rendering/camera.py	Tue Jun 21 19:17:34 2011 +0200
@@ -31,12 +31,14 @@
 from .transfer_functions import ProjectionTransferFunction
 
 from yt.utilities.amr_utils import TransferFunctionProxy, VectorPlane, \
-    arr_vec2pix_nest, arr_pix2vec_nest, AdaptiveRaySource
+    arr_vec2pix_nest, arr_pix2vec_nest, AdaptiveRaySource, \
+    arr_ang2pix_nest
 from yt.visualization.image_writer import write_bitmap
 from yt.data_objects.data_containers import data_object_registry
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface
 from yt.utilities.amr_kdtree.api import AMRKDTree
+from numpy import pi
 
 class Camera(ParallelAnalysisInterface):
     def __init__(self, center, normal_vector, width,
@@ -318,7 +320,7 @@
                                       self.unit_vectors[1])
         return vector_plane
 
-    def snapshot(self, fn = None):
+    def snapshot(self, fn = None, clip_ratio = None):
         r"""Ray-cast the camera.
 
         This method instructs the camera to take a snapshot -- i.e., call the ray
@@ -329,6 +331,9 @@
         fn : string, optional
             If supplied, the image will be saved out to this before being
             returned.  Scaling will be to the maximum value.
+        clip_ratio : float, optional
+            If supplied, the 'max_val' argument to write_bitmap will be handed
+            clip_ratio * image.std()
 
         Returns
         -------
@@ -352,7 +357,10 @@
         pbar.finish()
 
         if self._mpi_get_rank() is 0 and fn is not None:
-            write_bitmap(image, fn)
+            if clip_ratio is not None:
+                write_bitmap(image, fn, clip_ratio*image.std())
+            else:
+                write_bitmap(image, fn)
 
         return image
 
@@ -598,6 +606,24 @@
             pbar.update(total_cells)
         pbar.finish()
 
+        if self._mpi_get_rank() is 0 and fn is not None:
+            # This assumes Density; this is a relatively safe assumption.
+            import matplotlib.figure
+            import matplotlib.backends.backend_agg
+            phi, theta = na.mgrid[0.0:2*pi:800j, 0:pi:800j]
+            pixi = arr_ang2pix_nest(self.nside, theta.ravel(), phi.ravel())
+            image *= self.radius * self.pf['cm']
+            img = na.log10(image[:,0,0][pixi]).reshape((800,800))
+
+            fig = matplotlib.figure.Figure((10, 5))
+            ax = fig.add_subplot(1,1,1,projection='mollweide')
+            implot = ax.imshow(img, extent=(-pi,pi,-pi/2,pi/2), clip_on=False, aspect=0.5)
+            cb = fig.colorbar(implot, orientation='horizontal')
+            cb.set_label(r"$\mathrm{Column}\/\mathrm{Density}\/[\mathrm{g}/\mathrm{cm}^2]$")
+            ax.xaxis.set_ticks(())
+            ax.yaxis.set_ticks(())
+            canvas = matplotlib.backends.backend_agg.FigureCanvasAgg(fig)
+            canvas.print_figure(fn)
         return image
 
 


--- a/yt/visualization/volume_rendering/transfer_functions.py	Tue Jun 21 16:13:14 2011 +0200
+++ b/yt/visualization/volume_rendering/transfer_functions.py	Tue Jun 21 19:17:34 2011 +0200
@@ -547,7 +547,7 @@
             if mi is None: mi = col_bounds[0] + dist/(10.0*N)
             if ma is None: ma = col_bounds[1] - dist/(10.0*N)
         if w is None: w = 0.001 * (ma-mi)/N
-        if alpha is None: alpha = na.logspace(-2.0, 0.0, N)
+        if alpha is None: alpha = na.logspace(-3.0, 0.0, N)
         for v, a in zip(na.mgrid[mi:ma:N*1j], alpha):
             self.sample_colormap(v, w, a, colormap=colormap, col_bounds=col_bounds)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list