[Yt-svn] commit/yt: 2 new changesets

Bitbucket commits-noreply at bitbucket.org
Thu Jun 23 14:40:59 PDT 2011


2 new changesets in yt:

http://bitbucket.org/yt_analysis/yt/changeset/997e3cd923ed/
changeset:   997e3cd923ed
branch:      yt
user:        samskillman
date:        2011-06-23 23:39:40
summary:     Fix the reading of the file format version for FLASH.
affected #:  1 file (3 bytes)

--- a/yt/frontends/flash/data_structures.py	Wed Jun 15 10:55:57 2011 -0400
+++ b/yt/frontends/flash/data_structures.py	Thu Jun 23 15:39:40 2011 -0600
@@ -287,7 +287,7 @@
                 self._handle["file format version"][:])
         elif "sim info" in self._handle:
             self._flash_version = int(
-                self._handle["sim info"][:]["file format version"])
+                self._handle["sim info"]["file format version"])
         else:
             raise RuntimeError("Can't figure out FLASH file version.")
         self.domain_left_edge = na.array(


http://bitbucket.org/yt_analysis/yt/changeset/bc367a0f3566/
changeset:   bc367a0f3566
branch:      yt
user:        samskillman
date:        2011-06-23 23:40:50
summary:     merge
affected #:  10 files (1.2 KB)

--- a/tests/DD0010/moving7_0010	Thu Jun 23 15:39:40 2011 -0600
+++ b/tests/DD0010/moving7_0010	Thu Jun 23 15:40:50 2011 -0600
@@ -1,6 +1,7 @@
 InitialCycleNumber  = 10
 InitialTime         = 0.81751317119117
 InitialCPUTime      = 2.15207e+09
+CurrentTimeIdentifier = 0
 
 StopTime            = 20.097275649537
 StopCycle           = 10000


--- a/tests/runall.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/tests/runall.py	Thu Jun 23 15:40:50 2011 -0600
@@ -4,7 +4,8 @@
 
 from yt.utilities.answer_testing.api import \
     RegressionTestRunner, clear_registry, create_test, \
-    TestFieldStatistics, TestAllProjections, registry_entries
+    TestFieldStatistics, TestAllProjections, registry_entries, \
+    Xunit
 
 from yt.utilities.command_line import get_yt_version
 
@@ -78,20 +79,25 @@
         sys.exit(1)
     # Now we modify our compare name and self name to include the pf.
     compare_id = opts.compare_name
-    if compare_id is not None: compare_id += "_%s_%s" % (pf, pf._hash())
+    watcher = None
+    if compare_id is not None:
+        compare_id += "_%s_%s" % (pf, pf._hash())
+        watcher = Xunit()
     this_id = opts.this_name + "_%s_%s" % (pf, pf._hash())
     rtr = RegressionTestRunner(this_id, compare_id,
             results_path = opts.storage_dir,
             compare_results_path = opts.storage_dir,
             io_log = [opts.parameter_file])
+    rtr.watcher = watcher
     tests_to_run = []
     for m, vals in mapping.items():
-        print vals, opts.test_pattern
         new_tests = fnmatch.filter(vals, opts.test_pattern)
         if len(new_tests) == 0: continue
         tests_to_run += new_tests
         load_tests(m, cwd)
     for test_name in sorted(tests_to_run):
         rtr.run_test(test_name)
+    if watcher is not None:
+        rtr.watcher.report()
     for test_name, result in sorted(rtr.passed_tests.items()):
         print "TEST %s: %s" % (test_name, result)


--- a/yt/data_objects/field_info_container.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/data_objects/field_info_container.py	Thu Jun 23 15:40:50 2011 -0600
@@ -156,6 +156,8 @@
             pf = fake_parameter_file(lambda: 1)
             pf.current_redshift = pf.omega_lambda = pf.omega_matter = \
                 pf.hubble_constant = pf.cosmological_simulation = 0.0
+            pf.domain_left_edge = na.zeros(3, 'float64')
+            pf.domain_right_edge = na.ones(3, 'float64')
         self.pf = pf
         class fake_hierarchy(object):
             class fake_io(object):


--- a/yt/data_objects/universal_fields.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/data_objects/universal_fields.py	Thu Jun 23 15:40:50 2011 -0600
@@ -413,17 +413,15 @@
 
 # This is rho_b / <rho_b>.
 def _Baryon_Overdensity(field, data):
-    return data['Density']
-def _Convert_Baryon_Overdensity(data):
     if data.pf.has_key('omega_baryon_now'):
         omega_baryon_now = data.pf['omega_baryon_now']
     else:
         omega_baryon_now = 0.0441
-    return 1 / (omega_baryon_now * rho_crit_now * 
-                (data.pf['CosmologyHubbleConstantNow']**2) * 
-                ((1+data.pf['CosmologyCurrentRedshift'])**3))
+    return data['Density'] / (omega_baryon_now * rho_crit_now * 
+                              (data.pf['CosmologyHubbleConstantNow']**2) * 
+                              ((1+data.pf['CosmologyCurrentRedshift'])**3))
 add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
-          convert_function=_Convert_Baryon_Overdensity, units=r"")
+          units=r"")
 
 # Weak lensing convergence.
 # Eqn 4 of Metzler, White, & Loken (2001, ApJ, 547, 560).


--- a/yt/frontends/enzo/fields.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/frontends/enzo/fields.py	Thu Jun 23 15:40:50 2011 -0600
@@ -286,8 +286,11 @@
 def _dmpdensity(field, data):
     blank = na.zeros(data.ActiveDimensions, dtype='float32')
     if data.NumberOfParticles == 0: return blank
-    filter = data['creation_time'] <= 0.0
-    if not filter.any(): return blank
+    if 'creation_time' in data.keys():
+        filter = data['creation_time'] <= 0.0
+        if not filter.any(): return blank
+    else:
+        filter = na.ones(data.NumberOfParticles, dtype='bool')
     amr_utils.CICDeposit_3(data["particle_position_x"][filter].astype(na.float64),
                            data["particle_position_y"][filter].astype(na.float64),
                            data["particle_position_z"][filter].astype(na.float64),


--- a/yt/frontends/flash/data_structures.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/frontends/flash/data_structures.py	Thu Jun 23 15:40:50 2011 -0600
@@ -84,6 +84,12 @@
     def _detect_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [s for s in self._handle["/unknown names"][:].flat]
+        facevars = [s for s in self._handle
+                    if s.startswith(("fcx","fcy","fcz")) and s[-1].isdigit()]
+        nfacevars = len(facevars)
+        if (nfacevars > 0) :
+            ncomp += nfacevars
+            self.field_list.append(facevars)
         if ("/particle names" in self._handle) :
             self.field_list += ["particle_" + s[0].strip() for s
                                 in self._handle["/particle names"][:]]


--- a/yt/gui/reason/pannable_map.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/gui/reason/pannable_map.py	Thu Jun 23 15:40:50 2011 -0600
@@ -79,9 +79,12 @@
                                     self.pf.domain_right_edge[1],
                                     dd*DW[0] / (64*256),
                                     dd*DW[0])
-        cmi = na.log10(cmi)
-        cma = na.log10(cma)
-        to_plot = apply_colormap(na.log10(frb[self.field]), color_bounds = (cmi, cma))
+        if self.pf.field_info[self.field].take_log:
+            cmi = na.log10(cmi)
+            cma = na.log10(cma)
+            to_plot = apply_colormap(na.log10(frb[self.field]), color_bounds = (cmi, cma))
+        else:
+            to_plot = apply_colormap(frb[self.field], color_bounds = (cmi, cma))
         rv = write_png_to_string(to_plot)
         return rv
 


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Thu Jun 23 15:40:50 2011 -0600
@@ -259,6 +259,7 @@
         self.current_split_dim = 0
 
         self.pf = pf
+        self._id_offset = pf.h.grids[0]._id_offset
         if nprocs > len(pf.h.grids):
             print('Parallel rendering requires that the number of \n \
             grids in the dataset is greater or equal to the number of \n \
@@ -568,7 +569,7 @@
         None
         
         """
-        thisnode.grid = self.pf.hierarchy.grids[thisnode.grid - 1]
+        thisnode.grid = self.pf.hierarchy.grids[thisnode.grid - self._id_offset]
         
         dds = thisnode.grid.dds
         gle = thisnode.grid.LeftEdge
@@ -844,7 +845,7 @@
                     # Check if we have children and have not exceeded l_max
                     if len(thisgrid.Children) > 0 and thisgrid.Level < self.l_max:
                         # Get the children that are actually in the current volume
-                        children = [child.id - 1 for child in thisgrid.Children  
+                        children = [child.id - self._id_offset for child in thisgrid.Children  
                                     if na.all(child.LeftEdge < current_node.r_corner) & 
                                     na.all(child.RightEdge > current_node.l_corner)]
 


--- a/yt/utilities/answer_testing/api.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/utilities/answer_testing/api.py	Thu Jun 23 15:40:50 2011 -0600
@@ -42,3 +42,6 @@
 from .default_tests import \
     TestFieldStatistics, \
     TestAllProjections
+
+from .xunit import \
+    Xunit


--- a/yt/utilities/command_line.py	Thu Jun 23 15:39:40 2011 -0600
+++ b/yt/utilities/command_line.py	Thu Jun 23 15:40:50 2011 -0600
@@ -85,7 +85,7 @@
                    help="Width in specified units"),
     unit    = dict(short="-u", long="--unit",
                    action="store", type="string",
-                   dest="unit", default='1',
+                   dest="unit", default='unitary',
                    help="Desired units"),
     center  = dict(short="-c", long="--center",
                    action="store", type="float",
@@ -529,7 +529,13 @@
         import yt.utilities.bottle as bottle
         bottle.debug(True)
         if opts.host is not None:
-            bottle.run(server='rocket', host=opts.host)
+            colonpl = opts.host.find(":")
+            if colonpl >= 0:
+                port = int(opts.host.split(":")[-1])
+                opts.host = opts.host[:colonpl]
+            else:
+                port = 8080
+            bottle.run(server='rocket', host=opts.host, port=port)
         else:
             bottle.run(server='rocket')

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list