[yt-svn] commit/yt: 3 new changesets

Bitbucket commits-noreply at bitbucket.org
Fri Feb 24 08:02:23 PST 2012


3 new commits in yt:


https://bitbucket.org/yt_analysis/yt/changeset/59e04982b286/
changeset:   59e04982b286
branch:      yt
user:        MatthewTurk
date:        2012-02-24 04:15:38
summary:     Fixing "yt rpdb"
affected #:  1 file

diff -r ed12b1e9edebec49ada8dec518f0038628b8e740 -r 59e04982b28631c06a23b18c2a35f05efcdd3eb5 yt/utilities/command_line.py
--- a/yt/utilities/command_line.py
+++ b/yt/utilities/command_line.py
@@ -1240,10 +1240,15 @@
         uncaught exceptions.
 
         """
+    args = (
+            dict(short="-t", long="--task", action="store",
+                 default = 0, dest='task',
+                 help="Open a web browser."),
+           )
 
     def __call__(self, args):
         import rpdb
-        rpdb.run_rpdb(int(task))
+        rpdb.run_rpdb(int(args.task))
 
 class YTGUICmd(YTCommand):
     name = ["serve", "reason"]



https://bitbucket.org/yt_analysis/yt/changeset/30ad7ba41220/
changeset:   30ad7ba41220
branch:      yt
user:        MatthewTurk
date:        2012-02-24 04:24:00
summary:     [particle IO] Change to how particles are generated.  Read from the particles()
object first, then skip over getting the field from grids.  Additionally, map
converted fields back to the original field name, not the modified one.
affected #:  2 files

diff -r 59e04982b28631c06a23b18c2a35f05efcdd3eb5 -r 30ad7ba41220e063dd1b4ab46a805528947c255d yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -2413,9 +2413,6 @@
         for field in fields_to_get:
             if self.field_data.has_key(field):
                 continue
-            if field not in self.hierarchy.field_list and not in_grids:
-                if self._generate_field(field):
-                    continue # True means we already assigned it
             # There are a lot of 'ands' here, but I think they are all
             # necessary.
             if force_particle_read == False and \
@@ -2426,6 +2423,10 @@
                 self.particles.get_data(field)
                 if field not in self.field_data:
                     if self._generate_field(field): continue
+                continue
+            if field not in self.hierarchy.field_list and not in_grids:
+                if self._generate_field(field):
+                    continue # True means we already assigned it
             mylog.info("Getting field %s from %s", field, len(self._grids))
             self[field] = na.concatenate(
                 [self._get_data_from_grid(grid, field)


diff -r 59e04982b28631c06a23b18c2a35f05efcdd3eb5 -r 30ad7ba41220e063dd1b4ab46a805528947c255d yt/data_objects/particle_io.py
--- a/yt/data_objects/particle_io.py
+++ b/yt/data_objects/particle_io.py
@@ -29,6 +29,17 @@
 
 particle_handler_registry = defaultdict()
 
+def particle_converter(func):
+    from .data_containers import YTFieldData
+    def save_state(grid):
+        old_params = grid.field_parameters
+        old_keys = grid.field_data.keys()
+        tr = func(grid)
+        grid.field_parameters = old_params
+        grid.field_data = YTFieldData( [(k, grid.field_data[k]) for k in old_keys] )
+        return tr
+    return save_state
+
 class ParticleIOHandler(object):
     class __metaclass__(type):
         def __init__(cls, name, b, d):
@@ -82,6 +93,7 @@
                 func = f._convert_function
             else:
                 func = f.particle_convert
+            func = particle_converter(func)
             conv_factors.append(
               na.fromiter((func(g) for g in grid_list),
                           count=len(grid_list), dtype='float64'))
@@ -90,7 +102,7 @@
         rvs = self.pf.h.io._read_particles(
             fields_to_read, rtype, args, grid_list, count_list,
             conv_factors)
-        for [n, v] in zip(fields_to_read, rvs):
+        for [n, v] in zip(fields, rvs):
             self.source.field_data[n] = v
 
 class ParticleIOHandlerRegion(ParticleIOHandlerImplemented):



https://bitbucket.org/yt_analysis/yt/changeset/34781f1f6cc7/
changeset:   34781f1f6cc7
branch:      yt
user:        MatthewTurk
date:        2012-02-24 04:30:22
summary:     Merging
affected #:  2 files

diff -r 30ad7ba41220e063dd1b4ab46a805528947c255d -r 34781f1f6cc77e4f91f6b93fb9c8667672efda8c yt/frontends/orion/data_structures.py
--- a/yt/frontends/orion/data_structures.py
+++ b/yt/frontends/orion/data_structures.py
@@ -131,36 +131,56 @@
 
     def _read_particles(self):
         """
-        reads in particles and assigns them to grids
+        reads in particles and assigns them to grids. Will search for
+        Star particles, then sink particles if no star particle file
+        is found, and finally will simply note that no particles are
+        found if neither works. To add a new Orion particle type,
+        simply add it to the if/elif/else block.
 
         """
         self.grid_particle_count = na.zeros(len(self.grids))
-        fn = self.pf.fullplotdir + "/StarParticles"
-        with open(fn, 'r') as f:
-            lines = f.readlines()
-            self.num_stars = int(lines[0].strip())
-            for line in lines[1:]:
-                particle_position_x = float(line.split(' ')[1])
-                particle_position_y = float(line.split(' ')[2])
-                particle_position_z = float(line.split(' ')[3])
-                coord = [particle_position_x, particle_position_y, particle_position_z]
-                # for each particle, determine which grids contain it
-                # copied from object_finding_mixin.py
-                mask=na.ones(self.num_grids)
-                for i in xrange(len(coord)):
-                    na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
-                    na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
-                    ind = na.where(mask == 1)
-                selected_grids = self.grids[ind]
-                # in orion, particles always live on the finest level.
-                # so, we want to assign the particle to the finest of
-                # the grids we just found
-                if len(selected_grids) != 0:
-                    grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
-                    ind = na.where(self.grids == grid)[0][0]
-                    self.grid_particle_count[ind] += 1
-                    self.grids[ind].NumberOfParticles += 1
+    
+        if self._readOrionParticleFile(self.pf.fullplotdir + "/StarParticles"):
+            pass
+        elif self._readOrionParticleFile(self.pf.fullplotdir + "/SinkParticles"):
+            pass
+        else:
+            mylog.warning("No particles found.")
 
+    def _readOrionParticleFile(self, fn):
+        """actually reads the orion particle data file itself.
+
+        """
+        try:
+            with open(fn, 'r') as f:
+                lines = f.readlines()
+                self.num_stars = int(lines[0].strip())
+                for line in lines[1:]:
+                    particle_position_x = float(line.split(' ')[1])
+                    particle_position_y = float(line.split(' ')[2])
+                    particle_position_z = float(line.split(' ')[3])
+                    coord = [particle_position_x, particle_position_y, particle_position_z]
+                    # for each particle, determine which grids contain it
+                    # copied from object_finding_mixin.py
+                    mask=na.ones(self.num_grids)
+                    for i in xrange(len(coord)):
+                        na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
+                        na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
+                        ind = na.where(mask == 1)
+                        selected_grids = self.grids[ind]
+                        # in orion, particles always live on the finest level.
+                        # so, we want to assign the particle to the finest of
+                        # the grids we just found
+                        if len(selected_grids) != 0:
+                            grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
+                            ind = na.where(self.grids == grid)[0][0]
+                            self.grid_particle_count[ind] += 1
+                            self.grids[ind].NumberOfParticles += 1
+        except IOError:
+                return None
+
+        return True
+                
     def readGlobalHeader(self,filename,paranoid_read):
         """
         read the global header file for an Orion plotfile output.


diff -r 30ad7ba41220e063dd1b4ab46a805528947c255d -r 34781f1f6cc77e4f91f6b93fb9c8667672efda8c yt/visualization/plot_collection.py
--- a/yt/visualization/plot_collection.py
+++ b/yt/visualization/plot_collection.py
@@ -940,7 +940,7 @@
                                   x_bins, fields[0], x_min, x_max, x_log,
                                   lazy_reader)
         if len(fields) > 1:
-            profile.add_fields(fields[1], weight=weight, accumulation=accumulation)
+            profile.add_fields(fields[1:], weight=weight, accumulation=accumulation)
         if id is None: id = self._get_new_id()
         p = self._add_plot(Profile1DPlot(profile, fields, id,
                                                    axes=axes, figure=figure))
@@ -1148,13 +1148,15 @@
                                   x_bins, fields[0], x_min, x_max, x_log,
                                   y_bins, fields[1], y_min, y_max, y_log,
                                   lazy_reader)
+        # This will add all the fields to the profile object
+        if len(fields)>2:
+            profile.add_fields(fields[2:], weight=weight,
+                    accumulation=accumulation, fractional=fractional)
+
         if id is None: id = self._get_new_id()
         p = self._add_plot(PhasePlot(profile, fields, 
                                                id, cmap=cmap,
                                                figure=figure, axes=axes))
-        # This will add all the fields to the profile object
-        for i in range(2, len(fields)):
-            p.switch_z(fields[i], weight=weight, accumulation=accumulation, fractional=fractional)
         return p
 
     def add_phase_sphere(self, radius, unit, fields, center = None, cmap=None,

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list