[yt-svn] commit/yt: 3 new changesets

Bitbucket commits-noreply at bitbucket.org
Mon Dec 3 10:00:23 PST 2012


3 new commits in yt:


https://bitbucket.org/yt_analysis/yt/changeset/6b5394f0f728/
changeset:   6b5394f0f728
branch:      yt
user:        jzuhone
date:        2012-11-30 22:13:07
summary:     Particle generation classes. Bugfix for update_data in the stream frontend.
affected #:  2 files

diff -r 4f88d432d0dab1c918a641316720bc58462fcc19 -r 6b5394f0f72865143336ed6cf410c07a97bd8003 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -274,15 +274,18 @@
             self.stream_handler.particle_types[key] = particle_types[key]
             if key not in self.field_list:
                 self.field_list.append(key)
+                
+        self._setup_unknown_fields()
 
         for i, grid in enumerate(self.grids) :
             if data[i].has_key("number_of_particles") :
                 grid.NumberOfParticles = data[i].pop("number_of_particles")
             for key in data[i].keys() :
+                if key in grid.keys() : del grid[key]
                 self.stream_handler.fields[grid.id][key] = data[i][key]
             
         self._setup_unknown_fields()
-        self._detect_fields()
+        
         
 class StreamStaticOutput(StaticOutput):
     _hierarchy_class = StreamHierarchy


diff -r 4f88d432d0dab1c918a641316720bc58462fcc19 -r 6b5394f0f72865143336ed6cf410c07a97bd8003 yt/utilities/particle_generator.py
--- /dev/null
+++ b/yt/utilities/particle_generator.py
@@ -0,0 +1,379 @@
+import numpy as np
+import h5py
+from yt.utilities.lib import CICSample_3
+from yt.funcs import *
+
+class ParticleGenerator(object) :
+
+    default_fields = ["particle_position_x",
+                      "particle_position_y",
+                      "particle_position_z",
+                      "particle_index"]
+
+    def __init__(self, pf, num_particles, field_list) :
+        """
+        Base class for generating particle fields which may be applied to
+        streams. Normally this would not be called directly, since it doesn't
+        really do anything except allocate memory. Takes a *pf* to serve as the
+        basis for determining grids, the number of particles *num_particles*,
+        and a list of fields, *field_list*.
+        """
+        self.pf = pf
+        self.num_particles = num_particles
+        self.field_list = field_list
+            
+        try :
+            self.posx_index = self.field_list.index(self.default_fields[0])
+            self.posy_index = self.field_list.index(self.default_fields[1])
+            self.posz_index = self.field_list.index(self.default_fields[2])
+            self.index_index = self.field_list.index(self.default_fields[3])
+        except :
+            raise KeyError("Field list must contain the following fields: " +
+                           "\'particle_position_x\', \'particle_position_y\'" +
+                           ", \'particle_position_z\', \'particle_index\' ")
+
+        self.num_grids = self.pf.h.num_grids
+        self.NumberOfParticles = np.zeros((self.num_grids), dtype='int64')
+        self.ParticleIndices = np.zeros(self.num_grids + 1, dtype='int64')
+        
+        self.num_fields = len(self.field_list)
+        
+        self.particles = np.zeros((self.num_particles, self.num_fields),
+                                  dtype='float64')
+
+    def has_key(self, key) :
+        """
+        Check to see if *key* is in the particle field list.
+        """
+        return (key in self.field_list)
+            
+    def keys(self) :
+        """
+        Return the list of particle fields.
+        """
+        return self.field_list
+    
+    def __getitem__(self, key) :
+        """
+        Get the field associated with key.
+        """
+        return self.particles[:,self.field_list.index(key)]
+    
+    def __setitem__(self, key, val) :
+        """
+        Sets a field to be some other value. Note that we assume
+        that the particles have been sorted by grid already, so
+        make sure the setting of the field is consistent with this.
+        """
+        self.particles[:,self.field_list.index(key)] = val[:]
+                
+    def __len__(self) :
+        """
+        The number of particles
+        """
+        return self.num_particles
+
+    def get_from_grid(self, grid) :
+        """
+        Return a dict containing all of the particle fields in the specified grid.
+        """
+        ind = grid.id-grid._id_offset
+        start = self.ParticleIndices[ind]
+        end = self.ParticleIndices[ind+1]
+        return dict([(field, self.particles[start:end,self.field_list.index(field)])
+                     for field in self.field_list])
+    
+    def _setup_particles(self,x,y,z,setup_fields=None) :
+        """
+        Assigns grids to particles and sets up particle positions. *setup_fields* is
+        a dict of fields other than the particle positions to set up. 
+        """
+        particle_grids, particle_grid_inds = self.pf.h.find_points(x,y,z)
+        idxs = np.argsort(particle_grid_inds)
+        self.particles[:,self.posx_index] = x[idxs]
+        self.particles[:,self.posy_index] = y[idxs]
+        self.particles[:,self.posz_index] = z[idxs]
+        self.NumberOfParticles = np.bincount(particle_grid_inds,
+                                             minlength=self.num_grids)
+        if self.num_grids > 1 :
+            np.add.accumulate(self.NumberOfParticles.squeeze(),
+                              out=self.ParticleIndices[1:])
+        else :
+            self.ParticleIndices[1] = self.NumberOfParticles.squeeze()
+        if setup_fields is not None:
+            for key, value in setup_fields.items():
+                if key not in self.default_fields:
+                    self.particles[:,self.field_list.index(key)] = value[idxs]
+    
+    def assign_indices(self, function=None, **kwargs) :
+        """
+        Assign unique indices to the particles. The default is to just use
+        numpy.arange, but any function may be supplied with keyword arguments.
+        """
+        if function is None :
+            self.particles[:,self.index_index] = np.arange((self.num_particles))
+        else :
+            self.particles[:,self.index_index] = function(**kwargs)
+            
+    def map_grid_fields_to_particles(self, mapping_dict) :
+        r"""
+        For the fields in  *mapping_dict*, map grid fields to the particles
+        using CIC sampling.
+
+        Examples
+        --------
+        >>> field_map = {'Density':'particle_density',
+        >>>              'Temperature':'particle_temperature'}
+        >>> particles.map_grid_fields_to_particles(field_map)
+        """
+        pbar = get_pbar("Mapping fields to particles", self.num_grids)
+        for i, grid in enumerate(self.pf.h.grids) :
+            pbar.update(i)
+            if self.NumberOfParticles[i] > 0:
+                start = self.ParticleIndices[i]
+                end = self.ParticleIndices[i+1]
+                # Note we add one ghost zone to the grid!
+                cube = grid.retrieve_ghost_zones(1, mapping_dict.keys())
+                le = np.array(grid.LeftEdge).astype(np.float64)
+                dims = np.array(grid.ActiveDimensions).astype(np.int32)
+                for gfield, pfield in mapping_dict.items() :
+                    field_index = self.field_list.index(pfield)
+                    CICSample_3(self.particles[start:end,self.posx_index],
+                                self.particles[start:end,self.posy_index],
+                                self.particles[start:end,self.posz_index],
+                                self.particles[start:end,field_index],
+                                np.int64(self.NumberOfParticles[i]),
+                                cube[gfield], le, dims,
+                                np.float64(grid['dx']))
+        pbar.finish()
+
+    def apply_to_stream(self, clobber=False) :
+        """
+        Apply the particles to a stream parameter file. If particles already exist,
+        and clobber=False, do not overwrite them, but add the new ones to them. 
+        """
+        grid_data = []
+        for i,g in enumerate(self.pf.h.grids) :
+            data = {}
+            if clobber :
+                data["number_of_particles"] = self.NumberOfParticles[i]
+            else :
+                data["number_of_particles"] = self.NumberOfParticles[i] + \
+                                              g.NumberOfParticles
+            grid_particles = self.get_from_grid(g)
+            for field in self.field_list :
+                if data["number_of_particles"] > 0 :
+                    # We have particles in this grid
+                    if g.NumberOfParticles > 0 and not clobber:
+                        # Particles already exist
+                        if field in self.pf.h.field_list :
+                            # This field already exists
+                            #prev_particles = self.pf.h.io._read_data_set(g,field)
+                            prev_particles = g[field]
+                        else :
+                            # This one doesn't, set the previous particles' field
+                            # values to zero
+                            prev_particles = np.zeros((g.NumberOfParticles))
+                        data[field] = np.concatenate((prev_particles,
+                                                      grid_particles[field]))
+                    else :
+                        # Particles do not already exist or we're clobbering
+                        data[field] = grid_particles[field]
+                else :
+                    # We don't have particles in this grid
+                    data[field] = np.array([], dtype='float64')
+            grid_data.append(data)
+        self.pf.h.update_data(grid_data)
+
+class FromListParticleGenerator(ParticleGenerator) :
+
+    def __init__(self, pf, num_particles, data) :
+        r"""
+        Generate particle fields from array-like lists contained in a dict.
+
+        Parameters
+        ----------
+        pf : `StaticOutput`
+            The parameter file which will serve as the base for these particles.
+        num_particles : int
+            The number of particles in the dict.
+        data : dict of NumPy arrays
+            The particle fields themselves.
+
+        Examples
+        --------
+        >>> num_p = 100000
+        >>> posx = np.random.random((num_p))
+        >>> posy = np.random.random((num_p))
+        >>> posz = np.random.random((num_p))
+        >>> mass = np.ones((num_p))
+        >>> data = {'particle_position_x': posx, 'particle_position_y': posy,
+        >>>         'particle_position_z': posz, 'particle_mass': mass}
+        >>> particles = FromListParticleGenerator(pf, num_p, data)
+        """
+
+        field_list = data.keys()
+        x = data.pop("particle_position_x")
+        y = data.pop("particle_position_y")
+        z = data.pop("particle_position_z")
+
+        xcond = np.logical_or(x < pf.domain_left_edge[0],
+                              x >= pf.domain_right_edge[0])
+        ycond = np.logical_or(y < pf.domain_left_edge[1],
+                              y >= pf.domain_right_edge[1])
+        zcond = np.logical_or(z < pf.domain_left_edge[2],
+                              z >= pf.domain_right_edge[2])
+        cond = np.logical_or(xcond, ycond)
+        cond = np.logical_or(zcond, cond)
+
+        if np.any(cond) :
+            raise ValueError("Some particles are outside of the domain!!!")
+
+        ParticleGenerator.__init__(self, pf, num_particles, field_list)
+        self._setup_particles(x,y,z,setup_fields=data)
+        
+class LatticeParticleGenerator(ParticleGenerator) :
+
+    def __init__(self, pf, particles_dims, particles_left_edge,
+                 particles_right_edge, field_list) :
+        r"""
+        Generate particles in a lattice arrangement. 
+
+        Parameters
+        ----------
+        pf : `StaticOutput`
+            The parameter file which will serve as the base for these particles.
+        particles_dims : int, array-like 
+            The number of particles along each dimension
+        particles_left_edge : float, array-like
+            The 'left-most' starting positions of the lattice.
+        particles_right_edge : float, array-like
+             The 'right-most' ending positions of the lattice.
+        field_list : list of strings
+             A list of particle fields
+             
+        Examples
+        --------
+        >>> dims = (128,128,128)
+        >>> le = np.array([0.25,0.25,0.25])
+        >>> re = np.array([0.75,0.75,0.75])
+        >>> fields = ["particle_position_x","particle_position_y",
+        >>>           "particle_position_z","particle_index",
+        >>>           "particle_density","particle_temperature"]
+        >>> particles = LatticeParticleGenerator(pf, dims, le, re, fields)
+        """
+
+        num_x = particles_dims[0]
+        num_y = particles_dims[1]
+        num_z = particles_dims[2]
+        xmin = particles_left_edge[0]
+        ymin = particles_left_edge[1]
+        zmin = particles_left_edge[2]
+        xmax = particles_right_edge[0]
+        ymax = particles_right_edge[1]
+        zmax = particles_right_edge[2]
+
+        xcond = (xmin < pf.domain_left_edge[0]) or \
+                (xmax >= pf.domain_right_edge[0])
+        ycond = (ymin < pf.domain_left_edge[1]) or \
+                (ymax >= pf.domain_right_edge[1])
+        zcond = (zmin < pf.domain_left_edge[2]) or \
+                (zmax >= pf.domain_right_edge[2])
+        cond = xcond or ycond or zcond
+
+        if cond :
+            raise ValueError("Proposed bounds for particles are outside domain!!!")
+
+        ParticleGenerator.__init__(self, pf, num_x*num_y*num_z, field_list)
+
+        dx = (xmax-xmin)/(num_x-1)
+        dy = (ymax-ymin)/(num_y-1)
+        dz = (zmax-zmin)/(num_z-1)
+        inds = np.indices((num_x,num_y,num_z))
+        xpos = inds[0]*dx + xmin
+        ypos = inds[1]*dy + ymin
+        zpos = inds[2]*dz + zmin
+        
+        self._setup_particles(xpos.flat[:], ypos.flat[:], zpos.flat[:])
+        
+class WithDensityParticleGenerator(ParticleGenerator) :
+
+    def __init__(self, pf, data_source, num_particles, field_list,
+                 density_field="Density") :
+        r"""
+        Generate particles based on a density field.
+
+        Parameters
+        ----------
+        pf : `StaticOutput`
+            The parameter file which will serve as the base for these particles.
+        data_source : `yt.data_objects.api.AMRData`
+            The data source containing the density field.
+        num_particles : int
+            The number of particles to be generated
+        field_list : list of strings
+            A list of particle fields
+        density_field : string, optional
+            A density field which will serve as the distribution function for the
+            particle positions. Theoretically, this could be any 'per-volume' field. 
+            
+        Examples
+        --------
+        >>> sphere = pf.h.sphere(pf.domain_center, 0.5)
+        >>> num_p = 100000
+        >>> fields = ["particle_position_x","particle_position_y",
+        >>>           "particle_position_z","particle_index",
+        >>>           "particle_density","particle_temperature"]
+        >>> particles = WithDensityParticleGenerator(pf, sphere, num_particles,
+        >>>                                          fields, density_field='Dark_Matter_Density')
+        """
+
+        ParticleGenerator.__init__(self, pf, num_particles, field_list)
+
+        num_cells = len(data_source["x"].flat)
+        max_density = data_source[density_field].max()
+        num_particles_left = num_particles
+        all_x = []
+        all_y = []
+        all_z = []
+        
+        pbar = get_pbar("Generating Particles", num_particles)
+        tot_num_accepted = int(0)
+        
+        while num_particles_left > 0:
+
+            rho = np.random.uniform(high=1.01*max_density,
+                                    size=num_particles_left)
+            idxs = np.random.random_integers(low=0, high=num_cells-1,
+                                             size=num_particles_left)
+            rho_true = data_source[density_field].flat[idxs]
+            accept = rho <= rho_true
+            num_accepted = accept.sum()
+            accepted_idxs = idxs[accept]
+            
+            xpos = data_source["x"].flat[accepted_idxs] + \
+                   np.random.uniform(low=-0.5, high=0.5, size=num_accepted) * \
+                   data_source["dx"].flat[accepted_idxs]
+            ypos = data_source["y"].flat[accepted_idxs] + \
+                   np.random.uniform(low=-0.5, high=0.5, size=num_accepted) * \
+                   data_source["dy"].flat[accepted_idxs]
+            zpos = data_source["z"].flat[accepted_idxs] + \
+                   np.random.uniform(low=-0.5, high=0.5, size=num_accepted) * \
+                   data_source["dz"].flat[accepted_idxs]
+
+            all_x.append(xpos)
+            all_y.append(ypos)
+            all_z.append(zpos)
+
+            num_particles_left -= num_accepted
+            tot_num_accepted += num_accepted
+            pbar.update(tot_num_accepted)
+
+        pbar.finish()
+
+        x = np.concatenate(all_x)
+        y = np.concatenate(all_y)
+        z = np.concatenate(all_z)
+
+        self._setup_particles(x,y,z)
+        



https://bitbucket.org/yt_analysis/yt/changeset/7eef966e21e6/
changeset:   7eef966e21e6
branch:      yt
user:        jzuhone
date:        2012-11-30 22:15:47
summary:     Another bugfix for update_data. Added particle generation tests.
affected #:  2 files

diff -r 6b5394f0f72865143336ed6cf410c07a97bd8003 -r 7eef966e21e6618b3224325096c1f2cacdafbf83 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -284,7 +284,7 @@
                 if key in grid.keys() : del grid[key]
                 self.stream_handler.fields[grid.id][key] = data[i][key]
             
-        self._setup_unknown_fields()
+        self._detect_fields()
         
         
 class StreamStaticOutput(StaticOutput):


diff -r 6b5394f0f72865143336ed6cf410c07a97bd8003 -r 7eef966e21e6618b3224325096c1f2cacdafbf83 yt/utilities/tests/test_particle_generator.py
--- /dev/null
+++ b/yt/utilities/tests/test_particle_generator.py
@@ -0,0 +1,105 @@
+import numpy as np
+from yt.testing import *
+from yt.utilities.particle_generator import *
+from yt.frontends.stream.api import load_uniform_grid, refine_amr
+import yt.utilities.initial_conditions as ic
+import yt.utilities.flagging_methods as fm
+from IPython import embed
+
+def setup() :
+    pass
+
+def test_particle_generator() :
+    
+    # First generate our pf
+    domain_dims = (128, 128, 128)
+    dens = np.zeros(domain_dims) + 0.1
+    temp = 4.*np.ones(domain_dims)
+    fields = {"Density": dens, "Temperature": temp}
+    ug = load_uniform_grid(fields, domain_dims, 1.0)
+    fo = [ic.BetaModelSphere(1.0,0.1,0.5,[0.5,0.5,0.5],{"Density":(10.0)})]
+    rc = [fm.flagging_method_registry["overdensity"](4.0)]
+    pf = refine_amr(ug, rc, fo, 3)
+
+    # Now generate particles from density
+
+    field_list = ["particle_position_x","particle_position_y",
+                  "particle_position_z","particle_index",
+                  "particle_gas_density"]
+    num_particles = 1000000
+    field_dict = {"Density": "particle_gas_density"}
+    sphere = pf.h.sphere(pf.domain_center, 0.45)
+
+    particles1 = WithDensityParticleGenerator(pf, sphere, num_particles, field_list)
+    particles1.assign_indices()
+    particles1.map_grid_fields_to_particles(field_dict)
+    
+    # Test to make sure we ended up with the right number of particles per grid
+    particles1.apply_to_stream()
+    particles_per_grid1 = [grid.NumberOfParticles for grid in pf.h.grids]
+    assert_equal(particles_per_grid1, particles1.NumberOfParticles)
+    particles_per_grid1 = [len(grid["particle_position_x"]) for grid in pf.h.grids]
+    assert_equal(particles_per_grid1, particles1.NumberOfParticles)
+
+    # Set up a lattice of particles
+    pdims = np.array([64,64,64])
+    def new_indices() :
+        # We just add new indices onto the existing ones
+        return np.arange((np.product(pdims)))+num_particles
+    le = np.array([0.25,0.25,0.25])
+    re = np.array([0.75,0.75,0.75])
+    new_field_list = field_list + ["particle_gas_temperature"]
+    new_field_dict = {"Density": "particle_gas_density",
+                      "Temperature": "particle_gas_temperature"}
+
+    particles2 = LatticeParticleGenerator(pf, pdims, le, re, new_field_list)
+    particles2.assign_indices(function=new_indices)
+    particles2.map_grid_fields_to_particles(new_field_dict)
+
+    #Test lattice positions
+    xpos = np.unique(particles2["particle_position_x"])
+    ypos = np.unique(particles2["particle_position_y"])
+    zpos = np.unique(particles2["particle_position_z"])
+
+    xpred = np.linspace(le[0],re[0],num=pdims[0],endpoint=True)
+    ypred = np.linspace(le[1],re[1],num=pdims[1],endpoint=True)
+    zpred = np.linspace(le[2],re[2],num=pdims[2],endpoint=True)
+
+    assert_almost_equal(xpos, xpred)
+    assert_almost_equal(ypos, ypred)
+    assert_almost_equal(zpos, zpred)
+
+    #Test the number of particles again
+    particles2.apply_to_stream()
+    particles_per_grid2 = [grid.NumberOfParticles for grid in pf.h.grids]
+    assert_equal(particles_per_grid2, particles1.NumberOfParticles+particles2.NumberOfParticles)
+    particles_per_grid2 = [len(grid["particle_position_x"]) for grid in pf.h.grids]
+    assert_equal(particles_per_grid2, particles1.NumberOfParticles+particles2.NumberOfParticles)
+
+    #Test the uniqueness of tags
+    tags = np.concatenate([grid["particle_index"] for grid in pf.h.grids])
+    tags.sort()
+    assert_equal(tags, np.arange((np.product(pdims)+num_particles)))
+
+    # Test that the old particles have zero for the new field
+    old_particle_temps = [grid["particle_gas_temperature"][:particles_per_grid1[i]]
+                          for i, grid in enumerate(pf.h.grids)]
+    test_zeros = [np.zeros((particles_per_grid1[i])) 
+                  for i, grid in enumerate(pf.h.grids)]
+    assert_equal(old_particle_temps, test_zeros)
+
+    #Now dump all of these particle fields out into a dict
+    pdata = {}
+    dd = pf.h.all_data()
+    for field in new_field_list :
+        pdata[field] = dd[field]
+
+    #Test the "from-list" generator and particle field clobber
+    particles3 = FromListParticleGenerator(pf, num_particles+np.product(pdims), pdata)
+    particles3.apply_to_stream(clobber=True)
+    
+    #Test the number of particles again
+    particles_per_grid3 = [grid.NumberOfParticles for grid in pf.h.grids]
+    assert_equal(particles_per_grid3, particles1.NumberOfParticles+particles2.NumberOfParticles)
+    particles_per_grid2 = [len(grid["particle_position_z"]) for grid in pf.h.grids]
+    assert_equal(particles_per_grid3, particles1.NumberOfParticles+particles2.NumberOfParticles)



https://bitbucket.org/yt_analysis/yt/changeset/711e95aff04b/
changeset:   711e95aff04b
branch:      yt
user:        MatthewTurk
date:        2012-12-03 19:00:21
summary:     Merged in jzuhone/yt (pull request #361)
affected #:  3 files

diff -r e9fc5728572fb56adef9efb71dede5483e58566f -r 711e95aff04b54e6ccda7dfefb0b0ae17c3830a7 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -274,16 +274,19 @@
             self.stream_handler.particle_types[key] = particle_types[key]
             if key not in self.field_list:
                 self.field_list.append(key)
+                
+        self._setup_unknown_fields()
 
         for i, grid in enumerate(self.grids) :
             if data[i].has_key("number_of_particles") :
                 grid.NumberOfParticles = data[i].pop("number_of_particles")
             for key in data[i].keys() :
+                if key in grid.keys() : del grid[key]
                 self.stream_handler.fields[grid.id][key] = data[i][key]
             
-        self._setup_unknown_fields()
         self._detect_fields()
         
+        
 class StreamStaticOutput(StaticOutput):
     _hierarchy_class = StreamHierarchy
     _fieldinfo_fallback = StreamFieldInfo


diff -r e9fc5728572fb56adef9efb71dede5483e58566f -r 711e95aff04b54e6ccda7dfefb0b0ae17c3830a7 yt/utilities/particle_generator.py
--- /dev/null
+++ b/yt/utilities/particle_generator.py
@@ -0,0 +1,379 @@
+import numpy as np
+import h5py
+from yt.utilities.lib import CICSample_3
+from yt.funcs import *
+
+class ParticleGenerator(object) :
+
+    default_fields = ["particle_position_x",
+                      "particle_position_y",
+                      "particle_position_z",
+                      "particle_index"]
+
+    def __init__(self, pf, num_particles, field_list) :
+        """
+        Base class for generating particle fields which may be applied to
+        streams. Normally this would not be called directly, since it doesn't
+        really do anything except allocate memory. Takes a *pf* to serve as the
+        basis for determining grids, the number of particles *num_particles*,
+        and a list of fields, *field_list*.
+        """
+        self.pf = pf
+        self.num_particles = num_particles
+        self.field_list = field_list
+            
+        try :
+            self.posx_index = self.field_list.index(self.default_fields[0])
+            self.posy_index = self.field_list.index(self.default_fields[1])
+            self.posz_index = self.field_list.index(self.default_fields[2])
+            self.index_index = self.field_list.index(self.default_fields[3])
+        except :
+            raise KeyError("Field list must contain the following fields: " +
+                           "\'particle_position_x\', \'particle_position_y\'" +
+                           ", \'particle_position_z\', \'particle_index\' ")
+
+        self.num_grids = self.pf.h.num_grids
+        self.NumberOfParticles = np.zeros((self.num_grids), dtype='int64')
+        self.ParticleIndices = np.zeros(self.num_grids + 1, dtype='int64')
+        
+        self.num_fields = len(self.field_list)
+        
+        self.particles = np.zeros((self.num_particles, self.num_fields),
+                                  dtype='float64')
+
+    def has_key(self, key) :
+        """
+        Check to see if *key* is in the particle field list.
+        """
+        return (key in self.field_list)
+            
+    def keys(self) :
+        """
+        Return the list of particle fields.
+        """
+        return self.field_list
+    
+    def __getitem__(self, key) :
+        """
+        Get the field associated with key.
+        """
+        return self.particles[:,self.field_list.index(key)]
+    
+    def __setitem__(self, key, val) :
+        """
+        Sets a field to be some other value. Note that we assume
+        that the particles have been sorted by grid already, so
+        make sure the setting of the field is consistent with this.
+        """
+        self.particles[:,self.field_list.index(key)] = val[:]
+                
+    def __len__(self) :
+        """
+        The number of particles
+        """
+        return self.num_particles
+
+    def get_from_grid(self, grid) :
+        """
+        Return a dict containing all of the particle fields in the specified grid.
+        """
+        ind = grid.id-grid._id_offset
+        start = self.ParticleIndices[ind]
+        end = self.ParticleIndices[ind+1]
+        return dict([(field, self.particles[start:end,self.field_list.index(field)])
+                     for field in self.field_list])
+    
+    def _setup_particles(self,x,y,z,setup_fields=None) :
+        """
+        Assigns grids to particles and sets up particle positions. *setup_fields* is
+        a dict of fields other than the particle positions to set up. 
+        """
+        particle_grids, particle_grid_inds = self.pf.h.find_points(x,y,z)
+        idxs = np.argsort(particle_grid_inds)
+        self.particles[:,self.posx_index] = x[idxs]
+        self.particles[:,self.posy_index] = y[idxs]
+        self.particles[:,self.posz_index] = z[idxs]
+        self.NumberOfParticles = np.bincount(particle_grid_inds,
+                                             minlength=self.num_grids)
+        if self.num_grids > 1 :
+            np.add.accumulate(self.NumberOfParticles.squeeze(),
+                              out=self.ParticleIndices[1:])
+        else :
+            self.ParticleIndices[1] = self.NumberOfParticles.squeeze()
+        if setup_fields is not None:
+            for key, value in setup_fields.items():
+                if key not in self.default_fields:
+                    self.particles[:,self.field_list.index(key)] = value[idxs]
+    
+    def assign_indices(self, function=None, **kwargs) :
+        """
+        Assign unique indices to the particles. The default is to just use
+        numpy.arange, but any function may be supplied with keyword arguments.
+        """
+        if function is None :
+            self.particles[:,self.index_index] = np.arange((self.num_particles))
+        else :
+            self.particles[:,self.index_index] = function(**kwargs)
+            
+    def map_grid_fields_to_particles(self, mapping_dict) :
+        r"""
+        For the fields in  *mapping_dict*, map grid fields to the particles
+        using CIC sampling.
+
+        Examples
+        --------
+        >>> field_map = {'Density':'particle_density',
+        >>>              'Temperature':'particle_temperature'}
+        >>> particles.map_grid_fields_to_particles(field_map)
+        """
+        pbar = get_pbar("Mapping fields to particles", self.num_grids)
+        for i, grid in enumerate(self.pf.h.grids) :
+            pbar.update(i)
+            if self.NumberOfParticles[i] > 0:
+                start = self.ParticleIndices[i]
+                end = self.ParticleIndices[i+1]
+                # Note we add one ghost zone to the grid!
+                cube = grid.retrieve_ghost_zones(1, mapping_dict.keys())
+                le = np.array(grid.LeftEdge).astype(np.float64)
+                dims = np.array(grid.ActiveDimensions).astype(np.int32)
+                for gfield, pfield in mapping_dict.items() :
+                    field_index = self.field_list.index(pfield)
+                    CICSample_3(self.particles[start:end,self.posx_index],
+                                self.particles[start:end,self.posy_index],
+                                self.particles[start:end,self.posz_index],
+                                self.particles[start:end,field_index],
+                                np.int64(self.NumberOfParticles[i]),
+                                cube[gfield], le, dims,
+                                np.float64(grid['dx']))
+        pbar.finish()
+
+    def apply_to_stream(self, clobber=False) :
+        """
+        Apply the particles to a stream parameter file. If particles already exist,
+        and clobber=False, do not overwrite them, but add the new ones to them. 
+        """
+        grid_data = []
+        for i,g in enumerate(self.pf.h.grids) :
+            data = {}
+            if clobber :
+                data["number_of_particles"] = self.NumberOfParticles[i]
+            else :
+                data["number_of_particles"] = self.NumberOfParticles[i] + \
+                                              g.NumberOfParticles
+            grid_particles = self.get_from_grid(g)
+            for field in self.field_list :
+                if data["number_of_particles"] > 0 :
+                    # We have particles in this grid
+                    if g.NumberOfParticles > 0 and not clobber:
+                        # Particles already exist
+                        if field in self.pf.h.field_list :
+                            # This field already exists
+                            #prev_particles = self.pf.h.io._read_data_set(g,field)
+                            prev_particles = g[field]
+                        else :
+                            # This one doesn't, set the previous particles' field
+                            # values to zero
+                            prev_particles = np.zeros((g.NumberOfParticles))
+                        data[field] = np.concatenate((prev_particles,
+                                                      grid_particles[field]))
+                    else :
+                        # Particles do not already exist or we're clobbering
+                        data[field] = grid_particles[field]
+                else :
+                    # We don't have particles in this grid
+                    data[field] = np.array([], dtype='float64')
+            grid_data.append(data)
+        self.pf.h.update_data(grid_data)
+
+class FromListParticleGenerator(ParticleGenerator) :
+
+    def __init__(self, pf, num_particles, data) :
+        r"""
+        Generate particle fields from array-like lists contained in a dict.
+
+        Parameters
+        ----------
+        pf : `StaticOutput`
+            The parameter file which will serve as the base for these particles.
+        num_particles : int
+            The number of particles in the dict.
+        data : dict of NumPy arrays
+            The particle fields themselves.
+
+        Examples
+        --------
+        >>> num_p = 100000
+        >>> posx = np.random.random((num_p))
+        >>> posy = np.random.random((num_p))
+        >>> posz = np.random.random((num_p))
+        >>> mass = np.ones((num_p))
+        >>> data = {'particle_position_x': posx, 'particle_position_y': posy,
+        >>>         'particle_position_z': posz, 'particle_mass': mass}
+        >>> particles = FromListParticleGenerator(pf, num_p, data)
+        """
+
+        field_list = data.keys()
+        x = data.pop("particle_position_x")
+        y = data.pop("particle_position_y")
+        z = data.pop("particle_position_z")
+
+        xcond = np.logical_or(x < pf.domain_left_edge[0],
+                              x >= pf.domain_right_edge[0])
+        ycond = np.logical_or(y < pf.domain_left_edge[1],
+                              y >= pf.domain_right_edge[1])
+        zcond = np.logical_or(z < pf.domain_left_edge[2],
+                              z >= pf.domain_right_edge[2])
+        cond = np.logical_or(xcond, ycond)
+        cond = np.logical_or(zcond, cond)
+
+        if np.any(cond) :
+            raise ValueError("Some particles are outside of the domain!!!")
+
+        ParticleGenerator.__init__(self, pf, num_particles, field_list)
+        self._setup_particles(x,y,z,setup_fields=data)
+        
+class LatticeParticleGenerator(ParticleGenerator) :
+
+    def __init__(self, pf, particles_dims, particles_left_edge,
+                 particles_right_edge, field_list) :
+        r"""
+        Generate particles in a lattice arrangement. 
+
+        Parameters
+        ----------
+        pf : `StaticOutput`
+            The parameter file which will serve as the base for these particles.
+        particles_dims : int, array-like 
+            The number of particles along each dimension
+        particles_left_edge : float, array-like
+            The 'left-most' starting positions of the lattice.
+        particles_right_edge : float, array-like
+             The 'right-most' ending positions of the lattice.
+        field_list : list of strings
+             A list of particle fields
+             
+        Examples
+        --------
+        >>> dims = (128,128,128)
+        >>> le = np.array([0.25,0.25,0.25])
+        >>> re = np.array([0.75,0.75,0.75])
+        >>> fields = ["particle_position_x","particle_position_y",
+        >>>           "particle_position_z","particle_index",
+        >>>           "particle_density","particle_temperature"]
+        >>> particles = LatticeParticleGenerator(pf, dims, le, re, fields)
+        """
+
+        num_x = particles_dims[0]
+        num_y = particles_dims[1]
+        num_z = particles_dims[2]
+        xmin = particles_left_edge[0]
+        ymin = particles_left_edge[1]
+        zmin = particles_left_edge[2]
+        xmax = particles_right_edge[0]
+        ymax = particles_right_edge[1]
+        zmax = particles_right_edge[2]
+
+        xcond = (xmin < pf.domain_left_edge[0]) or \
+                (xmax >= pf.domain_right_edge[0])
+        ycond = (ymin < pf.domain_left_edge[1]) or \
+                (ymax >= pf.domain_right_edge[1])
+        zcond = (zmin < pf.domain_left_edge[2]) or \
+                (zmax >= pf.domain_right_edge[2])
+        cond = xcond or ycond or zcond
+
+        if cond :
+            raise ValueError("Proposed bounds for particles are outside domain!!!")
+
+        ParticleGenerator.__init__(self, pf, num_x*num_y*num_z, field_list)
+
+        dx = (xmax-xmin)/(num_x-1)
+        dy = (ymax-ymin)/(num_y-1)
+        dz = (zmax-zmin)/(num_z-1)
+        inds = np.indices((num_x,num_y,num_z))
+        xpos = inds[0]*dx + xmin
+        ypos = inds[1]*dy + ymin
+        zpos = inds[2]*dz + zmin
+        
+        self._setup_particles(xpos.flat[:], ypos.flat[:], zpos.flat[:])
+        
+class WithDensityParticleGenerator(ParticleGenerator) :
+
+    def __init__(self, pf, data_source, num_particles, field_list,
+                 density_field="Density") :
+        r"""
+        Generate particles based on a density field.
+
+        Parameters
+        ----------
+        pf : `StaticOutput`
+            The parameter file which will serve as the base for these particles.
+        data_source : `yt.data_objects.api.AMRData`
+            The data source containing the density field.
+        num_particles : int
+            The number of particles to be generated
+        field_list : list of strings
+            A list of particle fields
+        density_field : string, optional
+            A density field which will serve as the distribution function for the
+            particle positions. Theoretically, this could be any 'per-volume' field. 
+            
+        Examples
+        --------
+        >>> sphere = pf.h.sphere(pf.domain_center, 0.5)
+        >>> num_p = 100000
+        >>> fields = ["particle_position_x","particle_position_y",
+        >>>           "particle_position_z","particle_index",
+        >>>           "particle_density","particle_temperature"]
+        >>> particles = WithDensityParticleGenerator(pf, sphere, num_particles,
+        >>>                                          fields, density_field='Dark_Matter_Density')
+        """
+
+        ParticleGenerator.__init__(self, pf, num_particles, field_list)
+
+        num_cells = len(data_source["x"].flat)
+        max_density = data_source[density_field].max()
+        num_particles_left = num_particles
+        all_x = []
+        all_y = []
+        all_z = []
+        
+        pbar = get_pbar("Generating Particles", num_particles)
+        tot_num_accepted = int(0)
+        
+        while num_particles_left > 0:
+
+            rho = np.random.uniform(high=1.01*max_density,
+                                    size=num_particles_left)
+            idxs = np.random.random_integers(low=0, high=num_cells-1,
+                                             size=num_particles_left)
+            rho_true = data_source[density_field].flat[idxs]
+            accept = rho <= rho_true
+            num_accepted = accept.sum()
+            accepted_idxs = idxs[accept]
+            
+            xpos = data_source["x"].flat[accepted_idxs] + \
+                   np.random.uniform(low=-0.5, high=0.5, size=num_accepted) * \
+                   data_source["dx"].flat[accepted_idxs]
+            ypos = data_source["y"].flat[accepted_idxs] + \
+                   np.random.uniform(low=-0.5, high=0.5, size=num_accepted) * \
+                   data_source["dy"].flat[accepted_idxs]
+            zpos = data_source["z"].flat[accepted_idxs] + \
+                   np.random.uniform(low=-0.5, high=0.5, size=num_accepted) * \
+                   data_source["dz"].flat[accepted_idxs]
+
+            all_x.append(xpos)
+            all_y.append(ypos)
+            all_z.append(zpos)
+
+            num_particles_left -= num_accepted
+            tot_num_accepted += num_accepted
+            pbar.update(tot_num_accepted)
+
+        pbar.finish()
+
+        x = np.concatenate(all_x)
+        y = np.concatenate(all_y)
+        z = np.concatenate(all_z)
+
+        self._setup_particles(x,y,z)
+        


diff -r e9fc5728572fb56adef9efb71dede5483e58566f -r 711e95aff04b54e6ccda7dfefb0b0ae17c3830a7 yt/utilities/tests/test_particle_generator.py
--- /dev/null
+++ b/yt/utilities/tests/test_particle_generator.py
@@ -0,0 +1,105 @@
+import numpy as np
+from yt.testing import *
+from yt.utilities.particle_generator import *
+from yt.frontends.stream.api import load_uniform_grid, refine_amr
+import yt.utilities.initial_conditions as ic
+import yt.utilities.flagging_methods as fm
+from IPython import embed
+
+def setup() :
+    pass
+
+def test_particle_generator() :
+    
+    # First generate our pf
+    domain_dims = (128, 128, 128)
+    dens = np.zeros(domain_dims) + 0.1
+    temp = 4.*np.ones(domain_dims)
+    fields = {"Density": dens, "Temperature": temp}
+    ug = load_uniform_grid(fields, domain_dims, 1.0)
+    fo = [ic.BetaModelSphere(1.0,0.1,0.5,[0.5,0.5,0.5],{"Density":(10.0)})]
+    rc = [fm.flagging_method_registry["overdensity"](4.0)]
+    pf = refine_amr(ug, rc, fo, 3)
+
+    # Now generate particles from density
+
+    field_list = ["particle_position_x","particle_position_y",
+                  "particle_position_z","particle_index",
+                  "particle_gas_density"]
+    num_particles = 1000000
+    field_dict = {"Density": "particle_gas_density"}
+    sphere = pf.h.sphere(pf.domain_center, 0.45)
+
+    particles1 = WithDensityParticleGenerator(pf, sphere, num_particles, field_list)
+    particles1.assign_indices()
+    particles1.map_grid_fields_to_particles(field_dict)
+    
+    # Test to make sure we ended up with the right number of particles per grid
+    particles1.apply_to_stream()
+    particles_per_grid1 = [grid.NumberOfParticles for grid in pf.h.grids]
+    assert_equal(particles_per_grid1, particles1.NumberOfParticles)
+    particles_per_grid1 = [len(grid["particle_position_x"]) for grid in pf.h.grids]
+    assert_equal(particles_per_grid1, particles1.NumberOfParticles)
+
+    # Set up a lattice of particles
+    pdims = np.array([64,64,64])
+    def new_indices() :
+        # We just add new indices onto the existing ones
+        return np.arange((np.product(pdims)))+num_particles
+    le = np.array([0.25,0.25,0.25])
+    re = np.array([0.75,0.75,0.75])
+    new_field_list = field_list + ["particle_gas_temperature"]
+    new_field_dict = {"Density": "particle_gas_density",
+                      "Temperature": "particle_gas_temperature"}
+
+    particles2 = LatticeParticleGenerator(pf, pdims, le, re, new_field_list)
+    particles2.assign_indices(function=new_indices)
+    particles2.map_grid_fields_to_particles(new_field_dict)
+
+    #Test lattice positions
+    xpos = np.unique(particles2["particle_position_x"])
+    ypos = np.unique(particles2["particle_position_y"])
+    zpos = np.unique(particles2["particle_position_z"])
+
+    xpred = np.linspace(le[0],re[0],num=pdims[0],endpoint=True)
+    ypred = np.linspace(le[1],re[1],num=pdims[1],endpoint=True)
+    zpred = np.linspace(le[2],re[2],num=pdims[2],endpoint=True)
+
+    assert_almost_equal(xpos, xpred)
+    assert_almost_equal(ypos, ypred)
+    assert_almost_equal(zpos, zpred)
+
+    #Test the number of particles again
+    particles2.apply_to_stream()
+    particles_per_grid2 = [grid.NumberOfParticles for grid in pf.h.grids]
+    assert_equal(particles_per_grid2, particles1.NumberOfParticles+particles2.NumberOfParticles)
+    particles_per_grid2 = [len(grid["particle_position_x"]) for grid in pf.h.grids]
+    assert_equal(particles_per_grid2, particles1.NumberOfParticles+particles2.NumberOfParticles)
+
+    #Test the uniqueness of tags
+    tags = np.concatenate([grid["particle_index"] for grid in pf.h.grids])
+    tags.sort()
+    assert_equal(tags, np.arange((np.product(pdims)+num_particles)))
+
+    # Test that the old particles have zero for the new field
+    old_particle_temps = [grid["particle_gas_temperature"][:particles_per_grid1[i]]
+                          for i, grid in enumerate(pf.h.grids)]
+    test_zeros = [np.zeros((particles_per_grid1[i])) 
+                  for i, grid in enumerate(pf.h.grids)]
+    assert_equal(old_particle_temps, test_zeros)
+
+    #Now dump all of these particle fields out into a dict
+    pdata = {}
+    dd = pf.h.all_data()
+    for field in new_field_list :
+        pdata[field] = dd[field]
+
+    #Test the "from-list" generator and particle field clobber
+    particles3 = FromListParticleGenerator(pf, num_particles+np.product(pdims), pdata)
+    particles3.apply_to_stream(clobber=True)
+    
+    #Test the number of particles again
+    particles_per_grid3 = [grid.NumberOfParticles for grid in pf.h.grids]
+    assert_equal(particles_per_grid3, particles1.NumberOfParticles+particles2.NumberOfParticles)
+    particles_per_grid2 = [len(grid["particle_position_z"]) for grid in pf.h.grids]
+    assert_equal(particles_per_grid3, particles1.NumberOfParticles+particles2.NumberOfParticles)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list