[yt-svn] commit/yt-3.0: MatthewTurk: Adding an arbitrary particle loader, load_particles.

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Jul 4 22:02:32 PDT 2013


1 new commit in yt-3.0:

https://bitbucket.org/yt_analysis/yt-3.0/commits/3deb823bc6b8/
Changeset:   3deb823bc6b8
Branch:      yt-3.0
User:        MatthewTurk
Date:        2013-07-03 20:56:52
Summary:     Adding an arbitrary particle loader, load_particles.

Example: http://paste.yt-project.org/show/3664/
Affected #:  3 files

diff -r 2edc94f8cf0819f4eaede36d7dc2dc0556982e04 -r 3deb823bc6b8beb1cfdaec903d7aa93759c2aa7a yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -34,6 +34,8 @@
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
     GridGeometryHandler
+from yt.geometry.particle_geometry_handler import \
+    ParticleGeometryHandler
 from yt.data_objects.static_output import \
     StaticOutput
 from yt.utilities.logger import ytLogger as mylog
@@ -47,6 +49,8 @@
     mpc_conversion, sec_conversion
 from yt.utilities.flagging_methods import \
     FlaggingGrid
+from yt.frontends.sph.data_structures import \
+    ParticleFile
 
 from .fields import \
     StreamFieldInfo, \
@@ -704,3 +708,122 @@
         assign_particle_data(pf, pdata)
     
     return pf
+
+class StreamParticleGeometryHandler(ParticleGeometryHandler):
+
+    
+    def __init__(self, pf, data_style = None):
+        self.stream_handler = pf.stream_handler
+        super(StreamParticleGeometryHandler, self).__init__(pf, data_style)
+
+    def _setup_data_io(self):
+        if self.stream_handler.io is not None:
+            self.io = self.stream_handler.io
+        else:
+            self.io = io_registry[self.data_style](self.stream_handler)
+
+class StreamParticleFile(ParticleFile):
+    pass
+
+class StreamParticlesStaticOutput(StreamStaticOutput):
+    _hierarchy_class = StreamParticleGeometryHandler
+    _file_class = StreamParticleFile
+    _fieldinfo_fallback = StreamFieldInfo
+    _fieldinfo_known = KnownStreamFields
+    _data_style = "stream_particles"
+    file_count = 1
+    filename_template = "stream_file"
+
+def load_particles(data, sim_unit_to_cm, bbox=None,
+                      sim_time=0.0, periodicity=(True, True, True)):
+    r"""Load a set of particles into yt as a
+    :class:`~yt.frontends.stream.data_structures.StreamParticleHandler`.
+
+    This should allow a collection of particle data to be loaded directly into
+    yt and analyzed as would any others.  This comes with several caveats:
+        * Units will be incorrect unless the data has already been converted to
+          cgs.
+        * Some functions may behave oddly, and parallelism will be
+          disappointing or non-existent in most cases.
+
+    This will initialize an Octree of data.  Note that fluid fields will not
+    work yet, or possibly ever.
+    
+    Parameters
+    ----------
+    data : dict
+        This is a dict of numpy arrays, where the keys are the field names.
+        Particles positions must be named "particle_position_x",
+        "particle_position_y", "particle_position_z".
+    sim_unit_to_cm : float
+        Conversion factor from simulation units to centimeters
+    bbox : array_like (xdim:zdim, LE:RE), optional
+        Size of computational domain in units sim_unit_to_cm
+    sim_time : float, optional
+        The simulation time in seconds
+    periodicity : tuple of booleans
+        Determines whether the data will be treated as periodic along
+        each axis
+
+    Examples
+    --------
+
+    >>> pos = [np.random.random(128*128*128) for i in range(3)]
+    >>> data = dict(particle_position_x = pos[0],
+    ...             particle_position_y = pos[1],
+    ...             particle_position_z = pos[2])
+    >>> bbox = np.array([[0., 1.0], [0.0, 1.0], [0.0, 1.0]])
+    >>> pf = load_particles(data, 3.08e24, bbox=bbox)
+
+    """
+
+    domain_dimensions = np.ones(3, "int32") * 2
+    nprocs = 1
+    if bbox is None:
+        bbox = np.array([[0.0, 1.0], [0.0, 1.0], [0.0, 1.0]], 'float64')
+    domain_left_edge = np.array(bbox[:, 0], 'float64')
+    domain_right_edge = np.array(bbox[:, 1], 'float64')
+    grid_levels = np.zeros(nprocs, dtype='int32').reshape((nprocs,1))
+
+    sfh = StreamDictFieldHandler()
+    
+    particle_types = set_particle_types(data)
+    
+    sfh.update({'stream_file':data})
+    grid_left_edges = domain_left_edge
+    grid_right_edges = domain_right_edge
+    grid_dimensions = domain_dimensions.reshape(nprocs,3).astype("int32")
+
+    # I'm not sure we need any of this.
+    handler = StreamHandler(
+        grid_left_edges,
+        grid_right_edges,
+        grid_dimensions,
+        grid_levels,
+        -np.ones(nprocs, dtype='int64'),
+        np.zeros(nprocs, dtype='int64').reshape(nprocs,1), # Temporary
+        np.zeros(nprocs).reshape((nprocs,1)),
+        sfh,
+        particle_types=particle_types,
+        periodicity=periodicity
+    )
+
+    handler.name = "ParticleData"
+    handler.domain_left_edge = domain_left_edge
+    handler.domain_right_edge = domain_right_edge
+    handler.refine_by = 2
+    handler.dimensionality = 3
+    handler.domain_dimensions = domain_dimensions
+    handler.simulation_time = sim_time
+    handler.cosmology_simulation = 0
+
+    spf = StreamParticlesStaticOutput(handler)
+    spf.units["cm"] = sim_unit_to_cm
+    spf.units['1'] = 1.0
+    spf.units["unitary"] = 1.0
+    box_in_mpc = sim_unit_to_cm / mpc_conversion['cm']
+    for unit in mpc_conversion.keys():
+        spf.units[unit] = mpc_conversion[unit] * box_in_mpc
+
+    return spf
+

diff -r 2edc94f8cf0819f4eaede36d7dc2dc0556982e04 -r 3deb823bc6b8beb1cfdaec903d7aa93759c2aa7a yt/frontends/stream/fields.py
--- a/yt/frontends/stream/fields.py
+++ b/yt/frontends/stream/fields.py
@@ -34,6 +34,9 @@
     ValidateSpatial, \
     ValidateGridType
 import yt.data_objects.universal_fields
+from yt.data_objects.particle_fields import \
+    particle_deposition_functions, \
+    particle_vector_functions
 
 KnownStreamFields = FieldInfoContainer()
 add_stream_field = KnownStreamFields.add_field
@@ -69,3 +72,9 @@
 
 add_field(("all", "ParticleMass"), function = TranslationFunc("particle_mass"),
           particle_type=True)
+
+particle_vector_functions("all", ["particle_position_%s" % ax for ax in 'xyz'],
+                                 ["particle_velocity_%s" % ax for ax in 'xyz'],
+                          StreamFieldInfo)
+particle_deposition_functions("all", "Coordinates", "ParticleMass",
+                               StreamFieldInfo)

diff -r 2edc94f8cf0819f4eaede36d7dc2dc0556982e04 -r 3deb823bc6b8beb1cfdaec903d7aa93759c2aa7a yt/frontends/stream/io.py
--- a/yt/frontends/stream/io.py
+++ b/yt/frontends/stream/io.py
@@ -32,6 +32,8 @@
 from yt.utilities.io_handler import \
     BaseIOHandler, _axis_ids
 from yt.utilities.logger import ytLogger as mylog
+from yt.utilities.lib.geometry_utils import compute_morton
+from yt.utilities.exceptions import *
 
 class IOHandlerStream(BaseIOHandler):
 
@@ -127,3 +129,83 @@
     def _read_exception(self):
         return KeyError
 
+class StreamParticleIOHandler(BaseIOHandler):
+
+    _data_style = "stream_particles"
+
+    def __init__(self, stream_handler):
+        self.fields = stream_handler.fields
+        BaseIOHandler.__init__(self)
+
+    def _read_particle_selection(self, chunks, selector, fields):
+        rv = {}
+        # We first need a set of masks for each particle type
+        ptf = defaultdict(list)
+        psize = defaultdict(lambda: 0)
+        chunks = list(chunks)
+        for ftype, fname in fields:
+            ptf[ftype].append(fname)
+        # For this type of file, we actually have something slightly different.
+        # We are given a list of ParticleDataChunks, which is composed of
+        # individual ParticleOctreeSubsets.  The data_files attribute on these
+        # may in fact overlap.  So we will iterate over a union of all the
+        # data_files.
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        for data_file in data_files:
+            f = self.fields[data_file.filename]
+            # This double-reads
+            for ptype, field_list in sorted(ptf.items()):
+                assert(ptype == "all")
+                psize[ptype] += selector.count_points(
+                        f["particle_position_x"],
+                        f["particle_position_y"],
+                        f["particle_position_z"])
+        # Now we have all the sizes, and we can allocate
+        ind = {}
+        for field in fields:
+            mylog.debug("Allocating %s values for %s", psize[field[0]], field)
+            rv[field] = np.empty(psize[field[0]], dtype="float64")
+            ind[field] = 0
+        for data_file in data_files:
+            f = self.fields[data_file.filename]
+            for ptype, field_list in sorted(ptf.items()):
+                assert(ptype == "all")
+                mask = selector.select_points(
+                        f["particle_position_x"],
+                        f["particle_position_y"],
+                        f["particle_position_z"])
+                if mask is None: continue
+                for field in field_list:
+                    data = f[field][mask,...]
+                    my_ind = ind[ptype, field]
+                    mylog.debug("Filling from %s to %s with %s",
+                        my_ind, my_ind+data.shape[0], field)
+                    rv[ptype, field][my_ind:my_ind + data.shape[0],...] = data
+                    ind[ptype, field] += data.shape[0]
+        return rv
+
+    def _initialize_index(self, data_file, regions):
+        # self.fields[g.id][fname] is the pattern here
+        pos = np.column_stack(self.fields[data_file.filename][
+                              "particle_position_%s" % ax] for ax in 'xyz')
+        if np.any(pos.min(axis=0) <= data_file.pf.domain_left_edge) or \
+           np.any(pos.max(axis=0) >= data_file.pf.domain_right_edge):
+            raise YTDomainOverflow(pos.min(axis=0), pos.max(axis=0),
+                                   data_file.pf.domain_left_edge,
+                                   data_file.pf.domain_right_edge)
+        regions.add_data_file(pos, data_file.file_id)
+        morton = compute_morton(
+                pos[:,0], pos[:,1], pos[:,2],
+                data_file.pf.domain_left_edge,
+                data_file.pf.domain_right_edge)
+        return morton
+
+    def _count_particles(self, data_file):
+        npart = self.fields[data_file.filename]["particle_position_x"].size
+        return {'all': npart}
+
+    def _identify_fields(self, data_file):
+        return [ ("all", k) for k in self.fields[data_file.filename].keys()]

Repository URL: https://bitbucket.org/yt_analysis/yt-3.0/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list