[yt-svn] commit/yt-3.0: MatthewTurk: First draft of Tipsy file format support.

Bitbucket commits-noreply at bitbucket.org
Thu Jan 17 11:54:21 PST 2013


1 new commit in yt-3.0:

https://bitbucket.org/yt_analysis/yt-3.0/commits/1bab987a14ad/
changeset:   1bab987a14ad
branch:      yt-3.0
user:        MatthewTurk
date:        2013-01-17 20:49:31
summary:     First draft of Tipsy file format support.
affected #:  6 files

diff -r df575510e2d5235d77ef08c536e1123bd0e535cd -r 1bab987a14ad7444c5a4baec791c283b7cb4c283 yt/frontends/sph/api.py
--- a/yt/frontends/sph/api.py
+++ b/yt/frontends/sph/api.py
@@ -30,8 +30,17 @@
 
 from .data_structures import \
       OWLSStaticOutput, \
-      GadgetStaticOutput
+      GadgetStaticOutput, \
+      TipsyStaticOutput
 
 from .io import \
       IOHandlerOWLS, \
       IOHandlerGadgetBinary
+
+from .fields import \
+      add_owls_field, \
+      OWLSFieldInfo, \
+      add_gadget_field, \
+      GadgetFieldInfo, \
+      add_tipsy_field, \
+      TipsyFieldInfo

diff -r df575510e2d5235d77ef08c536e1123bd0e535cd -r 1bab987a14ad7444c5a4baec791c283b7cb4c283 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -27,6 +27,7 @@
 import numpy as np
 import stat
 import weakref
+import struct
 from itertools import izip
 
 from yt.utilities.fortran_utils import read_record
@@ -45,7 +46,9 @@
     OWLSFieldInfo, \
     KnownOWLSFields, \
     GadgetFieldInfo, \
-    KnownGadgetFields
+    KnownGadgetFields, \
+    TipsyFieldInfo, \
+    KnownTipsyFields
 
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, NullFunc
@@ -125,6 +128,7 @@
             self.parameter_file.domain_dimensions,
             self.parameter_file.domain_left_edge,
             self.parameter_file.domain_right_edge)
+        self.oct_handler.n_ref = 64
         mylog.info("Allocating for %0.3e particles", total_particles)
         for dom in self.domains:
             self.io._initialize_octree(dom, self.oct_handler)
@@ -133,7 +137,6 @@
         tot = self.oct_handler.linearly_count()
         mylog.info("Identified %0.3e octs", tot)
 
-
     def _detect_fields(self):
         # TODO: Add additional fields
         pfl = []
@@ -338,3 +341,90 @@
     def _is_valid(self, *args, **kwargs):
         # We do not allow load() of these files.
         return False
+
+class TipsyDomainFile(ParticleDomainFile):
+
+    def _calculate_offsets(self, field_list):
+        self.field_offsets = self.io._calculate_particle_offsets(self)
+
+    def __init__(self, pf, io, domain_filename, domain_id):
+        # To go above 1 domain, we need to include an indexing step in the
+        # IOHandler, rather than simply reading from a single file.
+        assert domain_id == 0 
+        super(TipsyDomainFile, self).__init__(pf, io,
+                domain_filename, domain_id)
+        io._create_dtypes(self)
+
+
+class TipsyStaticOutput(StaticOutput):
+    _hierarchy_class = ParticleGeometryHandler
+    _domain_class = TipsyDomainFile
+    _fieldinfo_fallback = TipsyFieldInfo
+    _fieldinfo_known = KnownTipsyFields
+    _header_spec = (('time',    'd'),
+                    ('nbodies', 'i'),
+                    ('ndim',    'i'),
+                    ('nsph',    'i'),
+                    ('ndark',   'i'),
+                    ('nstar',   'i'),
+                    ('dummy',   'i'))
+
+    def __init__(self, filename, data_style="tipsy",
+                 root_dimensions = 64):
+        self._root_dimensions = root_dimensions
+        # Set up the template for domain files
+        self.storage_filename = None
+        super(TipsyStaticOutput, self).__init__(filename, data_style)
+
+    def __repr__(self):
+        return os.path.basename(self.parameter_filename).split(".")[0]
+
+    def _set_units(self):
+        self.units = {}
+        self.time_units = {}
+        self.conversion_factors = {}
+        DW = self.domain_right_edge - self.domain_left_edge
+        self.units["unitary"] = 1.0 / DW.max()
+
+    def _parse_parameter_file(self):
+
+        # The entries in this header are capitalized and named to match Table 4
+        # in the GADGET-2 user guide.
+
+        f = open(self.parameter_filename, "rb")
+        hh = ">" + "".join(["%s" % (b) for a,b in self._header_spec])
+        hvals = dict([(a, c) for (a, b), c in zip(self._header_spec,
+                     struct.unpack(hh, f.read(struct.calcsize(hh))))])
+        self._header_offset = f.tell()
+
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.parameters["HydroMethod"] = "sph"
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        # Set standard values
+
+        # This may not be correct.
+        self.current_time = hvals["time"]
+
+        self.domain_left_edge = np.zeros(3, "float64") - 0.5
+        self.domain_right_edge = np.ones(3, "float64") + 0.5
+        self.domain_dimensions = np.ones(3, "int32") * self._root_dimensions
+
+        self.cosmological_simulation = 1
+
+        self.current_redshift = 0.0
+        self.omega_lambda = 0.0
+        self.omega_matter = 0.0
+        self.hubble_constant = 0.0
+        self.parameters = hvals
+
+        self.domain_template = self.parameter_filename
+        self.domain_count = 1
+
+        f.close()
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        # We do not allow load() of these files.
+        return False

diff -r df575510e2d5235d77ef08c536e1123bd0e535cd -r 1bab987a14ad7444c5a4baec791c283b7cb4c283 yt/frontends/sph/fields.py
--- a/yt/frontends/sph/fields.py
+++ b/yt/frontends/sph/fields.py
@@ -40,11 +40,17 @@
 add_owls_field = OWLSFieldInfo.add_field
 
 KnownOWLSFields = FieldInfoContainer()
-add_OWLS_field = KnownOWLSFields.add_field
+add_owls_field = KnownOWLSFields.add_field
 
 GadgetFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_Gadget_field = GadgetFieldInfo.add_field
+add_gadget_field = GadgetFieldInfo.add_field
 
 KnownGadgetFields = FieldInfoContainer()
-add_Gadget_field = KnownGadgetFields.add_field
+add_gadget_field = KnownGadgetFields.add_field
 
+TipsyFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_Tipsy_field = TipsyFieldInfo.add_field
+
+KnownTipsyFields = FieldInfoContainer()
+add_tipsy_field = KnownTipsyFields.add_field
+

diff -r df575510e2d5235d77ef08c536e1123bd0e535cd -r 1bab987a14ad7444c5a4baec791c283b7cb4c283 yt/frontends/sph/io.py
--- a/yt/frontends/sph/io.py
+++ b/yt/frontends/sph/io.py
@@ -235,6 +235,7 @@
         count = sum(domain.total_particles.values())
         dt = [("px", "float32"), ("py", "float32"), ("pz", "float32")]
         with open(domain.domain_filename, "rb") as f:
+            f.seek(self._header_offset)
             # The first total_particles * 3 values are positions
             pp = np.fromfile(f, dtype = dt, count = count)
         pos = np.empty((count, 3), dtype="float64")
@@ -286,3 +287,151 @@
                         continue
                 field_list.append((ptype, field))
         return field_list
+
+class IOHandlerTipsyBinary(BaseIOHandler):
+    _data_style = "tipsy"
+
+    _pdtypes = None # dtypes, to be filled in later
+
+    _ptypes = ( "Gas",
+                "DarkMatter",
+                "Stars" )
+
+    _fields = ( ("Gas", "Mass"),
+                ("Gas", "Coordinates"),
+                ("Gas", "Velocities"),
+                ("Gas", "Density"),
+                ("Gas", "Temperature"),
+                ("Gas", "Epsilon"),
+                ("Gas", "Metals"),
+                ("Gas", "Phi"),
+                ("DarkMatter", "Mass"),
+                ("DarkMatter", "Coordinates"),
+                ("DarkMatter", "Velocities"),
+                ("DarkMatter", "Epsilon"),
+                ("DarkMatter", "Phi"),
+                ("Stars", "Mass"),
+                ("Stars", "Coordinates"),
+                ("Stars", "Velocities"),
+                ("Stars", "Metals"),
+                ("Stars", "FormationTime"),
+                ("Stars", "Epsilon"),
+                ("Stars", "Phi")
+              )
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _fill_fields(self, fields, vals, mask):
+        if mask is None:
+            size = 0
+        else:
+            size = mask.sum()
+        rv = {}
+        for field in fields:
+            mylog.debug("Allocating %s values for %s", size, field)
+            if field in _vector_fields:
+                rv[field] = np.empty((size, 3), dtype="float64")
+                if size == 0: continue
+                rv[field][:,0] = vals[field]['x'][mask]
+                rv[field][:,1] = vals[field]['y'][mask]
+                rv[field][:,2] = vals[field]['z'][mask]
+            else:
+                rv[field] = np.empty(size, dtype="float64")
+                if size == 0: continue
+                rv[field][:] = vals[field]
+        return rv
+
+    def _read_particle_selection(self, chunks, selector, fields):
+        rv = {}
+        # We first need a set of masks for each particle type
+        ptf = defaultdict(list)
+        ptypes = set()
+        for ftype, fname in fields:
+            ptf[ftype].append(fname)
+            ptypes.add(ftype)
+        ptypes = list(ptypes)
+        ptypes.sort(key = lambda a: self._ptypes.index(a))
+        for chunk in chunks:
+            for subset in chunk.objs:
+                poff = subset.domain.field_offsets
+                tp = subset.domain.total_particles
+                f = open(subset.domain.domain_filename, "rb")
+                for ptype in ptypes:
+                    f.seek(poff[ptype], os.SEEK_SET)
+                    p = np.fromfile(f, self._pdtypes[ptype], count=tp[ptype])
+                    mask = selector.select_points(
+                        p['Coordinates']['x'].astype("float64"),
+                        p['Coordinates']['y'].astype("float64"),
+                        p['Coordinates']['z'].astype("float64"))
+                    tf = self._fill_fields(ptf[ptype], p, mask)
+                    for field in tf:
+                        rv[ptype, field] = tf[field]
+                    del p, tf
+                f.close()
+        return rv
+
+    def _initialize_octree(self, domain, octree):
+        with open(domain.domain_filename, "rb") as f:
+            f.seek(domain.pf._header_offset)
+            for ptype in self._ptypes:
+                # We'll just add the individual types separately
+                count = domain.total_particles[ptype]
+                if count == 0: continue
+                pp = np.fromfile(f, dtype = self._pdtypes[ptype],
+                                 count = count)
+                pos = np.empty((count, 3), dtype="float64")
+                mylog.info("Adding %0.3e %s particles", count, ptype)
+                pos[:,0] = pp['Coordinates']['x']
+                pos[:,1] = pp['Coordinates']['y']
+                pos[:,2] = pp['Coordinates']['z']
+                mylog.debug("Spanning: %0.3e .. %0.3e in x",
+                            pos[:,0].min(), pos[:,0].max())
+                mylog.debug("Spanning: %0.3e .. %0.3e in y",
+                            pos[:,1].min(), pos[:,1].max())
+                mylog.debug("Spanning: %0.3e .. %0.3e in z",
+                            pos[:,2].min(), pos[:,2].max())
+                del pp
+                octree.add(pos, domain.domain_id)
+
+    def _count_particles(self, domain):
+        npart = {
+            "Gas": domain.pf.parameters['nsph'],
+            "Stars": domain.pf.parameters['nstar'],
+            "DarkMatter": domain.pf.parameters['ndark']
+        }
+        return npart
+
+    def _create_dtypes(self, domain):
+        # We can just look at the particle counts.
+        self._header_offset = domain.pf._header_offset
+        self._pdtypes = {}
+        pds = {}
+        field_list = []
+        tp = domain.total_particles
+        for ptype, field in self._fields:
+            pfields = []
+            if tp[ptype] == 0: continue
+            if field in _vector_fields:
+                dt = (field, [('x', '>f'), ('y', '>f'), ('z', '>f')])
+            else:
+                dt = (field, '>f')
+            pds.setdefault(ptype, []).append(dt)
+            field_list.append((ptype, field))
+        for ptype in pds:
+            self._pdtypes[ptype] = np.dtype(pds[ptype])
+        self._field_list = field_list
+        return self._field_list
+
+    def _identify_fields(self, domain):
+        return self._field_list
+
+    def _calculate_particle_offsets(self, domain):
+        field_offsets = {}
+        pos = domain.pf._header_offset
+        for ptype in self._ptypes:
+            field_offsets[ptype] = pos
+            if domain.total_particles[ptype] == 0: continue
+            size = self._pdtypes[ptype].itemsize
+            pos += domain.total_particles[ptype] * size
+        return field_offsets

diff -r df575510e2d5235d77ef08c536e1123bd0e535cd -r 1bab987a14ad7444c5a4baec791c283b7cb4c283 yt/geometry/oct_container.pyx
--- a/yt/geometry/oct_container.pyx
+++ b/yt/geometry/oct_container.pyx
@@ -567,6 +567,7 @@
     cdef Oct** oct_list
     cdef np.int64_t *dom_offsets
     cdef public int max_level
+    cdef public int n_ref
 
     def allocate_root(self):
         cdef int i, j, k
@@ -749,8 +750,8 @@
         sd.next = NULL
         sd.pos = <np.float64_t **> malloc(sizeof(np.float64_t*) * 3)
         for i in range(3):
-            sd.pos[i] = <np.float64_t *> malloc(sizeof(np.float64_t) * 32)
-        for i in range(32):
+            sd.pos[i] = <np.float64_t *> malloc(sizeof(np.float64_t) * self.n_ref)
+        for i in range(self.n_ref):
             sd.pos[0][i] = sd.pos[1][i] = sd.pos[2][i] = 0.0
         sd.np = 0
         return my_oct
@@ -797,13 +798,15 @@
                 pp[i] = pos[p, i]
                 dds[i] = (self.DRE[i] + self.DLE[i])/self.nn[i]
                 ind[i] = <np.int64_t> ((pp[i] - self.DLE[i])/dds[i])
-                cp[i] = (ind[i] + 0.5) * dds[i]
+                cp[i] = (ind[i] + 0.5) * dds[i] + self.DLE[i]
             cur = self.root_mesh[ind[0]][ind[1]][ind[2]]
             if cur == NULL:
                 raise RuntimeError
             if self._check_refine(cur, cp, domain_id) == 1:
                 self.refine_oct(cur, cp)
             while cur.sd.np < 0:
+                if level > 100:
+                    raise RuntimeError
                 for i in range(3):
                     dds[i] = dds[i] / 2.0
                     if cp[i] > pp[i]:
@@ -817,17 +820,16 @@
                 if self._check_refine(cur, cp, domain_id) == 1:
                     self.refine_oct(cur, cp)
             # Now we copy in our particle 
-            pi = cur.sd.np
             cur.level = level
             for i in range(3):
-                cur.sd.pos[i][pi] = pp[i]
+                cur.sd.pos[i][cur.sd.np] = pp[i]
             cur.domain = domain_id
             cur.sd.np += 1
 
     cdef int _check_refine(self, Oct *cur, np.float64_t cp[3], int domain_id):
         if cur.children[0][0][0] != NULL:
             return 0
-        elif cur.sd.np == 32:
+        elif cur.sd.np >= self.n_ref:
             return 1
         elif cur.domain >= 0 and cur.domain != domain_id:
             return 1

diff -r df575510e2d5235d77ef08c536e1123bd0e535cd -r 1bab987a14ad7444c5a4baec791c283b7cb4c283 yt/mods.py
--- a/yt/mods.py
+++ b/yt/mods.py
@@ -109,6 +109,11 @@
     StreamStaticOutput, StreamFieldInfo, add_stream_field, \
     StreamHandler, load_uniform_grid, load_amr_grids
 
+from yt.frontends.sph.api import \
+    OWLSStaticOutput, OWLSFieldInfo, add_owls_field, \
+    GadgetStaticOutput, GadgetFieldInfo, add_gadget_field, \
+    TipsyStaticOutput, TipsyFieldInfo, add_tipsy_field
+
 from yt.analysis_modules.list_modules import \
     get_available_modules, amods
 available_analysis_modules = get_available_modules()

Repository URL: https://bitbucket.org/yt_analysis/yt-3.0/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list