[yt-svn] commit/yt: 11 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sat Jan 3 21:14:24 PST 2015


11 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/fe77807a13a3/
Changeset:   fe77807a13a3
Branch:      yt
User:        karraki
Date:        2014-08-31 15:53:49+00:00
Summary:     First pass at DM-only ART frontend
Affected #:  2 files

diff -r d85047922d9e1cbfc557566e8c00c8ae54d1779e -r fe77807a13a307c8a4db00429b3717d7e41bdf60 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -48,6 +48,7 @@
 from .io import _read_child_level
 from .io import _read_root_level
 from .io import b2t
+from .io import a2b
 
 from yt.utilities.definitions import \
     mpc_conversion, sec_conversion
@@ -376,7 +377,10 @@
         """
         f = ("%s" % args[0])
         prefix, suffix = filename_pattern['amr']
-        if not os.path.isfile(f): return False
+        if not os.path.isfile(f):
+            return False
+        if not f.endswith(suffix):
+            return False
         with open(f, 'rb') as fh:
             try:
                 amr_header_vals = fpu.read_attrs(fh, amr_header_struct, '>')
@@ -385,6 +389,243 @@
                 return False
         return False
 
+class DarkMatterARTDataset(ARTDataset):
+    def __init__(self, filename, dataset_type='art',
+                          fields=None, storage_filename=None,
+                          skip_particles=False, skip_stars=False,
+                 limit_level=None, spread_age=True,
+                 force_max_level=None, file_particle_header=None,
+                 file_particle_stars=None):
+        self.particle_types += ("all",)
+        if fields is None:
+            fields = particle_fields
+            filename = os.path.abspath(filename)
+        self._fields_in_file = fields
+        self._file_particle = filename
+        self._file_particle_header = file_particle_header
+        self._file_particle_stars = file_particle_stars
+        self._find_files(filename)
+        self.parameter_filename = filename
+        self.skip_stars = skip_stars
+        self.spread_age = spread_age
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
+        Dataset.__init__(self, filename, dataset_type)
+        self.storage_filename = storage_filename
+
+    def _find_files(self, file_particle):
+        """
+        Given the particle base filename, attempt to find the
+        particle header and star files.
+        """
+        base_prefix, base_suffix = filename_pattern['particle_data']
+        aexpstr = file_particle.rsplit('s0',1)[1].replace(base_suffix,'')
+        possibles = glob.glob(os.path.dirname(file_particle)+"/*")
+        for filetype, (prefix, suffix) in filename_pattern.iteritems():
+            # if this attribute is already set skip it
+            if getattr(self, "_file_"+filetype, None) is not None:
+                continue
+            match = None
+            for possible in possibles:
+                if possible.endswith(aexpstr+suffix):
+                    if os.path.basename(possible).startswith(prefix):
+                        match = possible
+            if match is not None:
+                mylog.info('discovered %s:%s', filetype, match)
+                setattr(self, "_file_"+filetype, match)
+            else:
+                setattr(self, "_file_"+filetype, None)
+
+    def __repr__(self):
+        return self._file_particle.split('/')[-1]
+
+    def _set_code_unit_attributes(self):
+        """
+        Generates the conversion to various physical units based
+                on the parameters from the header
+        """
+
+        # spatial units
+        z = self.current_redshift
+        h = self.hubble_constant
+        boxcm_cal = self.parameters["boxh"]
+        boxcm_uncal = boxcm_cal / h
+        box_proper = boxcm_uncal/(1+z)
+        aexpn = self.parameters["aexpn"]
+
+        # all other units
+        wmu = self.parameters["wmu"]
+        Om0 = self.parameters['Om0']
+        ng = self.parameters['ng']
+        boxh = self.parameters['boxh']
+        aexpn = self.parameters["aexpn"]
+        hubble = self.parameters['hubble']
+
+        r0 = boxh/ng
+        rho0 = 2.776e11 * hubble**2.0 * Om0
+        aM0 = rho0 * (boxh/hubble)**3.0 / ng**3.0
+        velocity = 100.0*r0/aexpn*1.0e5  # proper cm/s
+        mass = aM0 * 1.98892e33
+
+        self.cosmological_simulation = True
+        self.mass_unit = self.quan(mass, "g*%s" % ng**3)
+        self.length_unit = self.quan(box_proper, "Mpc")
+        self.velocity_unit = self.quan(velocity, "cm/s")
+        self.time_unit = self.length_unit / self.velocity_unit
+
+
+    def _parse_parameter_file(self):
+        """
+        Get the various simulation parameters & constants.
+        """
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.periodicity = (True, True, True)
+        self.cosmological_simulation = True
+        self.parameters = {}
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        self.parameters.update(constants)
+        self.parameters['Time'] = 1.0
+
+        # read the particle header
+        self.particle_types = []
+        self.particle_types_raw = ()
+        assert self._file_particle_header
+        with open(self._file_particle_header, "rb") as fh:
+            seek = 4
+            fh.seek(seek)
+            headerstr = np.fromfile(fh, count=1, dtype=(str,45))
+            aexpn = np.fromfile(fh, count=1, dtype='>f4')
+            aexp0 = np.fromfile(fh, count=1, dtype='>f4')
+            amplt = np.fromfile(fh, count=1, dtype='>f4')
+            astep = np.fromfile(fh, count=1, dtype='>f4')
+            istep = np.fromfile(fh, count=1, dtype='>i4')
+            partw = np.fromfile(fh, count=1, dtype='>f4')
+            tintg = np.fromfile(fh, count=1, dtype='>f4')
+            ekin = np.fromfile(fh, count=1, dtype='>f4')
+            ekin1 = np.fromfile(fh, count=1, dtype='>f4')
+            ekin2 = np.fromfile(fh, count=1, dtype='>f4')
+            au0 = np.fromfile(fh, count=1, dtype='>f4')
+            aeu0 = np.fromfile(fh, count=1, dtype='>f4')
+            nrowc = np.fromfile(fh, count=1, dtype='>i4')
+            ngridc = np.fromfile(fh, count=1, dtype='>i4')
+            nspecs = np.fromfile(fh, count=1, dtype='>i4')
+            nseed = np.fromfile(fh, count=1, dtype='>i4')
+            Om0 = np.fromfile(fh, count=1, dtype='>f4')
+            Oml0 = np.fromfile(fh, count=1, dtype='>f4')
+            hubble = np.fromfile(fh, count=1, dtype='>f4')
+            Wp5 = np.fromfile(fh, count=1, dtype='>f4')
+            Ocurv = np.fromfile(fh, count=1, dtype='>f4')
+            wspecies = np.fromfile(fh, count=10, dtype='>f4')
+            lspecies = np.fromfile(fh, count=10, dtype='>i4')
+            extras = np.fromfile(fh, count=79, dtype='>f4')
+            boxsize = np.fromfile(fh, count=1, dtype='>f4')
+        n = nspecs
+        particle_header_vals = {}
+        tmp = np.array([headerstr, aexpn, aexp0, amplt, astep, istep, partw, tintg, ekin, ekin1, ekin2, au0, aeu0, nrowc, ngridc, nspecs, nseed, Om0, Oml0, hubble, Wp5, Ocurv, wspecies, lspecies, extras, boxsize])
+        for i in range(len(tmp)):
+            a1 = dmparticle_header_struct[0][i]
+            a2 = dmparticle_header_struct[1][i]
+            if a2 == 1:
+                particle_header_vals[a1] = tmp[i][0]
+            else:
+                particle_header_vals[a1] = tmp[i][:a2]
+        for specie in range(n):
+            self.particle_types.append("specie%i" % specie)
+        self.particle_types_raw = tuple(
+            self.particle_types)
+        ls_nonzero = np.diff(lspecies)[:n-1]
+        ls_nonzero = np.append(lspecies[0], ls_nonzero)
+        self.star_type = len(ls_nonzero)
+        mylog.info("Discovered %i species of particles", len(ls_nonzero))
+        mylog.info("Particle populations: "+'%9i '*len(ls_nonzero),
+                   *ls_nonzero)
+        for k, v in particle_header_vals.items():
+            if k in self.parameters.keys():
+                if not self.parameters[k] == v:
+                    mylog.info(
+                        "Inconsistent parameter %s %1.1e  %1.1e", k, v,
+                        self.parameters[k])
+            else:
+                self.parameters[k] = v
+        self.parameters_particles = particle_header_vals
+        self.parameters.update(particle_header_vals)
+        self.parameters['wspecies'] = wspecies[:n]
+        self.parameters['lspecies'] = lspecies[:n]
+        self.parameters['ng'] = self.parameters['Ngridc']
+        self.parameters['ncell0'] = self.parameters['ng']**3
+        self.parameters['boxh'] = self.parameters['boxsize']
+
+        # setup standard simulation params yt expects to see
+        self.current_redshift = self.parameters["aexpn"]**-1.0 - 1.0
+        self.omega_lambda = particle_header_vals['Oml0']
+        self.omega_matter = particle_header_vals['Om0']
+        self.hubble_constant = particle_header_vals['hubble']
+        self.min_level = 0
+        self.max_level = 0
+#        self.min_level = particle_header_vals['min_level']
+#        self.max_level = particle_header_vals['max_level']
+#        if self.limit_level is not None:
+#            self.max_level = min(
+#                self.limit_level, particle_header_vals['max_level'])
+#        if self.force_max_level is not None:
+#            self.max_level = self.force_max_level
+        self.hubble_time = 1.0/(self.hubble_constant*100/3.08568025e19)
+        self.parameters['t'] = a2b(self.parameters['aexpn'])
+        self.current_time = b2t(self.parameters['t']) * sec_per_Gyr
+        self.gamma = self.parameters["gamma"]
+        mylog.info("Max level is %02i", self.max_level)
+
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        """
+        Defined for the NMSU file naming scheme.
+        This could differ for other formats.
+        """
+        f = ("%s" % args[0])
+        prefix, suffix = filename_pattern['particle_data']
+        if not os.path.isfile(f):
+            return False
+        if not f.endswith(suffix):
+            return False
+        with open(f, 'rb') as fh:
+            try:
+                seek = 4
+                fh.seek(seek)
+                headerstr = np.fromfile(fh, count=1, dtype=(str,45))
+                aexpn = np.fromfile(fh, count=1, dtype='>f4')
+                aexp0 = np.fromfile(fh, count=1, dtype='>f4')
+                amplt = np.fromfile(fh, count=1, dtype='>f4')
+                astep = np.fromfile(fh, count=1, dtype='>f4')
+                istep = np.fromfile(fh, count=1, dtype='>i4')
+                partw = np.fromfile(fh, count=1, dtype='>f4')
+                tintg = np.fromfile(fh, count=1, dtype='>f4')
+                ekin = np.fromfile(fh, count=1, dtype='>f4')
+                ekin1 = np.fromfile(fh, count=1, dtype='>f4')
+                ekin2 = np.fromfile(fh, count=1, dtype='>f4')
+                au0 = np.fromfile(fh, count=1, dtype='>f4')
+                aeu0 = np.fromfile(fh, count=1, dtype='>f4')
+                nrowc = np.fromfile(fh, count=1, dtype='>i4')
+                ngridc = np.fromfile(fh, count=1, dtype='>i4')
+                nspecs = np.fromfile(fh, count=1, dtype='>i4')
+                nseed = np.fromfile(fh, count=1, dtype='>i4')
+                Om0 = np.fromfile(fh, count=1, dtype='>f4')
+                Oml0 = np.fromfile(fh, count=1, dtype='>f4')
+                hubble = np.fromfile(fh, count=1, dtype='>f4')
+                Wp5 = np.fromfile(fh, count=1, dtype='>f4')
+                Ocurv = np.fromfile(fh, count=1, dtype='>f4')
+                wspecies = np.fromfile(fh, count=10, dtype='>f4')
+                lspecies = np.fromfile(fh, count=10, dtype='>i4')
+                extras = np.fromfile(fh, count=79, dtype='>f4')
+                boxsize = np.fromfile(fh, count=1, dtype='>f4')
+                return True
+            except:
+                return False
+        return False
+
+
 class ARTDomainSubset(OctreeSubset):
 
     def fill(self, content, ftfields, selector):

diff -r d85047922d9e1cbfc557566e8c00c8ae54d1779e -r fe77807a13a307c8a4db00429b3717d7e41bdf60 yt/frontends/art/definitions.py
--- a/yt/frontends/art/definitions.py
+++ b/yt/frontends/art/definitions.py
@@ -94,6 +94,21 @@
      '45sffffi'+'fffffff'+'iiii'+'ffffff'+'396s'+'f')
 ]
 
+dmparticle_header_struct = [
+     ('header',
+     'aexpn', 'aexp0', 'amplt', 'astep',
+     'istep',
+     'partw', 'tintg',
+     'Ekin', 'Ekin1', 'Ekin2',
+     'au0', 'aeu0',
+     'Nrow', 'Ngridc', 'Nspecies', 'Nseed',
+     'Om0', 'Oml0', 'hubble', 'Wp5', 'Ocurv',
+     'wspecies','lspecies',
+     'extras', 'boxsize'),
+     (1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+     1,1,1,10,10,79,1)
+]
+
 star_struct = [
     ('>d', ('tdum', 'adum')),
     ('>i', 'nstars'),


https://bitbucket.org/yt_analysis/yt/commits/4a98fe25c8ba/
Changeset:   4a98fe25c8ba
Branch:      yt
User:        karraki
Date:        2014-10-16 18:00:10+00:00
Summary:     updated loading ART data docs
Affected #:  1 file

diff -r fe77807a13a307c8a4db00429b3717d7e41bdf60 -r 4a98fe25c8ba873c680d50cea8047df8dafa8b21 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -11,32 +11,43 @@
 ART Data
 --------
 
-ART data enjoys preliminary support and has been supported in the past by
-Christopher Moody.  Please contact the ``yt-dev`` mailing list if you are
-interested in using yt for ART data, or if you are interested in assisting with
-development of yt to work with ART data.
+ART data has been supported in the past by Christopher Moody and is currently
+cared for by Kenza Arraki.  Please contact the ``yt-dev`` mailing list if you
+are interested in using yt for ART data, or if you are interested in assisting
+with development of yt to work with ART data.
 
-To load an ART dataset you can use the ``yt.load`` command and provide it
- the gas mesh file. It will search for and attempt 
-to find the complementary dark matter and stellar particle header and data 
-files. However, your simulations may not follow the same naming convention.
+To load an ART dataset you can use the ``yt.load`` command and provide it the
+gas mesh file.
 
-So for example, a single snapshot might have a series of files looking like
-this:
+.. code-block:: python
+    
+   import yt
+
+   ds = yt.load("D9p_500/10MpcBox_HartGal_csf_a0.500.d")
+
+
+It will search for and attempt to find the complementary dark matter and stellar
+particle header and data files. However, your simulations may not follow the
+same naming convention.
+
+For example, the single snapshot given in the sample data has a series of
+files that look like this:
 
 .. code-block:: none
 
-   10MpcBox_csf512_a0.300.d    #Gas mesh
-   PMcrda0.300.DAT             #Particle header
-   PMcrs0a0.300.DAT            #Particle data (positions,velocities)
-   stars_a0.300.dat            #Stellar data (metallicities, ages, etc.)
+   10MpcBox_HartGal_csf_a0.500.d  #Gas mesh
+   PMcrda0.500.DAT                #Particle header
+   PMcrs0a0.500.DAT               #Particle data (positions,velocities)
+   stars_a0.500.dat               #Stellar data (metallicities, ages, etc.)
 
-The ART frontend tries to find the associated files matching the above, but
-if that fails you can specify ``file_particle_data``,``file_particle_data``,
-``file_star_data`` in addition to the specifying the gas mesh. You also have 
-the option of gridding particles, and assigning them onto the meshes.
-This process is in beta, and for the time being it's probably  best to leave
-``do_grid_particles=False`` as the default.
+The ART frontend tries to find the associated files matching the above, but if
+that fails you can specify ``file_particle_header``,``file_particle_data``, and
+``file_particle_stars``, in addition to specifying the gas mesh. Note that the
+``pta0.500.dat`` file containing particle times is not loaded by yt.
+
+You also have the option of gridding particles and assigning them onto the
+meshes.  This process is in beta, and for the time being it's probably best to
+leave ``do_grid_particles=False`` as the default.
 
 To speed up the loading of an ART file, you have a few options. You can turn 
 off the particles entirely by setting ``discover_particles=False``. You can
@@ -46,13 +57,20 @@
 Finally, when stellar ages are computed we 'spread' the ages evenly within a
 smoothing window. By default this is turned on and set to 10Myr. To turn this 
 off you can set ``spread=False``, and you can tweak the age smoothing window
-by specifying the window in seconds, ``spread=1.0e7*265*24*3600``. 
+by specifying the window in seconds, ``spread=1.0e7*365*24*3600``. 
+
+Preliminary support for dark matter ART data. To load a DM-only ART dataset you
+can use the ``yt.load`` command and provide it the particle data file.
 
 .. code-block:: python
     
    import yt
 
-   ds = yt.load("SFG1/10MpcBox_csf512_a0.460.d")
+   ds = yt.load("PMcrs0a0.500.DAT")
+
+This should not be used for loading just the dark matter data for a 'regular'
+hydrodynamical data set as the units and IO are different!
+
 
 .. _loading-artio-data:
 


https://bitbucket.org/yt_analysis/yt/commits/95199a83f260/
Changeset:   95199a83f260
Branch:      yt
User:        karraki
Date:        2014-10-16 21:09:18+00:00
Summary:     DM-only ART frontend, new file class and IO handler
Affected #:  2 files

diff -r 4a98fe25c8ba873c680d50cea8047df8dafa8b21 -r 95199a83f260741d96b43bf2ba288436bb9d01f6 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -1,8 +1,5 @@
 """
 ART-specific data structures
-
-
-
 """
 
 #-----------------------------------------------------------------------------
@@ -13,7 +10,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 import numpy as np
-import os.path
+import os
 import stat
 import weakref
 import cStringIO
@@ -26,7 +23,7 @@
 from yt.geometry.geometry_handler import \
     Index, YTDataChunk
 from yt.data_objects.static_output import \
-    Dataset
+    Dataset, ParticleFile
 from yt.data_objects.octree_subset import \
     OctreeSubset
 from yt.geometry.oct_container import \
@@ -41,6 +38,8 @@
     get_box_grids_level
 from yt.data_objects.particle_unions import \
     ParticleUnion
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
 
 from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
@@ -94,7 +93,9 @@
         # The 1 here refers to domain_id == 1 always for ARTIO.
         self.domains = [ARTDomainFile(self.dataset, nv, 
                                       self.oct_handler, 1)]
-        self.octs_per_domain = [dom.level_count.sum() for dom in self.domains]
+        self.octs_per_domain = [dom.level_count.sum() for dom in
+        self.domains]
+        
         self.total_octs = sum(self.octs_per_domain)
         mylog.debug("Allocating %s octs", self.total_octs)
         self.oct_handler.allocate_domains(self.octs_per_domain)
@@ -389,13 +390,31 @@
                 return False
         return False
 
+class ARTParticleFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        super(ARTParticleFile, self).__init__(ds, io, filename, file_id)
+        self.total_particles = {}
+        for ptype, count in zip(ds.particle_types_raw, 
+                                ds.parameters['total_particles']):
+            self.total_particles[ptype] = count
+        with open(filename, "rb") as f:
+            self._position_offset = 0
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+
 class DarkMatterARTDataset(ARTDataset):
+    _index_class = ParticleIndex
+    _file_class = ARTParticleFile
+
     def __init__(self, filename, dataset_type='art',
                           fields=None, storage_filename=None,
                           skip_particles=False, skip_stars=False,
                  limit_level=None, spread_age=True,
                  force_max_level=None, file_particle_header=None,
                  file_particle_stars=None):
+        self.over_refine_factor = 1
+        self.n_ref = 64
         self.particle_types += ("all",)
         if fields is None:
             fields = particle_fields
@@ -410,6 +429,7 @@
         self.spread_age = spread_age
         self.domain_left_edge = np.zeros(3, dtype='float')
         self.domain_right_edge = np.zeros(3, dtype='float')+1.0
+        self.domain_dimensions = np.ones(3, dtype='int32')
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
@@ -487,6 +507,8 @@
             int(os.stat(self.parameter_filename)[stat.ST_CTIME])
         self.parameters.update(constants)
         self.parameters['Time'] = 1.0
+        self.file_count = 1
+        self.filename_template = self.parameter_filename
 
         # read the particle header
         self.particle_types = []
@@ -523,7 +545,10 @@
             boxsize = np.fromfile(fh, count=1, dtype='>f4')
         n = nspecs
         particle_header_vals = {}
-        tmp = np.array([headerstr, aexpn, aexp0, amplt, astep, istep, partw, tintg, ekin, ekin1, ekin2, au0, aeu0, nrowc, ngridc, nspecs, nseed, Om0, Oml0, hubble, Wp5, Ocurv, wspecies, lspecies, extras, boxsize])
+        tmp = np.array([headerstr, aexpn, aexp0, amplt, astep, istep,
+            partw, tintg, ekin, ekin1, ekin2, au0, aeu0, nrowc, ngridc,
+            nspecs, nseed, Om0, Oml0, hubble, Wp5, Ocurv, wspecies,
+            lspecies, extras, boxsize])
         for i in range(len(tmp)):
             a1 = dmparticle_header_struct[0][i]
             a2 = dmparticle_header_struct[1][i]
@@ -556,6 +581,7 @@
         self.parameters['ng'] = self.parameters['Ngridc']
         self.parameters['ncell0'] = self.parameters['ng']**3
         self.parameters['boxh'] = self.parameters['boxsize']
+        self.parameters['total_particles'] = ls_nonzero
 
         # setup standard simulation params yt expects to see
         self.current_redshift = self.parameters["aexpn"]**-1.0 - 1.0

diff -r 4a98fe25c8ba873c680d50cea8047df8dafa8b21 -r 95199a83f260741d96b43bf2ba288436bb9d01f6 yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -26,6 +26,7 @@
 from yt.utilities.logger import ytLogger as mylog
 from yt.frontends.art.definitions import *
 from yt.utilities.physical_constants import sec_per_year
+from yt.geometry.oct_container import _ORDER_MAX
 
 
 class IOHandlerART(BaseIOHandler):
@@ -85,6 +86,7 @@
             return mask
 
     def _read_particle_coords(self, chunks, ptf):
+        chunks = list(chunks)
         for chunk in chunks:
             for ptype, field_list in sorted(ptf.items()):
                 x = self._get_field((ptype, "particle_position_x"))
@@ -93,6 +95,7 @@
                 yield ptype, (x, y, z)
 
     def _read_particle_fields(self, chunks, ptf, selector):
+        chunks = list(chunks)
         for chunk in chunks:
             for ptype, field_list in sorted(ptf.items()):
                 x = self._get_field((ptype, "particle_position_x"))
@@ -177,6 +180,26 @@
         else:
             return tr[field]
 
+class IOHandlerDarkMatterART(IOHandlerART):
+    def _count_particles(self, data_file):
+        return self.ds.parameters['lspecies'][-1]
+
+    def _initialize_index(self, data_file, regions):
+        count = sum(data_file.total_particles.values())
+        DLE = data_file.ds.domain_left_edge
+        DRE = data_file.ds.domain_right_edge
+        dx = (DRE - DLE) / 2**_ORDER_MAX
+        with open(data_file.filename, "rb") as f:
+            # We add on an additionally 4 for the first record.
+            f.seek(data_file._position_offset + 4)
+            # The first total_particles * 3 values are positions
+            pp = np.fromfile(f, dtype = 'float32', count = count*3)
+            pp.shape = (count, 3)
+        regions.add_data_file(pp, data_file.file_id, data_file.ds.filter_bbox)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE,
+                                                data_file.ds.filter_bbox)
+        return morton
+
 def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")
     idxas = np.concatenate(([0, ], lspecies[:-1]))


https://bitbucket.org/yt_analysis/yt/commits/5df9843186ab/
Changeset:   5df9843186ab
Branch:      yt
User:        karraki
Date:        2014-10-16 21:44:11+00:00
Summary:     further cleaned up loading ART data docs
Affected #:  1 file

diff -r 95199a83f260741d96b43bf2ba288436bb9d01f6 -r 5df9843186ab80b133371e2692eabd35532bf39a doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -16,8 +16,8 @@
 are interested in using yt for ART data, or if you are interested in assisting
 with development of yt to work with ART data.
 
-To load an ART dataset you can use the ``yt.load`` command and provide it the
-gas mesh file.
+To load an ART dataset you can use the ``yt.load`` command and provide
+it with the gas mesh file.
 
 .. code-block:: python
     
@@ -30,8 +30,8 @@
 particle header and data files. However, your simulations may not follow the
 same naming convention.
 
-For example, the single snapshot given in the sample data has a series of
-files that look like this:
+For example, the single snapshot given in the sample data has a series of files
+that look like this:
 
 .. code-block:: none
 
@@ -40,10 +40,11 @@
    PMcrs0a0.500.DAT               #Particle data (positions,velocities)
    stars_a0.500.dat               #Stellar data (metallicities, ages, etc.)
 
-The ART frontend tries to find the associated files matching the above, but if
-that fails you can specify ``file_particle_header``,``file_particle_data``, and
-``file_particle_stars``, in addition to specifying the gas mesh. Note that the
-``pta0.500.dat`` file containing particle times is not loaded by yt.
+The ART frontend tries to find the associated files matching the
+above, but if that fails you can specify ``file_particle_header``,
+``file_particle_data``, and ``file_particle_stars``, in addition to
+specifying the gas mesh. Note that the ``pta0.500.dat`` or ``pt.dat``
+file containing particle time steps is not loaded by yt.
 
 You also have the option of gridding particles and assigning them onto the
 meshes.  This process is in beta, and for the time being it's probably best to
@@ -59,8 +60,9 @@
 off you can set ``spread=False``, and you can tweak the age smoothing window
 by specifying the window in seconds, ``spread=1.0e7*365*24*3600``. 
 
-Preliminary support for dark matter ART data. To load a DM-only ART dataset you
-can use the ``yt.load`` command and provide it the particle data file.
+There is currently preliminary support for dark matter only ART data. To load a
+dataset use the ``yt.load`` command and provide it the particle data file. It
+will search for the complementary particle header file.
 
 .. code-block:: python
     
@@ -68,8 +70,9 @@
 
    ds = yt.load("PMcrs0a0.500.DAT")
 
-This should not be used for loading just the dark matter data for a 'regular'
-hydrodynamical data set as the units and IO are different!
+Important: This should not be used for loading just the dark matter
+data for a 'regular' hydrodynamical data set as the units and IO are
+different!
 
 
 .. _loading-artio-data:


https://bitbucket.org/yt_analysis/yt/commits/0f2b27e5dc11/
Changeset:   0f2b27e5dc11
Branch:      yt
User:        karraki
Date:        2014-10-17 19:43:20+00:00
Summary:     IO for DM-only ART particle file
Affected #:  2 files

diff -r 5df9843186ab80b133371e2692eabd35532bf39a -r 0f2b27e5dc1123967889fd03d120d105706b91de yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -40,6 +40,7 @@
     ParticleUnion
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
+from yt.utilities.lib.geometry_utils import compute_morton
 
 from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
@@ -398,7 +399,6 @@
                                 ds.parameters['total_particles']):
             self.total_particles[ptype] = count
         with open(filename, "rb") as f:
-            self._position_offset = 0
             f.seek(0, os.SEEK_END)
             self._file_size = f.tell()
 
@@ -406,6 +406,7 @@
 class DarkMatterARTDataset(ARTDataset):
     _index_class = ParticleIndex
     _file_class = ARTParticleFile
+    filter_bbox = False
 
     def __init__(self, filename, dataset_type='art',
                           fields=None, storage_filename=None,
@@ -422,7 +423,6 @@
         self._fields_in_file = fields
         self._file_particle = filename
         self._file_particle_header = file_particle_header
-        self._file_particle_stars = file_particle_stars
         self._find_files(filename)
         self.parameter_filename = filename
         self.skip_stars = skip_stars

diff -r 5df9843186ab80b133371e2692eabd35532bf39a -r 0f2b27e5dc1123967889fd03d120d105706b91de yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -26,6 +26,7 @@
 from yt.utilities.logger import ytLogger as mylog
 from yt.frontends.art.definitions import *
 from yt.utilities.physical_constants import sec_per_year
+from yt.utilities.lib.geometry_utils import compute_morton
 from yt.geometry.oct_container import _ORDER_MAX
 
 
@@ -185,19 +186,19 @@
         return self.ds.parameters['lspecies'][-1]
 
     def _initialize_index(self, data_file, regions):
-        count = sum(data_file.total_particles.values())
+        totcount = 4096**2 #file is always this size
+        count = data_file.ds.parameters['lspecies'][-1]
         DLE = data_file.ds.domain_left_edge
         DRE = data_file.ds.domain_right_edge
         dx = (DRE - DLE) / 2**_ORDER_MAX
         with open(data_file.filename, "rb") as f:
-            # We add on an additionally 4 for the first record.
-            f.seek(data_file._position_offset + 4)
             # The first total_particles * 3 values are positions
-            pp = np.fromfile(f, dtype = 'float32', count = count*3)
-            pp.shape = (count, 3)
-        regions.add_data_file(pp, data_file.file_id, data_file.ds.filter_bbox)
-        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE,
-                                                data_file.ds.filter_bbox)
+            pp = np.fromfile(f, dtype = '>f4', count = totcount*3)
+            pp.shape = (3, totcount)
+            pp = pp[:,:count] #remove zeros
+            pp = np.transpose(pp)
+        regions.add_data_file(pp, data_file.file_id)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE)
         return morton
 
 def _determine_field_size(pf, field, lspecies, ptmax):


https://bitbucket.org/yt_analysis/yt/commits/a1daa091ef0a/
Changeset:   a1daa091ef0a
Branch:      yt
User:        karraki
Date:        2014-10-17 21:17:48+00:00
Summary:     hard coded domain right edge, should change
Affected #:  2 files

diff -r 0f2b27e5dc1123967889fd03d120d105706b91de -r a1daa091ef0a382f06b8a8c8f217ab27b4bfe4a7 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -427,9 +427,10 @@
         self.parameter_filename = filename
         self.skip_stars = skip_stars
         self.spread_age = spread_age
-        self.domain_left_edge = np.zeros(3, dtype='float')
-        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
-        self.domain_dimensions = np.ones(3, dtype='int32')
+        self.domain_left_edge = np.zeros(3, dtype='float')+1.0
+        self.domain_right_edge = np.zeros(3, dtype='float')+513.0 # CHANGE THIS!!!
+        self.domain_dimensions = np.ones(3, dtype='int64')
+        print self.domain_dimensions
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 

diff -r 0f2b27e5dc1123967889fd03d120d105706b91de -r a1daa091ef0a382f06b8a8c8f217ab27b4bfe4a7 yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -196,11 +196,22 @@
             pp = np.fromfile(f, dtype = '>f4', count = totcount*3)
             pp.shape = (3, totcount)
             pp = pp[:,:count] #remove zeros
-            pp = np.transpose(pp)
+            pp = np.transpose(pp).astype(np.float32) #cast as float32 for compute_morton
         regions.add_data_file(pp, data_file.file_id)
         morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE)
         return morton
 
+    def _identify_fields(self, domain):
+        field_list = []
+        tp = domain.total_particles
+        self.particle_field_list = [f for f in particle_fields]
+        for ptype in self.ds.particle_types_raw:
+            for pfield in self.particle_field_list:
+                pfn = (ptype, pfield)
+                field_list.append(pfn)
+        return field_list, {}
+
+
 def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")
     idxas = np.concatenate(([0, ], lspecies[:-1]))


https://bitbucket.org/yt_analysis/yt/commits/cd2e34a54e10/
Changeset:   cd2e34a54e10
Branch:      yt
User:        karraki
Date:        2014-10-17 22:24:17+00:00
Summary:     change dimensions to fix units
Affected #:  2 files

diff -r a1daa091ef0a382f06b8a8c8f217ab27b4bfe4a7 -r cd2e34a54e1037be0394666e259ec60ee200f8bb yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -427,10 +427,9 @@
         self.parameter_filename = filename
         self.skip_stars = skip_stars
         self.spread_age = spread_age
-        self.domain_left_edge = np.zeros(3, dtype='float')+1.0
-        self.domain_right_edge = np.zeros(3, dtype='float')+513.0 # CHANGE THIS!!!
-        self.domain_dimensions = np.ones(3, dtype='int64')
-        print self.domain_dimensions
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
+#        self.domain_dimensions = np.ones(3, dtype='int64')
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
@@ -465,7 +464,6 @@
         Generates the conversion to various physical units based
                 on the parameters from the header
         """
-
         # spatial units
         z = self.current_redshift
         h = self.hubble_constant
@@ -583,6 +581,8 @@
         self.parameters['ncell0'] = self.parameters['ng']**3
         self.parameters['boxh'] = self.parameters['boxsize']
         self.parameters['total_particles'] = ls_nonzero
+        self.domain_dimensions = np.ones(3,
+                        dtype='int64')*self.parameters['ng']
 
         # setup standard simulation params yt expects to see
         self.current_redshift = self.parameters["aexpn"]**-1.0 - 1.0

diff -r a1daa091ef0a382f06b8a8c8f217ab27b4bfe4a7 -r cd2e34a54e1037be0394666e259ec60ee200f8bb yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -197,6 +197,7 @@
             pp.shape = (3, totcount)
             pp = pp[:,:count] #remove zeros
             pp = np.transpose(pp).astype(np.float32) #cast as float32 for compute_morton
+            pp = (pp - 1.)/data_file.ds.parameters['ng'] #correct the dm particle units
         regions.add_data_file(pp, data_file.file_id)
         morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE)
         return morton


https://bitbucket.org/yt_analysis/yt/commits/b1e5abdd5d53/
Changeset:   b1e5abdd5d53
Branch:      yt
User:        karraki
Date:        2014-10-17 22:50:50+00:00
Summary:     changed fields to remove all star type particles
Affected #:  2 files

diff -r cd2e34a54e1037be0394666e259ec60ee200f8bb -r b1e5abdd5d53d55423db632e2de853a3ee126fe7 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -429,7 +429,6 @@
         self.spread_age = spread_age
         self.domain_left_edge = np.zeros(3, dtype='float')
         self.domain_right_edge = np.zeros(3, dtype='float')+1.0
-#        self.domain_dimensions = np.ones(3, dtype='int64')
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
@@ -604,6 +603,12 @@
         self.gamma = self.parameters["gamma"]
         mylog.info("Max level is %02i", self.max_level)
 
+    def create_field_info(self):
+        super(ARTDataset, self).create_field_info()
+        ptr = self.particle_types_raw
+        pu = ParticleUnion("darkmatter", list(ptr))
+        self.add_particle_union(pu)
+        pass
 
     @classmethod
     def _is_valid(self, *args, **kwargs):

diff -r cd2e34a54e1037be0394666e259ec60ee200f8bb -r b1e5abdd5d53d55423db632e2de853a3ee126fe7 yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -212,6 +212,72 @@
                 field_list.append(pfn)
         return field_list, {}
 
+    def _get_field(self,  field):
+        if field in self.cache.keys() and self.caching:
+            mylog.debug("Cached %s", str(field))
+            return self.cache[field]
+        mylog.debug("Reading %s", str(field))
+        tr = {}
+        ftype, fname = field
+        ptmax = self.ws[-1]
+        pbool, idxa, idxb = _determine_field_size(self.ds, ftype, 
+                                                  self.ls, ptmax)
+        npa = idxb - idxa
+        sizes = np.diff(np.concatenate(([0], self.ls)))
+        rp = lambda ax: read_particles(
+            self.file_particle, self.Nrow, idxa=idxa,
+            idxb=idxb, fields=ax)
+        for i, ax in enumerate('xyz'):
+            if fname.startswith("particle_position_%s" % ax):
+                dd = self.ds.domain_dimensions[0]
+                off = 1.0/dd
+                tr[field] = rp([ax])[0]/dd - off
+            if fname.startswith("particle_velocity_%s" % ax):
+                tr[field], = rp(['v'+ax])
+        if fname == "particle_mass":
+            a = 0
+            data = np.zeros(npa, dtype='f8')
+            for ptb, size, m in zip(pbool, sizes, self.ws):
+                if ptb:
+                    data[a:a+size] = m
+                    a += size
+            tr[field] = data
+        elif fname == "particle_index":
+            tr[field] = np.arange(idxa, idxb)
+        elif fname == "particle_type":
+            a = 0
+            data = np.zeros(npa, dtype='int')
+            for i, (ptb, size) in enumerate(zip(pbool, sizes)):
+                if ptb:
+                    data[a: a + size] = i
+                    a += size
+            tr[field] = data
+        if fname == "particle_creation_time":
+            self.tb, self.ages, data = interpolate_ages(
+                tr[field][-nstars:],
+                self.file_stars,
+                self.tb,
+                self.ages,
+                self.ds.current_time)
+            temp = tr.get(field, np.zeros(npa, 'f8'))
+            temp[-nstars:] = data
+            tr[field] = temp
+            del data
+        # We check again, after it's been filled
+        if fname == "particle_mass":
+            # We now divide by NGrid in order to make this match up.  Note that
+            # this means that even when requested in *code units*, we are
+            # giving them as modified by the ng value.  This only works for
+            # dark_matter -- stars are regular matter.
+            tr[field] /= self.ds.domain_dimensions.prod()
+        if tr == {}:
+            tr = dict((f, np.array([])) for f in fields)
+        if self.caching:
+            self.cache[field] = tr[field]
+            return self.cache[field]
+        else:
+            return tr[field]
+
 
 def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")


https://bitbucket.org/yt_analysis/yt/commits/090d7547a53d/
Changeset:   090d7547a53d
Branch:      yt
User:        karraki
Date:        2014-11-20 22:32:31+00:00
Summary:     Fixing domain dimensions and particles
Affected #:  2 files

diff -r b1e5abdd5d53d55423db632e2de853a3ee126fe7 -r 090d7547a53dd9443611efa6d7c5755824343ffc yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -581,7 +581,7 @@
         self.parameters['boxh'] = self.parameters['boxsize']
         self.parameters['total_particles'] = ls_nonzero
         self.domain_dimensions = np.ones(3,
-                        dtype='int64')*self.parameters['ng']
+                        dtype='int64')*2 # NOT ng
 
         # setup standard simulation params yt expects to see
         self.current_redshift = self.parameters["aexpn"]**-1.0 - 1.0

diff -r b1e5abdd5d53d55423db632e2de853a3ee126fe7 -r 090d7547a53dd9443611efa6d7c5755824343ffc yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -229,7 +229,8 @@
             idxb=idxb, fields=ax)
         for i, ax in enumerate('xyz'):
             if fname.startswith("particle_position_%s" % ax):
-                dd = self.ds.domain_dimensions[0]
+                # This is not the same as domain_dimensions
+                dd = self.ds.parameters['ng']
                 off = 1.0/dd
                 tr[field] = rp([ax])[0]/dd - off
             if fname.startswith("particle_velocity_%s" % ax):


https://bitbucket.org/yt_analysis/yt/commits/93ac43b4f5d3/
Changeset:   93ac43b4f5d3
Branch:      yt
User:        karraki
Date:        2014-11-20 23:28:20+00:00
Summary:     Check and fail when DM file from hydro run is passed to load
Affected #:  1 file

diff -r 090d7547a53dd9443611efa6d7c5755824343ffc -r 93ac43b4f5d30688878256f51103456f3a758cd7 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -199,7 +199,7 @@
         """
         base_prefix, base_suffix = filename_pattern['amr']
         aexpstr = 'a'+file_amr.rsplit('a',1)[1].replace(base_suffix,'')
-        possibles = glob.glob(os.path.dirname(file_amr)+"/*")
+        possibles = glob.glob(os.path.dirname(os.path.abspath(file_amr))+"/*")
         for filetype, (prefix, suffix) in filename_pattern.iteritems():
             # if this attribute is already set skip it
             if getattr(self, "_file_"+filetype, None) is not None:
@@ -439,7 +439,7 @@
         """
         base_prefix, base_suffix = filename_pattern['particle_data']
         aexpstr = file_particle.rsplit('s0',1)[1].replace(base_suffix,'')
-        possibles = glob.glob(os.path.dirname(file_particle)+"/*")
+        possibles = glob.glob(os.path.dirname(os.path.abspath(file_particle))+"/*")
         for filetype, (prefix, suffix) in filename_pattern.iteritems():
             # if this attribute is already set skip it
             if getattr(self, "_file_"+filetype, None) is not None:
@@ -624,6 +624,15 @@
             return False
         with open(f, 'rb') as fh:
             try:
+                amr_prefix, amr_suffix = filename_pattern['amr']
+                possibles = glob.glob(os.path.dirname(os.path.abspath(f))+"/*")
+                for possible in possibles:
+                    if possible.endswith(amr_suffix):
+                        if os.path.basename(possible).startswith(amr_prefix):
+                            return False
+            except:
+                pass
+            try:
                 seek = 4
                 fh.seek(seek)
                 headerstr = np.fromfile(fh, count=1, dtype=(str,45))


https://bitbucket.org/yt_analysis/yt/commits/ea716eba60f6/
Changeset:   ea716eba60f6
Branch:      yt
User:        MatthewTurk
Date:        2015-01-04 05:12:22+00:00
Summary:     Merging in ART DM frontend.
Affected #:  4 files

diff -r e88ef09175f96b2d336ba2597f68df966458fb0f -r ea716eba60f6a36dfee7274b9503822c70af0ffe doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -11,32 +11,46 @@
 ART Data
 --------
 
-ART data enjoys preliminary support and has been supported in the past by
-Christopher Moody.  Please contact the ``yt-dev`` mailing list if you are
-interested in using yt for ART data, or if you are interested in assisting with
-development of yt to work with ART data.
+ART data has been supported in the past by Christopher Moody and is currently
+cared for by Kenza Arraki.  Please contact the ``yt-dev`` mailing list if you
+are interested in using yt for ART data, or if you are interested in assisting
+with development of yt to work with ART data.
 
 To load an ART dataset you can use the ``yt.load`` command and provide it the
 gas mesh file. It will search for and attempt to find the complementary dark
 matter and stellar particle header and data files. However, your simulations may
 not follow the same naming convention.
 
-So for example, a single snapshot might have a series of files looking like
-this:
+.. code-block:: python
+    
+   import yt
+
+   ds = yt.load("D9p_500/10MpcBox_HartGal_csf_a0.500.d")
+
+
+It will search for and attempt to find the complementary dark matter and stellar
+particle header and data files. However, your simulations may not follow the
+same naming convention.
+
+For example, the single snapshot given in the sample data has a series of files
+that look like this:
 
 .. code-block:: none
 
-   10MpcBox_csf512_a0.300.d    #Gas mesh
-   PMcrda0.300.DAT             #Particle header
-   PMcrs0a0.300.DAT            #Particle data (positions,velocities)
-   stars_a0.300.dat            #Stellar data (metallicities, ages, etc.)
+   10MpcBox_HartGal_csf_a0.500.d  #Gas mesh
+   PMcrda0.500.DAT                #Particle header
+   PMcrs0a0.500.DAT               #Particle data (positions,velocities)
+   stars_a0.500.dat               #Stellar data (metallicities, ages, etc.)
 
-The ART frontend tries to find the associated files matching the above, but
-if that fails you can specify ``file_particle_data``,``file_particle_data``,
-``file_star_data`` in addition to the specifying the gas mesh. You also have 
-the option of gridding particles, and assigning them onto the meshes.
-This process is in beta, and for the time being it's probably  best to leave
-``do_grid_particles=False`` as the default.
+The ART frontend tries to find the associated files matching the
+above, but if that fails you can specify ``file_particle_header``,
+``file_particle_data``, and ``file_particle_stars``, in addition to
+specifying the gas mesh. Note that the ``pta0.500.dat`` or ``pt.dat``
+file containing particle time steps is not loaded by yt.
+
+You also have the option of gridding particles and assigning them onto the
+meshes.  This process is in beta, and for the time being it's probably best to
+leave ``do_grid_particles=False`` as the default.
 
 To speed up the loading of an ART file, you have a few options. You can turn 
 off the particles entirely by setting ``discover_particles=False``. You can
@@ -46,13 +60,22 @@
 Finally, when stellar ages are computed we 'spread' the ages evenly within a
 smoothing window. By default this is turned on and set to 10Myr. To turn this 
 off you can set ``spread=False``, and you can tweak the age smoothing window
-by specifying the window in seconds, ``spread=1.0e7*265*24*3600``. 
+by specifying the window in seconds, ``spread=1.0e7*365*24*3600``. 
+
+There is currently preliminary support for dark matter only ART data. To load a
+dataset use the ``yt.load`` command and provide it the particle data file. It
+will search for the complementary particle header file.
 
 .. code-block:: python
     
    import yt
 
-   ds = yt.load("SFG1/10MpcBox_csf512_a0.460.d")
+   ds = yt.load("PMcrs0a0.500.DAT")
+
+Important: This should not be used for loading just the dark matter
+data for a 'regular' hydrodynamical data set as the units and IO are
+different!
+
 
 .. _loading-artio-data:
 

diff -r e88ef09175f96b2d336ba2597f68df966458fb0f -r ea716eba60f6a36dfee7274b9503822c70af0ffe yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -1,8 +1,5 @@
 """
 ART-specific data structures
-
-
-
 """
 
 #-----------------------------------------------------------------------------
@@ -13,7 +10,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 import numpy as np
-import os.path
+import os
 import stat
 import weakref
 import cStringIO
@@ -26,7 +23,7 @@
 from yt.geometry.geometry_handler import \
     Index, YTDataChunk
 from yt.data_objects.static_output import \
-    Dataset
+    Dataset, ParticleFile
 from yt.data_objects.octree_subset import \
     OctreeSubset
 from yt.geometry.oct_container import \
@@ -41,6 +38,9 @@
     get_box_grids_level
 from yt.data_objects.particle_unions import \
     ParticleUnion
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.lib.geometry_utils import compute_morton
 
 from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
@@ -48,6 +48,7 @@
 from .io import _read_child_level
 from .io import _read_root_level
 from .io import b2t
+from .io import a2b
 
 from yt.utilities.definitions import \
     mpc_conversion, sec_conversion
@@ -93,7 +94,9 @@
         # The 1 here refers to domain_id == 1 always for ARTIO.
         self.domains = [ARTDomainFile(self.dataset, nv, 
                                       self.oct_handler, 1)]
-        self.octs_per_domain = [dom.level_count.sum() for dom in self.domains]
+        self.octs_per_domain = [dom.level_count.sum() for dom in
+        self.domains]
+        
         self.total_octs = sum(self.octs_per_domain)
         mylog.debug("Allocating %s octs", self.total_octs)
         self.oct_handler.allocate_domains(self.octs_per_domain)
@@ -198,7 +201,7 @@
         """
         base_prefix, base_suffix = filename_pattern['amr']
         aexpstr = 'a'+file_amr.rsplit('a',1)[1].replace(base_suffix,'')
-        possibles = glob.glob(os.path.dirname(file_amr)+"/*")
+        possibles = glob.glob(os.path.dirname(os.path.abspath(file_amr))+"/*")
         for filetype, (prefix, suffix) in filename_pattern.iteritems():
             # if this attribute is already set skip it
             if getattr(self, "_file_"+filetype, None) is not None:
@@ -378,7 +381,10 @@
         """
         f = ("%s" % args[0])
         prefix, suffix = filename_pattern['amr']
-        if not os.path.isfile(f): return False
+        if not os.path.isfile(f):
+            return False
+        if not f.endswith(suffix):
+            return False
         with open(f, 'rb') as fh:
             try:
                 amr_header_vals = fpu.read_attrs(fh, amr_header_struct, '>')
@@ -387,6 +393,282 @@
                 return False
         return False
 
+class ARTParticleFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        super(ARTParticleFile, self).__init__(ds, io, filename, file_id)
+        self.total_particles = {}
+        for ptype, count in zip(ds.particle_types_raw, 
+                                ds.parameters['total_particles']):
+            self.total_particles[ptype] = count
+        with open(filename, "rb") as f:
+            f.seek(0, os.SEEK_END)
+            self._file_size = f.tell()
+
+
+class DarkMatterARTDataset(ARTDataset):
+    _index_class = ParticleIndex
+    _file_class = ARTParticleFile
+    filter_bbox = False
+
+    def __init__(self, filename, dataset_type='art',
+                          fields=None, storage_filename=None,
+                          skip_particles=False, skip_stars=False,
+                 limit_level=None, spread_age=True,
+                 force_max_level=None, file_particle_header=None,
+                 file_particle_stars=None):
+        self.over_refine_factor = 1
+        self.n_ref = 64
+        self.particle_types += ("all",)
+        if fields is None:
+            fields = particle_fields
+            filename = os.path.abspath(filename)
+        self._fields_in_file = fields
+        self._file_particle = filename
+        self._file_particle_header = file_particle_header
+        self._find_files(filename)
+        self.parameter_filename = filename
+        self.skip_stars = skip_stars
+        self.spread_age = spread_age
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
+        Dataset.__init__(self, filename, dataset_type)
+        self.storage_filename = storage_filename
+
+    def _find_files(self, file_particle):
+        """
+        Given the particle base filename, attempt to find the
+        particle header and star files.
+        """
+        base_prefix, base_suffix = filename_pattern['particle_data']
+        aexpstr = file_particle.rsplit('s0',1)[1].replace(base_suffix,'')
+        possibles = glob.glob(os.path.dirname(os.path.abspath(file_particle))+"/*")
+        for filetype, (prefix, suffix) in filename_pattern.iteritems():
+            # if this attribute is already set skip it
+            if getattr(self, "_file_"+filetype, None) is not None:
+                continue
+            match = None
+            for possible in possibles:
+                if possible.endswith(aexpstr+suffix):
+                    if os.path.basename(possible).startswith(prefix):
+                        match = possible
+            if match is not None:
+                mylog.info('discovered %s:%s', filetype, match)
+                setattr(self, "_file_"+filetype, match)
+            else:
+                setattr(self, "_file_"+filetype, None)
+
+    def __repr__(self):
+        return self._file_particle.split('/')[-1]
+
+    def _set_code_unit_attributes(self):
+        """
+        Generates the conversion to various physical units based
+                on the parameters from the header
+        """
+        # spatial units
+        z = self.current_redshift
+        h = self.hubble_constant
+        boxcm_cal = self.parameters["boxh"]
+        boxcm_uncal = boxcm_cal / h
+        box_proper = boxcm_uncal/(1+z)
+        aexpn = self.parameters["aexpn"]
+
+        # all other units
+        wmu = self.parameters["wmu"]
+        Om0 = self.parameters['Om0']
+        ng = self.parameters['ng']
+        boxh = self.parameters['boxh']
+        aexpn = self.parameters["aexpn"]
+        hubble = self.parameters['hubble']
+
+        r0 = boxh/ng
+        rho0 = 2.776e11 * hubble**2.0 * Om0
+        aM0 = rho0 * (boxh/hubble)**3.0 / ng**3.0
+        velocity = 100.0*r0/aexpn*1.0e5  # proper cm/s
+        mass = aM0 * 1.98892e33
+
+        self.cosmological_simulation = True
+        self.mass_unit = self.quan(mass, "g*%s" % ng**3)
+        self.length_unit = self.quan(box_proper, "Mpc")
+        self.velocity_unit = self.quan(velocity, "cm/s")
+        self.time_unit = self.length_unit / self.velocity_unit
+
+
+    def _parse_parameter_file(self):
+        """
+        Get the various simulation parameters & constants.
+        """
+        self.dimensionality = 3
+        self.refine_by = 2
+        self.periodicity = (True, True, True)
+        self.cosmological_simulation = True
+        self.parameters = {}
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+        self.parameters.update(constants)
+        self.parameters['Time'] = 1.0
+        self.file_count = 1
+        self.filename_template = self.parameter_filename
+
+        # read the particle header
+        self.particle_types = []
+        self.particle_types_raw = ()
+        assert self._file_particle_header
+        with open(self._file_particle_header, "rb") as fh:
+            seek = 4
+            fh.seek(seek)
+            headerstr = np.fromfile(fh, count=1, dtype=(str,45))
+            aexpn = np.fromfile(fh, count=1, dtype='>f4')
+            aexp0 = np.fromfile(fh, count=1, dtype='>f4')
+            amplt = np.fromfile(fh, count=1, dtype='>f4')
+            astep = np.fromfile(fh, count=1, dtype='>f4')
+            istep = np.fromfile(fh, count=1, dtype='>i4')
+            partw = np.fromfile(fh, count=1, dtype='>f4')
+            tintg = np.fromfile(fh, count=1, dtype='>f4')
+            ekin = np.fromfile(fh, count=1, dtype='>f4')
+            ekin1 = np.fromfile(fh, count=1, dtype='>f4')
+            ekin2 = np.fromfile(fh, count=1, dtype='>f4')
+            au0 = np.fromfile(fh, count=1, dtype='>f4')
+            aeu0 = np.fromfile(fh, count=1, dtype='>f4')
+            nrowc = np.fromfile(fh, count=1, dtype='>i4')
+            ngridc = np.fromfile(fh, count=1, dtype='>i4')
+            nspecs = np.fromfile(fh, count=1, dtype='>i4')
+            nseed = np.fromfile(fh, count=1, dtype='>i4')
+            Om0 = np.fromfile(fh, count=1, dtype='>f4')
+            Oml0 = np.fromfile(fh, count=1, dtype='>f4')
+            hubble = np.fromfile(fh, count=1, dtype='>f4')
+            Wp5 = np.fromfile(fh, count=1, dtype='>f4')
+            Ocurv = np.fromfile(fh, count=1, dtype='>f4')
+            wspecies = np.fromfile(fh, count=10, dtype='>f4')
+            lspecies = np.fromfile(fh, count=10, dtype='>i4')
+            extras = np.fromfile(fh, count=79, dtype='>f4')
+            boxsize = np.fromfile(fh, count=1, dtype='>f4')
+        n = nspecs
+        particle_header_vals = {}
+        tmp = np.array([headerstr, aexpn, aexp0, amplt, astep, istep,
+            partw, tintg, ekin, ekin1, ekin2, au0, aeu0, nrowc, ngridc,
+            nspecs, nseed, Om0, Oml0, hubble, Wp5, Ocurv, wspecies,
+            lspecies, extras, boxsize])
+        for i in range(len(tmp)):
+            a1 = dmparticle_header_struct[0][i]
+            a2 = dmparticle_header_struct[1][i]
+            if a2 == 1:
+                particle_header_vals[a1] = tmp[i][0]
+            else:
+                particle_header_vals[a1] = tmp[i][:a2]
+        for specie in range(n):
+            self.particle_types.append("specie%i" % specie)
+        self.particle_types_raw = tuple(
+            self.particle_types)
+        ls_nonzero = np.diff(lspecies)[:n-1]
+        ls_nonzero = np.append(lspecies[0], ls_nonzero)
+        self.star_type = len(ls_nonzero)
+        mylog.info("Discovered %i species of particles", len(ls_nonzero))
+        mylog.info("Particle populations: "+'%9i '*len(ls_nonzero),
+                   *ls_nonzero)
+        for k, v in particle_header_vals.items():
+            if k in self.parameters.keys():
+                if not self.parameters[k] == v:
+                    mylog.info(
+                        "Inconsistent parameter %s %1.1e  %1.1e", k, v,
+                        self.parameters[k])
+            else:
+                self.parameters[k] = v
+        self.parameters_particles = particle_header_vals
+        self.parameters.update(particle_header_vals)
+        self.parameters['wspecies'] = wspecies[:n]
+        self.parameters['lspecies'] = lspecies[:n]
+        self.parameters['ng'] = self.parameters['Ngridc']
+        self.parameters['ncell0'] = self.parameters['ng']**3
+        self.parameters['boxh'] = self.parameters['boxsize']
+        self.parameters['total_particles'] = ls_nonzero
+        self.domain_dimensions = np.ones(3,
+                        dtype='int64')*2 # NOT ng
+
+        # setup standard simulation params yt expects to see
+        self.current_redshift = self.parameters["aexpn"]**-1.0 - 1.0
+        self.omega_lambda = particle_header_vals['Oml0']
+        self.omega_matter = particle_header_vals['Om0']
+        self.hubble_constant = particle_header_vals['hubble']
+        self.min_level = 0
+        self.max_level = 0
+#        self.min_level = particle_header_vals['min_level']
+#        self.max_level = particle_header_vals['max_level']
+#        if self.limit_level is not None:
+#            self.max_level = min(
+#                self.limit_level, particle_header_vals['max_level'])
+#        if self.force_max_level is not None:
+#            self.max_level = self.force_max_level
+        self.hubble_time = 1.0/(self.hubble_constant*100/3.08568025e19)
+        self.parameters['t'] = a2b(self.parameters['aexpn'])
+        self.current_time = b2t(self.parameters['t']) * sec_per_Gyr
+        self.gamma = self.parameters["gamma"]
+        mylog.info("Max level is %02i", self.max_level)
+
+    def create_field_info(self):
+        super(ARTDataset, self).create_field_info()
+        ptr = self.particle_types_raw
+        pu = ParticleUnion("darkmatter", list(ptr))
+        self.add_particle_union(pu)
+        pass
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        """
+        Defined for the NMSU file naming scheme.
+        This could differ for other formats.
+        """
+        f = ("%s" % args[0])
+        prefix, suffix = filename_pattern['particle_data']
+        if not os.path.isfile(f):
+            return False
+        if not f.endswith(suffix):
+            return False
+        with open(f, 'rb') as fh:
+            try:
+                amr_prefix, amr_suffix = filename_pattern['amr']
+                possibles = glob.glob(os.path.dirname(os.path.abspath(f))+"/*")
+                for possible in possibles:
+                    if possible.endswith(amr_suffix):
+                        if os.path.basename(possible).startswith(amr_prefix):
+                            return False
+            except:
+                pass
+            try:
+                seek = 4
+                fh.seek(seek)
+                headerstr = np.fromfile(fh, count=1, dtype=(str,45))
+                aexpn = np.fromfile(fh, count=1, dtype='>f4')
+                aexp0 = np.fromfile(fh, count=1, dtype='>f4')
+                amplt = np.fromfile(fh, count=1, dtype='>f4')
+                astep = np.fromfile(fh, count=1, dtype='>f4')
+                istep = np.fromfile(fh, count=1, dtype='>i4')
+                partw = np.fromfile(fh, count=1, dtype='>f4')
+                tintg = np.fromfile(fh, count=1, dtype='>f4')
+                ekin = np.fromfile(fh, count=1, dtype='>f4')
+                ekin1 = np.fromfile(fh, count=1, dtype='>f4')
+                ekin2 = np.fromfile(fh, count=1, dtype='>f4')
+                au0 = np.fromfile(fh, count=1, dtype='>f4')
+                aeu0 = np.fromfile(fh, count=1, dtype='>f4')
+                nrowc = np.fromfile(fh, count=1, dtype='>i4')
+                ngridc = np.fromfile(fh, count=1, dtype='>i4')
+                nspecs = np.fromfile(fh, count=1, dtype='>i4')
+                nseed = np.fromfile(fh, count=1, dtype='>i4')
+                Om0 = np.fromfile(fh, count=1, dtype='>f4')
+                Oml0 = np.fromfile(fh, count=1, dtype='>f4')
+                hubble = np.fromfile(fh, count=1, dtype='>f4')
+                Wp5 = np.fromfile(fh, count=1, dtype='>f4')
+                Ocurv = np.fromfile(fh, count=1, dtype='>f4')
+                wspecies = np.fromfile(fh, count=10, dtype='>f4')
+                lspecies = np.fromfile(fh, count=10, dtype='>i4')
+                extras = np.fromfile(fh, count=79, dtype='>f4')
+                boxsize = np.fromfile(fh, count=1, dtype='>f4')
+                return True
+            except:
+                return False
+        return False
+
+
 class ARTDomainSubset(OctreeSubset):
 
     def fill(self, content, ftfields, selector):

diff -r e88ef09175f96b2d336ba2597f68df966458fb0f -r ea716eba60f6a36dfee7274b9503822c70af0ffe yt/frontends/art/definitions.py
--- a/yt/frontends/art/definitions.py
+++ b/yt/frontends/art/definitions.py
@@ -94,6 +94,21 @@
      '45sffffi'+'fffffff'+'iiii'+'ffffff'+'396s'+'f')
 ]
 
+dmparticle_header_struct = [
+     ('header',
+     'aexpn', 'aexp0', 'amplt', 'astep',
+     'istep',
+     'partw', 'tintg',
+     'Ekin', 'Ekin1', 'Ekin2',
+     'au0', 'aeu0',
+     'Nrow', 'Ngridc', 'Nspecies', 'Nseed',
+     'Om0', 'Oml0', 'hubble', 'Wp5', 'Ocurv',
+     'wspecies','lspecies',
+     'extras', 'boxsize'),
+     (1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+     1,1,1,10,10,79,1)
+]
+
 star_struct = [
     ('>d', ('tdum', 'adum')),
     ('>i', 'nstars'),

diff -r e88ef09175f96b2d336ba2597f68df966458fb0f -r ea716eba60f6a36dfee7274b9503822c70af0ffe yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -26,6 +26,8 @@
 from yt.utilities.logger import ytLogger as mylog
 from yt.frontends.art.definitions import *
 from yt.utilities.physical_constants import sec_per_year
+from yt.utilities.lib.geometry_utils import compute_morton
+from yt.geometry.oct_container import _ORDER_MAX
 
 
 class IOHandlerART(BaseIOHandler):
@@ -85,6 +87,7 @@
             return mask
 
     def _read_particle_coords(self, chunks, ptf):
+        chunks = list(chunks)
         for chunk in chunks:
             for ptype, field_list in sorted(ptf.items()):
                 x = self._get_field((ptype, "particle_position_x"))
@@ -93,6 +96,7 @@
                 yield ptype, (x, y, z)
 
     def _read_particle_fields(self, chunks, ptf, selector):
+        chunks = list(chunks)
         for chunk in chunks:
             for ptype, field_list in sorted(ptf.items()):
                 x = self._get_field((ptype, "particle_position_x"))
@@ -177,6 +181,105 @@
         else:
             return tr[field]
 
+class IOHandlerDarkMatterART(IOHandlerART):
+    def _count_particles(self, data_file):
+        return self.ds.parameters['lspecies'][-1]
+
+    def _initialize_index(self, data_file, regions):
+        totcount = 4096**2 #file is always this size
+        count = data_file.ds.parameters['lspecies'][-1]
+        DLE = data_file.ds.domain_left_edge
+        DRE = data_file.ds.domain_right_edge
+        dx = (DRE - DLE) / 2**_ORDER_MAX
+        with open(data_file.filename, "rb") as f:
+            # The first total_particles * 3 values are positions
+            pp = np.fromfile(f, dtype = '>f4', count = totcount*3)
+            pp.shape = (3, totcount)
+            pp = pp[:,:count] #remove zeros
+            pp = np.transpose(pp).astype(np.float32) #cast as float32 for compute_morton
+            pp = (pp - 1.)/data_file.ds.parameters['ng'] #correct the dm particle units
+        regions.add_data_file(pp, data_file.file_id)
+        morton = compute_morton(pp[:,0], pp[:,1], pp[:,2], DLE, DRE)
+        return morton
+
+    def _identify_fields(self, domain):
+        field_list = []
+        tp = domain.total_particles
+        self.particle_field_list = [f for f in particle_fields]
+        for ptype in self.ds.particle_types_raw:
+            for pfield in self.particle_field_list:
+                pfn = (ptype, pfield)
+                field_list.append(pfn)
+        return field_list, {}
+
+    def _get_field(self,  field):
+        if field in self.cache.keys() and self.caching:
+            mylog.debug("Cached %s", str(field))
+            return self.cache[field]
+        mylog.debug("Reading %s", str(field))
+        tr = {}
+        ftype, fname = field
+        ptmax = self.ws[-1]
+        pbool, idxa, idxb = _determine_field_size(self.ds, ftype, 
+                                                  self.ls, ptmax)
+        npa = idxb - idxa
+        sizes = np.diff(np.concatenate(([0], self.ls)))
+        rp = lambda ax: read_particles(
+            self.file_particle, self.Nrow, idxa=idxa,
+            idxb=idxb, fields=ax)
+        for i, ax in enumerate('xyz'):
+            if fname.startswith("particle_position_%s" % ax):
+                # This is not the same as domain_dimensions
+                dd = self.ds.parameters['ng']
+                off = 1.0/dd
+                tr[field] = rp([ax])[0]/dd - off
+            if fname.startswith("particle_velocity_%s" % ax):
+                tr[field], = rp(['v'+ax])
+        if fname == "particle_mass":
+            a = 0
+            data = np.zeros(npa, dtype='f8')
+            for ptb, size, m in zip(pbool, sizes, self.ws):
+                if ptb:
+                    data[a:a+size] = m
+                    a += size
+            tr[field] = data
+        elif fname == "particle_index":
+            tr[field] = np.arange(idxa, idxb)
+        elif fname == "particle_type":
+            a = 0
+            data = np.zeros(npa, dtype='int')
+            for i, (ptb, size) in enumerate(zip(pbool, sizes)):
+                if ptb:
+                    data[a: a + size] = i
+                    a += size
+            tr[field] = data
+        if fname == "particle_creation_time":
+            self.tb, self.ages, data = interpolate_ages(
+                tr[field][-nstars:],
+                self.file_stars,
+                self.tb,
+                self.ages,
+                self.ds.current_time)
+            temp = tr.get(field, np.zeros(npa, 'f8'))
+            temp[-nstars:] = data
+            tr[field] = temp
+            del data
+        # We check again, after it's been filled
+        if fname == "particle_mass":
+            # We now divide by NGrid in order to make this match up.  Note that
+            # this means that even when requested in *code units*, we are
+            # giving them as modified by the ng value.  This only works for
+            # dark_matter -- stars are regular matter.
+            tr[field] /= self.ds.domain_dimensions.prod()
+        if tr == {}:
+            tr = dict((f, np.array([])) for f in fields)
+        if self.caching:
+            self.cache[field] = tr[field]
+            return self.cache[field]
+        else:
+            return tr[field]
+
+
 def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")
     idxas = np.concatenate(([0, ], lspecies[:-1]))

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list