[yt-svn] commit/yt: 19 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Aug 7 08:15:21 PDT 2017


19 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/66e085806dc5/
Changeset:   66e085806dc5
User:        qobilidop
Date:        2017-07-03 21:06:39+00:00
Summary:     AHF frontend draft

It's not working yet. I commit the draft in order to get some help from others.
Affected #:  6 files

diff -r ac06c5d6adcf34d370202204df9a255427428514 -r 66e085806dc5e9228daf47b217de975186a2c8ff yt/frontends/ahf/__init__.py
--- /dev/null
+++ b/yt/frontends/ahf/__init__.py
@@ -0,0 +1,14 @@
+"""
+API for yt.frontends.ahf
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r ac06c5d6adcf34d370202204df9a255427428514 -r 66e085806dc5e9228daf47b217de975186a2c8ff yt/frontends/ahf/api.py
--- /dev/null
+++ b/yt/frontends/ahf/api.py
@@ -0,0 +1,23 @@
+"""
+API for yt.frontends.ahf
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+      AHFHalosDataset
+
+from .fields import \
+      AHFHalosFieldInfo
+
+from .io import \
+      IOHandlerAHFHalos

diff -r ac06c5d6adcf34d370202204df9a255427428514 -r 66e085806dc5e9228daf47b217de975186a2c8ff yt/frontends/ahf/data_structures.py
--- /dev/null
+++ b/yt/frontends/ahf/data_structures.py
@@ -0,0 +1,175 @@
+"""
+AHF data structures
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import glob
+import os
+import stat
+
+import numpy as np
+
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.cosmology import \
+    Cosmology
+
+from .fields import AHFHalosFieldInfo
+
+
+class AHFHalosFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        root, _ = os.path.splitext(filename)
+        candidates = glob.glob(root + '*.AHF_halos')
+        if len(candidates) == 1:
+            filename = candidates[0]
+        else:
+            raise ValueError('Too many AHF_halos files.')
+        names = self._read_column_names(filename)
+        self.data = np.genfromtxt(filename, names=names)
+        super(AHFHalosFile, self).__init__(ds, io, filename, file_id)
+
+    def _read_column_names(self, filename):
+        with open(filename) as f:
+            line = f.readline()
+            # Remove leading '#'
+            line = line[1:]
+            names = line.split()
+            # Remove trailing '()'
+            names = [name.split('(')[0] for name in names]
+            return names
+
+
+class AHFHalosDataset(Dataset):
+    _index_class = ParticleIndex
+    _file_class = AHFHalosFile
+    _field_info_class = AHFHalosFieldInfo
+
+    def __init__(self, filename, dataset_type='ahf',
+                 n_ref=16, over_refine_factor=1, units_override=None,
+                 unit_system='cgs'):
+        root, _ = os.path.splitext(filename)
+        self.log_filename = root + '.log'
+
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(AHFHalosDataset, self).__init__(
+            filename, dataset_type=dataset_type,
+            units_override=units_override, unit_system=unit_system
+        )
+
+    def _set_code_unit_attributes(self):
+        self.length_unit = self.quan(1.0, 'kpccm/h')
+        self.mass_unit = self.quan(1.0, 'Msun/h')
+        self.time_unit = self.quan(1.0, '')
+        self.velocity_unit = self.quan(1.0, 'km/s')
+
+    def _parse_parameter_file(self):
+        # This needs to set up the following items.  Note that these are all
+        # assumed to be in code units; domain_left_edge and domain_right_edge
+        # will be converted to YTArray automatically at a later time.
+        # This includes the cosmological parameters.
+        #
+        #   self.unique_identifier      <= unique identifier for the dataset
+        #                                  being read (e.g., UUID or ST_CTIME)
+        #   self.parameters             <= full of code-specific items of use
+        #   self.domain_left_edge       <= array of float64
+        #   self.domain_right_edge      <= array of float64
+        #   self.dimensionality         <= int
+        #   self.domain_dimensions      <= array of int64
+        #   self.periodicity            <= three-element tuple of booleans
+        #   self.current_time           <= simulation time in code units
+        #
+        # We also set up cosmological information.  Set these to zero if
+        # non-cosmological.
+        #
+        #   self.cosmological_simulation    <= int, 0 or 1
+        #   self.current_redshift           <= float
+        #   self.omega_lambda               <= float
+        #   self.omega_matter               <= float
+        #   self.hubble_constant            <= float
+
+        # Read all parameters.
+        simu = self._read_log_simu()
+        param = self._read_parameter()
+
+        # Set up general information.
+        self.filename_template = self.parameter_filename
+        self.file_count = 1
+        self.parameters.update(param)
+        self.particle_types = ('halos')
+        self.particle_types_raw = ('halos')
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        # Set up geometrical information.
+        self.refine_by = 2
+        self.dimensionality = 3
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(self.dimensionality, "int32") * nz
+        self.domain_left_edge = np.array([0.0, 0.0, 0.0])
+        self.domain_right_edge = np.array([simu['boxsize']] * 3)
+        self.periodicity = (True, True, True)
+
+        # Set up cosmological information.
+        self.cosmological_simulation = 1
+        self.current_redshift = param['z']
+        self.hubble_constant = param['Hubble(z)']
+        self.omega_lambda = simu['lambda0']
+        self.omega_matter = simu['omega0']
+        cosmo = Cosmology(self.hubble_constant,
+                          self.omega_matter, self.omega_lambda)
+        self.current_time = cosmo.hubble_time(param['z']).in_units('s')
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        filename = args[0]
+        if not filename.endswith('.parameter'):
+            return False
+        with open(filename, 'r') as f:
+            if f.readlines()[11].startswith('AHF'):
+                return True
+        return False
+
+    # Helper methods
+
+    def _read_log_simu(self):
+        simu = {}
+        with open(self.log_filename) as f:
+            for l in f:
+                if l.startswith('simu.'):
+                    name, val = l.split(':')
+                    key = name.strip().split('.')[1]
+                    try:
+                        val = float(val)
+                    except:
+                        val = float.fromhex(val)
+                    simu[key] = val
+        return simu
+
+    def _read_parameter(self):
+        param = {}
+        with open(self.parameter_filename) as f:
+            for l in f:
+                words = l.split()
+                if len(words) == 2:
+                    key, val = words
+                    try:
+                        val = float(val)
+                        param[key] = val
+                    except:
+                        pass
+        return param

diff -r ac06c5d6adcf34d370202204df9a255427428514 -r 66e085806dc5e9228daf47b217de975186a2c8ff yt/frontends/ahf/fields.py
--- /dev/null
+++ b/yt/frontends/ahf/fields.py
@@ -0,0 +1,38 @@
+'''
+AHF-specific fields
+
+
+
+'''
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+
+m_units = 'Msun/h'
+p_units = 'kpccm/h'
+r_units = 'kpccm/h'
+v_units = 'km/s'
+
+
+class AHFHalosFieldInfo(FieldInfoContainer):
+    known_other_fields = ()
+
+    known_particle_fields = (
+        ('particle_identifier', ('', ['ID'], None)),
+        ('particle_mass', (m_units, ['Mvir'], 'Virial Mass')),
+        ('particle_position_x', (p_units, ['Xc'], None)),
+        ('particle_position_y', (p_units, ['Yc'], None)),
+        ('particle_position_z', (p_units, ['Zc'], None)),
+        ('particle_velocity_x', (v_units, ['VXc'], None)),
+        ('particle_velocity_y', (v_units, ['VYc'], None)),
+        ('particle_velocity_z', (v_units, ['VZc'], None)),
+        ('virial_radius', (r_units, ['Rvir'], 'Virial Radius')),
+    )

diff -r ac06c5d6adcf34d370202204df9a255427428514 -r 66e085806dc5e9228daf47b217de975186a2c8ff yt/frontends/ahf/io.py
--- /dev/null
+++ b/yt/frontends/ahf/io.py
@@ -0,0 +1,116 @@
+"""
+AHF-specific IO functions
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from operator import attrgetter
+
+import numpy as np
+
+from yt.funcs import \
+    mylog
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+
+class IOHandlerAHFHalos(BaseIOHandler):
+    _particle_reader = False
+    _dataset_type = 'ahf'
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This needs to *yield* a series of tuples of (ptype, (x, y, z)).
+        # chunks is a list of chunks, and ptf is a dict where the keys are
+        # ptypes and the values are lists of fields.
+        for data_file in self._get_data_files(chunks, ptf):
+            halos = data_file.data
+            x = halos['Xc'].astype('float64')
+            y = halos['Yc'].astype('float64')
+            z = halos['Zc'].astype('float64')
+            yield 'halos', (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # This gets called after the arrays have been allocated.  It needs to
+        # yield ((ptype, field), data) where data is the masked results of
+        # reading ptype, field and applying the selector to the data read in.
+        # Selector objects have a .select_points(x,y,z) that returns a mask, so
+        # you need to do your masking here.
+        for data_file in self._get_data_files(chunks, ptf):
+            halos = data_file.data
+            x = halos['Xc'].astype('float64')
+            y = halos['Yc'].astype('float64')
+            z = halos['Zc'].astype('float64')
+            mask = selector.select_points(x, y, z, 0.0)
+            del x, y, z
+            if mask is None: continue
+            for ptype, field_list in sorted(ptf.items()):
+                for field in field_list:
+                    data = halos[field][mask].astype('float64')
+                    yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        halos = data_file.data
+        pcount = len(data_file.data['ID'])
+        morton = np.empty(pcount, dtype='uint64')
+        mylog.debug('Initializing index % 5i (% 7i particles)',
+                    data_file.file_id, pcount)
+        if pcount == 0:
+            return morton
+        ind = 0
+        pos = np.empty((pcount, 3), dtype='float64')
+        pos = data_file.ds.arr(pos, 'code_length')
+        dx = np.finfo(halos['Xc'].dtype).eps
+        dx = 2.0 * self.ds.quan(dx, 'code_length')
+        pos[:, 0] = halos['Xc']
+        pos[:, 1] = halos['Yc']
+        pos[:, 2] = halos['Zc']
+        dle = self.ds.domain_left_edge
+        dre = self.ds.domain_right_edge
+        # These are 32 bit numbers, so we give a little lee-way.
+        # Otherwise, for big sets of particles, we often will bump into the
+        # domain edges.  This helps alleviate that.
+        np.clip(pos, dle + dx, dre - dx, pos)
+        if np.any(pos.min(axis=0) < dle) or np.any(pos.max(axis=0) > dre):
+            raise YTDomainOverflow(pos.min(axis=0),
+                                   pos.max(axis=0),
+                                   dle, dre)
+        regions.add_data_file(pos, data_file.file_id)
+        morton[ind:ind+pos.shape[0]] = compute_morton(
+            pos[:, 0], pos[:, 1], pos[:, 2], dle, dre)
+        return morton
+
+    def _count_particles(self, data_file):
+        return {'halos': len(data_file.data['ID'])}
+
+    def _identify_fields(self, data_file):
+        fields = [('halos', f) for f in data_file.data.dtype.names]
+        return fields, {}
+
+    # Helper methods
+
+    def _get_data_files(self, chunks, ptf):
+        # Only support halo reading for now.
+        assert len(ptf) == 1
+        assert list(ptf.keys())[0] == 'halos'
+        # Get data_files
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        data_files = sorted(data_files, key=attrgetter('filename'))
+        return data_files

diff -r ac06c5d6adcf34d370202204df9a255427428514 -r 66e085806dc5e9228daf47b217de975186a2c8ff yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -17,6 +17,7 @@
 from yt.extern.six.moves import cPickle as pickle
 
 _frontends = [
+    'ahf',
     'art',
     'artio',
     'athena',


https://bitbucket.org/yt_analysis/yt/commits/0e301cb6c3ef/
Changeset:   0e301cb6c3ef
User:        qobilidop
Date:        2017-07-03 22:22:13+00:00
Summary:     Fix the issue of empty fields
Affected #:  2 files

diff -r 66e085806dc5e9228daf47b217de975186a2c8ff -r 0e301cb6c3ef038c696905170d9f54c1a3764f0a yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -121,7 +121,8 @@
         nz = 1 << self.over_refine_factor
         self.domain_dimensions = np.ones(self.dimensionality, "int32") * nz
         self.domain_left_edge = np.array([0.0, 0.0, 0.0])
-        self.domain_right_edge = np.array([simu['boxsize']] * 3)
+        # Note that boxsize is in Mpc but particle positions are in kpc.
+        self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000
         self.periodicity = (True, True, True)
 
         # Set up cosmological information.

diff -r 66e085806dc5e9228daf47b217de975186a2c8ff -r 0e301cb6c3ef038c696905170d9f54c1a3764f0a yt/frontends/ahf/io.py
--- a/yt/frontends/ahf/io.py
+++ b/yt/frontends/ahf/io.py
@@ -19,6 +19,8 @@
 
 from yt.funcs import \
     mylog
+from yt.utilities.exceptions import \
+    YTDomainOverflow
 from yt.utilities.io_handler import \
     BaseIOHandler
 from yt.utilities.lib.geometry_utils import \
@@ -113,4 +115,5 @@
             for obj in chunk.objs:
                 data_files.update(obj.data_files)
         data_files = sorted(data_files, key=attrgetter('filename'))
-        return data_files
+        for data_file in data_files:
+            yield data_file


https://bitbucket.org/yt_analysis/yt/commits/9c030b209de2/
Changeset:   9c030b209de2
User:        qobilidop
Date:        2017-07-03 22:41:50+00:00
Summary:     Define fields the right way

The first enty should be the on-disk field name. The latter one is the alias. Thanks @ngoldbaum for pointing this out.
Affected #:  1 file

diff -r 0e301cb6c3ef038c696905170d9f54c1a3764f0a -r 9c030b209de253d3f5170d52e86c75281e30b8e5 yt/frontends/ahf/fields.py
--- a/yt/frontends/ahf/fields.py
+++ b/yt/frontends/ahf/fields.py
@@ -26,13 +26,13 @@
     known_other_fields = ()
 
     known_particle_fields = (
-        ('particle_identifier', ('', ['ID'], None)),
-        ('particle_mass', (m_units, ['Mvir'], 'Virial Mass')),
-        ('particle_position_x', (p_units, ['Xc'], None)),
-        ('particle_position_y', (p_units, ['Yc'], None)),
-        ('particle_position_z', (p_units, ['Zc'], None)),
-        ('particle_velocity_x', (v_units, ['VXc'], None)),
-        ('particle_velocity_y', (v_units, ['VYc'], None)),
-        ('particle_velocity_z', (v_units, ['VZc'], None)),
-        ('virial_radius', (r_units, ['Rvir'], 'Virial Radius')),
+        ('ID', ('', ['particle_identifier'], None)),
+        ('Mvir', (m_units, ['particle_mass'], 'Virial Mass')),
+        ('Xc', (p_units, ['particle_position_x'], None)),
+        ('Yc', (p_units, ['particle_position_y'], None)),
+        ('Zc', (p_units, ['particle_position_z'], None)),
+        ('VXc', (v_units, ['particle_velocity_x'], None)),
+        ('VYc', (v_units, ['particle_velocity_y'], None)),
+        ('VZc', (v_units, ['particle_velocity_z'], None)),
+        ('Rvir', (r_units, ['virial_radius'], 'Virial Radius')),
     )


https://bitbucket.org/yt_analysis/yt/commits/d76a46f5f476/
Changeset:   d76a46f5f476
User:        qobilidop
Date:        2017-07-03 22:55:18+00:00
Summary:     Complete fields definitions
Affected #:  1 file

diff -r 9c030b209de253d3f5170d52e86c75281e30b8e5 -r d76a46f5f476c8442f3563998eb7908fe492aef0 yt/frontends/ahf/fields.py
--- a/yt/frontends/ahf/fields.py
+++ b/yt/frontends/ahf/fields.py
@@ -25,9 +25,14 @@
 class AHFHalosFieldInfo(FieldInfoContainer):
     known_other_fields = ()
 
+    # See http://popia.ft.uam.es/AHF/files/AHF.pdf
+    # and search for '*.AHF_halos'.
     known_particle_fields = (
         ('ID', ('', ['particle_identifier'], None)),
+        ('hostHalo', ('', [], None)),
+        ('numSubStruct', ('', [], None)),
         ('Mvir', (m_units, ['particle_mass'], 'Virial Mass')),
+        ('npart', ('', [], None)),
         ('Xc', (p_units, ['particle_position_x'], None)),
         ('Yc', (p_units, ['particle_position_y'], None)),
         ('Zc', (p_units, ['particle_position_z'], None)),
@@ -35,4 +40,35 @@
         ('VYc', (v_units, ['particle_velocity_y'], None)),
         ('VZc', (v_units, ['particle_velocity_z'], None)),
         ('Rvir', (r_units, ['virial_radius'], 'Virial Radius')),
+        ('Rmax', (r_units, [], None)),
+        ('r2', (r_units, [], None)),
+        ('mbp_offset', (r_units, [], None)),
+        ('com_offset', (r_units, [], None)),
+        ('Vmax', (v_units, [], None)),
+        ('v_sec', (v_units, [], None)),
+        ('sigV', (v_units, [], None)),
+        ('lambda', ('', [], None)),
+        ('lambdaE', ('', [], None)),
+        ('Lx', ('', [], None)),
+        ('Ly', ('', [], None)),
+        ('Lz', ('', [], None)),
+        ('b', ('', [], None)),
+        ('c', ('', [], None)),
+        ('Eax', ('', [], None)),
+        ('Eay', ('', [], None)),
+        ('Eaz', ('', [], None)),
+        ('Ebx', ('', [], None)),
+        ('Eby', ('', [], None)),
+        ('Ebz', ('', [], None)),
+        ('Ecx', ('', [], None)),
+        ('Ecy', ('', [], None)),
+        ('Ecz', ('', [], None)),
+        ('ovdens', ('', [], None)),
+        ('nbins', ('', [], None)),
+        ('fMhires', ('', [], None)),
+        ('Ekin', ('Msun/h*(km/s)**2', [], None)),
+        ('Epot', ('Msun/h*(km/s)**2', [], None)),
+        ('SurfP', ('Msun/h*(km/s)**2', [], None)),
+        ('Phi0', ('(km/s)**2', [], None)),
+        ('cNFW', ('', [], None))
     )


https://bitbucket.org/yt_analysis/yt/commits/8b07ad514e65/
Changeset:   8b07ad514e65
User:        qobilidop
Date:        2017-07-04 02:33:09+00:00
Summary:     Add tests for AHF frontend
Affected #:  2 files

diff -r d76a46f5f476c8442f3563998eb7908fe492aef0 -r 8b07ad514e651d432a8288632790fad35fcbbb60 yt/frontends/ahf/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/ahf/tests/test_outputs.py
@@ -0,0 +1,41 @@
+"""
+AHF frontend tests using ahf_halos dataset
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os.path
+from yt.testing import \
+    assert_equal, \
+    requires_file
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    data_dir_load
+from yt.frontends.ahf.api import AHFHalosDataset
+
+_fields = ('particle_position_x', 'particle_position_y',
+           'particle_position_z', 'particle_mass')
+
+ahf_halos = 'ahf_halos/snap_N64L16_135.parameter'
+
+
+ at requires_ds(ahf_halos)
+def test_fields_sample():
+    ds = data_dir_load(ahf_halos)
+    assert_equal(str(ahf_halos), os.path.basename(ahf_halos))
+    for field in _fields:
+        yield FieldValuesTest(ahf_halos, field, particle_type=True)
+
+ at requires_file(ahf_halos)
+def test_AHFHalosDataset():
+    assert isinstance(data_dir_load(ahf_halos), AHFHalosDataset)


https://bitbucket.org/yt_analysis/yt/commits/89731da7b08a/
Changeset:   89731da7b08a
User:        qobilidop
Date:        2017-07-04 02:43:11+00:00
Summary:     Register answer tests
Affected #:  1 file

diff -r 8b07ad514e651d432a8288632790fad35fcbbb60 -r 89731da7b08a60a980b262fc7f8d5d86386ad765 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -38,6 +38,7 @@
   local_halos_001:
     - yt/analysis_modules/halo_analysis/tests/test_halo_finders.py  # [py2]
     - yt/analysis_modules/halo_finding/tests/test_rockstar.py  # [py2]
+    - yt/frontends/ahf/tests/test_outputs.py
     - yt/frontends/owls_subfind/tests/test_outputs.py
     - yt/frontends/gadget_fof/tests/test_outputs.py:test_fields_g5
     - yt/frontends/gadget_fof/tests/test_outputs.py:test_fields_g42


https://bitbucket.org/yt_analysis/yt/commits/c11016acd511/
Changeset:   c11016acd511
User:        qobilidop
Date:        2017-07-04 03:55:22+00:00
Summary:     Fix typo
Affected #:  1 file

diff -r 89731da7b08a60a980b262fc7f8d5d86386ad765 -r c11016acd51121ce4c09711d81b83dc1b7711507 yt/frontends/ahf/tests/test_outputs.py
--- a/yt/frontends/ahf/tests/test_outputs.py
+++ b/yt/frontends/ahf/tests/test_outputs.py
@@ -30,9 +30,9 @@
 
 
 @requires_ds(ahf_halos)
-def test_fields_sample():
+def test_fields_ahf_halos():
     ds = data_dir_load(ahf_halos)
-    assert_equal(str(ahf_halos), os.path.basename(ahf_halos))
+    assert_equal(str(ds), os.path.basename(ahf_halos))
     for field in _fields:
         yield FieldValuesTest(ahf_halos, field, particle_type=True)
 


https://bitbucket.org/yt_analysis/yt/commits/2a80f5e681a4/
Changeset:   2a80f5e681a4
User:        qobilidop
Date:        2017-07-04 17:50:04+00:00
Summary:     Add docs to Loading Data
Affected #:  1 file

diff -r c11016acd51121ce4c09711d81b83dc1b7711507 -r 2a80f5e681a4161f998b503a5b858500853633e8 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1605,7 +1605,7 @@
 Halo Catalog Data
 -----------------
 
-yt has support for reading halo catalogs produced by Rockstar and the inline
+yt has support for reading halo catalogs produced by AHF, Rockstar and the inline
 FOF/SUBFIND halo finders of Gadget and OWLS.  The halo catalogs are treated as
 particle datasets where each particle represents a single halo.  For example,
 this means that the `particle_mass` field refers to the mass of the halos.  For
@@ -1618,6 +1618,44 @@
 :ref:`halo_catalog`.  The resulting product can be reloaded in a similar manner
 to the other halo catalogs shown here.
 
+.. _ahf:
+
+AHF
+^^^
+
+AHF halo catalogs are loaded by providing the path to the .parameter files.
+The corresponding .log and .AHF_halos files must exist for data loading to
+succeed. The field type for all fields is "halos". Some fields of note avaible
+from AHF are:
+
++----------------+---------------------------+
+| AHF field      | yt field name             |
++================+===========================+
+| ID             | particle_identifier       |
++----------------+---------------------------+
+| Mvir           | particle_mass             |
++----------------+---------------------------+
+| Rvir           | virial_radius             |
++----------------+---------------------------+
+| (X,Y,Z)c       | particle_position_(x,y,z) |
++----------------+---------------------------+
+| V(X,Y,Z)c      | particle_velocity_(x,y,z) |
++----------------+---------------------------+
+
+Numerous other AHF fields exist.  To see them, check the field list by typing
+`ds.field_list` for a dataset loaded as `ds`.  Like all other datasets, fields
+must be accessed through :ref:`Data-objects`.
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("ahf_halos/snap_N64L16_135.parameter")
+   ad = ds.all_data()
+   # halo masses
+   print(ad["halos", "particle_mass"])
+   # halo radii
+   print(ad["halos", "virial_radius"])
+
 .. _rockstar:
 
 Rockstar


https://bitbucket.org/yt_analysis/yt/commits/3c8075c32514/
Changeset:   3c8075c32514
User:        qobilidop
Date:        2017-07-14 14:10:57+00:00
Summary:     Increment the answer number
Affected #:  1 file

diff -r 2a80f5e681a4161f998b503a5b858500853633e8 -r 3c8075c325149631b20742bd30c2ba65670244a2 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -35,7 +35,7 @@
   local_gizmo_002:
     - yt/frontends/gizmo/tests/test_outputs.py
 
-  local_halos_001:
+  local_halos_002:
     - yt/analysis_modules/halo_analysis/tests/test_halo_finders.py  # [py2]
     - yt/analysis_modules/halo_finding/tests/test_rockstar.py  # [py2]
     - yt/frontends/ahf/tests/test_outputs.py


https://bitbucket.org/yt_analysis/yt/commits/df791092f3a4/
Changeset:   df791092f3a4
User:        qobilidop
Date:        2017-07-18 05:04:55+00:00
Summary:     Delete comments from the skeleton template
Affected #:  1 file

diff -r 3c8075c325149631b20742bd30c2ba65670244a2 -r df791092f3a43d4a01480f9236114c8c6a7408d7 yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -78,30 +78,6 @@
         self.velocity_unit = self.quan(1.0, 'km/s')
 
     def _parse_parameter_file(self):
-        # This needs to set up the following items.  Note that these are all
-        # assumed to be in code units; domain_left_edge and domain_right_edge
-        # will be converted to YTArray automatically at a later time.
-        # This includes the cosmological parameters.
-        #
-        #   self.unique_identifier      <= unique identifier for the dataset
-        #                                  being read (e.g., UUID or ST_CTIME)
-        #   self.parameters             <= full of code-specific items of use
-        #   self.domain_left_edge       <= array of float64
-        #   self.domain_right_edge      <= array of float64
-        #   self.dimensionality         <= int
-        #   self.domain_dimensions      <= array of int64
-        #   self.periodicity            <= three-element tuple of booleans
-        #   self.current_time           <= simulation time in code units
-        #
-        # We also set up cosmological information.  Set these to zero if
-        # non-cosmological.
-        #
-        #   self.cosmological_simulation    <= int, 0 or 1
-        #   self.current_redshift           <= float
-        #   self.omega_lambda               <= float
-        #   self.omega_matter               <= float
-        #   self.hubble_constant            <= float
-
         # Read all parameters.
         simu = self._read_log_simu()
         param = self._read_parameter()


https://bitbucket.org/yt_analysis/yt/commits/bf6140a61198/
Changeset:   bf6140a61198
User:        qobilidop
Date:        2017-07-18 09:12:43+00:00
Summary:     Refactor code unit definition

Mimic the Rockstar frontend.
Affected #:  1 file

diff -r df791092f3a43d4a01480f9236114c8c6a7408d7 -r bf6140a61198bd9ba9582167670584e3740d4437 yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -22,6 +22,8 @@
 from yt.data_objects.static_output import \
     Dataset, \
     ParticleFile
+from yt.funcs import \
+    setdefaultattr
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.utilities.cosmology import \
@@ -72,10 +74,10 @@
         )
 
     def _set_code_unit_attributes(self):
-        self.length_unit = self.quan(1.0, 'kpccm/h')
-        self.mass_unit = self.quan(1.0, 'Msun/h')
-        self.time_unit = self.quan(1.0, '')
-        self.velocity_unit = self.quan(1.0, 'km/s')
+        setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))
+        setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))
+        setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))
+        setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))
 
     def _parse_parameter_file(self):
         # Read all parameters.


https://bitbucket.org/yt_analysis/yt/commits/569b9aa296f6/
Changeset:   569b9aa296f6
User:        qobilidop
Date:        2017-07-18 10:04:02+00:00
Summary:     Read data in IOHandler instead

We don't want to have the data in memory when creating the File object. It's the job of the IOHandler to read data.
Affected #:  2 files

diff -r bf6140a61198bd9ba9582167670584e3740d4437 -r 569b9aa296f6e5ed598a29c2a9dcbaee38b875b7 yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -40,10 +40,12 @@
             filename = candidates[0]
         else:
             raise ValueError('Too many AHF_halos files.')
-        names = self._read_column_names(filename)
-        self.data = np.genfromtxt(filename, names=names)
+        self.col_names = self._read_column_names(filename)
         super(AHFHalosFile, self).__init__(ds, io, filename, file_id)
 
+    def read_data(self):
+        return np.genfromtxt(self.filename, names=self.col_names)
+
     def _read_column_names(self, filename):
         with open(filename) as f:
             line = f.readline()

diff -r bf6140a61198bd9ba9582167670584e3740d4437 -r 569b9aa296f6e5ed598a29c2a9dcbaee38b875b7 yt/frontends/ahf/io.py
--- a/yt/frontends/ahf/io.py
+++ b/yt/frontends/ahf/io.py
@@ -39,7 +39,7 @@
         # chunks is a list of chunks, and ptf is a dict where the keys are
         # ptypes and the values are lists of fields.
         for data_file in self._get_data_files(chunks, ptf):
-            halos = data_file.data
+            halos = data_file.read_data()
             x = halos['Xc'].astype('float64')
             y = halos['Yc'].astype('float64')
             z = halos['Zc'].astype('float64')
@@ -52,7 +52,7 @@
         # Selector objects have a .select_points(x,y,z) that returns a mask, so
         # you need to do your masking here.
         for data_file in self._get_data_files(chunks, ptf):
-            halos = data_file.data
+            halos = data_file.read_data()
             x = halos['Xc'].astype('float64')
             y = halos['Yc'].astype('float64')
             z = halos['Zc'].astype('float64')
@@ -65,8 +65,8 @@
                     yield (ptype, field), data
 
     def _initialize_index(self, data_file, regions):
-        halos = data_file.data
-        pcount = len(data_file.data['ID'])
+        halos = data_file.read_data()
+        pcount = len(halos['ID'])
         morton = np.empty(pcount, dtype='uint64')
         mylog.debug('Initializing index % 5i (% 7i particles)',
                     data_file.file_id, pcount)
@@ -96,10 +96,11 @@
         return morton
 
     def _count_particles(self, data_file):
-        return {'halos': len(data_file.data['ID'])}
+        halos = data_file.read_data()
+        return {'halos': len(halos['ID'])}
 
     def _identify_fields(self, data_file):
-        fields = [('halos', f) for f in data_file.data.dtype.names]
+        fields = [('halos', f) for f in data_file.col_names]
         return fields, {}
 
     # Helper methods


https://bitbucket.org/yt_analysis/yt/commits/898c82903613/
Changeset:   898c82903613
User:        qobilidop
Date:        2017-07-18 15:16:06+00:00
Summary:     Read selected columns only
Affected #:  2 files

diff -r 569b9aa296f6e5ed598a29c2a9dcbaee38b875b7 -r 898c82903613fa70a9455b0ba4169cac48f5a25d yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -43,8 +43,9 @@
         self.col_names = self._read_column_names(filename)
         super(AHFHalosFile, self).__init__(ds, io, filename, file_id)
 
-    def read_data(self):
-        return np.genfromtxt(self.filename, names=self.col_names)
+    def read_data(self, usecols=None):
+        return np.genfromtxt(self.filename, names=self.col_names,
+                             usecols=usecols)
 
     def _read_column_names(self, filename):
         with open(filename) as f:

diff -r 569b9aa296f6e5ed598a29c2a9dcbaee38b875b7 -r 898c82903613fa70a9455b0ba4169cac48f5a25d yt/frontends/ahf/io.py
--- a/yt/frontends/ahf/io.py
+++ b/yt/frontends/ahf/io.py
@@ -39,7 +39,7 @@
         # chunks is a list of chunks, and ptf is a dict where the keys are
         # ptypes and the values are lists of fields.
         for data_file in self._get_data_files(chunks, ptf):
-            halos = data_file.read_data()
+            halos = data_file.read_data(usecols=['Xc', 'Yc', 'Zc'])
             x = halos['Xc'].astype('float64')
             y = halos['Yc'].astype('float64')
             z = halos['Zc'].astype('float64')
@@ -65,7 +65,7 @@
                     yield (ptype, field), data
 
     def _initialize_index(self, data_file, regions):
-        halos = data_file.read_data()
+        halos = data_file.read_data(usecols=['Xc', 'Yc', 'Zc'])
         pcount = len(halos['ID'])
         morton = np.empty(pcount, dtype='uint64')
         mylog.debug('Initializing index % 5i (% 7i particles)',
@@ -96,7 +96,7 @@
         return morton
 
     def _count_particles(self, data_file):
-        halos = data_file.read_data()
+        halos = data_file.read_data(usecols=['ID'])
         return {'halos': len(halos['ID'])}
 
     def _identify_fields(self, data_file):


https://bitbucket.org/yt_analysis/yt/commits/a466d4a88ffc/
Changeset:   a466d4a88ffc
User:        qobilidop
Date:        2017-07-18 15:37:37+00:00
Summary:     Let the user specify the Hubble constant

It is a current flaw of AHF that the Hubble constant is not shown in the outputs.
Affected #:  1 file

diff -r 898c82903613fa70a9455b0ba4169cac48f5a25d -r a466d4a88ffc585b3671ed3b009de051a2bb86e9 yt/frontends/ahf/data_structures.py
--- a/yt/frontends/ahf/data_structures.py
+++ b/yt/frontends/ahf/data_structures.py
@@ -64,10 +64,12 @@
     _field_info_class = AHFHalosFieldInfo
 
     def __init__(self, filename, dataset_type='ahf',
-                 n_ref=16, over_refine_factor=1, units_override=None,
-                 unit_system='cgs'):
+                 n_ref=16, over_refine_factor=1,
+                 units_override=None, unit_system='cgs',
+                 hubble_constant=1.0):
         root, _ = os.path.splitext(filename)
         self.log_filename = root + '.log'
+        self.hubble_constant = hubble_constant
 
         self.n_ref = n_ref
         self.over_refine_factor = over_refine_factor
@@ -109,7 +111,6 @@
         # Set up cosmological information.
         self.cosmological_simulation = 1
         self.current_redshift = param['z']
-        self.hubble_constant = param['Hubble(z)']
         self.omega_lambda = simu['lambda0']
         self.omega_matter = simu['omega0']
         cosmo = Cosmology(self.hubble_constant,


https://bitbucket.org/yt_analysis/yt/commits/41481eeed432/
Changeset:   41481eeed432
User:        qobilidop
Date:        2017-07-18 15:59:42+00:00
Summary:     Add note about Hubble parameter in AHF
Affected #:  1 file

diff -r a466d4a88ffc585b3671ed3b009de051a2bb86e9 -r 41481eeed43299cdcf69e90445f40ae031f6934c doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1649,13 +1649,18 @@
 .. code-block:: python
 
    import yt
-   ds = yt.load("ahf_halos/snap_N64L16_135.parameter")
+   ds = yt.load("ahf_halos/snap_N64L16_135.parameter", hubble_constant=0.7)
    ad = ds.all_data()
    # halo masses
    print(ad["halos", "particle_mass"])
    # halo radii
    print(ad["halos", "virial_radius"])
 
+.. note::
+
+  Currently the dimensionless Hubble parameter that yt needs is not provided in
+  AHF outputs. So users need to provide the `hubble_constant` (default to 1.0) while loading datasets, as shown above.
+
 .. _rockstar:
 
 Rockstar


https://bitbucket.org/yt_analysis/yt/commits/b4985add6a6f/
Changeset:   b4985add6a6f
User:        qobilidop
Date:        2017-07-19 00:31:15+00:00
Summary:     Fix column selection
Affected #:  1 file

diff -r 41481eeed43299cdcf69e90445f40ae031f6934c -r b4985add6a6fcea948fda676e9b4cfb8bda13b45 yt/frontends/ahf/io.py
--- a/yt/frontends/ahf/io.py
+++ b/yt/frontends/ahf/io.py
@@ -52,7 +52,11 @@
         # Selector objects have a .select_points(x,y,z) that returns a mask, so
         # you need to do your masking here.
         for data_file in self._get_data_files(chunks, ptf):
-            halos = data_file.read_data()
+            cols = ['Xc', 'Yc', 'Zc']
+            for field_list in ptf.values():
+                cols.extend(field_list)
+            cols = list(set(cols))
+            halos = data_file.read_data(usecols=cols)
             x = halos['Xc'].astype('float64')
             y = halos['Yc'].astype('float64')
             z = halos['Zc'].astype('float64')
@@ -65,7 +69,7 @@
                     yield (ptype, field), data
 
     def _initialize_index(self, data_file, regions):
-        halos = data_file.read_data(usecols=['Xc', 'Yc', 'Zc'])
+        halos = data_file.read_data(usecols=['ID', 'Xc', 'Yc', 'Zc'])
         pcount = len(halos['ID'])
         morton = np.empty(pcount, dtype='uint64')
         mylog.debug('Initializing index % 5i (% 7i particles)',


https://bitbucket.org/yt_analysis/yt/commits/4a7f5591f263/
Changeset:   4a7f5591f263
User:        qobilidop
Date:        2017-07-19 01:55:52+00:00
Summary:     Set proper Hubble constant in test
Affected #:  1 file

diff -r b4985add6a6fcea948fda676e9b4cfb8bda13b45 -r 4a7f5591f263535bd3dbf108ed455a3ad688831a yt/frontends/ahf/tests/test_outputs.py
--- a/yt/frontends/ahf/tests/test_outputs.py
+++ b/yt/frontends/ahf/tests/test_outputs.py
@@ -29,13 +29,18 @@
 ahf_halos = 'ahf_halos/snap_N64L16_135.parameter'
 
 
+def load(filename):
+    return data_dir_load(filename, kwargs={'hubble_constant': 0.7})
+
+
 @requires_ds(ahf_halos)
 def test_fields_ahf_halos():
-    ds = data_dir_load(ahf_halos)
+    ds = load(ahf_halos)
     assert_equal(str(ds), os.path.basename(ahf_halos))
     for field in _fields:
         yield FieldValuesTest(ahf_halos, field, particle_type=True)
 
+
 @requires_file(ahf_halos)
 def test_AHFHalosDataset():
-    assert isinstance(data_dir_load(ahf_halos), AHFHalosDataset)
+    assert isinstance(load(ahf_halos), AHFHalosDataset)


https://bitbucket.org/yt_analysis/yt/commits/7b3957e778c7/
Changeset:   7b3957e778c7
User:        qobilidop
Date:        2017-07-19 01:58:06+00:00
Summary:     Bump answer number for AHF data

The change in Hubble constant changes answers.
Affected #:  1 file

diff -r 4a7f5591f263535bd3dbf108ed455a3ad688831a -r 7b3957e778c726493802263bf29ee29ed9d6812f tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -35,7 +35,7 @@
   local_gizmo_002:
     - yt/frontends/gizmo/tests/test_outputs.py
 
-  local_halos_002:
+  local_halos_003:
     - yt/analysis_modules/halo_analysis/tests/test_halo_finders.py  # [py2]
     - yt/analysis_modules/halo_finding/tests/test_rockstar.py  # [py2]
     - yt/frontends/ahf/tests/test_outputs.py


https://bitbucket.org/yt_analysis/yt/commits/9a899ebd4ed0/
Changeset:   9a899ebd4ed0
User:        MatthewTurk
Date:        2017-08-07 15:14:57+00:00
Summary:     Merge pull request #1477 from qobilidop/ahf-frontend

Add AHF frontend
Affected #:  10 files

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1627,7 +1627,7 @@
 Halo Catalog Data
 -----------------
 
-yt has support for reading halo catalogs produced by Rockstar and the inline
+yt has support for reading halo catalogs produced by AHF, Rockstar and the inline
 FOF/SUBFIND halo finders of Gadget and OWLS.  The halo catalogs are treated as
 particle datasets where each particle represents a single halo.  For example,
 this means that the `particle_mass` field refers to the mass of the halos.  For
@@ -1640,6 +1640,49 @@
 :ref:`halo_catalog`.  The resulting product can be reloaded in a similar manner
 to the other halo catalogs shown here.
 
+.. _ahf:
+
+AHF
+^^^
+
+AHF halo catalogs are loaded by providing the path to the .parameter files.
+The corresponding .log and .AHF_halos files must exist for data loading to
+succeed. The field type for all fields is "halos". Some fields of note avaible
+from AHF are:
+
++----------------+---------------------------+
+| AHF field      | yt field name             |
++================+===========================+
+| ID             | particle_identifier       |
++----------------+---------------------------+
+| Mvir           | particle_mass             |
++----------------+---------------------------+
+| Rvir           | virial_radius             |
++----------------+---------------------------+
+| (X,Y,Z)c       | particle_position_(x,y,z) |
++----------------+---------------------------+
+| V(X,Y,Z)c      | particle_velocity_(x,y,z) |
++----------------+---------------------------+
+
+Numerous other AHF fields exist.  To see them, check the field list by typing
+`ds.field_list` for a dataset loaded as `ds`.  Like all other datasets, fields
+must be accessed through :ref:`Data-objects`.
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("ahf_halos/snap_N64L16_135.parameter", hubble_constant=0.7)
+   ad = ds.all_data()
+   # halo masses
+   print(ad["halos", "particle_mass"])
+   # halo radii
+   print(ad["halos", "virial_radius"])
+
+.. note::
+
+  Currently the dimensionless Hubble parameter that yt needs is not provided in
+  AHF outputs. So users need to provide the `hubble_constant` (default to 1.0) while loading datasets, as shown above.
+
 .. _rockstar:
 
 Rockstar

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -35,9 +35,10 @@
   local_gizmo_002:
     - yt/frontends/gizmo/tests/test_outputs.py
 
-  local_halos_001:
+  local_halos_003:
     - yt/analysis_modules/halo_analysis/tests/test_halo_finders.py  # [py2]
     - yt/analysis_modules/halo_finding/tests/test_rockstar.py  # [py2]
+    - yt/frontends/ahf/tests/test_outputs.py
     - yt/frontends/owls_subfind/tests/test_outputs.py
     - yt/frontends/gadget_fof/tests/test_outputs.py:test_fields_g5
     - yt/frontends/gadget_fof/tests/test_outputs.py:test_fields_g42

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/ahf/__init__.py
--- /dev/null
+++ b/yt/frontends/ahf/__init__.py
@@ -0,0 +1,14 @@
+"""
+API for yt.frontends.ahf
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/ahf/api.py
--- /dev/null
+++ b/yt/frontends/ahf/api.py
@@ -0,0 +1,23 @@
+"""
+API for yt.frontends.ahf
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+      AHFHalosDataset
+
+from .fields import \
+      AHFHalosFieldInfo
+
+from .io import \
+      IOHandlerAHFHalos

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/ahf/data_structures.py
--- /dev/null
+++ b/yt/frontends/ahf/data_structures.py
@@ -0,0 +1,158 @@
+"""
+AHF data structures
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import glob
+import os
+import stat
+
+import numpy as np
+
+from yt.data_objects.static_output import \
+    Dataset, \
+    ParticleFile
+from yt.funcs import \
+    setdefaultattr
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
+from yt.utilities.cosmology import \
+    Cosmology
+
+from .fields import AHFHalosFieldInfo
+
+
+class AHFHalosFile(ParticleFile):
+    def __init__(self, ds, io, filename, file_id):
+        root, _ = os.path.splitext(filename)
+        candidates = glob.glob(root + '*.AHF_halos')
+        if len(candidates) == 1:
+            filename = candidates[0]
+        else:
+            raise ValueError('Too many AHF_halos files.')
+        self.col_names = self._read_column_names(filename)
+        super(AHFHalosFile, self).__init__(ds, io, filename, file_id)
+
+    def read_data(self, usecols=None):
+        return np.genfromtxt(self.filename, names=self.col_names,
+                             usecols=usecols)
+
+    def _read_column_names(self, filename):
+        with open(filename) as f:
+            line = f.readline()
+            # Remove leading '#'
+            line = line[1:]
+            names = line.split()
+            # Remove trailing '()'
+            names = [name.split('(')[0] for name in names]
+            return names
+
+
+class AHFHalosDataset(Dataset):
+    _index_class = ParticleIndex
+    _file_class = AHFHalosFile
+    _field_info_class = AHFHalosFieldInfo
+
+    def __init__(self, filename, dataset_type='ahf',
+                 n_ref=16, over_refine_factor=1,
+                 units_override=None, unit_system='cgs',
+                 hubble_constant=1.0):
+        root, _ = os.path.splitext(filename)
+        self.log_filename = root + '.log'
+        self.hubble_constant = hubble_constant
+
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(AHFHalosDataset, self).__init__(
+            filename, dataset_type=dataset_type,
+            units_override=units_override, unit_system=unit_system
+        )
+
+    def _set_code_unit_attributes(self):
+        setdefaultattr(self, 'length_unit', self.quan(1.0, 'kpccm/h'))
+        setdefaultattr(self, 'mass_unit', self.quan(1.0, 'Msun/h'))
+        setdefaultattr(self, 'time_unit', self.quan(1.0, 's'))
+        setdefaultattr(self, 'velocity_unit', self.quan(1.0, 'km/s'))
+
+    def _parse_parameter_file(self):
+        # Read all parameters.
+        simu = self._read_log_simu()
+        param = self._read_parameter()
+
+        # Set up general information.
+        self.filename_template = self.parameter_filename
+        self.file_count = 1
+        self.parameters.update(param)
+        self.particle_types = ('halos')
+        self.particle_types_raw = ('halos')
+        self.unique_identifier = \
+            int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+        # Set up geometrical information.
+        self.refine_by = 2
+        self.dimensionality = 3
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.ones(self.dimensionality, "int32") * nz
+        self.domain_left_edge = np.array([0.0, 0.0, 0.0])
+        # Note that boxsize is in Mpc but particle positions are in kpc.
+        self.domain_right_edge = np.array([simu['boxsize']] * 3) * 1000
+        self.periodicity = (True, True, True)
+
+        # Set up cosmological information.
+        self.cosmological_simulation = 1
+        self.current_redshift = param['z']
+        self.omega_lambda = simu['lambda0']
+        self.omega_matter = simu['omega0']
+        cosmo = Cosmology(self.hubble_constant,
+                          self.omega_matter, self.omega_lambda)
+        self.current_time = cosmo.hubble_time(param['z']).in_units('s')
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        filename = args[0]
+        if not filename.endswith('.parameter'):
+            return False
+        with open(filename, 'r') as f:
+            if f.readlines()[11].startswith('AHF'):
+                return True
+        return False
+
+    # Helper methods
+
+    def _read_log_simu(self):
+        simu = {}
+        with open(self.log_filename) as f:
+            for l in f:
+                if l.startswith('simu.'):
+                    name, val = l.split(':')
+                    key = name.strip().split('.')[1]
+                    try:
+                        val = float(val)
+                    except:
+                        val = float.fromhex(val)
+                    simu[key] = val
+        return simu
+
+    def _read_parameter(self):
+        param = {}
+        with open(self.parameter_filename) as f:
+            for l in f:
+                words = l.split()
+                if len(words) == 2:
+                    key, val = words
+                    try:
+                        val = float(val)
+                        param[key] = val
+                    except:
+                        pass
+        return param

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/ahf/fields.py
--- /dev/null
+++ b/yt/frontends/ahf/fields.py
@@ -0,0 +1,74 @@
+'''
+AHF-specific fields
+
+
+
+'''
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.fields.field_info_container import \
+    FieldInfoContainer
+
+m_units = 'Msun/h'
+p_units = 'kpccm/h'
+r_units = 'kpccm/h'
+v_units = 'km/s'
+
+
+class AHFHalosFieldInfo(FieldInfoContainer):
+    known_other_fields = ()
+
+    # See http://popia.ft.uam.es/AHF/files/AHF.pdf
+    # and search for '*.AHF_halos'.
+    known_particle_fields = (
+        ('ID', ('', ['particle_identifier'], None)),
+        ('hostHalo', ('', [], None)),
+        ('numSubStruct', ('', [], None)),
+        ('Mvir', (m_units, ['particle_mass'], 'Virial Mass')),
+        ('npart', ('', [], None)),
+        ('Xc', (p_units, ['particle_position_x'], None)),
+        ('Yc', (p_units, ['particle_position_y'], None)),
+        ('Zc', (p_units, ['particle_position_z'], None)),
+        ('VXc', (v_units, ['particle_velocity_x'], None)),
+        ('VYc', (v_units, ['particle_velocity_y'], None)),
+        ('VZc', (v_units, ['particle_velocity_z'], None)),
+        ('Rvir', (r_units, ['virial_radius'], 'Virial Radius')),
+        ('Rmax', (r_units, [], None)),
+        ('r2', (r_units, [], None)),
+        ('mbp_offset', (r_units, [], None)),
+        ('com_offset', (r_units, [], None)),
+        ('Vmax', (v_units, [], None)),
+        ('v_sec', (v_units, [], None)),
+        ('sigV', (v_units, [], None)),
+        ('lambda', ('', [], None)),
+        ('lambdaE', ('', [], None)),
+        ('Lx', ('', [], None)),
+        ('Ly', ('', [], None)),
+        ('Lz', ('', [], None)),
+        ('b', ('', [], None)),
+        ('c', ('', [], None)),
+        ('Eax', ('', [], None)),
+        ('Eay', ('', [], None)),
+        ('Eaz', ('', [], None)),
+        ('Ebx', ('', [], None)),
+        ('Eby', ('', [], None)),
+        ('Ebz', ('', [], None)),
+        ('Ecx', ('', [], None)),
+        ('Ecy', ('', [], None)),
+        ('Ecz', ('', [], None)),
+        ('ovdens', ('', [], None)),
+        ('nbins', ('', [], None)),
+        ('fMhires', ('', [], None)),
+        ('Ekin', ('Msun/h*(km/s)**2', [], None)),
+        ('Epot', ('Msun/h*(km/s)**2', [], None)),
+        ('SurfP', ('Msun/h*(km/s)**2', [], None)),
+        ('Phi0', ('(km/s)**2', [], None)),
+        ('cNFW', ('', [], None))
+    )

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/ahf/io.py
--- /dev/null
+++ b/yt/frontends/ahf/io.py
@@ -0,0 +1,124 @@
+"""
+AHF-specific IO functions
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from operator import attrgetter
+
+import numpy as np
+
+from yt.funcs import \
+    mylog
+from yt.utilities.exceptions import \
+    YTDomainOverflow
+from yt.utilities.io_handler import \
+    BaseIOHandler
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
+
+
+class IOHandlerAHFHalos(BaseIOHandler):
+    _particle_reader = False
+    _dataset_type = 'ahf'
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        # This needs to *yield* a series of tuples of (ptype, (x, y, z)).
+        # chunks is a list of chunks, and ptf is a dict where the keys are
+        # ptypes and the values are lists of fields.
+        for data_file in self._get_data_files(chunks, ptf):
+            halos = data_file.read_data(usecols=['Xc', 'Yc', 'Zc'])
+            x = halos['Xc'].astype('float64')
+            y = halos['Yc'].astype('float64')
+            z = halos['Zc'].astype('float64')
+            yield 'halos', (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # This gets called after the arrays have been allocated.  It needs to
+        # yield ((ptype, field), data) where data is the masked results of
+        # reading ptype, field and applying the selector to the data read in.
+        # Selector objects have a .select_points(x,y,z) that returns a mask, so
+        # you need to do your masking here.
+        for data_file in self._get_data_files(chunks, ptf):
+            cols = ['Xc', 'Yc', 'Zc']
+            for field_list in ptf.values():
+                cols.extend(field_list)
+            cols = list(set(cols))
+            halos = data_file.read_data(usecols=cols)
+            x = halos['Xc'].astype('float64')
+            y = halos['Yc'].astype('float64')
+            z = halos['Zc'].astype('float64')
+            mask = selector.select_points(x, y, z, 0.0)
+            del x, y, z
+            if mask is None: continue
+            for ptype, field_list in sorted(ptf.items()):
+                for field in field_list:
+                    data = halos[field][mask].astype('float64')
+                    yield (ptype, field), data
+
+    def _initialize_index(self, data_file, regions):
+        halos = data_file.read_data(usecols=['ID', 'Xc', 'Yc', 'Zc'])
+        pcount = len(halos['ID'])
+        morton = np.empty(pcount, dtype='uint64')
+        mylog.debug('Initializing index % 5i (% 7i particles)',
+                    data_file.file_id, pcount)
+        if pcount == 0:
+            return morton
+        ind = 0
+        pos = np.empty((pcount, 3), dtype='float64')
+        pos = data_file.ds.arr(pos, 'code_length')
+        dx = np.finfo(halos['Xc'].dtype).eps
+        dx = 2.0 * self.ds.quan(dx, 'code_length')
+        pos[:, 0] = halos['Xc']
+        pos[:, 1] = halos['Yc']
+        pos[:, 2] = halos['Zc']
+        dle = self.ds.domain_left_edge
+        dre = self.ds.domain_right_edge
+        # These are 32 bit numbers, so we give a little lee-way.
+        # Otherwise, for big sets of particles, we often will bump into the
+        # domain edges.  This helps alleviate that.
+        np.clip(pos, dle + dx, dre - dx, pos)
+        if np.any(pos.min(axis=0) < dle) or np.any(pos.max(axis=0) > dre):
+            raise YTDomainOverflow(pos.min(axis=0),
+                                   pos.max(axis=0),
+                                   dle, dre)
+        regions.add_data_file(pos, data_file.file_id)
+        morton[ind:ind+pos.shape[0]] = compute_morton(
+            pos[:, 0], pos[:, 1], pos[:, 2], dle, dre)
+        return morton
+
+    def _count_particles(self, data_file):
+        halos = data_file.read_data(usecols=['ID'])
+        return {'halos': len(halos['ID'])}
+
+    def _identify_fields(self, data_file):
+        fields = [('halos', f) for f in data_file.col_names]
+        return fields, {}
+
+    # Helper methods
+
+    def _get_data_files(self, chunks, ptf):
+        # Only support halo reading for now.
+        assert len(ptf) == 1
+        assert list(ptf.keys())[0] == 'halos'
+        # Get data_files
+        chunks = list(chunks)
+        data_files = set([])
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        data_files = sorted(data_files, key=attrgetter('filename'))
+        for data_file in data_files:
+            yield data_file

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/ahf/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/ahf/tests/test_outputs.py
@@ -0,0 +1,46 @@
+"""
+AHF frontend tests using ahf_halos dataset
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2017, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os.path
+from yt.testing import \
+    assert_equal, \
+    requires_file
+from yt.utilities.answer_testing.framework import \
+    FieldValuesTest, \
+    requires_ds, \
+    data_dir_load
+from yt.frontends.ahf.api import AHFHalosDataset
+
+_fields = ('particle_position_x', 'particle_position_y',
+           'particle_position_z', 'particle_mass')
+
+ahf_halos = 'ahf_halos/snap_N64L16_135.parameter'
+
+
+def load(filename):
+    return data_dir_load(filename, kwargs={'hubble_constant': 0.7})
+
+
+ at requires_ds(ahf_halos)
+def test_fields_ahf_halos():
+    ds = load(ahf_halos)
+    assert_equal(str(ds), os.path.basename(ahf_halos))
+    for field in _fields:
+        yield FieldValuesTest(ahf_halos, field, particle_type=True)
+
+
+ at requires_file(ahf_halos)
+def test_AHFHalosDataset():
+    assert isinstance(load(ahf_halos), AHFHalosDataset)

diff -r 399cd9128a7011779f161a11b03529289c9a2f51 -r 9a899ebd4ed06ce34bdf9caa98ae7e61e45b0259 yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -17,6 +17,7 @@
 from yt.extern.six.moves import cPickle as pickle
 
 _frontends = [
+    'ahf',
     'art',
     'artio',
     'athena',

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.


More information about the yt-svn mailing list