[yt-svn] commit/yt: 11 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Thu Jul 16 10:14:17 PDT 2015
11 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/947a95d96df4/
Changeset: 947a95d96df4
Branch: yt
User: brittonsmith
Date: 2015-04-30 15:35:45+00:00
Summary: Adding most of gadget_fof frontend.
Affected #: 12 files
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -27,6 +27,7 @@
'fits',
'flash',
'gadget',
+ 'gadget_fof',
'gdf',
'halo_catalog',
'http_stream',
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -337,7 +337,7 @@
@classmethod
def _is_valid(self, *args, **kwargs):
need_groups = ['Header']
- veto_groups = ['FOF']
+ veto_groups = ['FOF', 'Group', 'Subhalo']
valid = True
try:
fh = h5py.File(args[0], mode='r')
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/__init__.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/api.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/api.py
@@ -0,0 +1,26 @@
+"""
+API for GadgetFOF frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+ GadgetFOFDataset
+
+from .io import \
+ IOHandlerGadgetFOFHDF5
+
+from .fields import \
+ GadgetFOFFieldInfo
+
+from . import tests
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -0,0 +1,243 @@
+"""
+Data structures for GadgetFOF frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import defaultdict
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+ GadgetFOFFieldInfo
+
+from yt.utilities.cosmology import \
+ Cosmology
+from yt.utilities.definitions import \
+ mpc_conversion, sec_conversion
+from yt.utilities.exceptions import \
+ YTException
+from yt.utilities.logger import ytLogger as \
+ mylog
+from yt.geometry.particle_geometry_handler import \
+ ParticleIndex
+from yt.data_objects.static_output import \
+ Dataset, \
+ ParticleFile
+from yt.frontends.gadget.data_structures import \
+ _fix_unit_ordering
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+ YTArray, \
+ YTQuantity
+
+class GadgetFOFParticleIndex(ParticleIndex):
+ def __init__(self, ds, dataset_type):
+ super(GadgetFOFParticleIndex, self).__init__(ds, dataset_type)
+
+ def _calculate_particle_index_starts(self):
+ # Halo indices are not saved in the file, so we must count by hand.
+ # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
+ particle_count = defaultdict(int)
+ offset_count = 0
+ for data_file in self.data_files:
+ data_file.index_start = dict([(ptype, particle_count[ptype]) for
+ ptype in data_file.total_particles])
+ data_file.offset_start = offset_count
+ for ptype in data_file.total_particles:
+ particle_count[ptype] += data_file.total_particles[ptype]
+ offset_count += data_file.total_offset
+
+ def _calculate_file_offset_map(self):
+ # After the FOF is performed, a load-balancing step redistributes halos
+ # and then writes more fields. Here, for each file, we create a list of
+ # files which contain the rest of the redistributed particles.
+ ifof = np.array([data_file.total_particles["Group"]
+ for data_file in self.data_files])
+ isub = np.array([data_file.total_offset
+ for data_file in self.data_files])
+ subend = isub.cumsum()
+ fofend = ifof.cumsum()
+ istart = np.digitize(fofend - ifof, subend - isub) - 1
+ iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
+ for i, data_file in enumerate(self.data_files):
+ data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
+
+ def _detect_output_fields(self):
+ # TODO: Add additional fields
+ dsl = []
+ units = {}
+ for dom in self.data_files:
+ fl, _units = self.io._identify_fields(dom)
+ units.update(_units)
+ dom._calculate_offsets(fl)
+ for f in fl:
+ if f not in dsl: dsl.append(f)
+ self.field_list = dsl
+ ds = self.dataset
+ ds.particle_types = tuple(set(pt for pt, ds in dsl))
+ # This is an attribute that means these particle types *actually*
+ # exist. As in, they are real, in the dataset.
+ ds.field_units.update(units)
+ ds.particle_types_raw = ds.particle_types
+
+ def _setup_geometry(self):
+ super(GadgetFOFParticleIndex, self)._setup_geometry()
+ self._calculate_particle_index_starts()
+ self._calculate_file_offset_map()
+
+class GadgetFOFHDF5File(ParticleFile):
+ def __init__(self, ds, io, filename, file_id):
+ super(GadgetFOFHDF5File, self).__init__(ds, io, filename, file_id)
+ with h5py.File(filename, "r") as f:
+ self.header = dict((field, f.attrs[field]) \
+ for field in f.attrs.keys())
+
+class GadgetFOFDataset(Dataset):
+ _index_class = GadgetFOFParticleIndex
+ _file_class = GadgetFOFHDF5File
+ _field_info_class = GadgetFOFFieldInfo
+ _suffix = ".hdf5"
+
+ def __init__(self, filename, dataset_type="gadget_fof_hdf5",
+ n_ref=16, over_refine_factor=1,
+ unit_base=None, units_override=None):
+ self.n_ref = n_ref
+ self.over_refine_factor = over_refine_factor
+ if unit_base is not None and "UnitLength_in_cm" in unit_base:
+ # We assume this is comoving, because in the absence of comoving
+ # integration the redshift will be zero.
+ unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+ self._unit_base = unit_base
+ if units_override is not None:
+ raise RuntimeError("units_override is not supported for GadgetFOFDataset. "+
+ "Use unit_base instead.")
+ super(GadgetFOFDataset, self).__init__(filename, dataset_type,
+ units_override=units_override)
+
+ def _parse_parameter_file(self):
+ handle = h5py.File(self.parameter_filename, mode="r")
+ hvals = {}
+ hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+ hvals["NumFiles"] = hvals["NumFiles"]
+
+ self.dimensionality = 3
+ self.refine_by = 2
+ self.unique_identifier = \
+ int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+ # Set standard values
+ self.domain_left_edge = np.zeros(3, "float64")
+ self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+ nz = 1 << self.over_refine_factor
+ self.domain_dimensions = np.ones(3, "int32") * nz
+ self.cosmological_simulation = 1
+ self.periodicity = (True, True, True)
+ self.current_redshift = hvals["Redshift"]
+ self.omega_lambda = hvals["OmegaLambda"]
+ self.omega_matter = hvals["Omega0"]
+ self.hubble_constant = hvals["HubbleParam"]
+
+ cosmology = Cosmology(hubble_constant=self.hubble_constant,
+ omega_matter=self.omega_matter,
+ omega_lambda=self.omega_lambda)
+ self.current_time = cosmology.t_from_z(self.current_redshift)
+
+ self.parameters = hvals
+ prefix = os.path.abspath(
+ os.path.join(os.path.dirname(self.parameter_filename),
+ os.path.basename(self.parameter_filename).split(".", 1)[0]))
+
+ suffix = self.parameter_filename.rsplit(".", 1)[-1]
+ self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+ self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+ if self.file_count == 0:
+ raise YTException(message="No data files found.", ds=self)
+ self.particle_types = ("Group", "Subhalo")
+ self.particle_types_raw = ("Group", "Subhalo")
+
+ handle.close()
+
+ def _set_code_unit_attributes(self):
+ # Set a sane default for cosmological simulations.
+ if self._unit_base is None and self.cosmological_simulation == 1:
+ mylog.info("Assuming length units are in Mpc/h (comoving)")
+ self._unit_base = dict(length = (1.0, "Mpccm/h"))
+ # The other same defaults we will use from the standard Gadget
+ # defaults.
+ unit_base = self._unit_base or {}
+
+ if "length" in unit_base:
+ length_unit = unit_base["length"]
+ elif "UnitLength_in_cm" in unit_base:
+ if self.cosmological_simulation == 0:
+ length_unit = (unit_base["UnitLength_in_cm"], "cm")
+ else:
+ length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+ else:
+ raise RuntimeError
+ length_unit = _fix_unit_ordering(length_unit)
+ self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+ if "velocity" in unit_base:
+ velocity_unit = unit_base["velocity"]
+ elif "UnitVelocity_in_cm_per_s" in unit_base:
+ velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+ else:
+ velocity_unit = (1e5, "cm/s")
+ velocity_unit = _fix_unit_ordering(velocity_unit)
+ self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+ # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+ # Default to 1e10 Msun/h if mass is not specified.
+ if "mass" in unit_base:
+ mass_unit = unit_base["mass"]
+ elif "UnitMass_in_g" in unit_base:
+ if self.cosmological_simulation == 0:
+ mass_unit = (unit_base["UnitMass_in_g"], "g")
+ else:
+ mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+ else:
+ # Sane default
+ mass_unit = (1.0, "1e10*Msun/h")
+ mass_unit = _fix_unit_ordering(mass_unit)
+ self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+
+ if "time" in unit_base:
+ time_unit = unit_base["time"]
+ elif "UnitTime_in_s" in unit_base:
+ time_unit = (unit_base["UnitTime_in_s"], "s")
+ else:
+ time_unit = (1., "s")
+ self.time_unit = self.quan(time_unit[0], time_unit[1])
+
+ @classmethod
+ def _is_valid(self, *args, **kwargs):
+ need_groups = ['Group', 'Header', 'Subhalo']
+ veto_groups = ['FOF']
+ valid = True
+ try:
+ fh = h5py.File(args[0], mode='r')
+ valid = all(ng in fh["/"] for ng in need_groups) and \
+ not any(vg in fh["/"] for vg in veto_groups)
+ fh.close()
+ except:
+ valid = False
+ pass
+ return valid
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/fields.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/fields.py
@@ -0,0 +1,40 @@
+"""
+GadgetFOF-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+ FieldInfoContainer
+from yt.units.yt_array import \
+ YTArray
+
+m_units = "code_mass"
+mdot_units = "code_mass / code_time"
+p_units = "Mpccm/h"
+v_units = "1e5 * cmcm / s"
+
+class GadgetFOFFieldInfo(FieldInfoContainer):
+ known_other_fields = (
+ )
+
+ known_particle_fields = (
+ ("Pos_0", (p_units, ["particle_position_x"], None)),
+ ("Pos_1", (p_units, ["particle_position_y"], None)),
+ ("Pos_2", (p_units, ["particle_position_z"], None)),
+ ("Vel_0", (v_units, ["particle_velocity_x"], None)),
+ ("Vel_1", (v_units, ["particle_velocity_y"], None)),
+ ("Vel_2", (v_units, ["particle_velocity_z"], None)),
+ ("Mass", (m_units, ["particle_mass"], None)),
+)
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/io.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/io.py
@@ -0,0 +1,210 @@
+"""
+GadgetFOF data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+ BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+class IOHandlerGadgetFOFHDF5(BaseIOHandler):
+ _dataset_type = "gadget_fof_hdf5"
+
+ def __init__(self, ds):
+ super(IOHandlerGadgetFOFHDF5, self).__init__(ds)
+ self.offset_fields = set([])
+
+ def _read_fluid_selection(self, chunks, selector, fields, size):
+ raise NotImplementedError
+
+ def _read_particle_coords(self, chunks, ptf):
+ # This will read chunks and yield the results.
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in sorted(data_files):
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ yield ptype, (x, y, z)
+
+ def _read_offset_particle_field(self, field, data_file, fh):
+ field_data = np.empty(data_file.total_particles["Group"], dtype="float64")
+ fofindex = np.arange(data_file.total_particles["Group"]) + data_file.index_start["FOF"]
+ for offset_file in data_file.offset_files:
+ if fh.filename == offset_file.filename:
+ ofh = fh
+ else:
+ ofh = h5py.File(offset_file.filename, "r")
+ subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
+ substart = max(fofindex[0] - subindex[0], 0)
+ subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
+ fofstart = substart + subindex[0] - fofindex[0]
+ fofend = subend + subindex[0] - fofindex[0]
+ field_data[fofstart:fofend + 1] = ofh["Subhalo"][field][substart:subend + 1]
+ return field_data
+
+ def _read_particle_fields(self, chunks, ptf, selector):
+ # Now we have all the sizes, and we can allocate
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in sorted(data_files):
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ if pcount == 0: continue
+ coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ mask = selector.select_points(x, y, z, 0.0)
+ del x, y, z
+ if mask is None: continue
+ for field in field_list:
+ if field in self.offset_fields:
+ field_data = \
+ self._read_offset_particle_field(field, data_file, f)
+ else:
+ if field == "particle_identifier":
+ field_data = \
+ np.arange(data_file.total_particles[ptype]) + \
+ data_file.index_start[ptype]
+ elif field in f[ptype]:
+ field_data = f[ptype][field].value.astype("float64")
+ else:
+ fname = field[:field.rfind("_")]
+ field_data = f[ptype][fname].value.astype("float64")
+ my_div = field_data.size / pcount
+ if my_div > 1:
+ field_data = np.resize(field_data, (pcount, my_div))
+ findex = int(field[field.rfind("_") + 1:])
+ field_data = field_data[:, findex]
+ data = field_data[mask]
+ yield (ptype, field), data
+
+ def _initialize_index(self, data_file, regions):
+ pcount = sum(data_file.total_particles.values())
+ morton = np.empty(pcount, dtype='uint64')
+ if pcount == 0: return morton
+ mylog.debug("Initializing index % 5i (% 7i particles)",
+ data_file.file_id, pcount)
+ ind = 0
+ with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
+ dx = np.finfo(f["Group"]["GroupPos"].dtype).eps
+ dx = 2.0*self.ds.quan(dx, "code_length")
+
+ for ptype in data_file.ds.particle_types_raw:
+ if data_file.total_particles[ptype] == 0: continue
+ pos = f[ptype]["%sPos" % ptype].value.astype("float64")
+ pos = np.resize(pos, (data_file.total_particles[ptype], 3))
+ pos = data_file.ds.arr(pos, "code_length")
+
+ # These are 32 bit numbers, so we give a little lee-way.
+ # Otherwise, for big sets of particles, we often will bump into the
+ # domain edges. This helps alleviate that.
+ np.clip(pos, self.ds.domain_left_edge + dx,
+ self.ds.domain_right_edge - dx, pos)
+ if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+ np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+ raise YTDomainOverflow(pos.min(axis=0),
+ pos.max(axis=0),
+ self.ds.domain_left_edge,
+ self.ds.domain_right_edge)
+ regions.add_data_file(pos, data_file.file_id)
+ morton[ind:ind+pos.shape[0]] = compute_morton(
+ pos[:,0], pos[:,1], pos[:,2],
+ data_file.ds.domain_left_edge,
+ data_file.ds.domain_right_edge)
+ ind += pos.shape[0]
+ return morton
+
+ def _count_particles(self, data_file):
+ with h5py.File(data_file.filename, "r") as f:
+ pcount = {"Group": f["Header"].attrs["Ngroups_ThisFile"],
+ "Subhalo": f["Header"].attrs["Nsubgroups_ThisFile"]}
+ data_file.total_offset = 0 # need to figure out how subfind works here
+ return pcount
+
+ def _identify_fields(self, data_file):
+ fields = []
+ pcount = data_file.total_particles
+ if sum(pcount.values()) == 0: return fields, {}
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype in self.ds.particle_types_raw:
+ if data_file.total_particles[ptype] == 0: continue
+ fields.append((ptype, "particle_identifier"))
+ # my_fields, my_offset_fields = \
+ # subfind_field_list(f[ptype], ptype, data_file.total_particles)
+ # fields.extend(my_fields)
+ # self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+ return fields, {}
+
+def subfind_field_list(fh, ptype, pcount):
+ fields = []
+ offset_fields = []
+ for field in fh.keys():
+ if "PartType" in field:
+ # These are halo member particles
+ continue
+ elif isinstance(fh[field], h5py.Group):
+ my_fields, my_offset_fields = \
+ subfind_field_list(fh[field], ptype, pcount)
+ fields.extend(my_fields)
+ my_offset_fields.extend(offset_fields)
+ else:
+ if not fh[field].size % pcount[ptype]:
+ my_div = fh[field].size / pcount[ptype]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append((ptype, "%s_%d" % (fname, i)))
+ else:
+ fields.append((ptype, fname))
+ elif ptype == "SUBFIND" and \
+ not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
+ # These are actually FOF fields, but they were written after
+ # a load balancing step moved halos around and thus they do not
+ # correspond to the halos stored in the FOF group.
+ my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append(("FOF", "%s_%d" % (fname, i)))
+ else:
+ fields.append(("FOF", fname))
+ offset_fields.append(fname)
+ else:
+ mylog.warn("Cannot add field (%s, %s) with size %d." % \
+ (ptype, fh[field].name, fh[field].size))
+ continue
+ return fields, offset_fields
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/setup.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+ from numpy.distutils.misc_util import Configuration
+ config = Configuration('gadget_fof', parent_package, top_path)
+ config.make_config_py() # installs __config__.py
+ #config.make_svn_version_py()
+ return config
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/gadget_fof/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/tests/test_outputs.py
@@ -0,0 +1,50 @@
+"""
+GadgetFOF frontend tests using owls_fof_halos datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os.path
+from yt.testing import \
+ assert_equal
+from yt.utilities.answer_testing.framework import \
+ FieldValuesTest, \
+ requires_ds, \
+ data_dir_load
+from yt.frontends.gadget_fof.api import GadgetFOFDataset
+
+_fields = ("particle_position_x", "particle_position_y",
+ "particle_position_z", "particle_mass")
+
+# a dataset with empty files
+g1 = "" # TBD
+g8 = "" # TBD
+
+
+ at requires_ds(g8)
+def test_fields_g8():
+ ds = data_dir_load(g8)
+ yield assert_equal, str(ds), os.path.basename(g8)
+ for field in _fields:
+ yield FieldValuesTest(g8, field, particle_type=True)
+
+
+ at requires_ds(g1)
+def test_fields_g1():
+ ds = data_dir_load(g1)
+ yield assert_equal, str(ds), os.path.basename(g1)
+ for field in _fields:
+ yield FieldValuesTest(g1, field, particle_type=True)
+
+ at requires_file(g1)
+def test_GadgetFOFDataset():
+ assert isinstance(data_dir_load(g1), GadgetFOFDataset)
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -27,11 +27,14 @@
from .fields import \
OWLSSubfindFieldInfo
-from yt.utilities.cosmology import Cosmology
+from yt.utilities.cosmology import \
+ Cosmology
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities.exceptions import \
- YTException
+ YTException
+from yt.utilities.logger import ytLogger as \
+ mylog
from yt.geometry.particle_geometry_handler import \
ParticleIndex
from yt.data_objects.static_output import \
@@ -170,6 +173,7 @@
# The other same defaults we will use from the standard Gadget
# defaults.
unit_base = self._unit_base or {}
+
if "length" in unit_base:
length_unit = unit_base["length"]
elif "UnitLength_in_cm" in unit_base:
@@ -182,7 +186,6 @@
length_unit = _fix_unit_ordering(length_unit)
self.length_unit = self.quan(length_unit[0], length_unit[1])
- unit_base = self._unit_base or {}
if "velocity" in unit_base:
velocity_unit = unit_base["velocity"]
elif "UnitVelocity_in_cm_per_s" in unit_base:
@@ -191,6 +194,7 @@
velocity_unit = (1e5, "cm/s")
velocity_unit = _fix_unit_ordering(velocity_unit)
self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
# We set hubble_constant = 1.0 for non-cosmology, so this is safe.
# Default to 1e10 Msun/h if mass is not specified.
if "mass" in unit_base:
@@ -205,7 +209,14 @@
mass_unit = (1.0, "1e10*Msun/h")
mass_unit = _fix_unit_ordering(mass_unit)
self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
- self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
+
+ if "time" in unit_base:
+ time_unit = unit_base["time"]
+ elif "UnitTime_in_s" in unit_base:
+ time_unit = (unit_base["UnitTime_in_s"], "s")
+ else:
+ time_unit = (1., "s")
+ self.time_unit = self.quan(time_unit[0], time_unit[1])
@classmethod
def _is_valid(self, *args, **kwargs):
diff -r 868a95fb29713767c66044b9eccbb2e87e706b91 -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 yt/frontends/setup.py
--- a/yt/frontends/setup.py
+++ b/yt/frontends/setup.py
@@ -17,6 +17,7 @@
config.add_subpackage("fits")
config.add_subpackage("flash")
config.add_subpackage("gadget")
+ config.add_subpackage("gadget_fof")
config.add_subpackage("gdf")
config.add_subpackage("halo_catalog")
config.add_subpackage("http_stream")
@@ -34,13 +35,17 @@
config.add_subpackage("athena/tests")
config.add_subpackage("boxlib/tests")
config.add_subpackage("chombo/tests")
+ config.add_subpackage("eagle/tests")
config.add_subpackage("enzo/tests")
config.add_subpackage("fits/tests")
config.add_subpackage("flash/tests")
+ config.add_subpackage("gadget/tests")
+ config.add_subpackage("gadget_fof/tests")
config.add_subpackage("moab/tests")
config.add_subpackage("owls/tests")
config.add_subpackage("owls_subfind/tests")
config.add_subpackage("ramses/tests")
+ config.add_subpackage("rockstar/tests")
config.add_subpackage("stream/tests")
config.add_subpackage("tipsy/tests")
return config
https://bitbucket.org/yt_analysis/yt/commits/8e164fa170d2/
Changeset: 8e164fa170d2
Branch: yt
User: brittonsmith
Date: 2015-04-30 15:51:24+00:00
Summary: Fixing some units and enabling field detection.
Affected #: 3 files
diff -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 -r 8e164fa170d237d9d80ad7aa36dffbe27954eab9 yt/frontends/gadget_fof/data_structures.py
--- a/yt/frontends/gadget_fof/data_structures.py
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -200,7 +200,10 @@
elif "UnitVelocity_in_cm_per_s" in unit_base:
velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
else:
- velocity_unit = (1e5, "cm/s")
+ if self.cosmological_simulation == 0:
+ velocity_unit = (1e5, "cm/s")
+ else:
+ velocity_unit = (1e5, "cmcm/s")
velocity_unit = _fix_unit_ordering(velocity_unit)
self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
diff -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 -r 8e164fa170d237d9d80ad7aa36dffbe27954eab9 yt/frontends/gadget_fof/fields.py
--- a/yt/frontends/gadget_fof/fields.py
+++ b/yt/frontends/gadget_fof/fields.py
@@ -21,20 +21,20 @@
YTArray
m_units = "code_mass"
-mdot_units = "code_mass / code_time"
-p_units = "Mpccm/h"
-v_units = "1e5 * cmcm / s"
+p_units = "code_length"
+v_units = "code_velocity"
class GadgetFOFFieldInfo(FieldInfoContainer):
known_other_fields = (
)
known_particle_fields = (
- ("Pos_0", (p_units, ["particle_position_x"], None)),
- ("Pos_1", (p_units, ["particle_position_y"], None)),
- ("Pos_2", (p_units, ["particle_position_z"], None)),
- ("Vel_0", (v_units, ["particle_velocity_x"], None)),
- ("Vel_1", (v_units, ["particle_velocity_y"], None)),
- ("Vel_2", (v_units, ["particle_velocity_z"], None)),
- ("Mass", (m_units, ["particle_mass"], None)),
+ ("GroupPos_0", (p_units, ["particle_position_x"], None)),
+ ("GroupPos_1", (p_units, ["particle_position_y"], None)),
+ ("GroupPos_2", (p_units, ["particle_position_z"], None)),
+ ("GroupVel_0", (v_units, ["particle_velocity_x"], None)),
+ ("GroupVel_1", (v_units, ["particle_velocity_y"], None)),
+ ("GroupVel_2", (v_units, ["particle_velocity_z"], None)),
+ ("GroupMass", (m_units, ["particle_mass"], None)),
+ ("GroupLen", ("", ["particle_number"], None)),
)
diff -r 947a95d96df4cf8711c1ecbda301e8bce8b3c796 -r 8e164fa170d237d9d80ad7aa36dffbe27954eab9 yt/frontends/gadget_fof/io.py
--- a/yt/frontends/gadget_fof/io.py
+++ b/yt/frontends/gadget_fof/io.py
@@ -163,9 +163,9 @@
for ptype in self.ds.particle_types_raw:
if data_file.total_particles[ptype] == 0: continue
fields.append((ptype, "particle_identifier"))
- # my_fields, my_offset_fields = \
- # subfind_field_list(f[ptype], ptype, data_file.total_particles)
- # fields.extend(my_fields)
+ my_fields, my_offset_fields = \
+ subfind_field_list(f[ptype], ptype, data_file.total_particles)
+ fields.extend(my_fields)
# self.offset_fields = self.offset_fields.union(set(my_offset_fields))
return fields, {}
@@ -173,10 +173,7 @@
fields = []
offset_fields = []
for field in fh.keys():
- if "PartType" in field:
- # These are halo member particles
- continue
- elif isinstance(fh[field], h5py.Group):
+ if isinstance(fh[field], h5py.Group):
my_fields, my_offset_fields = \
subfind_field_list(fh[field], ptype, pcount)
fields.extend(my_fields)
@@ -190,19 +187,19 @@
fields.append((ptype, "%s_%d" % (fname, i)))
else:
fields.append((ptype, fname))
- elif ptype == "SUBFIND" and \
- not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
- # These are actually FOF fields, but they were written after
- # a load balancing step moved halos around and thus they do not
- # correspond to the halos stored in the FOF group.
- my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
- fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
- if my_div > 1:
- for i in range(my_div):
- fields.append(("FOF", "%s_%d" % (fname, i)))
- else:
- fields.append(("FOF", fname))
- offset_fields.append(fname)
+ # elif ptype == "SUBFIND" and \
+ # not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
+ # # These are actually FOF fields, but they were written after
+ # # a load balancing step moved halos around and thus they do not
+ # # correspond to the halos stored in the FOF group.
+ # my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
+ # fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ # if my_div > 1:
+ # for i in range(my_div):
+ # fields.append(("FOF", "%s_%d" % (fname, i)))
+ # else:
+ # fields.append(("FOF", fname))
+ # offset_fields.append(fname)
else:
mylog.warn("Cannot add field (%s, %s) with size %d." % \
(ptype, fh[field].name, fh[field].size))
https://bitbucket.org/yt_analysis/yt/commits/4c7da6c8012d/
Changeset: 4c7da6c8012d
Branch: yt
User: brittonsmith
Date: 2015-04-30 15:55:00+00:00
Summary: Leaving note in case we need to implement offset fields. That was hard enough the first time.
Affected #: 1 file
diff -r 8e164fa170d237d9d80ad7aa36dffbe27954eab9 -r 4c7da6c8012d67371386a01991a549ad5ad4f072 yt/frontends/gadget_fof/io.py
--- a/yt/frontends/gadget_fof/io.py
+++ b/yt/frontends/gadget_fof/io.py
@@ -166,7 +166,7 @@
my_fields, my_offset_fields = \
subfind_field_list(f[ptype], ptype, data_file.total_particles)
fields.extend(my_fields)
- # self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+ self.offset_fields = self.offset_fields.union(set(my_offset_fields))
return fields, {}
def subfind_field_list(fh, ptype, pcount):
@@ -187,6 +187,8 @@
fields.append((ptype, "%s_%d" % (fname, i)))
else:
fields.append((ptype, fname))
+ ### Leave this block of code in case we need to do this.
+ ### This will have to wait until I get a dataset with subhalos.
# elif ptype == "SUBFIND" and \
# not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
# # These are actually FOF fields, but they were written after
https://bitbucket.org/yt_analysis/yt/commits/2d7db3e1f6c4/
Changeset: 2d7db3e1f6c4
Branch: yt
User: brittonsmith
Date: 2015-05-04 18:34:16+00:00
Summary: Merging.
Affected #: 12 files
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -27,6 +27,7 @@
'fits',
'flash',
'gadget',
+ 'gadget_fof',
'gdf',
'halo_catalog',
'http_stream',
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -337,7 +337,7 @@
@classmethod
def _is_valid(self, *args, **kwargs):
need_groups = ['Header']
- veto_groups = ['FOF']
+ veto_groups = ['FOF', 'Group', 'Subhalo']
valid = True
try:
fh = h5py.File(args[0], mode='r')
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/__init__.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/api.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/api.py
@@ -0,0 +1,26 @@
+"""
+API for GadgetFOF frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+ GadgetFOFDataset
+
+from .io import \
+ IOHandlerGadgetFOFHDF5
+
+from .fields import \
+ GadgetFOFFieldInfo
+
+from . import tests
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -0,0 +1,246 @@
+"""
+Data structures for GadgetFOF frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import defaultdict
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+ GadgetFOFFieldInfo
+
+from yt.utilities.cosmology import \
+ Cosmology
+from yt.utilities.definitions import \
+ mpc_conversion, sec_conversion
+from yt.utilities.exceptions import \
+ YTException
+from yt.utilities.logger import ytLogger as \
+ mylog
+from yt.geometry.particle_geometry_handler import \
+ ParticleIndex
+from yt.data_objects.static_output import \
+ Dataset, \
+ ParticleFile
+from yt.frontends.gadget.data_structures import \
+ _fix_unit_ordering
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+ YTArray, \
+ YTQuantity
+
+class GadgetFOFParticleIndex(ParticleIndex):
+ def __init__(self, ds, dataset_type):
+ super(GadgetFOFParticleIndex, self).__init__(ds, dataset_type)
+
+ def _calculate_particle_index_starts(self):
+ # Halo indices are not saved in the file, so we must count by hand.
+ # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
+ particle_count = defaultdict(int)
+ offset_count = 0
+ for data_file in self.data_files:
+ data_file.index_start = dict([(ptype, particle_count[ptype]) for
+ ptype in data_file.total_particles])
+ data_file.offset_start = offset_count
+ for ptype in data_file.total_particles:
+ particle_count[ptype] += data_file.total_particles[ptype]
+ offset_count += data_file.total_offset
+
+ def _calculate_file_offset_map(self):
+ # After the FOF is performed, a load-balancing step redistributes halos
+ # and then writes more fields. Here, for each file, we create a list of
+ # files which contain the rest of the redistributed particles.
+ ifof = np.array([data_file.total_particles["Group"]
+ for data_file in self.data_files])
+ isub = np.array([data_file.total_offset
+ for data_file in self.data_files])
+ subend = isub.cumsum()
+ fofend = ifof.cumsum()
+ istart = np.digitize(fofend - ifof, subend - isub) - 1
+ iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
+ for i, data_file in enumerate(self.data_files):
+ data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
+
+ def _detect_output_fields(self):
+ # TODO: Add additional fields
+ dsl = []
+ units = {}
+ for dom in self.data_files:
+ fl, _units = self.io._identify_fields(dom)
+ units.update(_units)
+ dom._calculate_offsets(fl)
+ for f in fl:
+ if f not in dsl: dsl.append(f)
+ self.field_list = dsl
+ ds = self.dataset
+ ds.particle_types = tuple(set(pt for pt, ds in dsl))
+ # This is an attribute that means these particle types *actually*
+ # exist. As in, they are real, in the dataset.
+ ds.field_units.update(units)
+ ds.particle_types_raw = ds.particle_types
+
+ def _setup_geometry(self):
+ super(GadgetFOFParticleIndex, self)._setup_geometry()
+ self._calculate_particle_index_starts()
+ self._calculate_file_offset_map()
+
+class GadgetFOFHDF5File(ParticleFile):
+ def __init__(self, ds, io, filename, file_id):
+ super(GadgetFOFHDF5File, self).__init__(ds, io, filename, file_id)
+ with h5py.File(filename, "r") as f:
+ self.header = dict((field, f.attrs[field]) \
+ for field in f.attrs.keys())
+
+class GadgetFOFDataset(Dataset):
+ _index_class = GadgetFOFParticleIndex
+ _file_class = GadgetFOFHDF5File
+ _field_info_class = GadgetFOFFieldInfo
+ _suffix = ".hdf5"
+
+ def __init__(self, filename, dataset_type="gadget_fof_hdf5",
+ n_ref=16, over_refine_factor=1,
+ unit_base=None, units_override=None):
+ self.n_ref = n_ref
+ self.over_refine_factor = over_refine_factor
+ if unit_base is not None and "UnitLength_in_cm" in unit_base:
+ # We assume this is comoving, because in the absence of comoving
+ # integration the redshift will be zero.
+ unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+ self._unit_base = unit_base
+ if units_override is not None:
+ raise RuntimeError("units_override is not supported for GadgetFOFDataset. "+
+ "Use unit_base instead.")
+ super(GadgetFOFDataset, self).__init__(filename, dataset_type,
+ units_override=units_override)
+
+ def _parse_parameter_file(self):
+ handle = h5py.File(self.parameter_filename, mode="r")
+ hvals = {}
+ hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+ hvals["NumFiles"] = hvals["NumFiles"]
+
+ self.dimensionality = 3
+ self.refine_by = 2
+ self.unique_identifier = \
+ int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+ # Set standard values
+ self.domain_left_edge = np.zeros(3, "float64")
+ self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+ nz = 1 << self.over_refine_factor
+ self.domain_dimensions = np.ones(3, "int32") * nz
+ self.cosmological_simulation = 1
+ self.periodicity = (True, True, True)
+ self.current_redshift = hvals["Redshift"]
+ self.omega_lambda = hvals["OmegaLambda"]
+ self.omega_matter = hvals["Omega0"]
+ self.hubble_constant = hvals["HubbleParam"]
+
+ cosmology = Cosmology(hubble_constant=self.hubble_constant,
+ omega_matter=self.omega_matter,
+ omega_lambda=self.omega_lambda)
+ self.current_time = cosmology.t_from_z(self.current_redshift)
+
+ self.parameters = hvals
+ prefix = os.path.abspath(
+ os.path.join(os.path.dirname(self.parameter_filename),
+ os.path.basename(self.parameter_filename).split(".", 1)[0]))
+
+ suffix = self.parameter_filename.rsplit(".", 1)[-1]
+ self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+ self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+ if self.file_count == 0:
+ raise YTException(message="No data files found.", ds=self)
+ self.particle_types = ("Group", "Subhalo")
+ self.particle_types_raw = ("Group", "Subhalo")
+
+ handle.close()
+
+ def _set_code_unit_attributes(self):
+ # Set a sane default for cosmological simulations.
+ if self._unit_base is None and self.cosmological_simulation == 1:
+ mylog.info("Assuming length units are in Mpc/h (comoving)")
+ self._unit_base = dict(length = (1.0, "Mpccm/h"))
+ # The other same defaults we will use from the standard Gadget
+ # defaults.
+ unit_base = self._unit_base or {}
+
+ if "length" in unit_base:
+ length_unit = unit_base["length"]
+ elif "UnitLength_in_cm" in unit_base:
+ if self.cosmological_simulation == 0:
+ length_unit = (unit_base["UnitLength_in_cm"], "cm")
+ else:
+ length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+ else:
+ raise RuntimeError
+ length_unit = _fix_unit_ordering(length_unit)
+ self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+ if "velocity" in unit_base:
+ velocity_unit = unit_base["velocity"]
+ elif "UnitVelocity_in_cm_per_s" in unit_base:
+ velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+ else:
+ if self.cosmological_simulation == 0:
+ velocity_unit = (1e5, "cm/s")
+ else:
+ velocity_unit = (1e5, "cmcm/s")
+ velocity_unit = _fix_unit_ordering(velocity_unit)
+ self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+ # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+ # Default to 1e10 Msun/h if mass is not specified.
+ if "mass" in unit_base:
+ mass_unit = unit_base["mass"]
+ elif "UnitMass_in_g" in unit_base:
+ if self.cosmological_simulation == 0:
+ mass_unit = (unit_base["UnitMass_in_g"], "g")
+ else:
+ mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+ else:
+ # Sane default
+ mass_unit = (1.0, "1e10*Msun/h")
+ mass_unit = _fix_unit_ordering(mass_unit)
+ self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+
+ if "time" in unit_base:
+ time_unit = unit_base["time"]
+ elif "UnitTime_in_s" in unit_base:
+ time_unit = (unit_base["UnitTime_in_s"], "s")
+ else:
+ time_unit = (1., "s")
+ self.time_unit = self.quan(time_unit[0], time_unit[1])
+
+ @classmethod
+ def _is_valid(self, *args, **kwargs):
+ need_groups = ['Group', 'Header', 'Subhalo']
+ veto_groups = ['FOF']
+ valid = True
+ try:
+ fh = h5py.File(args[0], mode='r')
+ valid = all(ng in fh["/"] for ng in need_groups) and \
+ not any(vg in fh["/"] for vg in veto_groups)
+ fh.close()
+ except:
+ valid = False
+ pass
+ return valid
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/fields.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/fields.py
@@ -0,0 +1,40 @@
+"""
+GadgetFOF-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+ FieldInfoContainer
+from yt.units.yt_array import \
+ YTArray
+
+m_units = "code_mass"
+p_units = "code_length"
+v_units = "code_velocity"
+
+class GadgetFOFFieldInfo(FieldInfoContainer):
+ known_other_fields = (
+ )
+
+ known_particle_fields = (
+ ("GroupPos_0", (p_units, ["particle_position_x"], None)),
+ ("GroupPos_1", (p_units, ["particle_position_y"], None)),
+ ("GroupPos_2", (p_units, ["particle_position_z"], None)),
+ ("GroupVel_0", (v_units, ["particle_velocity_x"], None)),
+ ("GroupVel_1", (v_units, ["particle_velocity_y"], None)),
+ ("GroupVel_2", (v_units, ["particle_velocity_z"], None)),
+ ("GroupMass", (m_units, ["particle_mass"], None)),
+ ("GroupLen", ("", ["particle_number"], None)),
+)
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/io.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/io.py
@@ -0,0 +1,209 @@
+"""
+GadgetFOF data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+ BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+class IOHandlerGadgetFOFHDF5(BaseIOHandler):
+ _dataset_type = "gadget_fof_hdf5"
+
+ def __init__(self, ds):
+ super(IOHandlerGadgetFOFHDF5, self).__init__(ds)
+ self.offset_fields = set([])
+
+ def _read_fluid_selection(self, chunks, selector, fields, size):
+ raise NotImplementedError
+
+ def _read_particle_coords(self, chunks, ptf):
+ # This will read chunks and yield the results.
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in sorted(data_files):
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ yield ptype, (x, y, z)
+
+ def _read_offset_particle_field(self, field, data_file, fh):
+ field_data = np.empty(data_file.total_particles["Group"], dtype="float64")
+ fofindex = np.arange(data_file.total_particles["Group"]) + data_file.index_start["FOF"]
+ for offset_file in data_file.offset_files:
+ if fh.filename == offset_file.filename:
+ ofh = fh
+ else:
+ ofh = h5py.File(offset_file.filename, "r")
+ subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
+ substart = max(fofindex[0] - subindex[0], 0)
+ subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
+ fofstart = substart + subindex[0] - fofindex[0]
+ fofend = subend + subindex[0] - fofindex[0]
+ field_data[fofstart:fofend + 1] = ofh["Subhalo"][field][substart:subend + 1]
+ return field_data
+
+ def _read_particle_fields(self, chunks, ptf, selector):
+ # Now we have all the sizes, and we can allocate
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in sorted(data_files):
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ if pcount == 0: continue
+ coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ mask = selector.select_points(x, y, z, 0.0)
+ del x, y, z
+ if mask is None: continue
+ for field in field_list:
+ if field in self.offset_fields:
+ field_data = \
+ self._read_offset_particle_field(field, data_file, f)
+ else:
+ if field == "particle_identifier":
+ field_data = \
+ np.arange(data_file.total_particles[ptype]) + \
+ data_file.index_start[ptype]
+ elif field in f[ptype]:
+ field_data = f[ptype][field].value.astype("float64")
+ else:
+ fname = field[:field.rfind("_")]
+ field_data = f[ptype][fname].value.astype("float64")
+ my_div = field_data.size / pcount
+ if my_div > 1:
+ field_data = np.resize(field_data, (pcount, my_div))
+ findex = int(field[field.rfind("_") + 1:])
+ field_data = field_data[:, findex]
+ data = field_data[mask]
+ yield (ptype, field), data
+
+ def _initialize_index(self, data_file, regions):
+ pcount = sum(data_file.total_particles.values())
+ morton = np.empty(pcount, dtype='uint64')
+ if pcount == 0: return morton
+ mylog.debug("Initializing index % 5i (% 7i particles)",
+ data_file.file_id, pcount)
+ ind = 0
+ with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
+ dx = np.finfo(f["Group"]["GroupPos"].dtype).eps
+ dx = 2.0*self.ds.quan(dx, "code_length")
+
+ for ptype in data_file.ds.particle_types_raw:
+ if data_file.total_particles[ptype] == 0: continue
+ pos = f[ptype]["%sPos" % ptype].value.astype("float64")
+ pos = np.resize(pos, (data_file.total_particles[ptype], 3))
+ pos = data_file.ds.arr(pos, "code_length")
+
+ # These are 32 bit numbers, so we give a little lee-way.
+ # Otherwise, for big sets of particles, we often will bump into the
+ # domain edges. This helps alleviate that.
+ np.clip(pos, self.ds.domain_left_edge + dx,
+ self.ds.domain_right_edge - dx, pos)
+ if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+ np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+ raise YTDomainOverflow(pos.min(axis=0),
+ pos.max(axis=0),
+ self.ds.domain_left_edge,
+ self.ds.domain_right_edge)
+ regions.add_data_file(pos, data_file.file_id)
+ morton[ind:ind+pos.shape[0]] = compute_morton(
+ pos[:,0], pos[:,1], pos[:,2],
+ data_file.ds.domain_left_edge,
+ data_file.ds.domain_right_edge)
+ ind += pos.shape[0]
+ return morton
+
+ def _count_particles(self, data_file):
+ with h5py.File(data_file.filename, "r") as f:
+ pcount = {"Group": f["Header"].attrs["Ngroups_ThisFile"],
+ "Subhalo": f["Header"].attrs["Nsubgroups_ThisFile"]}
+ data_file.total_offset = 0 # need to figure out how subfind works here
+ return pcount
+
+ def _identify_fields(self, data_file):
+ fields = []
+ pcount = data_file.total_particles
+ if sum(pcount.values()) == 0: return fields, {}
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype in self.ds.particle_types_raw:
+ if data_file.total_particles[ptype] == 0: continue
+ fields.append((ptype, "particle_identifier"))
+ my_fields, my_offset_fields = \
+ subfind_field_list(f[ptype], ptype, data_file.total_particles)
+ fields.extend(my_fields)
+ self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+ return fields, {}
+
+def subfind_field_list(fh, ptype, pcount):
+ fields = []
+ offset_fields = []
+ for field in fh.keys():
+ if isinstance(fh[field], h5py.Group):
+ my_fields, my_offset_fields = \
+ subfind_field_list(fh[field], ptype, pcount)
+ fields.extend(my_fields)
+ my_offset_fields.extend(offset_fields)
+ else:
+ if not fh[field].size % pcount[ptype]:
+ my_div = fh[field].size / pcount[ptype]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append((ptype, "%s_%d" % (fname, i)))
+ else:
+ fields.append((ptype, fname))
+ ### Leave this block of code in case we need to do this.
+ ### This will have to wait until I get a dataset with subhalos.
+ # elif ptype == "SUBFIND" and \
+ # not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
+ # # These are actually FOF fields, but they were written after
+ # # a load balancing step moved halos around and thus they do not
+ # # correspond to the halos stored in the FOF group.
+ # my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
+ # fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ # if my_div > 1:
+ # for i in range(my_div):
+ # fields.append(("FOF", "%s_%d" % (fname, i)))
+ # else:
+ # fields.append(("FOF", fname))
+ # offset_fields.append(fname)
+ else:
+ mylog.warn("Cannot add field (%s, %s) with size %d." % \
+ (ptype, fh[field].name, fh[field].size))
+ continue
+ return fields, offset_fields
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/setup.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+ from numpy.distutils.misc_util import Configuration
+ config = Configuration('gadget_fof', parent_package, top_path)
+ config.make_config_py() # installs __config__.py
+ #config.make_svn_version_py()
+ return config
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/gadget_fof/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/tests/test_outputs.py
@@ -0,0 +1,50 @@
+"""
+GadgetFOF frontend tests using owls_fof_halos datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os.path
+from yt.testing import \
+ assert_equal
+from yt.utilities.answer_testing.framework import \
+ FieldValuesTest, \
+ requires_ds, \
+ data_dir_load
+from yt.frontends.gadget_fof.api import GadgetFOFDataset
+
+_fields = ("particle_position_x", "particle_position_y",
+ "particle_position_z", "particle_mass")
+
+# a dataset with empty files
+g1 = "" # TBD
+g8 = "" # TBD
+
+
+ at requires_ds(g8)
+def test_fields_g8():
+ ds = data_dir_load(g8)
+ yield assert_equal, str(ds), os.path.basename(g8)
+ for field in _fields:
+ yield FieldValuesTest(g8, field, particle_type=True)
+
+
+ at requires_ds(g1)
+def test_fields_g1():
+ ds = data_dir_load(g1)
+ yield assert_equal, str(ds), os.path.basename(g1)
+ for field in _fields:
+ yield FieldValuesTest(g1, field, particle_type=True)
+
+ at requires_file(g1)
+def test_GadgetFOFDataset():
+ assert isinstance(data_dir_load(g1), GadgetFOFDataset)
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -27,11 +27,14 @@
from .fields import \
OWLSSubfindFieldInfo
-from yt.utilities.cosmology import Cosmology
+from yt.utilities.cosmology import \
+ Cosmology
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities.exceptions import \
- YTException
+ YTException
+from yt.utilities.logger import ytLogger as \
+ mylog
from yt.geometry.particle_geometry_handler import \
ParticleIndex
from yt.data_objects.static_output import \
@@ -170,6 +173,7 @@
# The other same defaults we will use from the standard Gadget
# defaults.
unit_base = self._unit_base or {}
+
if "length" in unit_base:
length_unit = unit_base["length"]
elif "UnitLength_in_cm" in unit_base:
@@ -182,7 +186,6 @@
length_unit = _fix_unit_ordering(length_unit)
self.length_unit = self.quan(length_unit[0], length_unit[1])
- unit_base = self._unit_base or {}
if "velocity" in unit_base:
velocity_unit = unit_base["velocity"]
elif "UnitVelocity_in_cm_per_s" in unit_base:
@@ -191,6 +194,7 @@
velocity_unit = (1e5, "cm/s")
velocity_unit = _fix_unit_ordering(velocity_unit)
self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
# We set hubble_constant = 1.0 for non-cosmology, so this is safe.
# Default to 1e10 Msun/h if mass is not specified.
if "mass" in unit_base:
@@ -205,7 +209,14 @@
mass_unit = (1.0, "1e10*Msun/h")
mass_unit = _fix_unit_ordering(mass_unit)
self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
- self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
+
+ if "time" in unit_base:
+ time_unit = unit_base["time"]
+ elif "UnitTime_in_s" in unit_base:
+ time_unit = (unit_base["UnitTime_in_s"], "s")
+ else:
+ time_unit = (1., "s")
+ self.time_unit = self.quan(time_unit[0], time_unit[1])
@classmethod
def _is_valid(self, *args, **kwargs):
diff -r 61c3f8b403719087ab0a31f77b2117ead47bbfa0 -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 yt/frontends/setup.py
--- a/yt/frontends/setup.py
+++ b/yt/frontends/setup.py
@@ -17,6 +17,7 @@
config.add_subpackage("fits")
config.add_subpackage("flash")
config.add_subpackage("gadget")
+ config.add_subpackage("gadget_fof")
config.add_subpackage("gdf")
config.add_subpackage("halo_catalog")
config.add_subpackage("http_stream")
@@ -34,13 +35,17 @@
config.add_subpackage("athena/tests")
config.add_subpackage("boxlib/tests")
config.add_subpackage("chombo/tests")
+ config.add_subpackage("eagle/tests")
config.add_subpackage("enzo/tests")
config.add_subpackage("fits/tests")
config.add_subpackage("flash/tests")
+ config.add_subpackage("gadget/tests")
+ config.add_subpackage("gadget_fof/tests")
config.add_subpackage("moab/tests")
config.add_subpackage("owls/tests")
config.add_subpackage("owls_subfind/tests")
config.add_subpackage("ramses/tests")
+ config.add_subpackage("rockstar/tests")
config.add_subpackage("stream/tests")
config.add_subpackage("tipsy/tests")
return config
https://bitbucket.org/yt_analysis/yt/commits/3a35c7aaf6aa/
Changeset: 3a35c7aaf6aa
Branch: yt
User: brittonsmith
Date: 2015-07-09 12:55:02+00:00
Summary: Merging.
Affected #: 106 files
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -13,6 +13,7 @@
yt/frontends/ramses/_ramses_reader.cpp
yt/geometry/fake_octree.c
yt/geometry/grid_container.c
+yt/geometry/grid_visitors.c
yt/geometry/oct_container.c
yt/geometry/oct_visitors.c
yt/geometry/particle_deposit.c
@@ -25,6 +26,7 @@
yt/utilities/spatial/ckdtree.c
yt/utilities/lib/alt_ray_tracers.c
yt/utilities/lib/amr_kdtools.c
+yt/utilities/lib/bitarray.c
yt/utilities/lib/CICDeposit.c
yt/utilities/lib/ContourFinding.c
yt/utilities/lib/DepthFirstOctree.c
@@ -39,6 +41,7 @@
yt/utilities/lib/misc_utilities.c
yt/utilities/lib/Octree.c
yt/utilities/lib/origami.c
+yt/utilities/lib/pixelization_routines.c
yt/utilities/lib/png_writer.c
yt/utilities/lib/PointsInVolume.c
yt/utilities/lib/QuadTree.c
@@ -59,3 +62,4 @@
doc/source/reference/api/generated/*
doc/_temp/*
doc/source/bootcamp/.ipynb_checkpoints/
+dist
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 .python-version
--- /dev/null
+++ b/.python-version
@@ -0,0 +1,1 @@
+2.7.9
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 README
--- a/README
+++ b/README
@@ -20,4 +20,4 @@
For more information on installation, what to do if you run into problems, or
ways to help development, please visit our website.
-Enjoy!
+Enjoy!
\ No newline at end of file
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 distribute_setup.py
--- a/distribute_setup.py
+++ /dev/null
@@ -1,541 +0,0 @@
-#!python
-"""Bootstrap distribute installation
-
-If you want to use setuptools in your package's setup.py, just include this
-file in the same directory with it, and add this to the top of your setup.py::
-
- from distribute_setup import use_setuptools
- use_setuptools()
-
-If you want to require a specific version of setuptools, set a download
-mirror, or use an alternate download directory, you can do so by supplying
-the appropriate options to ``use_setuptools()``.
-
-This file can also be run as a script to install or upgrade setuptools.
-"""
-import os
-import shutil
-import sys
-import time
-import fnmatch
-import tempfile
-import tarfile
-import optparse
-
-from distutils import log
-
-try:
- from site import USER_SITE
-except ImportError:
- USER_SITE = None
-
-try:
- import subprocess
-
- def _python_cmd(*args):
- args = (sys.executable,) + args
- return subprocess.call(args) == 0
-
-except ImportError:
- # will be used for python 2.3
- def _python_cmd(*args):
- args = (sys.executable,) + args
- # quoting arguments if windows
- if sys.platform == 'win32':
- def quote(arg):
- if ' ' in arg:
- return '"%s"' % arg
- return arg
- args = [quote(arg) for arg in args]
- return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
-
-DEFAULT_VERSION = "0.6.32"
-DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
-SETUPTOOLS_FAKED_VERSION = "0.6c11"
-
-SETUPTOOLS_PKG_INFO = """\
-Metadata-Version: 1.0
-Name: setuptools
-Version: %s
-Summary: xxxx
-Home-page: xxx
-Author: xxx
-Author-email: xxx
-License: xxx
-Description: xxx
-""" % SETUPTOOLS_FAKED_VERSION
-
-
-def _install(tarball, install_args=()):
- # extracting the tarball
- tmpdir = tempfile.mkdtemp()
- log.warn('Extracting in %s', tmpdir)
- old_wd = os.getcwd()
- try:
- os.chdir(tmpdir)
- tar = tarfile.open(tarball)
- _extractall(tar)
- tar.close()
-
- # going in the directory
- subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
- os.chdir(subdir)
- log.warn('Now working in %s', subdir)
-
- # installing
- log.warn('Installing Distribute')
- if not _python_cmd('setup.py', 'install', *install_args):
- log.warn('Something went wrong during the installation.')
- log.warn('See the error message above.')
- # exitcode will be 2
- return 2
- finally:
- os.chdir(old_wd)
- shutil.rmtree(tmpdir)
-
-
-def _build_egg(egg, tarball, to_dir):
- # extracting the tarball
- tmpdir = tempfile.mkdtemp()
- log.warn('Extracting in %s', tmpdir)
- old_wd = os.getcwd()
- try:
- os.chdir(tmpdir)
- tar = tarfile.open(tarball)
- _extractall(tar)
- tar.close()
-
- # going in the directory
- subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
- os.chdir(subdir)
- log.warn('Now working in %s', subdir)
-
- # building an egg
- log.warn('Building a Distribute egg in %s', to_dir)
- _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
-
- finally:
- os.chdir(old_wd)
- shutil.rmtree(tmpdir)
- # returning the result
- log.warn(egg)
- if not os.path.exists(egg):
- raise IOError('Could not build the egg.')
-
-
-def _do_download(version, download_base, to_dir, download_delay):
- egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
- % (version, sys.version_info[0], sys.version_info[1]))
- if not os.path.exists(egg):
- tarball = download_setuptools(version, download_base,
- to_dir, download_delay)
- _build_egg(egg, tarball, to_dir)
- sys.path.insert(0, egg)
- import setuptools
- setuptools.bootstrap_install_from = egg
-
-
-def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
- to_dir=os.curdir, download_delay=15, no_fake=True):
- # making sure we use the absolute path
- to_dir = os.path.abspath(to_dir)
- was_imported = 'pkg_resources' in sys.modules or \
- 'setuptools' in sys.modules
- try:
- try:
- import pkg_resources
- if not hasattr(pkg_resources, '_distribute'):
- if not no_fake:
- _fake_setuptools()
- raise ImportError
- except ImportError:
- return _do_download(version, download_base, to_dir, download_delay)
- try:
- pkg_resources.require("distribute>=" + version)
- return
- except pkg_resources.VersionConflict:
- e = sys.exc_info()[1]
- if was_imported:
- sys.stderr.write(
- "The required version of distribute (>=%s) is not available,\n"
- "and can't be installed while this script is running. Please\n"
- "install a more recent version first, using\n"
- "'easy_install -U distribute'."
- "\n\n(Currently using %r)\n" % (version, e.args[0]))
- sys.exit(2)
- else:
- del pkg_resources, sys.modules['pkg_resources'] # reload ok
- return _do_download(version, download_base, to_dir,
- download_delay)
- except pkg_resources.DistributionNotFound:
- return _do_download(version, download_base, to_dir,
- download_delay)
- finally:
- if not no_fake:
- _create_fake_setuptools_pkg_info(to_dir)
-
-
-def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
- to_dir=os.curdir, delay=15):
- """Download distribute from a specified location and return its filename
-
- `version` should be a valid distribute version number that is available
- as an egg for download under the `download_base` URL (which should end
- with a '/'). `to_dir` is the directory where the egg will be downloaded.
- `delay` is the number of seconds to pause before an actual download
- attempt.
- """
- # making sure we use the absolute path
- to_dir = os.path.abspath(to_dir)
- try:
- from urllib.request import urlopen
- except ImportError:
- from urllib2 import urlopen
- tgz_name = "distribute-%s.tar.gz" % version
- url = download_base + tgz_name
- saveto = os.path.join(to_dir, tgz_name)
- src = dst = None
- if not os.path.exists(saveto): # Avoid repeated downloads
- try:
- log.warn("Downloading %s", url)
- src = urlopen(url)
- # Read/write all in one block, so we don't create a corrupt file
- # if the download is interrupted.
- data = src.read()
- dst = open(saveto, "wb")
- dst.write(data)
- finally:
- if src:
- src.close()
- if dst:
- dst.close()
- return os.path.realpath(saveto)
-
-
-def _no_sandbox(function):
- def __no_sandbox(*args, **kw):
- try:
- from setuptools.sandbox import DirectorySandbox
- if not hasattr(DirectorySandbox, '_old'):
- def violation(*args):
- pass
- DirectorySandbox._old = DirectorySandbox._violation
- DirectorySandbox._violation = violation
- patched = True
- else:
- patched = False
- except ImportError:
- patched = False
-
- try:
- return function(*args, **kw)
- finally:
- if patched:
- DirectorySandbox._violation = DirectorySandbox._old
- del DirectorySandbox._old
-
- return __no_sandbox
-
-
-def _patch_file(path, content):
- """Will backup the file then patch it"""
- existing_content = open(path).read()
- if existing_content == content:
- # already patched
- log.warn('Already patched.')
- return False
- log.warn('Patching...')
- _rename_path(path)
- f = open(path, 'w')
- try:
- f.write(content)
- finally:
- f.close()
- return True
-
-_patch_file = _no_sandbox(_patch_file)
-
-
-def _same_content(path, content):
- return open(path).read() == content
-
-
-def _rename_path(path):
- new_name = path + '.OLD.%s' % time.time()
- log.warn('Renaming %s to %s', path, new_name)
- os.rename(path, new_name)
- return new_name
-
-
-def _remove_flat_installation(placeholder):
- if not os.path.isdir(placeholder):
- log.warn('Unknown installation at %s', placeholder)
- return False
- found = False
- for file in os.listdir(placeholder):
- if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
- found = True
- break
- if not found:
- log.warn('Could not locate setuptools*.egg-info')
- return
-
- log.warn('Moving elements out of the way...')
- pkg_info = os.path.join(placeholder, file)
- if os.path.isdir(pkg_info):
- patched = _patch_egg_dir(pkg_info)
- else:
- patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
-
- if not patched:
- log.warn('%s already patched.', pkg_info)
- return False
- # now let's move the files out of the way
- for element in ('setuptools', 'pkg_resources.py', 'site.py'):
- element = os.path.join(placeholder, element)
- if os.path.exists(element):
- _rename_path(element)
- else:
- log.warn('Could not find the %s element of the '
- 'Setuptools distribution', element)
- return True
-
-_remove_flat_installation = _no_sandbox(_remove_flat_installation)
-
-
-def _after_install(dist):
- log.warn('After install bootstrap.')
- placeholder = dist.get_command_obj('install').install_purelib
- _create_fake_setuptools_pkg_info(placeholder)
-
-
-def _create_fake_setuptools_pkg_info(placeholder):
- if not placeholder or not os.path.exists(placeholder):
- log.warn('Could not find the install location')
- return
- pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
- setuptools_file = 'setuptools-%s-py%s.egg-info' % \
- (SETUPTOOLS_FAKED_VERSION, pyver)
- pkg_info = os.path.join(placeholder, setuptools_file)
- if os.path.exists(pkg_info):
- log.warn('%s already exists', pkg_info)
- return
-
- log.warn('Creating %s', pkg_info)
- try:
- f = open(pkg_info, 'w')
- except EnvironmentError:
- log.warn("Don't have permissions to write %s, skipping", pkg_info)
- return
- try:
- f.write(SETUPTOOLS_PKG_INFO)
- finally:
- f.close()
-
- pth_file = os.path.join(placeholder, 'setuptools.pth')
- log.warn('Creating %s', pth_file)
- f = open(pth_file, 'w')
- try:
- f.write(os.path.join(os.curdir, setuptools_file))
- finally:
- f.close()
-
-_create_fake_setuptools_pkg_info = _no_sandbox(
- _create_fake_setuptools_pkg_info
-)
-
-
-def _patch_egg_dir(path):
- # let's check if it's already patched
- pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
- if os.path.exists(pkg_info):
- if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
- log.warn('%s already patched.', pkg_info)
- return False
- _rename_path(path)
- os.mkdir(path)
- os.mkdir(os.path.join(path, 'EGG-INFO'))
- pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
- f = open(pkg_info, 'w')
- try:
- f.write(SETUPTOOLS_PKG_INFO)
- finally:
- f.close()
- return True
-
-_patch_egg_dir = _no_sandbox(_patch_egg_dir)
-
-
-def _before_install():
- log.warn('Before install bootstrap.')
- _fake_setuptools()
-
-
-def _under_prefix(location):
- if 'install' not in sys.argv:
- return True
- args = sys.argv[sys.argv.index('install') + 1:]
- for index, arg in enumerate(args):
- for option in ('--root', '--prefix'):
- if arg.startswith('%s=' % option):
- top_dir = arg.split('root=')[-1]
- return location.startswith(top_dir)
- elif arg == option:
- if len(args) > index:
- top_dir = args[index + 1]
- return location.startswith(top_dir)
- if arg == '--user' and USER_SITE is not None:
- return location.startswith(USER_SITE)
- return True
-
-
-def _fake_setuptools():
- log.warn('Scanning installed packages')
- try:
- import pkg_resources
- except ImportError:
- # we're cool
- log.warn('Setuptools or Distribute does not seem to be installed.')
- return
- ws = pkg_resources.working_set
- try:
- setuptools_dist = ws.find(
- pkg_resources.Requirement.parse('setuptools', replacement=False)
- )
- except TypeError:
- # old distribute API
- setuptools_dist = ws.find(
- pkg_resources.Requirement.parse('setuptools')
- )
-
- if setuptools_dist is None:
- log.warn('No setuptools distribution found')
- return
- # detecting if it was already faked
- setuptools_location = setuptools_dist.location
- log.warn('Setuptools installation detected at %s', setuptools_location)
-
- # if --root or --preix was provided, and if
- # setuptools is not located in them, we don't patch it
- if not _under_prefix(setuptools_location):
- log.warn('Not patching, --root or --prefix is installing Distribute'
- ' in another location')
- return
-
- # let's see if its an egg
- if not setuptools_location.endswith('.egg'):
- log.warn('Non-egg installation')
- res = _remove_flat_installation(setuptools_location)
- if not res:
- return
- else:
- log.warn('Egg installation')
- pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
- if (os.path.exists(pkg_info) and
- _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
- log.warn('Already patched.')
- return
- log.warn('Patching...')
- # let's create a fake egg replacing setuptools one
- res = _patch_egg_dir(setuptools_location)
- if not res:
- return
- log.warn('Patching complete.')
- _relaunch()
-
-
-def _relaunch():
- log.warn('Relaunching...')
- # we have to relaunch the process
- # pip marker to avoid a relaunch bug
- _cmd1 = ['-c', 'install', '--single-version-externally-managed']
- _cmd2 = ['-c', 'install', '--record']
- if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
- sys.argv[0] = 'setup.py'
- args = [sys.executable] + sys.argv
- sys.exit(subprocess.call(args))
-
-
-def _extractall(self, path=".", members=None):
- """Extract all members from the archive to the current working
- directory and set owner, modification time and permissions on
- directories afterwards. `path' specifies a different directory
- to extract to. `members' is optional and must be a subset of the
- list returned by getmembers().
- """
- import copy
- import operator
- from tarfile import ExtractError
- directories = []
-
- if members is None:
- members = self
-
- for tarinfo in members:
- if tarinfo.isdir():
- # Extract directories with a safe mode.
- directories.append(tarinfo)
- tarinfo = copy.copy(tarinfo)
- tarinfo.mode = 448 # decimal for oct 0700
- self.extract(tarinfo, path)
-
- # Reverse sort directories.
- if sys.version_info < (2, 4):
- def sorter(dir1, dir2):
- return cmp(dir1.name, dir2.name)
- directories.sort(sorter)
- directories.reverse()
- else:
- directories.sort(key=operator.attrgetter('name'), reverse=True)
-
- # Set correct owner, mtime and filemode on directories.
- for tarinfo in directories:
- dirpath = os.path.join(path, tarinfo.name)
- try:
- self.chown(tarinfo, dirpath)
- self.utime(tarinfo, dirpath)
- self.chmod(tarinfo, dirpath)
- except ExtractError:
- e = sys.exc_info()[1]
- if self.errorlevel > 1:
- raise
- else:
- self._dbg(1, "tarfile: %s" % e)
-
-
-def _build_install_args(options):
- """
- Build the arguments to 'python setup.py install' on the distribute package
- """
- install_args = []
- if options.user_install:
- if sys.version_info < (2, 6):
- log.warn("--user requires Python 2.6 or later")
- raise SystemExit(1)
- install_args.append('--user')
- return install_args
-
-def _parse_args():
- """
- Parse the command line for options
- """
- parser = optparse.OptionParser()
- parser.add_option(
- '--user', dest='user_install', action='store_true', default=False,
- help='install in user site package (requires Python 2.6 or later)')
- parser.add_option(
- '--download-base', dest='download_base', metavar="URL",
- default=DEFAULT_URL,
- help='alternative URL from where to download the distribute package')
- options, args = parser.parse_args()
- # positional arguments are ignored
- return options
-
-def main(version=DEFAULT_VERSION):
- """Install or upgrade setuptools and EasyInstall"""
- options = _parse_args()
- tarball = download_setuptools(download_base=options.download_base)
- return _install(tarball, _build_install_args(options))
-
-if __name__ == '__main__':
- sys.exit(main())
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/helper_scripts/run_recipes.py
--- a/doc/helper_scripts/run_recipes.py
+++ b/doc/helper_scripts/run_recipes.py
@@ -13,7 +13,7 @@
from yt.config import ytcfg
FPATTERNS = ['*.png', '*.txt', '*.h5', '*.dat']
-DPATTERNS = ['LC*', 'LR', 'DD0046', 'halo_analysis']
+DPATTERNS = ['LC*', 'LR', 'DD0046']
BADF = ['cloudy_emissivity.h5', 'apec_emissivity.h5',
'xray_emissivity.h5', 'AMRGridData_Slice_x_density.png']
CWD = os.getcwd()
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1,18 +1,14 @@
#
# Hi there! Welcome to the yt installation script.
#
+# First things first, if you experience problems, please visit the Help
+# section at http://yt-project.org.
+#
# This script is designed to create a fully isolated Python installation
# with the dependencies you need to run yt.
#
-# There are a few options, but you only need to set *one* of them. And
-# that's the next one, DEST_DIR. But, if you want to use an existing HDF5
-# installation you can set HDF5_DIR, or if you want to use some other
-# subversion checkout of yt, you can set YT_DIR, too. (It'll already
-# check the current directory and one up.
-#
-# If you experience problems, please visit the Help section at
-# http://yt-project.org.
-#
+# There are a few options, but you only need to set *one* of them, which is
+# the next one, DEST_DIR:
DEST_SUFFIX="yt-`uname -m`"
DEST_DIR="`pwd`/${DEST_SUFFIX/ /}" # Installation location
@@ -23,16 +19,25 @@
DEST_DIR=${YT_DEST}
fi
+# What follows are some other options that you may or may not need to change.
+
# Here's where you put the HDF5 path if you like; otherwise it'll download it
# and install it on its own
#HDF5_DIR=
+# If you've got yt some other place, set this to point to it. The script will
+# already check the current directory and the one above it in the tree.
+YT_DIR=""
+
# If you need to supply arguments to the NumPy or SciPy build, supply them here
# This one turns on gfortran manually:
#NUMPY_ARGS="--fcompiler=gnu95"
# If you absolutely can't get the fortran to work, try this:
#NUMPY_ARGS="--fcompiler=fake"
+INST_PY3=0 # Install Python 3 along with Python 2. If this is turned
+ # on, all Python packages (including yt) will be installed
+ # in Python 3 (except Mercurial, which requires Python 2).
INST_HG=1 # Install Mercurial or not? If hg is not already
# installed, yt cannot be installed.
INST_ZLIB=1 # On some systems (Kraken) matplotlib has issues with
@@ -50,9 +55,6 @@
INST_ROCKSTAR=0 # Install the Rockstar halo finder?
INST_SCIPY=0 # Install scipy?
-# If you've got yt some other place, set this to point to it.
-YT_DIR=""
-
# If you need to pass anything to matplotlib, do so here.
MPL_SUPP_LDFLAGS=""
MPL_SUPP_CFLAGS=""
@@ -111,6 +113,7 @@
echo INST_SQLITE3=${INST_SQLITE3} >> ${CONFIG_FILE}
echo INST_PYX=${INST_PYX} >> ${CONFIG_FILE}
echo INST_0MQ=${INST_0MQ} >> ${CONFIG_FILE}
+ echo INST_PY3=${INST_PY3} >> ${CONFIG_FILE}
echo INST_ROCKSTAR=${INST_ROCKSTAR} >> ${CONFIG_FILE}
echo INST_SCIPY=${INST_SCIPY} >> ${CONFIG_FILE}
echo YT_DIR=${YT_DIR} >> ${CONFIG_FILE}
@@ -415,6 +418,10 @@
get_willwont ${INST_SQLITE3}
echo "be installing SQLite3"
+printf "%-15s = %s so I " "INST_PY3" "${INST_PY3}"
+get_willwont ${INST_PY3}
+echo "be installing Python 3"
+
printf "%-15s = %s so I " "INST_HG" "${INST_HG}"
get_willwont ${INST_HG}
echo "be installing Mercurial"
@@ -487,6 +494,13 @@
exit 1
}
+if [ $INST_PY3 -eq 1 ]
+then
+ PYTHON_EXEC='python3.4'
+else
+ PYTHON_EXEC='python2.7'
+fi
+
function do_setup_py
{
[ -e $1/done ] && return
@@ -501,21 +515,27 @@
[ ! -e $LIB/extracted ] && tar xfz $LIB.tar.gz
touch $LIB/extracted
BUILD_ARGS=""
+ if [[ $LIB =~ .*mercurial.* ]]
+ then
+ PYEXE="python2.7"
+ else
+ PYEXE=${PYTHON_EXEC}
+ fi
case $LIB in
*h5py*)
pushd $LIB &> /dev/null
- ( ${DEST_DIR}/bin/python2.7 setup.py configure --hdf5=${HDF5_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+ ( ${DEST_DIR}/bin/${PYTHON_EXEC} setup.py configure --hdf5=${HDF5_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
popd &> /dev/null
;;
*numpy*)
- if [ -e ${DEST_DIR}/lib/python2.7/site-packages/numpy/__init__.py ]
+ if [ -e ${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/numpy/__init__.py ]
then
- VER=$(${DEST_DIR}/bin/python -c 'from distutils.version import StrictVersion as SV; \
+ VER=$(${DEST_DIR}/bin/${PYTHON_EXEC} -c 'from distutils.version import StrictVersion as SV; \
import numpy; print SV(numpy.__version__) < SV("1.8.0")')
if [ $VER == "True" ]
then
echo "Removing previous NumPy instance (see issue #889)"
- rm -rf ${DEST_DIR}/lib/python2.7/site-packages/{numpy*,*.pth}
+ rm -rf ${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/{numpy*,*.pth}
fi
fi
;;
@@ -523,8 +543,8 @@
;;
esac
cd $LIB
- ( ${DEST_DIR}/bin/python2.7 setup.py build ${BUILD_ARGS} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
- ( ${DEST_DIR}/bin/python2.7 setup.py install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+ ( ${DEST_DIR}/bin/${PYEXE} setup.py build ${BUILD_ARGS} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
+ ( ${DEST_DIR}/bin/${PYEXE} setup.py install 2>&1 ) 1>> ${LOG_FILE} || do_exit
touch done
cd ..
}
@@ -592,14 +612,15 @@
# Set paths to what they should be when yt is activated.
export PATH=${DEST_DIR}/bin:$PATH
export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
-export PYTHONPATH=${DEST_DIR}/lib/python2.7/site-packages
+export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages
mkdir -p ${DEST_DIR}/src
cd ${DEST_DIR}/src
+PYTHON2='Python-2.7.9'
+PYTHON3='Python-3.4.3'
CYTHON='Cython-0.22'
PYX='PyX-0.12.1'
-PYTHON='Python-2.7.9'
BZLIB='bzip2-1.0.6'
FREETYPE_VER='freetype-2.4.12'
H5PY='h5py-2.5.0'
@@ -620,11 +641,13 @@
TORNADO='tornado-4.0.2'
ZEROMQ='zeromq-4.0.5'
ZLIB='zlib-1.2.8'
+SETUPTOOLS='setuptools-16.0'
# Now we dump all our SHA512 files out.
echo '856220fa579e272ac38dcef091760f527431ff3b98df9af6e68416fcf77d9659ac5abe5c7dee41331f359614637a4ff452033085335ee499830ed126ab584267 Cython-0.22.tar.gz' > Cython-0.22.tar.gz.sha512
echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1 PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
echo 'a42f28ed8e49f04cf89e2ea7434c5ecbc264e7188dcb79ab97f745adf664dd9ab57f9a913543731635f90859536244ac37dca9adf0fc2aa1b215ba884839d160 Python-2.7.9.tgz' > Python-2.7.9.tgz.sha512
+echo '609cc82586fabecb25f25ecb410f2938e01d21cde85dd3f8824fe55c6edde9ecf3b7609195473d3fa05a16b9b121464f5414db1a0187103b78ea6edfa71684a7 Python-3.4.3.tgz' > Python-3.4.3.tgz.sha512
echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e blas.tar.gz' > blas.tar.gz.sha512
echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12 bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
echo 'a296dfcaef7e853e58eed4e24b37c4fa29cfc6ac688def048480f4bb384b9e37ca447faf96eec7b378fd764ba291713f03ac464581d62275e28eb2ec99110ab6 reason-js-20120623.zip' > reason-js-20120623.zip.sha512
@@ -646,6 +669,7 @@
echo '93591068dc63af8d50a7925d528bc0cccdd705232c529b6162619fe28dddaf115e8a460b1842877d35160bd7ed480c1bd0bdbec57d1f359085bd1814e0c1c242 tornado-4.0.2.tar.gz' > tornado-4.0.2.tar.gz.sha512
echo '0d928ed688ed940d460fa8f8d574a9819dccc4e030d735a8c7db71b59287ee50fa741a08249e356c78356b03c2174f2f2699f05aa7dc3d380ed47d8d7bab5408 zeromq-4.0.5.tar.gz' > zeromq-4.0.5.tar.gz.sha512
echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
+echo '38a89aad89dc9aa682dbfbca623e2f69511f5e20d4a3526c01aabbc7e93ae78f20aac566676b431e111540b41540a1c4f644ce4174e7ecf052318612075e02dc setuptools-16.0.tar.gz' > setuptools-16.0.tar.gz.sha512
# Individual processes
[ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
[ $INST_ZLIB -eq 1 ] && get_ytproject $ZLIB.tar.gz
@@ -660,10 +684,11 @@
[ $INST_SCIPY -eq 1 ] && get_ytproject $SCIPY.tar.gz
[ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
[ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
-get_ytproject $PYTHON.tgz
+[ $INST_HG -eq 1 ] && get_ytproject $MERCURIAL.tar.gz
+[ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
+get_ytproject $PYTHON2.tgz
get_ytproject $NUMPY.tar.gz
get_ytproject $MATPLOTLIB.tar.gz
-get_ytproject $MERCURIAL.tar.gz
get_ytproject $IPYTHON.tar.gz
get_ytproject $H5PY.tar.gz
get_ytproject $CYTHON.tar.gz
@@ -671,6 +696,7 @@
get_ytproject $NOSE.tar.gz
get_ytproject $PYTHON_HGLIB.tar.gz
get_ytproject $SYMPY.tar.gz
+get_ytproject $SETUPTOOLS.tar.gz
if [ $INST_BZLIB -eq 1 ]
then
if [ ! -e $BZLIB/done ]
@@ -787,11 +813,11 @@
fi
fi
-if [ ! -e $PYTHON/done ]
+if [ ! -e $PYTHON2/done ]
then
- echo "Installing Python. This may take a while, but don't worry. yt loves you."
- [ ! -e $PYTHON ] && tar xfz $PYTHON.tgz
- cd $PYTHON
+ echo "Installing Python 2. This may take a while, but don't worry. yt loves you."
+ [ ! -e $PYTHON2 ] && tar xfz $PYTHON2.tgz
+ cd $PYTHON2
( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
@@ -802,7 +828,30 @@
cd ..
fi
-export PYTHONPATH=${DEST_DIR}/lib/python2.7/site-packages/
+if [ $INST_PY3 -eq 1 ]
+then
+ if [ ! -e $PYTHON3/done ]
+ then
+ echo "Installing Python 3. Because two Pythons are better than one."
+ [ ! -e $PYTHON3 ] && tar xfz $PYTHON3.tgz
+ cd $PYTHON3
+ ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+
+ ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+ ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+ ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
+ ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/python 2>&1 ) 1>> ${LOG_FILE}
+ ( ln -sf ${DEST_DIR}/bin/python3-config ${DEST_DIR}/bin/python-config 2>&1 ) 1>> ${LOG_FILE}
+ ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+ touch done
+ cd ..
+ fi
+fi
+
+export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/
+
+# Install setuptools
+do_setup_py $SETUPTOOLS
if [ $INST_HG -eq 1 ]
then
@@ -847,12 +896,10 @@
# This fixes problems with gfortran linking.
unset LDFLAGS
-
-echo "Installing distribute"
-( ${DEST_DIR}/bin/python2.7 ${YT_DIR}/distribute_setup.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
-
+
echo "Installing pip"
-( ${DEST_DIR}/bin/easy_install-2.7 pip 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${GETFILE} https://bootstrap.pypa.io/get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${DEST_DIR}/bin/${PYTHON_EXEC} get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
if [ $INST_SCIPY -eq 0 ]
then
@@ -986,13 +1033,14 @@
echo "Installing yt"
[ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
-( export PATH=$DEST_DIR/bin:$PATH ; ${DEST_DIR}/bin/python2.7 setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( export PATH=$DEST_DIR/bin:$PATH ; ${DEST_DIR}/bin/${PYTHON_EXEC} setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
touch done
cd $MY_PWD
-if !( ( ${DEST_DIR}/bin/python2.7 -c "import readline" 2>&1 )>> ${LOG_FILE})
+if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import readline" 2>&1 )>> ${LOG_FILE}) || \
+ [[ "${MYOS##Darwin}" != "${MYOS}" && $INST_PY3 -eq 1 ]]
then
- if !( ( ${DEST_DIR}/bin/python2.7 -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
+ if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
then
echo "Installing pure-python readline"
( ${DEST_DIR}/bin/pip install gnureadline 2>&1 ) 1>> ${LOG_FILE}
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/analyzing/analysis_modules/halo_finders.rst
--- a/doc/source/analyzing/analysis_modules/halo_finders.rst
+++ b/doc/source/analyzing/analysis_modules/halo_finders.rst
@@ -116,7 +116,7 @@
the width of the smallest grid element in the simulation from the
last data snapshot (i.e. the one where time has evolved the
longest) in the time series:
- ``ds_last.index.get_smallest_dx() * ds_last['mpch']``.
+ ``ds_last.index.get_smallest_dx() * ds_last['Mpch']``.
* ``total_particles``, if supplied, this is a pre-calculated
total number of dark matter
particles present in the simulation. For example, this is useful
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -271,6 +271,29 @@
For a practical application of this, see :ref:`cookbook-radial-velocity`.
+Gradient Fields
+---------------
+
+yt provides a way to compute gradients of spatial fields using the
+:meth:`~yt.frontends.flash.data_structures.FLASHDataset.add_gradient_fields`
+method. If you have a spatially-based field such as density or temperature,
+and want to calculate the gradient of that field, you can do it like so:
+
+.. code-block:: python
+
+ ds = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")
+ grad_fields = ds.add_gradient_fields(("gas","temperature"))
+
+where the ``grad_fields`` list will now have a list of new field names that can be used
+in calculations, representing the 3 different components of the field and the magnitude
+of the gradient, e.g., ``"temperature_gradient_x"``, ``"temperature_gradient_y"``,
+``"temperature_gradient_z"``, and ``"temperature_gradient_magnitude"``. To see an example
+of how to create and use these fields, see :ref:`cookbook-complicated-derived-fields`.
+
+.. note::
+
+ ``add_gradient_fields`` currently only supports Cartesian geometries!
+
General Particle Fields
-----------------------
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/analyzing/generating_processed_data.rst
--- a/doc/source/analyzing/generating_processed_data.rst
+++ b/doc/source/analyzing/generating_processed_data.rst
@@ -47,10 +47,30 @@
frb = FixedResolutionBuffer(sl, (0.3, 0.5, 0.6, 0.8), (512, 512))
my_image = frb["density"]
-This resultant array can be saved out to disk or visualized using a
-hand-constructed Matplotlib image, for instance using
+This image may then be used in a hand-constructed Matplotlib image, for instance using
:func:`~matplotlib.pyplot.imshow`.
+The buffer arrays can be saved out to disk in either HDF5 or FITS format:
+
+.. code-block:: python
+
+ frb.export_hdf5("my_images.h5", fields=["density","temperature"])
+ frb.export_fits("my_images.fits", fields=["density","temperature"],
+ clobber=True, units="kpc")
+
+In the FITS case, there is an option for setting the ``units`` of the coordinate system in
+the file. If you want to overwrite a file with the same name, set ``clobber=True``.
+
+The :class:`~yt.visualization.fixed_resolution.FixedResolutionBuffer` can even be exported
+as a 2D dataset itself, which may be operated on in the same way as any other dataset in yt:
+
+.. code-block:: python
+
+ ds_frb = frb.export_dataset(fields=["density","temperature"], nprocs=8)
+ sp = ds_frb.sphere("c", (100.,"kpc"))
+
+where the ``nprocs`` parameter can be used to decompose the image into ``nprocs`` number of grids.
+
.. _generating-profiles-and-histograms:
Profiles and Histograms
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/analyzing/time_series_analysis.rst
--- a/doc/source/analyzing/time_series_analysis.rst
+++ b/doc/source/analyzing/time_series_analysis.rst
@@ -79,9 +79,7 @@
Analyzing an Entire Simulation
------------------------------
-.. note:: Currently only implemented for Enzo. Other simulation types coming
- soon. Until then, rely on the above prescription for creating
- ``DatasetSeries`` objects.
+.. note:: Implemented for: Enzo, Gadget, OWLS.
The parameter file used to run a simulation contains all the information
necessary to know what datasets should be available. The ``simulation``
@@ -93,8 +91,7 @@
.. code-block:: python
import yt
- my_sim = yt.simulation('enzo_tiny_cosmology/32Mpc_32.enzo', 'Enzo',
- find_outputs=False)
+ my_sim = yt.simulation('enzo_tiny_cosmology/32Mpc_32.enzo', 'Enzo')
Then, create a ``DatasetSeries`` object with the
:meth:`frontends.enzo.simulation_handling.EnzoSimulation.get_time_series`
@@ -123,10 +120,10 @@
to select a subset of the total data:
* ``time_data`` (*bool*): Whether or not to include time outputs when
- gathering datasets for time series. Default: True.
+ gathering datasets for time series. Default: True. (Enzo only)
* ``redshift_data`` (*bool*): Whether or not to include redshift outputs
- when gathering datasets for time series. Default: True.
+ when gathering datasets for time series. Default: True. (Enzo only)
* ``initial_time`` (*float*): The earliest time for outputs to be included.
If None, the initial time of the simulation is used. This can be used in
@@ -139,15 +136,12 @@
* ``times`` (*list*): A list of times for which outputs will be found.
Default: None.
-* ``time_units`` (*str*): The time units used for requesting outputs by time.
- Default: '1' (code units).
-
* ``initial_redshift`` (*float*): The earliest redshift for outputs to be
included. If None, the initial redshift of the simulation is used. This
can be used in combination with either ``final_time`` or ``final_redshift``.
Default: None.
-* ``final_time`` (*float*): The latest redshift for outputs to be included.
+* ``final_redshift`` (*float*): The latest redshift for outputs to be included.
If None, the final redshift of the simulation is used. This can be used
in combination with either ``initial_time`` or ``initial_redshift``.
Default: None.
@@ -157,11 +151,11 @@
* ``initial_cycle`` (*float*): The earliest cycle for outputs to be
included. If None, the initial cycle of the simulation is used. This can
- only be used with final_cycle. Default: None.
+ only be used with final_cycle. Default: None. (Enzo only)
* ``final_cycle`` (*float*): The latest cycle for outputs to be included.
If None, the final cycle of the simulation is used. This can only be used
- in combination with initial_cycle. Default: None.
+ in combination with initial_cycle. Default: None. (Enzo only)
* ``tolerance`` (*float*): Used in combination with ``times`` or ``redshifts``
keywords, this is the tolerance within which outputs are accepted given
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/cookbook/calculating_information.rst
--- a/doc/source/cookbook/calculating_information.rst
+++ b/doc/source/cookbook/calculating_information.rst
@@ -82,6 +82,17 @@
.. yt_cookbook:: derived_field.py
+.. _cookbook-complicated-derived-fields:
+
+Complicated Derived Fields
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This recipe demonstrates how to use the
+:meth:`~yt.frontends.flash.data_structures.FLASHDataset.add_gradient_fields` method
+to generate gradient fields and use them in a more complex derived field.
+
+.. yt_cookbook:: hse_field.py
+
Using Particle Filters to Calculate Star Formation Rates
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/cookbook/hse_field.py
--- a/doc/source/cookbook/hse_field.py
+++ b/doc/source/cookbook/hse_field.py
@@ -1,44 +1,32 @@
import numpy as np
import yt
-from yt.fields.field_plugin_registry import \
- register_field_plugin
-from yt.fields.fluid_fields import \
- setup_gradient_fields
-
-
-# Define the components of the gravitational acceleration vector field by
-# taking the gradient of the gravitational potential
- at register_field_plugin
-def setup_my_fields(registry, ftype="gas", slice_info=None):
- setup_gradient_fields(registry, (ftype, "gravitational_potential"),
- "cm ** 2 / s ** 2", slice_info)
-
-# Define the "degree of hydrostatic equilibrium" field
-
-
- at yt.derived_field(name='HSE', units=None, take_log=False,
- display_name='Hydrostatic Equilibrium')
-def HSE(field, data):
-
- gx = data["density"] * data["gravitational_potential_gradient_x"]
- gy = data["density"] * data["gravitational_potential_gradient_y"]
- gz = data["density"] * data["gravitational_potential_gradient_z"]
-
- hx = data["pressure_gradient_x"] - gx
- hy = data["pressure_gradient_y"] - gy
- hz = data["pressure_gradient_z"] - gz
-
- h = np.sqrt((hx * hx + hy * hy + hz * hz) / (gx * gx + gy * gy + gz * gz))
-
- return h
-
-
# Open a dataset from when there's a lot of sloshing going on.
ds = yt.load("GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0350")
-# gradient operator requires periodic boundaries. This dataset has
+# Define the components of the gravitational acceleration vector field by
+# taking the gradient of the gravitational potential
+grad_fields = ds.add_gradient_fields(("gas","gravitational_potential"))
+
+# We don't need to do the same for the pressure field because yt already
+# has pressure gradient fields. Now, define the "degree of hydrostatic
+# equilibrium" field.
+
+def _hse(field, data):
+ # Remember that g is the negative of the potential gradient
+ gx = -data["density"] * data["gravitational_potential_gradient_x"]
+ gy = -data["density"] * data["gravitational_potential_gradient_y"]
+ gz = -data["density"] * data["gravitational_potential_gradient_z"]
+ hx = data["pressure_gradient_x"] - gx
+ hy = data["pressure_gradient_y"] - gy
+ hz = data["pressure_gradient_z"] - gz
+ h = np.sqrt((hx * hx + hy * hy + hz * hz) / (gx * gx + gy * gy + gz * gz))
+ return h
+ds.add_field(('gas','HSE'), function=_hse, units="", take_log=False,
+ display_name='Hydrostatic Equilibrium')
+
+# The gradient operator requires periodic boundaries. This dataset has
# open boundary conditions. We need to hack it for now (this will be fixed
# in future version of yt)
ds.periodicity = (True, True, True)
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -469,6 +469,8 @@
first image in the primary file. If this is not the case,
yt will raise a warning and will not load this field.
+.. _additional_fits_options:
+
Additional Options
^^^^^^^^^^^^^^^^^^
@@ -570,6 +572,35 @@
``WCSAxes`` is still in an experimental state, but as its functionality improves it will be
utilized more here.
+``create_spectral_slabs``
+"""""""""""""""""""""""""
+
+.. note::
+
+ The following functionality requires the `spectral-cube <http://spectral-cube.readthedocs.org>`_
+ library to be installed.
+
+If you have a spectral intensity dataset of some sort, and would like to extract emission in
+particular slabs along the spectral axis of a certain width, ``create_spectral_slabs`` can be
+used to generate a dataset with these slabs as different fields. In this example, we use it
+to extract individual lines from an intensity cube:
+
+.. code-block:: python
+
+ slab_centers = {'13CN': (218.03117, 'GHz'),
+ 'CH3CH2CHO': (218.284256, 'GHz'),
+ 'CH3NH2': (218.40956, 'GHz')}
+ slab_width = (0.05, "GHz")
+ ds = create_spectral_slabs("intensity_cube.fits",
+ slab_centers, slab_width,
+ nan_mask=0.0)
+
+All keyword arguments to `create_spectral_slabs` are passed on to `load` when creating the dataset
+(see :ref:`additional_fits_options` above). In the returned dataset, the different slabs will be
+different fields, with the field names taken from the keys in ``slab_centers``. The WCS coordinates
+on the spectral axis are reset so that the center of the domain along this axis is zero, and the
+left and right edges of the domain along this axis are :math:`\pm` ``0.5*slab_width``.
+
Examples of Using FITS Data
^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -635,13 +666,14 @@
import yt
ds = yt.load("snapshot_061.hdf5")
-However, yt cannot detect raw-binary Gadget data, and so you must specify the
-format as being Gadget:
+Gadget data in raw binary format can also be loaded with the ``load`` command.
+This is only supported for snapshots created with the ``SnapFormat`` parameter
+set to 1 (the standard for Gadget-2).
.. code-block:: python
import yt
- ds = yt.GadgetDataset("snapshot_061")
+ ds = yt.load("snapshot_061")
.. _particle-bbox:
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/installing.rst
--- a/doc/source/installing.rst
+++ b/doc/source/installing.rst
@@ -213,10 +213,31 @@
++++++++++++++++++++++++++++++++++++++
To install yt from source, you must make sure you have yt's dependencies
-installed on your system. These include: a C compiler, ``HDF5``, ``python``,
-``Cython``, ``NumPy``, ``matplotlib``, ``sympy``, and ``h5py``. From here, you
-can use ``pip`` (which comes with ``Python``) to install the latest stable
-version of yt:
+installed on your system.
+
+If you use a Linux OS, use your distro's package manager to install these yt
+dependencies on your system:
+
+- ``HDF5``
+- ``zeromq``
+- ``sqlite``
+- ``mercurial``
+
+Then install the required Python packages with ``pip``:
+
+.. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+If you're using IPython notebooks, you can install its dependencies
+with ``pip`` as well:
+
+.. code-block:: bash
+
+ $ pip install -r optional-requirements.txt
+
+From here, you can use ``pip`` (which comes with ``Python``) to install the latest
+stable version of yt:
.. code-block:: bash
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/visualizing/FITSImageBuffer.ipynb
--- a/doc/source/visualizing/FITSImageBuffer.ipynb
+++ /dev/null
@@ -1,205 +0,0 @@
-{
- "metadata": {
- "name": "",
- "signature": "sha256:872f7525edd3c1ee09c67f6ecdd8552218df05ebe5ab73bcab55654edf0ac2bb"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
- {
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "yt has capabilities for writing 2D and 3D uniformly gridded data generated from datasets to FITS files. This is via the `FITSImageBuffer` class, which has subclasses `FITSSlice` and `FITSProjection` to write slices and projections directly to FITS. We'll test this out on an Athena dataset."
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "%matplotlib inline\n",
- "import yt\n",
- "from yt.utilities.fits_image import FITSImageBuffer, FITSSlice, FITSProjection"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "ds = yt.load(\"MHDSloshing/virgo_low_res.0054.vtk\", parameters={\"length_unit\":(1.0,\"Mpc\"),\n",
- " \"mass_unit\":(1.0e14,\"Msun\"),\n",
- " \"time_unit\":(1.0,\"Myr\")})"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "To demonstrate a useful example of creating a FITS file, let's first make a `ProjectionPlot`:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "prj = yt.ProjectionPlot(ds, \"z\", [\"temperature\"], weight_field=\"density\", width=(500.,\"kpc\"))\n",
- "prj.show()"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Suppose that we wanted to write this projection to a FITS file for analysis and visualization in other programs, such as ds9. We can do that using `FITSProjection`:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "prj_fits = FITSProjection(ds, \"z\", [\"temperature\"], weight_field=\"density\")"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "which took the same parameters as `ProjectionPlot` except the width, because `FITSProjection` and `FITSSlice` always make slices and projections of the width of the domain size, at the finest resolution available in the simulation, in a unit determined to be appropriate for the physical size of the dataset. `prj_fits` is a full-fledged FITS file in memory, specifically an [AstroPy `HDUList`](http://astropy.readthedocs.org/en/latest/io/fits/api/hdulists.html) object. This means that we can use all of the methods inherited from `HDUList`:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "prj_fits.info()"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "`info` shows us the contents of the virtual FITS file. We can also look at the header for the `\"temperature\"` image, like so:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "prj_fits[\"temperature\"].header"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "where we can see that the temperature units are in Kelvin and the cell widths are in kiloparsecs. The projection can be written to disk using the `writeto` method:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "prj_fits.writeto(\"sloshing.fits\", clobber=True)"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Since yt can read FITS image files, it can be loaded up just like any other dataset:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "ds2 = yt.load(\"sloshing.fits\")"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "and we can make a `SlicePlot` of the 2D image, which shows the same data as the previous image:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "slc2 = yt.SlicePlot(ds2, \"z\", [\"temperature\"], width=(500.,\"kpc\"))\n",
- "slc2.set_log(\"temperature\", True)\n",
- "slc2.show()"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "If you want more fine-grained control over what goes into the FITS file, you can call `FITSImageBuffer` directly, with various kinds of inputs. For example, you could use a `FixedResolutionBuffer`, and specify you want the units in parsecs instead:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "slc3 = ds.slice(0, 0.0)\n",
- "frb = slc3.to_frb((500.,\"kpc\"), 800)\n",
- "fib = FITSImageBuffer(frb, fields=[\"density\",\"temperature\"], units=\"pc\")"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Finally, a 3D FITS cube can be created from a covering grid:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "cvg = ds.covering_grid(ds.index.max_level, [-0.5,-0.5,-0.5], [64, 64, 64], fields=[\"density\",\"temperature\"])\n",
- "fib = FITSImageBuffer(cvg, fields=[\"density\",\"temperature\"], units=\"Mpc\")"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- }
- ],
- "metadata": {}
- }
- ]
-}
\ No newline at end of file
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/visualizing/FITSImageData.ipynb
--- /dev/null
+++ b/doc/source/visualizing/FITSImageData.ipynb
@@ -0,0 +1,409 @@
+{
+ "metadata": {
+ "name": "",
+ "signature": "sha256:c7de5ef190feaa2289595aec7eaa05db02fd535e408e0d04aa54088b0bd3ebae"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+ {
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "yt has capabilities for writing 2D and 3D uniformly gridded data generated from datasets to FITS files. This is via the `FITSImageData` class. We'll test these capabilities out on an Athena dataset."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "import yt\n",
+ "from yt.utilities.fits_image import FITSImageData, FITSProjection"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "ds = yt.load(\"MHDSloshing/virgo_low_res.0054.vtk\", parameters={\"length_unit\":(1.0,\"Mpc\"),\n",
+ " \"mass_unit\":(1.0e14,\"Msun\"),\n",
+ " \"time_unit\":(1.0,\"Myr\")})"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "heading",
+ "level": 2,
+ "metadata": {},
+ "source": [
+ "Creating FITS images from Slices and Projections"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "There are several ways to make a `FITSImageData` instance. The most intuitive ways are to use the `FITSSlice`, `FITSProjection`, `FITSOffAxisSlice`, and `FITSOffAxisProjection` classes to write slices and projections directly to FITS. To demonstrate a useful example of creating a FITS file, let's first make a `ProjectionPlot`:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj = yt.ProjectionPlot(ds, \"z\", [\"temperature\"], weight_field=\"density\", width=(500.,\"kpc\"))\n",
+ "prj.show()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Suppose that we wanted to write this projection to a FITS file for analysis and visualization in other programs, such as ds9. We can do that using `FITSProjection`:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits = FITSProjection(ds, \"z\", [\"temperature\"], weight_field=\"density\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "which took the same parameters as `ProjectionPlot` except the width, because `FITSProjection` and `FITSSlice` always make slices and projections of the width of the domain size, at the finest resolution available in the simulation, in a unit determined to be appropriate for the physical size of the dataset."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Because `FITSImageData` inherits from the [AstroPy `HDUList`](http://astropy.readthedocs.org/en/latest/io/fits/api/hdulists.html) class, we can call its methods. For example, `info` shows us the contents of the virtual FITS file:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits.info()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We can also look at the header for a particular field:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits[\"temperature\"].header"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "where we can see that the temperature units are in Kelvin and the cell widths are in kiloparsecs. If we want the raw image data with units, we can call `get_data`:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits.get_data(\"temperature\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We can use the `set_unit` method to change the units of a particular field:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits.set_unit(\"temperature\",\"R\")\n",
+ "prj_fits.get_data(\"temperature\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The image can be written to disk using the `writeto` method:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits.writeto(\"sloshing.fits\", clobber=True)"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Since yt can read FITS image files, it can be loaded up just like any other dataset:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "ds2 = yt.load(\"sloshing.fits\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "and we can make a `SlicePlot` of the 2D image, which shows the same data as the previous image:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "slc2 = yt.SlicePlot(ds2, \"z\", [\"temperature\"], width=(500.,\"kpc\"))\n",
+ "slc2.set_log(\"temperature\", True)\n",
+ "slc2.show()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "heading",
+ "level": 2,
+ "metadata": {},
+ "source": [
+ "Using `FITSImageData` directly"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "If you want more fine-grained control over what goes into the FITS file, you can call `FITSImageData` directly, with various kinds of inputs. For example, you could use a `FixedResolutionBuffer`, and specify you want the units in parsecs instead:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "slc3 = ds.slice(0, 0.0)\n",
+ "frb = slc3.to_frb((500.,\"kpc\"), 800)\n",
+ "fid_frb = FITSImageData(frb, fields=[\"density\",\"temperature\"], units=\"pc\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "A 3D FITS cube can also be created from a covering grid:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "cvg = ds.covering_grid(ds.index.max_level, [-0.5,-0.5,-0.5], [64, 64, 64], fields=[\"density\",\"temperature\"])\n",
+ "fid_cvg = FITSImageData(cvg, fields=[\"density\",\"temperature\"], units=\"Mpc\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "heading",
+ "level": 2,
+ "metadata": {},
+ "source": [
+ "Other `FITSImageData` Methods"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "A `FITSImageData` instance can be generated from one previously written to disk using the `from_file` classmethod:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "fid = FITSImageData.from_file(\"sloshing.fits\")\n",
+ "fid.info()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Multiple `FITSImageData` can be combined to create a new one, provided that the coordinate information is the same:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits2 = FITSProjection(ds, \"z\", [\"density\"])\n",
+ "prj_fits3 = FITSImageData.from_images([prj_fits, prj_fits2])\n",
+ "prj_fits3.info()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Alternatively, individual fields can be popped as well:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "dens_fits = prj_fits3.pop(\"density\")"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "dens_fits.info()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits3.info()"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "So far, the FITS images we have shown have linear spatial coordinates. One may want to take a projection of an object and make a crude mock observation out of it, with celestial coordinates. For this, we can use the `create_sky_wcs` method. Specify a center (RA, Dec) coordinate in degrees, as well as a linear scale in terms of angle per distance:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "sky_center = [30.,45.] # in degrees\n",
+ "sky_scale = (2.5, \"arcsec/kpc\") # could also use a YTQuantity\n",
+ "prj_fits.create_sky_wcs(sky_center, sky_scale, ctype=[\"RA---TAN\",\"DEC--TAN\"])"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "By the default, a tangent RA/Dec projection is used, but one could also use another projection using the `ctype` keyword. We can now look at the header and see it has the appropriate WCS:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "prj_fits[\"temperature\"].header"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally, we can add header keywords to a single field or for all fields in the FITS image using `update_header`:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "fid_frb.update_header(\"all\", \"time\", 0.1) # Update all the fields\n",
+ "fid_frb.update_header(\"temperature\", \"scale\", \"Rankine\") # Update just one field"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "collapsed": false,
+ "input": [
+ "print fid_frb[\"density\"].header[\"time\"]\n",
+ "print fid_frb[\"temperature\"].header[\"scale\"]"
+ ],
+ "language": "python",
+ "metadata": {},
+ "outputs": []
+ }
+ ],
+ "metadata": {}
+ }
+ ]
+}
\ No newline at end of file
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/visualizing/manual_plotting.rst
--- a/doc/source/visualizing/manual_plotting.rst
+++ b/doc/source/visualizing/manual_plotting.rst
@@ -66,6 +66,57 @@
setting up multiple axes with colorbars easier than it would be using only
matplotlib can be found in the :ref:`advanced-multi-panel` cookbook recipe.
+.. _frb-filters:
+
+Fixed Resolution Buffer Filters
+-------------------------------
+
+The FRB can be modified by using set of predefined filters in order to e.g.
+create realistically looking, mock observation images out of simulation data.
+Applying filter is an irreversible operation, hence the order in which you are
+using them matters.
+
+.. python-script::
+
+ import matplotlib
+ matplotlib.use('Agg')
+ from matplotlib import pyplot as plt
+
+ import yt
+
+ ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')
+ slc = ds.slice('z', 0.5)
+ frb = slc.to_frb((20, 'kpc'), 512)
+ frb.apply_gauss_beam(nbeam=30, sigma=2.0)
+ frb.apply_white_noise(5e-23)
+ plt.imshow(frb['density'].d)
+ plt.savefig('frb_filters.png')
+
+Currently available filters:
+
+Gaussian Smoothing
+~~~~~~~~~~~~~~~~~~
+
+.. function:: apply_gauss_beam(self, nbeam=30, sigma=2.0)
+
+ (This is a proxy for
+ :class:`~yt.visualization.fixed_resolution_filters.FixedResolutionBufferGaussBeamFilter`.)
+
+ This filter convolves the FRB with 2d Gaussian that is "nbeam" pixel wide
+ and has standard deviation "sigma".
+
+White Noise
+~~~~~~~~~~~
+
+.. function:: apply_white_noise(self, bg_lvl=None)
+
+ (This is a proxy for
+ :class:`~yt.visualization.fixed_resolution_filters.FixedResolutionBufferWhiteNoiseFilter`.)
+
+ This filter adds white noise with the amplitude "bg_lvl" to the FRB.
+ If "bg_lvl" is not present, 10th percentile of the FRB's values is used
+ instead.
+
.. _manual-line-plots:
Line Plots
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/visualizing/sketchfab.rst
--- a/doc/source/visualizing/sketchfab.rst
+++ b/doc/source/visualizing/sketchfab.rst
@@ -56,7 +56,7 @@
import yt
ds = yt.load("/data/workshop2012/IsolatedGalaxy/galaxy0030/galaxy0030")
- sphere = ds.sphere("max", (1.0, "mpc"))
+ sphere = ds.sphere("max", (1.0, "Mpc"))
surface = ds.surface(sphere, "density", 1e-27)
This object, ``surface``, can be queried for values on the surface. For
@@ -172,7 +172,7 @@
trans = [1.0, 0.5]
filename = './surfaces'
- sphere = ds.sphere("max", (1.0, "mpc"))
+ sphere = ds.sphere("max", (1.0, "Mpc"))
for i,r in enumerate(rho):
surf = ds.surface(sphere, 'density', r)
surf.export_obj(filename, transparency = trans[i], color_field='temperature', plot_index = i)
@@ -248,7 +248,7 @@
return (data['density']*data['density']*np.sqrt(data['temperature']))
add_field("emissivity", function=_Emissivity, units=r"g*K/cm**6")
- sphere = ds.sphere("max", (1.0, "mpc"))
+ sphere = ds.sphere("max", (1.0, "Mpc"))
for i,r in enumerate(rho):
surf = ds.surface(sphere, 'density', r)
surf.export_obj(filename, transparency = trans[i],
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 doc/source/visualizing/writing_fits_images.rst
--- a/doc/source/visualizing/writing_fits_images.rst
+++ b/doc/source/visualizing/writing_fits_images.rst
@@ -3,4 +3,4 @@
Writing FITS Images
==========================
-.. notebook:: FITSImageBuffer.ipynb
\ No newline at end of file
+.. notebook:: FITSImageData.ipynb
\ No newline at end of file
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 optional-requirements.txt
--- /dev/null
+++ b/optional-requirements.txt
@@ -0,0 +1,1 @@
+ipython[notebook]
\ No newline at end of file
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 requirements.txt
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,6 @@
+numpy==1.9.2
+matplotlib==1.4.3
+Cython==0.22
+h5py==2.5.0
+nose==1.3.6
+sympy==0.7.6
diff -r 2d7db3e1f6c4946da38ce036ba97ace04b80dbd0 -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 setup.py
--- a/setup.py
+++ b/setup.py
@@ -13,11 +13,6 @@
sys.exit(1)
import setuptools
-from distutils.version import StrictVersion
-if StrictVersion(setuptools.__version__) < StrictVersion('0.7.0'):
- import distribute_setup
- distribute_setup.use_setuptools()
-
from distutils.command.build_py import build_py
from numpy.distutils.misc_util import appendpath
from numpy.distutils.command import install_data as np_install_data
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/yt_analysis/yt/commits/6eed5a4fec99/
Changeset: 6eed5a4fec99
Branch: yt
User: brittonsmith
Date: 2015-07-10 12:30:35+00:00
Summary: Enabling subhalo fields.
Affected #: 1 file
diff -r 3a35c7aaf6aad4d1d1f623a110296a69f4f6d735 -r 6eed5a4fec99cc06d355692b5a611b5abb299653 yt/frontends/gadget_fof/io.py
--- a/yt/frontends/gadget_fof/io.py
+++ b/yt/frontends/gadget_fof/io.py
@@ -55,7 +55,7 @@
def _read_offset_particle_field(self, field, data_file, fh):
field_data = np.empty(data_file.total_particles["Group"], dtype="float64")
- fofindex = np.arange(data_file.total_particles["Group"]) + data_file.index_start["FOF"]
+ fofindex = np.arange(data_file.total_particles["Group"]) + data_file.index_start["Group"]
for offset_file in data_file.offset_files:
if fh.filename == offset_file.filename:
ofh = fh
@@ -187,21 +187,19 @@
fields.append((ptype, "%s_%d" % (fname, i)))
else:
fields.append((ptype, fname))
- ### Leave this block of code in case we need to do this.
- ### This will have to wait until I get a dataset with subhalos.
- # elif ptype == "SUBFIND" and \
- # not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
- # # These are actually FOF fields, but they were written after
- # # a load balancing step moved halos around and thus they do not
- # # correspond to the halos stored in the FOF group.
- # my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
- # fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
- # if my_div > 1:
- # for i in range(my_div):
- # fields.append(("FOF", "%s_%d" % (fname, i)))
- # else:
- # fields.append(("FOF", fname))
- # offset_fields.append(fname)
+ elif ptype == "Subfind" and \
+ not fh[field].size % fh["/Subfind"].attrs["Number_of_groups"]:
+ # These are actually Group fields, but they were written after
+ # a load balancing step moved halos around and thus they do not
+ # correspond to the halos stored in the Group group.
+ my_div = fh[field].size / fh["/Subfind"].attrs["Number_of_groups"]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append(("Group", "%s_%d" % (fname, i)))
+ else:
+ fields.append(("Group", fname))
+ offset_fields.append(fname)
else:
mylog.warn("Cannot add field (%s, %s) with size %d." % \
(ptype, fh[field].name, fh[field].size))
https://bitbucket.org/yt_analysis/yt/commits/9e8ee1a7aec8/
Changeset: 9e8ee1a7aec8
Branch: yt
User: brittonsmith
Date: 2015-07-10 12:36:34+00:00
Summary: Adding subhalo fields.
Affected #: 1 file
diff -r 6eed5a4fec99cc06d355692b5a611b5abb299653 -r 9e8ee1a7aec878b08b204c894f7d3d928807ea58 yt/frontends/gadget_fof/fields.py
--- a/yt/frontends/gadget_fof/fields.py
+++ b/yt/frontends/gadget_fof/fields.py
@@ -29,12 +29,20 @@
)
known_particle_fields = (
- ("GroupPos_0", (p_units, ["particle_position_x"], None)),
- ("GroupPos_1", (p_units, ["particle_position_y"], None)),
- ("GroupPos_2", (p_units, ["particle_position_z"], None)),
- ("GroupVel_0", (v_units, ["particle_velocity_x"], None)),
- ("GroupVel_1", (v_units, ["particle_velocity_y"], None)),
- ("GroupVel_2", (v_units, ["particle_velocity_z"], None)),
- ("GroupMass", (m_units, ["particle_mass"], None)),
- ("GroupLen", ("", ["particle_number"], None)),
+ ("GroupPos_0", (p_units, ["Group", "particle_position_x"], None)),
+ ("GroupPos_1", (p_units, ["Group", "particle_position_y"], None)),
+ ("GroupPos_2", (p_units, ["Group", "particle_position_z"], None)),
+ ("GroupVel_0", (v_units, ["Group", "particle_velocity_x"], None)),
+ ("GroupVel_1", (v_units, ["Group", "particle_velocity_y"], None)),
+ ("GroupVel_2", (v_units, ["Group", "particle_velocity_z"], None)),
+ ("GroupMass", (m_units, ["Group", "particle_mass"], None)),
+ ("GroupLen", ("", ["Group", "particle_number"], None)),
+ ("SubhaloPos_0", (p_units, ["Subhalo", "particle_position_x"], None)),
+ ("SubhaloPos_1", (p_units, ["Subhalo", "particle_position_y"], None)),
+ ("SubhaloPos_2", (p_units, ["Subhalo", "particle_position_z"], None)),
+ ("SubhaloVel_0", (v_units, ["Subhalo", "particle_velocity_x"], None)),
+ ("SubhaloVel_1", (v_units, ["Subhalo", "particle_velocity_y"], None)),
+ ("SubhaloVel_2", (v_units, ["Subhalo", "particle_velocity_z"], None)),
+ ("SubhaloMass", (m_units, ["Subhalo", "particle_mass"], None)),
+ ("SubhaloLen", ("", ["Subhalo", "particle_number"], None)),
)
https://bitbucket.org/yt_analysis/yt/commits/263fc5c5b3e6/
Changeset: 263fc5c5b3e6
Branch: yt
User: brittonsmith
Date: 2015-07-10 14:20:47+00:00
Summary: Updating tests with real data.
Affected #: 1 file
diff -r 9e8ee1a7aec878b08b204c894f7d3d928807ea58 -r 263fc5c5b3e61e42c2b0b58e450ee3fc3be6e0a2 yt/frontends/gadget_fof/tests/test_outputs.py
--- a/yt/frontends/gadget_fof/tests/test_outputs.py
+++ b/yt/frontends/gadget_fof/tests/test_outputs.py
@@ -1,5 +1,5 @@
"""
-GadgetFOF frontend tests using owls_fof_halos datasets
+GadgetFOF frontend tests using gadget_fof datasets
@@ -19,32 +19,38 @@
from yt.utilities.answer_testing.framework import \
FieldValuesTest, \
requires_ds, \
+ requires_file, \
data_dir_load
from yt.frontends.gadget_fof.api import GadgetFOFDataset
-_fields = ("particle_position_x", "particle_position_y",
- "particle_position_z", "particle_mass")
+p_types = ("Group", "Subhalo")
+p_fields = ("particle_position_x", "particle_position_y",
+ "particle_position_z", "particle_velocity_x",
+ "particle_velocity_y", "particle_velocity_z",
+ "particle_mass", "particle_identifier")
+_fields = tuple([(p_type, p_field) for p_type in p_types
+ for p_field in p_fields])
# a dataset with empty files
-g1 = "" # TBD
-g8 = "" # TBD
+g5 = "gadget_fof/groups_005/fof_subhalo_tab_005.0.hdf5"
+g42 = "gadget_fof/groups_042/fof_subhalo_tab_042.0.hdf5"
- at requires_ds(g8)
-def test_fields_g8():
- ds = data_dir_load(g8)
- yield assert_equal, str(ds), os.path.basename(g8)
+ at requires_ds(g5)
+def test_fields_g5():
+ ds = data_dir_load(g5)
+ yield assert_equal, str(ds), os.path.basename(g5)
for field in _fields:
- yield FieldValuesTest(g8, field, particle_type=True)
+ yield FieldValuesTest(g5, field, particle_type=True)
- at requires_ds(g1)
-def test_fields_g1():
- ds = data_dir_load(g1)
- yield assert_equal, str(ds), os.path.basename(g1)
+ at requires_ds(g42)
+def test_fields_g42():
+ ds = data_dir_load(g42)
+ yield assert_equal, str(ds), os.path.basename(g42)
for field in _fields:
- yield FieldValuesTest(g1, field, particle_type=True)
+ yield FieldValuesTest(g42, field, particle_type=True)
- at requires_file(g1)
+ at requires_file(g42)
def test_GadgetFOFDataset():
- assert isinstance(data_dir_load(g1), GadgetFOFDataset)
+ assert isinstance(data_dir_load(g42), GadgetFOFDataset)
https://bitbucket.org/yt_analysis/yt/commits/13d4d581a9c0/
Changeset: 13d4d581a9c0
Branch: yt
User: brittonsmith
Date: 2015-07-10 14:29:43+00:00
Summary: Updating test data name.
Affected #: 1 file
diff -r 263fc5c5b3e61e42c2b0b58e450ee3fc3be6e0a2 -r 13d4d581a9c01024f0c2b8c55333c745db48e2f4 yt/frontends/gadget_fof/tests/test_outputs.py
--- a/yt/frontends/gadget_fof/tests/test_outputs.py
+++ b/yt/frontends/gadget_fof/tests/test_outputs.py
@@ -32,8 +32,8 @@
for p_field in p_fields])
# a dataset with empty files
-g5 = "gadget_fof/groups_005/fof_subhalo_tab_005.0.hdf5"
-g42 = "gadget_fof/groups_042/fof_subhalo_tab_042.0.hdf5"
+g5 = "gadget_fof_halos/groups_005/fof_subhalo_tab_005.0.hdf5"
+g42 = "gadget_fof_halos/groups_042/fof_subhalo_tab_042.0.hdf5"
@requires_ds(g5)
https://bitbucket.org/yt_analysis/yt/commits/98c4d7e6684b/
Changeset: 98c4d7e6684b
Branch: yt
User: brittonsmith
Date: 2015-07-10 15:13:12+00:00
Summary: Adding docs on loading halo catalogs.
Affected #: 1 file
diff -r 13d4d581a9c01024f0c2b8c55333c745db48e2f4 -r 98c4d7e6684bd086ccd0116f6cd10110fa42cfb9 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1075,6 +1075,76 @@
.. _loading-pyne-data:
+Halo Catalog Data
+-----------------
+
+yt has support for reading halo catalogs produced by Rockstar and the inline
+FOF/SUBFIND halo finders of Gadget and OWLS. The halo catalogs are treated as
+particle datasets where each particle represents a single halo. At this time,
+yt does not have the ability to load the member particles for a given halo.
+However, once loaded, further halo analysis can be performed using
+:ref:`halo_catalog`.
+
+In the case where halo catalogs are written to multiple files, one must only
+give the path to one of them.
+
+Gadget FOF/SUBFIND
+^^^^^^^^^^^^^^^^^^
+
+The two field types for GadgetFOF data are "Group" (FOF) and "Subhalo" (SUBFIND).
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("gadget_fof_halos/groups_042/fof_subhalo_tab_042.0.hdf5")
+ ad = ds.all_data()
+ # The halo mass
+ print ad["Group", "particle_mass"]
+ print ad["Subhalo", "particle_mass"]
+ # Halo ID
+ print ad["Group", "particle_identifier"]
+ print ad["Subhalo", "particle_identifier"]
+ # positions
+ print ad["Group", "particle_position_x"]
+ # velocities
+ print ad["Group", "particle_velocity_x"]
+
+Multidimensional fields can be accessed through the field name followed by an
+underscore and the index.
+
+.. code-block:: python
+
+ # x component of the spin
+ print ad["Subhalo", "SubhaloSpin_0"]
+
+OWLS FOF/SUBFIND
+^^^^^^^^^^^^^^^^
+
+OWLS halo catalogs have a very similar structure to regular Gadget halo catalogs.
+The two field types are "FOF" and "SUBFIND".
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("owls_fof_halos/groups_008/group_008.0.hdf5")
+ ad = ds.all_data()
+ # The halo mass
+ print ad["FOF", "particle_mass"]
+
+Rockstar
+^^^^^^^^
+
+Rockstar halo catalogs are loaded by providing the path to one of the .bin files.
+The single field type available is "halos".
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("rockstar_halos/halos_0.0.bin")
+ ad = ds.all_data()
+ # The halo mass
+ print ad["halos", "particle_mass"]
+
PyNE Data
---------
https://bitbucket.org/yt_analysis/yt/commits/d48960b8f6a7/
Changeset: d48960b8f6a7
Branch: yt
User: chummels
Date: 2015-07-16 17:14:06+00:00
Summary: Merged in brittonsmith/yt (pull request #1570)
Adding frontend for Gadget_FOF
Affected #: 13 files
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1079,6 +1079,76 @@
.. _loading-pyne-data:
+Halo Catalog Data
+-----------------
+
+yt has support for reading halo catalogs produced by Rockstar and the inline
+FOF/SUBFIND halo finders of Gadget and OWLS. The halo catalogs are treated as
+particle datasets where each particle represents a single halo. At this time,
+yt does not have the ability to load the member particles for a given halo.
+However, once loaded, further halo analysis can be performed using
+:ref:`halo_catalog`.
+
+In the case where halo catalogs are written to multiple files, one must only
+give the path to one of them.
+
+Gadget FOF/SUBFIND
+^^^^^^^^^^^^^^^^^^
+
+The two field types for GadgetFOF data are "Group" (FOF) and "Subhalo" (SUBFIND).
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("gadget_fof_halos/groups_042/fof_subhalo_tab_042.0.hdf5")
+ ad = ds.all_data()
+ # The halo mass
+ print ad["Group", "particle_mass"]
+ print ad["Subhalo", "particle_mass"]
+ # Halo ID
+ print ad["Group", "particle_identifier"]
+ print ad["Subhalo", "particle_identifier"]
+ # positions
+ print ad["Group", "particle_position_x"]
+ # velocities
+ print ad["Group", "particle_velocity_x"]
+
+Multidimensional fields can be accessed through the field name followed by an
+underscore and the index.
+
+.. code-block:: python
+
+ # x component of the spin
+ print ad["Subhalo", "SubhaloSpin_0"]
+
+OWLS FOF/SUBFIND
+^^^^^^^^^^^^^^^^
+
+OWLS halo catalogs have a very similar structure to regular Gadget halo catalogs.
+The two field types are "FOF" and "SUBFIND".
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("owls_fof_halos/groups_008/group_008.0.hdf5")
+ ad = ds.all_data()
+ # The halo mass
+ print ad["FOF", "particle_mass"]
+
+Rockstar
+^^^^^^^^
+
+Rockstar halo catalogs are loaded by providing the path to one of the .bin files.
+The single field type available is "halos".
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("rockstar_halos/halos_0.0.bin")
+ ad = ds.all_data()
+ # The halo mass
+ print ad["halos", "particle_mass"]
+
PyNE Data
---------
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -27,6 +27,7 @@
'fits',
'flash',
'gadget',
+ 'gadget_fof',
'gdf',
'halo_catalog',
'http_stream',
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -387,7 +387,7 @@
@classmethod
def _is_valid(self, *args, **kwargs):
need_groups = ['Header']
- veto_groups = ['FOF']
+ veto_groups = ['FOF', 'Group', 'Subhalo']
valid = True
try:
fh = h5py.File(args[0], mode='r')
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/__init__.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/api.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/api.py
@@ -0,0 +1,26 @@
+"""
+API for GadgetFOF frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+ GadgetFOFDataset
+
+from .io import \
+ IOHandlerGadgetFOFHDF5
+
+from .fields import \
+ GadgetFOFFieldInfo
+
+from . import tests
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/data_structures.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -0,0 +1,246 @@
+"""
+Data structures for GadgetFOF frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import defaultdict
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+ GadgetFOFFieldInfo
+
+from yt.utilities.cosmology import \
+ Cosmology
+from yt.utilities.definitions import \
+ mpc_conversion, sec_conversion
+from yt.utilities.exceptions import \
+ YTException
+from yt.utilities.logger import ytLogger as \
+ mylog
+from yt.geometry.particle_geometry_handler import \
+ ParticleIndex
+from yt.data_objects.static_output import \
+ Dataset, \
+ ParticleFile
+from yt.frontends.gadget.data_structures import \
+ _fix_unit_ordering
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+ YTArray, \
+ YTQuantity
+
+class GadgetFOFParticleIndex(ParticleIndex):
+ def __init__(self, ds, dataset_type):
+ super(GadgetFOFParticleIndex, self).__init__(ds, dataset_type)
+
+ def _calculate_particle_index_starts(self):
+ # Halo indices are not saved in the file, so we must count by hand.
+ # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
+ particle_count = defaultdict(int)
+ offset_count = 0
+ for data_file in self.data_files:
+ data_file.index_start = dict([(ptype, particle_count[ptype]) for
+ ptype in data_file.total_particles])
+ data_file.offset_start = offset_count
+ for ptype in data_file.total_particles:
+ particle_count[ptype] += data_file.total_particles[ptype]
+ offset_count += data_file.total_offset
+
+ def _calculate_file_offset_map(self):
+ # After the FOF is performed, a load-balancing step redistributes halos
+ # and then writes more fields. Here, for each file, we create a list of
+ # files which contain the rest of the redistributed particles.
+ ifof = np.array([data_file.total_particles["Group"]
+ for data_file in self.data_files])
+ isub = np.array([data_file.total_offset
+ for data_file in self.data_files])
+ subend = isub.cumsum()
+ fofend = ifof.cumsum()
+ istart = np.digitize(fofend - ifof, subend - isub) - 1
+ iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
+ for i, data_file in enumerate(self.data_files):
+ data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
+
+ def _detect_output_fields(self):
+ # TODO: Add additional fields
+ dsl = []
+ units = {}
+ for dom in self.data_files:
+ fl, _units = self.io._identify_fields(dom)
+ units.update(_units)
+ dom._calculate_offsets(fl)
+ for f in fl:
+ if f not in dsl: dsl.append(f)
+ self.field_list = dsl
+ ds = self.dataset
+ ds.particle_types = tuple(set(pt for pt, ds in dsl))
+ # This is an attribute that means these particle types *actually*
+ # exist. As in, they are real, in the dataset.
+ ds.field_units.update(units)
+ ds.particle_types_raw = ds.particle_types
+
+ def _setup_geometry(self):
+ super(GadgetFOFParticleIndex, self)._setup_geometry()
+ self._calculate_particle_index_starts()
+ self._calculate_file_offset_map()
+
+class GadgetFOFHDF5File(ParticleFile):
+ def __init__(self, ds, io, filename, file_id):
+ super(GadgetFOFHDF5File, self).__init__(ds, io, filename, file_id)
+ with h5py.File(filename, "r") as f:
+ self.header = dict((field, f.attrs[field]) \
+ for field in f.attrs.keys())
+
+class GadgetFOFDataset(Dataset):
+ _index_class = GadgetFOFParticleIndex
+ _file_class = GadgetFOFHDF5File
+ _field_info_class = GadgetFOFFieldInfo
+ _suffix = ".hdf5"
+
+ def __init__(self, filename, dataset_type="gadget_fof_hdf5",
+ n_ref=16, over_refine_factor=1,
+ unit_base=None, units_override=None):
+ self.n_ref = n_ref
+ self.over_refine_factor = over_refine_factor
+ if unit_base is not None and "UnitLength_in_cm" in unit_base:
+ # We assume this is comoving, because in the absence of comoving
+ # integration the redshift will be zero.
+ unit_base['cmcm'] = 1.0 / unit_base["UnitLength_in_cm"]
+ self._unit_base = unit_base
+ if units_override is not None:
+ raise RuntimeError("units_override is not supported for GadgetFOFDataset. "+
+ "Use unit_base instead.")
+ super(GadgetFOFDataset, self).__init__(filename, dataset_type,
+ units_override=units_override)
+
+ def _parse_parameter_file(self):
+ handle = h5py.File(self.parameter_filename, mode="r")
+ hvals = {}
+ hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+ hvals["NumFiles"] = hvals["NumFiles"]
+
+ self.dimensionality = 3
+ self.refine_by = 2
+ self.unique_identifier = \
+ int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+ # Set standard values
+ self.domain_left_edge = np.zeros(3, "float64")
+ self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+ nz = 1 << self.over_refine_factor
+ self.domain_dimensions = np.ones(3, "int32") * nz
+ self.cosmological_simulation = 1
+ self.periodicity = (True, True, True)
+ self.current_redshift = hvals["Redshift"]
+ self.omega_lambda = hvals["OmegaLambda"]
+ self.omega_matter = hvals["Omega0"]
+ self.hubble_constant = hvals["HubbleParam"]
+
+ cosmology = Cosmology(hubble_constant=self.hubble_constant,
+ omega_matter=self.omega_matter,
+ omega_lambda=self.omega_lambda)
+ self.current_time = cosmology.t_from_z(self.current_redshift)
+
+ self.parameters = hvals
+ prefix = os.path.abspath(
+ os.path.join(os.path.dirname(self.parameter_filename),
+ os.path.basename(self.parameter_filename).split(".", 1)[0]))
+
+ suffix = self.parameter_filename.rsplit(".", 1)[-1]
+ self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+ self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+ if self.file_count == 0:
+ raise YTException(message="No data files found.", ds=self)
+ self.particle_types = ("Group", "Subhalo")
+ self.particle_types_raw = ("Group", "Subhalo")
+
+ handle.close()
+
+ def _set_code_unit_attributes(self):
+ # Set a sane default for cosmological simulations.
+ if self._unit_base is None and self.cosmological_simulation == 1:
+ mylog.info("Assuming length units are in Mpc/h (comoving)")
+ self._unit_base = dict(length = (1.0, "Mpccm/h"))
+ # The other same defaults we will use from the standard Gadget
+ # defaults.
+ unit_base = self._unit_base or {}
+
+ if "length" in unit_base:
+ length_unit = unit_base["length"]
+ elif "UnitLength_in_cm" in unit_base:
+ if self.cosmological_simulation == 0:
+ length_unit = (unit_base["UnitLength_in_cm"], "cm")
+ else:
+ length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+ else:
+ raise RuntimeError
+ length_unit = _fix_unit_ordering(length_unit)
+ self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+ if "velocity" in unit_base:
+ velocity_unit = unit_base["velocity"]
+ elif "UnitVelocity_in_cm_per_s" in unit_base:
+ velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+ else:
+ if self.cosmological_simulation == 0:
+ velocity_unit = (1e5, "cm/s")
+ else:
+ velocity_unit = (1e5, "cmcm/s")
+ velocity_unit = _fix_unit_ordering(velocity_unit)
+ self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
+ # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+ # Default to 1e10 Msun/h if mass is not specified.
+ if "mass" in unit_base:
+ mass_unit = unit_base["mass"]
+ elif "UnitMass_in_g" in unit_base:
+ if self.cosmological_simulation == 0:
+ mass_unit = (unit_base["UnitMass_in_g"], "g")
+ else:
+ mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+ else:
+ # Sane default
+ mass_unit = (1.0, "1e10*Msun/h")
+ mass_unit = _fix_unit_ordering(mass_unit)
+ self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+
+ if "time" in unit_base:
+ time_unit = unit_base["time"]
+ elif "UnitTime_in_s" in unit_base:
+ time_unit = (unit_base["UnitTime_in_s"], "s")
+ else:
+ time_unit = (1., "s")
+ self.time_unit = self.quan(time_unit[0], time_unit[1])
+
+ @classmethod
+ def _is_valid(self, *args, **kwargs):
+ need_groups = ['Group', 'Header', 'Subhalo']
+ veto_groups = ['FOF']
+ valid = True
+ try:
+ fh = h5py.File(args[0], mode='r')
+ valid = all(ng in fh["/"] for ng in need_groups) and \
+ not any(vg in fh["/"] for vg in veto_groups)
+ fh.close()
+ except:
+ valid = False
+ pass
+ return valid
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/fields.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/fields.py
@@ -0,0 +1,48 @@
+"""
+GadgetFOF-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2015, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+ FieldInfoContainer
+from yt.units.yt_array import \
+ YTArray
+
+m_units = "code_mass"
+p_units = "code_length"
+v_units = "code_velocity"
+
+class GadgetFOFFieldInfo(FieldInfoContainer):
+ known_other_fields = (
+ )
+
+ known_particle_fields = (
+ ("GroupPos_0", (p_units, ["Group", "particle_position_x"], None)),
+ ("GroupPos_1", (p_units, ["Group", "particle_position_y"], None)),
+ ("GroupPos_2", (p_units, ["Group", "particle_position_z"], None)),
+ ("GroupVel_0", (v_units, ["Group", "particle_velocity_x"], None)),
+ ("GroupVel_1", (v_units, ["Group", "particle_velocity_y"], None)),
+ ("GroupVel_2", (v_units, ["Group", "particle_velocity_z"], None)),
+ ("GroupMass", (m_units, ["Group", "particle_mass"], None)),
+ ("GroupLen", ("", ["Group", "particle_number"], None)),
+ ("SubhaloPos_0", (p_units, ["Subhalo", "particle_position_x"], None)),
+ ("SubhaloPos_1", (p_units, ["Subhalo", "particle_position_y"], None)),
+ ("SubhaloPos_2", (p_units, ["Subhalo", "particle_position_z"], None)),
+ ("SubhaloVel_0", (v_units, ["Subhalo", "particle_velocity_x"], None)),
+ ("SubhaloVel_1", (v_units, ["Subhalo", "particle_velocity_y"], None)),
+ ("SubhaloVel_2", (v_units, ["Subhalo", "particle_velocity_z"], None)),
+ ("SubhaloMass", (m_units, ["Subhalo", "particle_mass"], None)),
+ ("SubhaloLen", ("", ["Subhalo", "particle_number"], None)),
+)
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/io.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/io.py
@@ -0,0 +1,207 @@
+"""
+GadgetFOF data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+ BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+class IOHandlerGadgetFOFHDF5(BaseIOHandler):
+ _dataset_type = "gadget_fof_hdf5"
+
+ def __init__(self, ds):
+ super(IOHandlerGadgetFOFHDF5, self).__init__(ds)
+ self.offset_fields = set([])
+
+ def _read_fluid_selection(self, chunks, selector, fields, size):
+ raise NotImplementedError
+
+ def _read_particle_coords(self, chunks, ptf):
+ # This will read chunks and yield the results.
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in sorted(data_files):
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ yield ptype, (x, y, z)
+
+ def _read_offset_particle_field(self, field, data_file, fh):
+ field_data = np.empty(data_file.total_particles["Group"], dtype="float64")
+ fofindex = np.arange(data_file.total_particles["Group"]) + data_file.index_start["Group"]
+ for offset_file in data_file.offset_files:
+ if fh.filename == offset_file.filename:
+ ofh = fh
+ else:
+ ofh = h5py.File(offset_file.filename, "r")
+ subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
+ substart = max(fofindex[0] - subindex[0], 0)
+ subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
+ fofstart = substart + subindex[0] - fofindex[0]
+ fofend = subend + subindex[0] - fofindex[0]
+ field_data[fofstart:fofend + 1] = ofh["Subhalo"][field][substart:subend + 1]
+ return field_data
+
+ def _read_particle_fields(self, chunks, ptf, selector):
+ # Now we have all the sizes, and we can allocate
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in sorted(data_files):
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ if pcount == 0: continue
+ coords = f[ptype]["%sPos" % ptype].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ mask = selector.select_points(x, y, z, 0.0)
+ del x, y, z
+ if mask is None: continue
+ for field in field_list:
+ if field in self.offset_fields:
+ field_data = \
+ self._read_offset_particle_field(field, data_file, f)
+ else:
+ if field == "particle_identifier":
+ field_data = \
+ np.arange(data_file.total_particles[ptype]) + \
+ data_file.index_start[ptype]
+ elif field in f[ptype]:
+ field_data = f[ptype][field].value.astype("float64")
+ else:
+ fname = field[:field.rfind("_")]
+ field_data = f[ptype][fname].value.astype("float64")
+ my_div = field_data.size / pcount
+ if my_div > 1:
+ field_data = np.resize(field_data, (pcount, my_div))
+ findex = int(field[field.rfind("_") + 1:])
+ field_data = field_data[:, findex]
+ data = field_data[mask]
+ yield (ptype, field), data
+
+ def _initialize_index(self, data_file, regions):
+ pcount = sum(data_file.total_particles.values())
+ morton = np.empty(pcount, dtype='uint64')
+ if pcount == 0: return morton
+ mylog.debug("Initializing index % 5i (% 7i particles)",
+ data_file.file_id, pcount)
+ ind = 0
+ with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
+ dx = np.finfo(f["Group"]["GroupPos"].dtype).eps
+ dx = 2.0*self.ds.quan(dx, "code_length")
+
+ for ptype in data_file.ds.particle_types_raw:
+ if data_file.total_particles[ptype] == 0: continue
+ pos = f[ptype]["%sPos" % ptype].value.astype("float64")
+ pos = np.resize(pos, (data_file.total_particles[ptype], 3))
+ pos = data_file.ds.arr(pos, "code_length")
+
+ # These are 32 bit numbers, so we give a little lee-way.
+ # Otherwise, for big sets of particles, we often will bump into the
+ # domain edges. This helps alleviate that.
+ np.clip(pos, self.ds.domain_left_edge + dx,
+ self.ds.domain_right_edge - dx, pos)
+ if np.any(pos.min(axis=0) < self.ds.domain_left_edge) or \
+ np.any(pos.max(axis=0) > self.ds.domain_right_edge):
+ raise YTDomainOverflow(pos.min(axis=0),
+ pos.max(axis=0),
+ self.ds.domain_left_edge,
+ self.ds.domain_right_edge)
+ regions.add_data_file(pos, data_file.file_id)
+ morton[ind:ind+pos.shape[0]] = compute_morton(
+ pos[:,0], pos[:,1], pos[:,2],
+ data_file.ds.domain_left_edge,
+ data_file.ds.domain_right_edge)
+ ind += pos.shape[0]
+ return morton
+
+ def _count_particles(self, data_file):
+ with h5py.File(data_file.filename, "r") as f:
+ pcount = {"Group": f["Header"].attrs["Ngroups_ThisFile"],
+ "Subhalo": f["Header"].attrs["Nsubgroups_ThisFile"]}
+ data_file.total_offset = 0 # need to figure out how subfind works here
+ return pcount
+
+ def _identify_fields(self, data_file):
+ fields = []
+ pcount = data_file.total_particles
+ if sum(pcount.values()) == 0: return fields, {}
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype in self.ds.particle_types_raw:
+ if data_file.total_particles[ptype] == 0: continue
+ fields.append((ptype, "particle_identifier"))
+ my_fields, my_offset_fields = \
+ subfind_field_list(f[ptype], ptype, data_file.total_particles)
+ fields.extend(my_fields)
+ self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+ return fields, {}
+
+def subfind_field_list(fh, ptype, pcount):
+ fields = []
+ offset_fields = []
+ for field in fh.keys():
+ if isinstance(fh[field], h5py.Group):
+ my_fields, my_offset_fields = \
+ subfind_field_list(fh[field], ptype, pcount)
+ fields.extend(my_fields)
+ my_offset_fields.extend(offset_fields)
+ else:
+ if not fh[field].size % pcount[ptype]:
+ my_div = fh[field].size / pcount[ptype]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append((ptype, "%s_%d" % (fname, i)))
+ else:
+ fields.append((ptype, fname))
+ elif ptype == "Subfind" and \
+ not fh[field].size % fh["/Subfind"].attrs["Number_of_groups"]:
+ # These are actually Group fields, but they were written after
+ # a load balancing step moved halos around and thus they do not
+ # correspond to the halos stored in the Group group.
+ my_div = fh[field].size / fh["/Subfind"].attrs["Number_of_groups"]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append(("Group", "%s_%d" % (fname, i)))
+ else:
+ fields.append(("Group", fname))
+ offset_fields.append(fname)
+ else:
+ mylog.warn("Cannot add field (%s, %s) with size %d." % \
+ (ptype, fh[field].name, fh[field].size))
+ continue
+ return fields, offset_fields
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/setup.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import setuptools
+import os
+import sys
+import os.path
+
+
+def configuration(parent_package='', top_path=None):
+ from numpy.distutils.misc_util import Configuration
+ config = Configuration('gadget_fof', parent_package, top_path)
+ config.make_config_py() # installs __config__.py
+ #config.make_svn_version_py()
+ return config
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/gadget_fof/tests/test_outputs.py
--- /dev/null
+++ b/yt/frontends/gadget_fof/tests/test_outputs.py
@@ -0,0 +1,56 @@
+"""
+GadgetFOF frontend tests using gadget_fof datasets
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import os.path
+from yt.testing import \
+ assert_equal
+from yt.utilities.answer_testing.framework import \
+ FieldValuesTest, \
+ requires_ds, \
+ requires_file, \
+ data_dir_load
+from yt.frontends.gadget_fof.api import GadgetFOFDataset
+
+p_types = ("Group", "Subhalo")
+p_fields = ("particle_position_x", "particle_position_y",
+ "particle_position_z", "particle_velocity_x",
+ "particle_velocity_y", "particle_velocity_z",
+ "particle_mass", "particle_identifier")
+_fields = tuple([(p_type, p_field) for p_type in p_types
+ for p_field in p_fields])
+
+# a dataset with empty files
+g5 = "gadget_fof_halos/groups_005/fof_subhalo_tab_005.0.hdf5"
+g42 = "gadget_fof_halos/groups_042/fof_subhalo_tab_042.0.hdf5"
+
+
+ at requires_ds(g5)
+def test_fields_g5():
+ ds = data_dir_load(g5)
+ yield assert_equal, str(ds), os.path.basename(g5)
+ for field in _fields:
+ yield FieldValuesTest(g5, field, particle_type=True)
+
+
+ at requires_ds(g42)
+def test_fields_g42():
+ ds = data_dir_load(g42)
+ yield assert_equal, str(ds), os.path.basename(g42)
+ for field in _fields:
+ yield FieldValuesTest(g42, field, particle_type=True)
+
+ at requires_file(g42)
+def test_GadgetFOFDataset():
+ assert isinstance(data_dir_load(g42), GadgetFOFDataset)
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -27,11 +27,14 @@
from .fields import \
OWLSSubfindFieldInfo
-from yt.utilities.cosmology import Cosmology
+from yt.utilities.cosmology import \
+ Cosmology
from yt.utilities.definitions import \
mpc_conversion, sec_conversion
from yt.utilities.exceptions import \
- YTException
+ YTException
+from yt.utilities.logger import ytLogger as \
+ mylog
from yt.geometry.particle_geometry_handler import \
ParticleIndex
from yt.data_objects.static_output import \
@@ -170,6 +173,7 @@
# The other same defaults we will use from the standard Gadget
# defaults.
unit_base = self._unit_base or {}
+
if "length" in unit_base:
length_unit = unit_base["length"]
elif "UnitLength_in_cm" in unit_base:
@@ -182,7 +186,6 @@
length_unit = _fix_unit_ordering(length_unit)
self.length_unit = self.quan(length_unit[0], length_unit[1])
- unit_base = self._unit_base or {}
if "velocity" in unit_base:
velocity_unit = unit_base["velocity"]
elif "UnitVelocity_in_cm_per_s" in unit_base:
@@ -191,6 +194,7 @@
velocity_unit = (1e5, "cm/s")
velocity_unit = _fix_unit_ordering(velocity_unit)
self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+
# We set hubble_constant = 1.0 for non-cosmology, so this is safe.
# Default to 1e10 Msun/h if mass is not specified.
if "mass" in unit_base:
@@ -205,7 +209,14 @@
mass_unit = (1.0, "1e10*Msun/h")
mass_unit = _fix_unit_ordering(mass_unit)
self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
- self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
+
+ if "time" in unit_base:
+ time_unit = unit_base["time"]
+ elif "UnitTime_in_s" in unit_base:
+ time_unit = (unit_base["UnitTime_in_s"], "s")
+ else:
+ time_unit = (1., "s")
+ self.time_unit = self.quan(time_unit[0], time_unit[1])
@classmethod
def _is_valid(self, *args, **kwargs):
diff -r 989be30f7b70fc8e144581d1882c1dac913b25f9 -r d48960b8f6a7791a7610555cb447402900e981bd yt/frontends/setup.py
--- a/yt/frontends/setup.py
+++ b/yt/frontends/setup.py
@@ -17,6 +17,7 @@
config.add_subpackage("fits")
config.add_subpackage("flash")
config.add_subpackage("gadget")
+ config.add_subpackage("gadget_fof")
config.add_subpackage("gdf")
config.add_subpackage("halo_catalog")
config.add_subpackage("http_stream")
@@ -34,11 +35,13 @@
config.add_subpackage("athena/tests")
config.add_subpackage("boxlib/tests")
config.add_subpackage("chombo/tests")
+ config.add_subpackage("eagle/tests")
config.add_subpackage("enzo/tests")
config.add_subpackage("eagle/tests")
config.add_subpackage("fits/tests")
config.add_subpackage("flash/tests")
config.add_subpackage("gadget/tests")
+ config.add_subpackage("gadget_fof/tests")
config.add_subpackage("moab/tests")
config.add_subpackage("owls/tests")
config.add_subpackage("owls_subfind/tests")
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list