[yt-svn] commit/yt: MatthewTurk: Merged in brittonsmith/yt/yt-3.0 (pull request #867)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Mon May 5 08:48:48 PDT 2014
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/2baafb904700/
Changeset: 2baafb904700
Branch: yt-3.0
User: MatthewTurk
Date: 2014-05-05 17:48:37
Summary: Merged in brittonsmith/yt/yt-3.0 (pull request #867)
OWLSSubfind frontend.
Affected #: 8 files
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/halo_catalogs/api.py
--- a/yt/frontends/halo_catalogs/api.py
+++ b/yt/frontends/halo_catalogs/api.py
@@ -23,3 +23,8 @@
RockstarDataset, \
IOHandlerRockstarBinary, \
RockstarFieldInfo
+
+from .owls_subfind.api import \
+ OWLSSubfindDataset, \
+ IOHandlerOWLSSubfindHDF5, \
+ OWLSSubfindFieldInfo
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/halo_catalogs/owls_subfind/__init__.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/__init__.py
@@ -0,0 +1,15 @@
+"""
+API for HaloCatalog frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/halo_catalogs/owls_subfind/api.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/api.py
@@ -0,0 +1,24 @@
+"""
+API for OWLSSubfind frontend
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .data_structures import \
+ OWLSSubfindDataset
+
+from .io import \
+ IOHandlerOWLSSubfindHDF5
+
+from .fields import \
+ OWLSSubfindFieldInfo
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/halo_catalogs/owls_subfind/data_structures.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/data_structures.py
@@ -0,0 +1,221 @@
+"""
+Data structures for OWLSSubfind frontend.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from collections import defaultdict
+import h5py
+import numpy as np
+import stat
+import weakref
+import struct
+import glob
+import time
+import os
+
+from .fields import \
+ OWLSSubfindFieldInfo
+
+from yt.utilities.cosmology import Cosmology
+from yt.utilities.definitions import \
+ mpc_conversion, sec_conversion
+from yt.utilities.exceptions import \
+ YTException
+from yt.geometry.particle_geometry_handler import \
+ ParticleIndex
+from yt.data_objects.static_output import \
+ Dataset, \
+ ParticleFile
+from yt.frontends.sph.data_structures import \
+ _fix_unit_ordering
+import yt.utilities.fortran_utils as fpu
+from yt.units.yt_array import \
+ YTArray, \
+ YTQuantity
+
+class OWLSSubfindParticleIndex(ParticleIndex):
+ def __init__(self, pf, dataset_type):
+ super(OWLSSubfindParticleIndex, self).__init__(pf, dataset_type)
+
+ def _calculate_particle_index_starts(self):
+ # Halo indices are not saved in the file, so we must count by hand.
+ # File 0 has halos 0 to N_0 - 1, file 1 has halos N_0 to N_0 + N_1 - 1, etc.
+ particle_count = defaultdict(int)
+ offset_count = 0
+ for data_file in self.data_files:
+ data_file.index_start = dict([(ptype, particle_count[ptype]) for
+ ptype in data_file.total_particles])
+ data_file.offset_start = offset_count
+ for ptype in data_file.total_particles:
+ particle_count[ptype] += data_file.total_particles[ptype]
+ offset_count += data_file.total_offset
+
+ def _calculate_file_offset_map(self):
+ # After the FOF is performed, a load-balancing step redistributes halos
+ # and then writes more fields. Here, for each file, we create a list of
+ # files which contain the rest of the redistributed particles.
+ ifof = np.array([data_file.total_particles["FOF"]
+ for data_file in self.data_files])
+ isub = np.array([data_file.total_offset
+ for data_file in self.data_files])
+ subend = isub.cumsum()
+ fofend = ifof.cumsum()
+ istart = np.digitize(fofend - ifof, subend - isub) - 1
+ iend = np.clip(np.digitize(fofend, subend), 0, ifof.size - 2)
+ for i, data_file in enumerate(self.data_files):
+ data_file.offset_files = self.data_files[istart[i]: iend[i] + 1]
+
+ def _detect_output_fields(self):
+ # TODO: Add additional fields
+ pfl = []
+ units = {}
+ for dom in self.data_files[:1]:
+ fl, _units = self.io._identify_fields(dom)
+ units.update(_units)
+ dom._calculate_offsets(fl)
+ for f in fl:
+ if f not in pfl: pfl.append(f)
+ self.field_list = pfl
+ pf = self.parameter_file
+ pf.particle_types = tuple(set(pt for pt, pf in pfl))
+ # This is an attribute that means these particle types *actually*
+ # exist. As in, they are real, in the dataset.
+ pf.field_units.update(units)
+ pf.particle_types_raw = pf.particle_types
+
+ def _setup_geometry(self):
+ super(OWLSSubfindParticleIndex, self)._setup_geometry()
+ self._calculate_particle_index_starts()
+ self._calculate_file_offset_map()
+
+class OWLSSubfindHDF5File(ParticleFile):
+ def __init__(self, pf, io, filename, file_id):
+ super(OWLSSubfindHDF5File, self).__init__(pf, io, filename, file_id)
+ with h5py.File(filename, "r") as f:
+ self.header = dict((field, f.attrs[field]) \
+ for field in f.attrs.keys())
+
+class OWLSSubfindDataset(Dataset):
+ _index_class = OWLSSubfindParticleIndex
+ _file_class = OWLSSubfindHDF5File
+ _field_info_class = OWLSSubfindFieldInfo
+ _suffix = ".hdf5"
+
+ def __init__(self, filename, dataset_type="subfind_hdf5",
+ n_ref = 16, over_refine_factor = 1):
+ self.n_ref = n_ref
+ self.over_refine_factor = over_refine_factor
+ super(OWLSSubfindDataset, self).__init__(filename, dataset_type)
+
+ def _parse_parameter_file(self):
+ handle = h5py.File(self.parameter_filename, mode="r")
+ hvals = {}
+ hvals.update((str(k), v) for k, v in handle["/Header"].attrs.items())
+ hvals["NumFiles"] = hvals["NumFilesPerSnapshot"]
+ hvals["Massarr"] = hvals["MassTable"]
+
+ self.dimensionality = 3
+ self.refine_by = 2
+ self.unique_identifier = \
+ int(os.stat(self.parameter_filename)[stat.ST_CTIME])
+
+ # Set standard values
+ self.current_time = self.quan(hvals["Time_GYR"] * sec_conversion["Gyr"], "s")
+ self.domain_left_edge = np.zeros(3, "float64")
+ self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
+ nz = 1 << self.over_refine_factor
+ self.domain_dimensions = np.ones(3, "int32") * nz
+ self.cosmological_simulation = 1
+ self.periodicity = (True, True, True)
+ self.current_redshift = hvals["Redshift"]
+ self.omega_lambda = hvals["OmegaLambda"]
+ self.omega_matter = hvals["Omega0"]
+ self.hubble_constant = hvals["HubbleParam"]
+ self.parameters = hvals
+ prefix = os.path.abspath(
+ os.path.join(os.path.dirname(self.parameter_filename),
+ os.path.basename(self.parameter_filename).split(".", 1)[0]))
+
+ suffix = self.parameter_filename.rsplit(".", 1)[-1]
+ self.filename_template = "%s.%%(num)i.%s" % (prefix, suffix)
+ self.file_count = len(glob.glob(prefix + "*" + self._suffix))
+ if self.file_count == 0:
+ raise YTException(message="No data files found.", pf=self)
+ self.particle_types = ("FOF", "SUBFIND")
+ self.particle_types_raw = ("FOF", "SUBFIND")
+
+ # To avoid having to open files twice
+ self._unit_base = {}
+ self._unit_base.update(
+ (str(k), v) for k, v in handle["/Units"].attrs.items())
+ handle.close()
+
+ def _set_code_unit_attributes(self):
+ # Set a sane default for cosmological simulations.
+ if self._unit_base is None and self.cosmological_simulation == 1:
+ mylog.info("Assuming length units are in Mpc/h (comoving)")
+ self._unit_base = dict(length = (1.0, "Mpccm/h"))
+ # The other same defaults we will use from the standard Gadget
+ # defaults.
+ unit_base = self._unit_base or {}
+ if "length" in unit_base:
+ length_unit = unit_base["length"]
+ elif "UnitLength_in_cm" in unit_base:
+ if self.cosmological_simulation == 0:
+ length_unit = (unit_base["UnitLength_in_cm"], "cm")
+ else:
+ length_unit = (unit_base["UnitLength_in_cm"], "cmcm/h")
+ else:
+ raise RuntimeError
+ length_unit = _fix_unit_ordering(length_unit)
+ self.length_unit = self.quan(length_unit[0], length_unit[1])
+
+ unit_base = self._unit_base or {}
+ if "velocity" in unit_base:
+ velocity_unit = unit_base["velocity"]
+ elif "UnitVelocity_in_cm_per_s" in unit_base:
+ velocity_unit = (unit_base["UnitVelocity_in_cm_per_s"], "cm/s")
+ else:
+ velocity_unit = (1e5, "cm/s")
+ velocity_unit = _fix_unit_ordering(velocity_unit)
+ self.velocity_unit = self.quan(velocity_unit[0], velocity_unit[1])
+ # We set hubble_constant = 1.0 for non-cosmology, so this is safe.
+ # Default to 1e10 Msun/h if mass is not specified.
+ if "mass" in unit_base:
+ mass_unit = unit_base["mass"]
+ elif "UnitMass_in_g" in unit_base:
+ if self.cosmological_simulation == 0:
+ mass_unit = (unit_base["UnitMass_in_g"], "g")
+ else:
+ mass_unit = (unit_base["UnitMass_in_g"], "g/h")
+ else:
+ # Sane default
+ mass_unit = (1.0, "1e10*Msun/h")
+ mass_unit = _fix_unit_ordering(mass_unit)
+ self.mass_unit = self.quan(mass_unit[0], mass_unit[1])
+ self.time_unit = self.quan(unit_base["UnitTime_in_s"], "s")
+
+ @classmethod
+ def _is_valid(self, *args, **kwargs):
+ try:
+ fileh = h5py.File(args[0], mode='r')
+ if "Constants" in fileh["/"].keys() and \
+ "Header" in fileh["/"].keys() and \
+ "SUBFIND" in fileh["/"].keys():
+ fileh.close()
+ return True
+ fileh.close()
+ except:
+ pass
+ return False
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/halo_catalogs/owls_subfind/fields.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/fields.py
@@ -0,0 +1,58 @@
+"""
+OWLSSubfind-specific fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.fields.field_info_container import \
+ FieldInfoContainer
+from yt.units.yt_array import \
+ YTArray
+
+m_units = "code_mass"
+mdot_units = "code_mass / code_time"
+p_units = "Mpccm/h"
+v_units = "1e5 * cmcm / s"
+
+class OWLSSubfindFieldInfo(FieldInfoContainer):
+ known_other_fields = (
+ )
+
+ known_particle_fields = (
+ ("CenterOfMass_0", (p_units, ["particle_position_x"], None)),
+ ("CenterOfMass_1", (p_units, ["particle_position_y"], None)),
+ ("CenterOfMass_2", (p_units, ["particle_position_z"], None)),
+ ("CenterOfMassVelocity_0", (v_units, ["particle_velocity_x"], None)),
+ ("CenterOfMassVelocity_1", (v_units, ["particle_velocity_y"], None)),
+ ("CenterOfMassVelocity_2", (v_units, ["particle_velocity_z"], None)),
+ ("Mass", (m_units, ["particle_mass"], None)),
+ ("Halo_M_Crit200", (m_units, ["Virial Mass"], None)),
+ ("Halo_M_Crit2500", (m_units, [], None)),
+ ("Halo_M_Crit500", (m_units, [], None)),
+ ("Halo_M_Mean200", (m_units, [], None)),
+ ("Halo_M_Mean2500", (m_units, [], None)),
+ ("Halo_M_Mean500", (m_units, [], None)),
+ ("Halo_M_TopHat200", (m_units, [], None)),
+ ("Halo_R_Crit200", (p_units, ["Virial Radius"], None)),
+ ("Halo_R_Crit2500", (p_units, [], None)),
+ ("Halo_R_Crit500", (p_units, [], None)),
+ ("Halo_R_Mean200", (p_units, [], None)),
+ ("Halo_R_Mean2500", (p_units, [], None)),
+ ("Halo_R_Mean500", (p_units, [], None)),
+ ("Halo_R_TopHat200", (p_units, [], None)),
+ ("BH_Mass", (m_units, [], None)),
+ ("Stars/Mass", (m_units, [], None)),
+ ("BH_Mdot", (mdot_units, [], None)),
+ ("StarFormationRate", (mdot_units, [], None)),
+)
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/halo_catalogs/owls_subfind/io.py
--- /dev/null
+++ b/yt/frontends/halo_catalogs/owls_subfind/io.py
@@ -0,0 +1,209 @@
+"""
+OWLSSubfind data-file handling function
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import h5py
+import numpy as np
+
+from yt.utilities.exceptions import *
+from yt.funcs import mylog
+
+from yt.utilities.io_handler import \
+ BaseIOHandler
+
+from yt.utilities.lib.geometry_utils import compute_morton
+
+from yt.geometry.oct_container import _ORDER_MAX
+
+class IOHandlerOWLSSubfindHDF5(BaseIOHandler):
+ _dataset_type = "subfind_hdf5"
+
+ def __init__(self, pf):
+ super(IOHandlerOWLSSubfindHDF5, self).__init__(pf)
+ self.offset_fields = set([])
+
+ def _read_fluid_selection(self, chunks, selector, fields, size):
+ raise NotImplementedError
+
+ def _read_particle_coords(self, chunks, ptf):
+ # This will read chunks and yield the results.
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in data_files:
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ coords = f[ptype]["CenterOfMass"].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ yield ptype, (x, y, z)
+
+ def _read_offset_particle_field(self, field, data_file, fh):
+ field_data = np.empty(data_file.total_particles["FOF"], dtype="float64")
+ fofindex = np.arange(data_file.total_particles["FOF"]) + data_file.index_start["FOF"]
+ for offset_file in data_file.offset_files:
+ if fh.filename == offset_file.filename:
+ ofh = fh
+ else:
+ ofh = h5py.File(offset_file.filename, "r")
+ subindex = np.arange(offset_file.total_offset) + offset_file.offset_start
+ substart = max(fofindex[0] - subindex[0], 0)
+ subend = min(fofindex[-1] - subindex[0], subindex.size - 1)
+ fofstart = substart + subindex[0] - fofindex[0]
+ fofend = subend + subindex[0] - fofindex[0]
+ field_data[fofstart:fofend + 1] = ofh["SUBFIND"][field][substart:subend + 1]
+ return field_data
+
+ def _read_particle_fields(self, chunks, ptf, selector):
+ # Now we have all the sizes, and we can allocate
+ chunks = list(chunks)
+ data_files = set([])
+ for chunk in chunks:
+ for obj in chunk.objs:
+ data_files.update(obj.data_files)
+ for data_file in data_files:
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype, field_list in sorted(ptf.items()):
+ pcount = data_file.total_particles[ptype]
+ coords = f[ptype]["CenterOfMass"].value.astype("float64")
+ coords = np.resize(coords, (pcount, 3))
+ x = coords[:, 0]
+ y = coords[:, 1]
+ z = coords[:, 2]
+ mask = selector.select_points(x, y, z)
+ del x, y, z
+ if mask is None: continue
+ for field in field_list:
+ if field in self.offset_fields:
+ field_data = \
+ self._read_offset_particle_field(field, data_file, f)
+ else:
+ if field == "particle_identifier":
+ field_data = \
+ np.arange(data_file.total_particles[ptype]) + \
+ data_file.index_start[ptype]
+ elif field in f[ptype]:
+ field_data = f[ptype][field].value.astype("float64")
+ else:
+ fname = field[:field.rfind("_")]
+ field_data = f[ptype][fname].value.astype("float64")
+ my_div = field_data.size / pcount
+ if my_div > 1:
+ field_data = np.resize(field_data, (pcount, my_div))
+ findex = int(field[field.rfind("_") + 1:])
+ field_data = field_data[:, findex]
+ data = field_data[mask]
+ yield (ptype, field), data
+
+ def _initialize_index(self, data_file, regions):
+ pcount = sum(self._count_particles(data_file).values())
+ morton = np.empty(pcount, dtype='uint64')
+ mylog.debug("Initializing index % 5i (% 7i particles)",
+ data_file.file_id, pcount)
+ ind = 0
+ with h5py.File(data_file.filename, "r") as f:
+ if not f.keys(): return None
+ dx = np.finfo(f["FOF"]['CenterOfMass'].dtype).eps
+ dx = 2.0*self.pf.quan(dx, "code_length")
+
+ for ptype, pattr in zip(["FOF", "SUBFIND"],
+ ["Number_of_groups", "Number_of_subgroups"]):
+ my_pcount = f[ptype].attrs[pattr]
+ pos = f[ptype]["CenterOfMass"].value.astype("float64")
+ pos = np.resize(pos, (my_pcount, 3))
+ pos = data_file.pf.arr(pos, "code_length")
+
+ # These are 32 bit numbers, so we give a little lee-way.
+ # Otherwise, for big sets of particles, we often will bump into the
+ # domain edges. This helps alleviate that.
+ np.clip(pos, self.pf.domain_left_edge + dx,
+ self.pf.domain_right_edge - dx, pos)
+ if np.any(pos.min(axis=0) < self.pf.domain_left_edge) or \
+ np.any(pos.max(axis=0) > self.pf.domain_right_edge):
+ raise YTDomainOverflow(pos.min(axis=0),
+ pos.max(axis=0),
+ self.pf.domain_left_edge,
+ self.pf.domain_right_edge)
+ regions.add_data_file(pos, data_file.file_id)
+ morton[ind:ind+pos.shape[0]] = compute_morton(
+ pos[:,0], pos[:,1], pos[:,2],
+ data_file.pf.domain_left_edge,
+ data_file.pf.domain_right_edge)
+ ind += pos.shape[0]
+ return morton
+
+ def _count_particles(self, data_file):
+ with h5py.File(data_file.filename, "r") as f:
+ # We need this to figure out where the offset fields are stored.
+ data_file.total_offset = f["SUBFIND"].attrs["Number_of_groups"]
+ return {"FOF": f["FOF"].attrs["Number_of_groups"],
+ "SUBFIND": f["FOF"].attrs["Number_of_subgroups"]}
+
+ def _identify_fields(self, data_file):
+ fields = [(ptype, "particle_identifier")
+ for ptype in self.pf.particle_types_raw]
+ pcount = data_file.total_particles
+ with h5py.File(data_file.filename, "r") as f:
+ for ptype in self.pf.particle_types_raw:
+ my_fields, my_offset_fields = \
+ subfind_field_list(f[ptype], ptype, data_file.total_particles)
+ fields.extend(my_fields)
+ self.offset_fields = self.offset_fields.union(set(my_offset_fields))
+ return fields, {}
+
+def subfind_field_list(fh, ptype, pcount):
+ fields = []
+ offset_fields = []
+ for field in fh.keys():
+ if "PartType" in field:
+ # These are halo member particles
+ continue
+ elif isinstance(fh[field], h5py.Group):
+ my_fields, my_offset_fields = \
+ subfind_field_list(fh[field], ptype, pcount)
+ fields.extend(my_fields)
+ my_offset_fields.extend(offset_fields)
+ else:
+ if not fh[field].size % pcount[ptype]:
+ my_div = fh[field].size / pcount[ptype]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append((ptype, "%s_%d" % (fname, i)))
+ else:
+ fields.append((ptype, fname))
+ elif ptype == "SUBFIND" and \
+ not fh[field].size % fh["/SUBFIND"].attrs["Number_of_groups"]:
+ # These are actually FOF fields, but they were written after
+ # a load balancing step moved halos around and thus they do not
+ # correspond to the halos stored in the FOF group.
+ my_div = fh[field].size / fh["/SUBFIND"].attrs["Number_of_groups"]
+ fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1:]
+ if my_div > 1:
+ for i in range(my_div):
+ fields.append(("FOF", "%s_%d" % (fname, i)))
+ else:
+ fields.append(("FOF", fname))
+ offset_fields.append(fname)
+ else:
+ mylog.warn("Cannot add field (%s, %s) with size %d." % \
+ (ptype, fh[field].name, fh[field].size))
+ continue
+ return fields, offset_fields
diff -r 85cf74b2256cfe93057f6858619f5f2fdbf4e55f -r 2baafb904700f2eaccb6af2f6d545db6c2a286ca yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -343,7 +343,8 @@
try:
fileh = h5py.File(args[0], mode='r')
if "Constants" in fileh["/"].keys() and \
- "Header" in fileh["/"].keys():
+ "Header" in fileh["/"].keys() and \
+ "SUBFIND" not in fileh["/"].keys():
fileh.close()
return True
fileh.close()
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list