[yt-svn] commit/yt: 8 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Wed Sep 28 15:54:12 PDT 2016
8 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/63651c854f88/
Changeset: 63651c854f88
Branch: yt
User: atmyers
Date: 2016-09-23 19:26:43+00:00
Summary: Making the streaming unstructured mesh handler work with 2D datasets.
Affected #: 1 file
diff -r 71960f84ae9d3e12d70759e342b2bc581581674c -r 63651c854f88afbb80f241b2d9ecaa5f6dc438c6 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -1659,9 +1659,11 @@
connectivity length and should be of shape (N,M) where N is the number
of elements and M is the number of vertices per element.
coordinates : array_like
- The 3D coordinates of mesh vertices. This should be of size (L,3) where
- L is the number of vertices. When loading more than one mesh, the data
- for each mesh should be concatenated into a single coordinates array.
+ The 3D coordinates of mesh vertices. This should be of size (L, D) where
+ L is the number of vertices and D is the number of coordinates per vertex
+ (the spatial dimensions of the dataset). When loading more than one mesh,
+ the data for each mesh should be concatenated into a single coordinates
+ array.
node_data : dict or list of dicts
For a single mesh, a dict mapping field names to 2D numpy arrays,
representing data defined at element vertices. For multiple meshes,
@@ -1720,6 +1722,7 @@
"""
+ dimensionality = coordinates.shape[1]
domain_dimensions = np.ones(3, "int32") * 2
nprocs = 1
@@ -1750,9 +1753,14 @@
data = ensure_list(data)
if bbox is None:
- bbox = np.array([[coordinates[:,i].min() - 0.1 * abs(coordinates[:,i].min()),
- coordinates[:,i].max() + 0.1 * abs(coordinates[:,i].max())]
- for i in range(3)], "float64")
+ bbox = [[coordinates[:,i].min() - 0.1 * abs(coordinates[:,i].min()),
+ coordinates[:,i].max() + 0.1 * abs(coordinates[:,i].max())]
+ for i in range(dimensionality)]
+
+ if dimensionality == 2:
+ bbox.append([0.0, 1.0])
+
+ bbox = np.array(bbox, dtype=np.float64)
domain_left_edge = np.array(bbox[:, 0], 'float64')
domain_right_edge = np.array(bbox[:, 1], 'float64')
grid_levels = np.zeros(nprocs, dtype='int32').reshape((nprocs,1))
@@ -1778,7 +1786,7 @@
raise RuntimeError
grid_left_edges = domain_left_edge
grid_right_edges = domain_right_edge
- grid_dimensions = domain_dimensions.reshape(nprocs,3).astype("int32")
+ grid_dimensions = domain_dimensions.reshape(nprocs, 3).astype("int32")
if length_unit is None:
length_unit = 'code_length'
@@ -1811,7 +1819,7 @@
handler.domain_left_edge = domain_left_edge
handler.domain_right_edge = domain_right_edge
handler.refine_by = 2
- handler.dimensionality = 3
+ handler.dimensionality = dimensionality
handler.domain_dimensions = domain_dimensions
handler.simulation_time = sim_time
handler.cosmology_simulation = 0
https://bitbucket.org/yt_analysis/yt/commits/b57fddb1f2c8/
Changeset: b57fddb1f2c8
Branch: yt
User: atmyers
Date: 2016-09-23 19:46:02+00:00
Summary: Add an error message stating that 1D unstructured mesh data isn't supported right now/
Affected #: 3 files
diff -r 63651c854f88afbb80f241b2d9ecaa5f6dc438c6 -r b57fddb1f2c8b68d96b32d18b430e8a7081ee105 yt/frontends/exodus_ii/data_structures.py
--- a/yt/frontends/exodus_ii/data_structures.py
+++ b/yt/frontends/exodus_ii/data_structures.py
@@ -28,7 +28,9 @@
from .fields import \
ExodusIIFieldInfo
from .util import \
- load_info_records, sanitize_string
+ load_info_records, \
+ sanitize_string, \
+ get_num_pseudo_dims
class ExodusIIUnstructuredMesh(UnstructuredMesh):
@@ -159,6 +161,12 @@
self.default_field = [f for f in self.field_list
if f[0] == 'connect1'][-1]
+ for mesh in self.index.meshes:
+ num_pseudo = get_num_pseudo_dims(mesh.connectivity_coords)
+ if (num_pseudo > 1 or self.dimensionality < 2):
+ raise RuntimeError("1D unstructured mesh data "
+ "is currently not supported.")
+
def _set_code_unit_attributes(self):
# This is where quantities are created that represent the various
# on-disk units. These are the currently available quantities which
diff -r 63651c854f88afbb80f241b2d9ecaa5f6dc438c6 -r b57fddb1f2c8b68d96b32d18b430e8a7081ee105 yt/frontends/exodus_ii/util.py
--- a/yt/frontends/exodus_ii/util.py
+++ b/yt/frontends/exodus_ii/util.py
@@ -1,3 +1,4 @@
+import numpy as np
import sys
import string
from itertools import takewhile
@@ -6,6 +7,10 @@
_printable = set([ord(_) for _ in string.printable])
+def get_num_pseudo_dims(coords):
+ D = coords.shape[1]
+ return sum([np.all(coords[:, dim] == 0.0) for dim in range(D)])
+
def sanitize_string(s):
if sys.version_info > (3, ):
return "".join([chr(_) for _ in takewhile(lambda a: a in _printable, s)])
diff -r 63651c854f88afbb80f241b2d9ecaa5f6dc438c6 -r b57fddb1f2c8b68d96b32d18b430e8a7081ee105 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -69,6 +69,8 @@
from yt.extern.six import string_types
from .fields import \
StreamFieldInfo
+from yt.frontends.exodus_ii.util import \
+ get_num_pseudo_dims
class StreamGrid(AMRGridPatch):
"""
@@ -1661,9 +1663,9 @@
coordinates : array_like
The 3D coordinates of mesh vertices. This should be of size (L, D) where
L is the number of vertices and D is the number of coordinates per vertex
- (the spatial dimensions of the dataset). When loading more than one mesh,
- the data for each mesh should be concatenated into a single coordinates
- array.
+ (the spatial dimensions of the dataset). Currently this must be either 2 or 3.
+ When loading more than one mesh, the data for each mesh should be concatenated
+ into a single coordinates array.
node_data : dict or list of dicts
For a single mesh, a dict mapping field names to 2D numpy arrays,
representing data defined at element vertices. For multiple meshes,
@@ -1723,6 +1725,11 @@
"""
dimensionality = coordinates.shape[1]
+ num_pseudo = get_num_pseudo_dims(coordinates)
+ if (num_pseudo > 1 or dimensionality < 2):
+ raise RuntimeError("1D unstructured mesh data "
+ "is currently not supported.")
+
domain_dimensions = np.ones(3, "int32") * 2
nprocs = 1
https://bitbucket.org/yt_analysis/yt/commits/01ccdacf266e/
Changeset: 01ccdacf266e
Branch: yt
User: atmyers
Date: 2016-09-23 19:47:08+00:00
Summary: Remove the now superceded exodusII_reader
Affected #: 1 file
diff -r b57fddb1f2c8b68d96b32d18b430e8a7081ee105 -r 01ccdacf266e9dae414217e95dad1681b4d2c4ed yt/utilities/exodusII_reader.py
--- a/yt/utilities/exodusII_reader.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import string
-from itertools import takewhile
-from netCDF4 import Dataset
-import numpy as np
-from yt.config import ytcfg
-import os
-
-
-def sanitize_string(s):
- s = "".join(_ for _ in takewhile(lambda a: a in string.printable, s))
- return s
-
-
-def get_data(fn):
- try:
- f = Dataset(fn)
- except RuntimeError:
- f = Dataset(os.path.join(ytcfg.get("yt", "test_data_dir"), fn))
- fvars = f.variables
- # Is this correct?
- etypes = fvars["eb_status"][:]
- nelem = etypes.shape[0]
- varnames = [sanitize_string(v.tostring()) for v in
- fvars["name_elem_var"][:]]
- nodnames = [sanitize_string(v.tostring()) for v in
- fvars["name_nod_var"][:]]
- coord = np.array([fvars["coord%s" % ax][:]
- for ax in 'xyz']).transpose().copy()
- coords = []
- connects = []
- data = []
- for i in range(nelem):
- connects.append(fvars["connect%s" % (i+1)][:].astype("i8"))
- ci = connects[-1]
- coords.append(coord) # Same for all
- vals = {}
- for j, v in enumerate(varnames):
- values = fvars["vals_elem_var%seb%s" % (j+1, i+1)][:]
- vals['gas', v] = values.astype("f8")[-1, :]
- for j, v in enumerate(nodnames):
- # We want just for this set of nodes all the node variables
- # Use (ci - 1) to get these values
- values = fvars["vals_nod_var%s" % (j+1)][:]
- vals['gas', v] = values.astype("f8")[-1, ci - 1, ...]
- data.append(vals)
- return coords, connects, data
https://bitbucket.org/yt_analysis/yt/commits/323420bbfbe1/
Changeset: 323420bbfbe1
Branch: yt
User: atmyers
Date: 2016-09-23 19:52:00+00:00
Summary: update the doc example that used exodusII_reader
Affected #: 1 file
diff -r 01ccdacf266e9dae414217e95dad1681b4d2c4ed -r 323420bbfbe104d6bf16fb99826a31f6c000e851 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1264,23 +1264,37 @@
.. code-block:: python
- import yt
- import numpy
- from yt.utilities.exodusII_reader import get_data
+ import yt
+ import numpy as np
- coords, connectivity, data = get_data("MOOSE_sample_data/out.e-s010")
+ coords = np.array([[0.0, 0.0],
+ [1.0, 0.0],
+ [1.0, 1.0],
+ [0.0, 1.0]], dtype=np.float64)
-This uses a publically available `MOOSE <http://mooseframework.org/>`
-dataset along with the get_data function to parse the coords, connectivity,
-and data. Then, these can be loaded as an in-memory dataset as follows:
+ connect = np.array([[0, 1, 3],
+ [1, 2, 3]], dtype=np.int64)
+
+ data = {}
+ data['connect1', 'test'] = np.array([[0.0, 1.0, 3.0],
+ [1.0, 2.0, 3.0]], dtype=np.float64)
+
+Here, we have made up a simple, 2D unstructured mesh dataset consisting of two
+triangles and one node-centered data field. This data can be loaded as an in-memory
+ dataset as follows:
.. code-block:: python
- mesh_id = 0
- ds = yt.load_unstructured_mesh(data[mesh_id], connectivity[mesh_id], coords[mesh_id])
+ ds = yt.load_unstructured_mesh(connect, coords, data)
-Note that load_unstructured_mesh can take either a single or a list of meshes.
-Here, we have selected only the first mesh to load.
+Note that load_unstructured_mesh can take either a single mesh or a list of meshes.
+Here, we only have one mesh. The in-memory dataset can then be visualized as usual,
+e.g.:
+
+.. code-block:: python
+
+ sl = yt.SlicePlot(ds, 'z', 'test')
+ sl.annotate_mesh_lines()
.. rubric:: Caveats
https://bitbucket.org/yt_analysis/yt/commits/f87a45da4a6a/
Changeset: f87a45da4a6a
Branch: yt
User: atmyers
Date: 2016-09-23 20:41:07+00:00
Summary: restore the exodusII_reader module, but add a note that it is deprecated.
Affected #: 1 file
diff -r 323420bbfbe104d6bf16fb99826a31f6c000e851 -r f87a45da4a6a1c13f797973c6650aa9075399e01 yt/utilities/exodusII_reader.py
--- /dev/null
+++ b/yt/utilities/exodusII_reader.py
@@ -0,0 +1,51 @@
+import string
+from itertools import takewhile
+from netCDF4 import Dataset
+import numpy as np
+from yt.config import ytcfg
+import os
+import warnings
+
+
+def sanitize_string(s):
+ s = "".join(_ for _ in takewhile(lambda a: a in string.printable, s))
+ return s
+
+
+def get_data(fn):
+ warnings.warn("The yt.utilities.exodusII_reader module is deprecated "
+ "and will be removed in a future release. "
+ "Please use the normal yt.load() command to access "
+ "your data instead.")
+ try:
+ f = Dataset(fn)
+ except RuntimeError:
+ f = Dataset(os.path.join(ytcfg.get("yt", "test_data_dir"), fn))
+ fvars = f.variables
+ # Is this correct?
+ etypes = fvars["eb_status"][:]
+ nelem = etypes.shape[0]
+ varnames = [sanitize_string(v.tostring()) for v in
+ fvars["name_elem_var"][:]]
+ nodnames = [sanitize_string(v.tostring()) for v in
+ fvars["name_nod_var"][:]]
+ coord = np.array([fvars["coord%s" % ax][:]
+ for ax in 'xyz']).transpose().copy()
+ coords = []
+ connects = []
+ data = []
+ for i in range(nelem):
+ connects.append(fvars["connect%s" % (i+1)][:].astype("i8"))
+ ci = connects[-1]
+ coords.append(coord) # Same for all
+ vals = {}
+ for j, v in enumerate(varnames):
+ values = fvars["vals_elem_var%seb%s" % (j+1, i+1)][:]
+ vals['gas', v] = values.astype("f8")[-1, :]
+ for j, v in enumerate(nodnames):
+ # We want just for this set of nodes all the node variables
+ # Use (ci - 1) to get these values
+ values = fvars["vals_nod_var%s" % (j+1)][:]
+ vals['gas', v] = values.astype("f8")[-1, ci - 1, ...]
+ data.append(vals)
+ return coords, connects, data
https://bitbucket.org/yt_analysis/yt/commits/a164c0126ac4/
Changeset: a164c0126ac4
Branch: yt
User: atmyers
Date: 2016-09-23 20:47:18+00:00
Summary: data is plural
Affected #: 2 files
diff -r f87a45da4a6a1c13f797973c6650aa9075399e01 -r a164c0126ac4a939e5ee96403415b0d9a0f38379 yt/frontends/exodus_ii/data_structures.py
--- a/yt/frontends/exodus_ii/data_structures.py
+++ b/yt/frontends/exodus_ii/data_structures.py
@@ -165,7 +165,7 @@
num_pseudo = get_num_pseudo_dims(mesh.connectivity_coords)
if (num_pseudo > 1 or self.dimensionality < 2):
raise RuntimeError("1D unstructured mesh data "
- "is currently not supported.")
+ "are currently not supported.")
def _set_code_unit_attributes(self):
# This is where quantities are created that represent the various
diff -r f87a45da4a6a1c13f797973c6650aa9075399e01 -r a164c0126ac4a939e5ee96403415b0d9a0f38379 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -1728,7 +1728,7 @@
num_pseudo = get_num_pseudo_dims(coordinates)
if (num_pseudo > 1 or dimensionality < 2):
raise RuntimeError("1D unstructured mesh data "
- "is currently not supported.")
+ "are currently not supported.")
domain_dimensions = np.ones(3, "int32") * 2
nprocs = 1
https://bitbucket.org/yt_analysis/yt/commits/44b450c81f71/
Changeset: 44b450c81f71
Branch: yt
User: atmyers
Date: 2016-09-23 22:44:29+00:00
Summary: fixing a bit of rst formating weirdness
Affected #: 1 file
diff -r a164c0126ac4a939e5ee96403415b0d9a0f38379 -r 44b450c81f71d8035048684584c42d332e8d318d doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1281,7 +1281,7 @@
Here, we have made up a simple, 2D unstructured mesh dataset consisting of two
triangles and one node-centered data field. This data can be loaded as an in-memory
- dataset as follows:
+dataset as follows:
.. code-block:: python
https://bitbucket.org/yt_analysis/yt/commits/098f53627175/
Changeset: 098f53627175
Branch: yt
User: ngoldbaum
Date: 2016-09-28 22:53:44+00:00
Summary: Merged in atmyers/yt (pull request #2390)
Removing the now-superseded exodusII_reader function. Fixes Issue #1282
Affected #: 5 files
diff -r 9b5937af58578d073179329863c7e220f8ec7df6 -r 098f53627175f611a2b5c45e69efbd5048bb70fb doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1264,23 +1264,37 @@
.. code-block:: python
- import yt
- import numpy
- from yt.utilities.exodusII_reader import get_data
+ import yt
+ import numpy as np
- coords, connectivity, data = get_data("MOOSE_sample_data/out.e-s010")
+ coords = np.array([[0.0, 0.0],
+ [1.0, 0.0],
+ [1.0, 1.0],
+ [0.0, 1.0]], dtype=np.float64)
-This uses a publically available `MOOSE <http://mooseframework.org/>`
-dataset along with the get_data function to parse the coords, connectivity,
-and data. Then, these can be loaded as an in-memory dataset as follows:
+ connect = np.array([[0, 1, 3],
+ [1, 2, 3]], dtype=np.int64)
+
+ data = {}
+ data['connect1', 'test'] = np.array([[0.0, 1.0, 3.0],
+ [1.0, 2.0, 3.0]], dtype=np.float64)
+
+Here, we have made up a simple, 2D unstructured mesh dataset consisting of two
+triangles and one node-centered data field. This data can be loaded as an in-memory
+dataset as follows:
.. code-block:: python
- mesh_id = 0
- ds = yt.load_unstructured_mesh(data[mesh_id], connectivity[mesh_id], coords[mesh_id])
+ ds = yt.load_unstructured_mesh(connect, coords, data)
-Note that load_unstructured_mesh can take either a single or a list of meshes.
-Here, we have selected only the first mesh to load.
+Note that load_unstructured_mesh can take either a single mesh or a list of meshes.
+Here, we only have one mesh. The in-memory dataset can then be visualized as usual,
+e.g.:
+
+.. code-block:: python
+
+ sl = yt.SlicePlot(ds, 'z', 'test')
+ sl.annotate_mesh_lines()
.. rubric:: Caveats
diff -r 9b5937af58578d073179329863c7e220f8ec7df6 -r 098f53627175f611a2b5c45e69efbd5048bb70fb yt/frontends/exodus_ii/data_structures.py
--- a/yt/frontends/exodus_ii/data_structures.py
+++ b/yt/frontends/exodus_ii/data_structures.py
@@ -28,7 +28,9 @@
from .fields import \
ExodusIIFieldInfo
from .util import \
- load_info_records, sanitize_string
+ load_info_records, \
+ sanitize_string, \
+ get_num_pseudo_dims
class ExodusIIUnstructuredMesh(UnstructuredMesh):
@@ -159,6 +161,12 @@
self.default_field = [f for f in self.field_list
if f[0] == 'connect1'][-1]
+ for mesh in self.index.meshes:
+ num_pseudo = get_num_pseudo_dims(mesh.connectivity_coords)
+ if (num_pseudo > 1 or self.dimensionality < 2):
+ raise RuntimeError("1D unstructured mesh data "
+ "are currently not supported.")
+
def _set_code_unit_attributes(self):
# This is where quantities are created that represent the various
# on-disk units. These are the currently available quantities which
diff -r 9b5937af58578d073179329863c7e220f8ec7df6 -r 098f53627175f611a2b5c45e69efbd5048bb70fb yt/frontends/exodus_ii/util.py
--- a/yt/frontends/exodus_ii/util.py
+++ b/yt/frontends/exodus_ii/util.py
@@ -1,3 +1,4 @@
+import numpy as np
import sys
import string
from itertools import takewhile
@@ -6,6 +7,10 @@
_printable = set([ord(_) for _ in string.printable])
+def get_num_pseudo_dims(coords):
+ D = coords.shape[1]
+ return sum([np.all(coords[:, dim] == 0.0) for dim in range(D)])
+
def sanitize_string(s):
if sys.version_info > (3, ):
return "".join([chr(_) for _ in takewhile(lambda a: a in _printable, s)])
diff -r 9b5937af58578d073179329863c7e220f8ec7df6 -r 098f53627175f611a2b5c45e69efbd5048bb70fb yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -69,6 +69,8 @@
from yt.extern.six import string_types
from .fields import \
StreamFieldInfo
+from yt.frontends.exodus_ii.util import \
+ get_num_pseudo_dims
class StreamGrid(AMRGridPatch):
"""
@@ -1683,9 +1685,11 @@
connectivity length and should be of shape (N,M) where N is the number
of elements and M is the number of vertices per element.
coordinates : array_like
- The 3D coordinates of mesh vertices. This should be of size (L,3) where
- L is the number of vertices. When loading more than one mesh, the data
- for each mesh should be concatenated into a single coordinates array.
+ The 3D coordinates of mesh vertices. This should be of size (L, D) where
+ L is the number of vertices and D is the number of coordinates per vertex
+ (the spatial dimensions of the dataset). Currently this must be either 2 or 3.
+ When loading more than one mesh, the data for each mesh should be concatenated
+ into a single coordinates array.
node_data : dict or list of dicts
For a single mesh, a dict mapping field names to 2D numpy arrays,
representing data defined at element vertices. For multiple meshes,
@@ -1744,6 +1748,12 @@
"""
+ dimensionality = coordinates.shape[1]
+ num_pseudo = get_num_pseudo_dims(coordinates)
+ if (num_pseudo > 1 or dimensionality < 2):
+ raise RuntimeError("1D unstructured mesh data "
+ "are currently not supported.")
+
domain_dimensions = np.ones(3, "int32") * 2
nprocs = 1
@@ -1774,9 +1784,14 @@
data = ensure_list(data)
if bbox is None:
- bbox = np.array([[coordinates[:,i].min() - 0.1 * abs(coordinates[:,i].min()),
- coordinates[:,i].max() + 0.1 * abs(coordinates[:,i].max())]
- for i in range(3)], "float64")
+ bbox = [[coordinates[:,i].min() - 0.1 * abs(coordinates[:,i].min()),
+ coordinates[:,i].max() + 0.1 * abs(coordinates[:,i].max())]
+ for i in range(dimensionality)]
+
+ if dimensionality == 2:
+ bbox.append([0.0, 1.0])
+
+ bbox = np.array(bbox, dtype=np.float64)
domain_left_edge = np.array(bbox[:, 0], 'float64')
domain_right_edge = np.array(bbox[:, 1], 'float64')
grid_levels = np.zeros(nprocs, dtype='int32').reshape((nprocs,1))
@@ -1802,7 +1817,7 @@
raise RuntimeError
grid_left_edges = domain_left_edge
grid_right_edges = domain_right_edge
- grid_dimensions = domain_dimensions.reshape(nprocs,3).astype("int32")
+ grid_dimensions = domain_dimensions.reshape(nprocs, 3).astype("int32")
if length_unit is None:
length_unit = 'code_length'
@@ -1835,7 +1850,7 @@
handler.domain_left_edge = domain_left_edge
handler.domain_right_edge = domain_right_edge
handler.refine_by = 2
- handler.dimensionality = 3
+ handler.dimensionality = dimensionality
handler.domain_dimensions = domain_dimensions
handler.simulation_time = sim_time
handler.cosmology_simulation = 0
diff -r 9b5937af58578d073179329863c7e220f8ec7df6 -r 098f53627175f611a2b5c45e69efbd5048bb70fb yt/utilities/exodusII_reader.py
--- a/yt/utilities/exodusII_reader.py
+++ b/yt/utilities/exodusII_reader.py
@@ -4,6 +4,7 @@
import numpy as np
from yt.config import ytcfg
import os
+import warnings
def sanitize_string(s):
@@ -12,6 +13,10 @@
def get_data(fn):
+ warnings.warn("The yt.utilities.exodusII_reader module is deprecated "
+ "and will be removed in a future release. "
+ "Please use the normal yt.load() command to access "
+ "your data instead.")
try:
f = Dataset(fn)
except RuntimeError:
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list