[yt-svn] commit/yt: 7 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Thu Mar 16 13:55:13 PDT 2017
7 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/4974a44db551/
Changeset: 4974a44db551
Branch: yt
User: atmyers
Date: 2017-03-01 23:36:53+00:00
Summary: [BUGFIX] Handle a couple of places where uncaught exceptions would be raised in yt.load() if h5py was not installed.
Affected #: 2 files
diff -r 3eca2ae80ab14a48b643d3055d7d3c0933fa77ae -r 4974a44db5512f5231ed153a9f648d48cce3383f yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -439,7 +439,7 @@
fileh = HDF5FileHandler(args[0])
if "bounding box" in fileh["/"].keys():
return True
- except:
+ except (IOError, OSError, ImportError):
pass
return False
@@ -494,7 +494,7 @@
if "bounding box" not in fileh["/"].keys() \
and "localnp" in fileh["/"].keys():
return True
- except IOError:
+ except (IOError, OSError, ImportError):
pass
return False
diff -r 3eca2ae80ab14a48b643d3055d7d3c0933fa77ae -r 4974a44db5512f5231ed153a9f648d48cce3383f yt/frontends/open_pmd/data_structures.py
--- a/yt/frontends/open_pmd/data_structures.py
+++ b/yt/frontends/open_pmd/data_structures.py
@@ -497,7 +497,7 @@
"""
try:
f = h5.File(args[0], "r")
- except (IOError, OSError):
+ except (IOError, OSError, ImportError):
return False
requirements = ["openPMD", "basePath", "meshesPath", "particlesPath"]
https://bitbucket.org/yt_analysis/yt/commits/dd4d33cc7001/
Changeset: dd4d33cc7001
Branch: yt
User: atmyers
Date: 2017-03-02 01:26:14+00:00
Summary: Warn if the user passes what appear to be hdf5 or netcdf classic files, but the corresponding modules are not importable.
Affected #: 5 files
diff -r 4974a44db5512f5231ed153a9f648d48cce3383f -r dd4d33cc70012c584c754629b4e6818df69e91e1 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -13,7 +13,7 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-from yt.utilities.on_demand_imports import _h5py as h5py
+from yt.utilities.on_demand_imports import _h5py as h5py, NotAModule
import re
import os
import weakref
@@ -34,7 +34,8 @@
from yt.data_objects.static_output import \
Dataset
from yt.utilities.file_handler import \
- HDF5FileHandler
+ HDF5FileHandler, \
+ warn_h5py
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_root_only
from yt.utilities.lib.misc_utilities import \
@@ -730,6 +731,8 @@
@classmethod
def _is_valid(self, *args, **kwargs):
+ warn_h5py(args[0])
+
if not is_chombo_hdf5(args[0]):
return False
diff -r 4974a44db5512f5231ed153a9f648d48cce3383f -r dd4d33cc70012c584c754629b4e6818df69e91e1 yt/frontends/exodus_ii/data_structures.py
--- a/yt/frontends/exodus_ii/data_structures.py
+++ b/yt/frontends/exodus_ii/data_structures.py
@@ -23,8 +23,9 @@
from yt.data_objects.static_output import \
Dataset
from yt.data_objects.unions import MeshUnion
-from .io import \
- NetCDF4FileHandler
+from yt.utilities.file_handler import \
+ NetCDF4FileHandler, \
+ warn_netcdf
from yt.utilities.logger import ytLogger as mylog
from .fields import \
ExodusIIFieldInfo
@@ -378,6 +379,7 @@
@classmethod
def _is_valid(self, *args, **kwargs):
+ warn_netcdf(args[0])
try:
from netCDF4 import Dataset
filename = args[0]
diff -r 4974a44db5512f5231ed153a9f648d48cce3383f -r dd4d33cc70012c584c754629b4e6818df69e91e1 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -30,7 +30,8 @@
from yt.geometry.particle_geometry_handler import \
ParticleIndex
from yt.utilities.file_handler import \
- HDF5FileHandler
+ HDF5FileHandler, \
+ warn_h5py
from yt.utilities.physical_ratios import cm_per_mpc
from .fields import FLASHFieldInfo
@@ -489,6 +490,7 @@
@classmethod
def _is_valid(self, *args, **kwargs):
+ warn_h5py(args[0])
try:
fileh = HDF5FileHandler(args[0])
if "bounding box" not in fileh["/"].keys() \
diff -r 4974a44db5512f5231ed153a9f648d48cce3383f -r dd4d33cc70012c584c754629b4e6818df69e91e1 yt/frontends/open_pmd/data_structures.py
--- a/yt/frontends/open_pmd/data_structures.py
+++ b/yt/frontends/open_pmd/data_structures.py
@@ -31,7 +31,8 @@
get_component
from yt.funcs import setdefaultattr
from yt.geometry.grid_geometry_handler import GridIndex
-from yt.utilities.file_handler import HDF5FileHandler
+from yt.utilities.file_handler import HDF5FileHandler, \
+ warn_h5py
from yt.utilities.logger import ytLogger as mylog
from yt.utilities.on_demand_imports import _h5py as h5
@@ -495,6 +496,7 @@
def _is_valid(self, *args, **kwargs):
"""Checks whether the supplied file can be read by this frontend.
"""
+ warn_h5py(args[0])
try:
f = h5.File(args[0], "r")
except (IOError, OSError, ImportError):
diff -r 4974a44db5512f5231ed153a9f648d48cce3383f -r dd4d33cc70012c584c754629b4e6818df69e91e1 yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -14,6 +14,24 @@
#-----------------------------------------------------------------------------
from yt.utilities.on_demand_imports import _h5py as h5py
+from yt.utilities.on_demand_imports import NotAModule
+
+def valid_hdf5_signature(fn):
+ signature = b'\x89HDF\r\n\x1a\n'
+ try:
+ with open(fn, 'rb') as f:
+ header = f.read(8)
+ return header == signature
+ except:
+ return False
+
+
+def warn_h5py(fn):
+ needs_h5py = valid_hdf5_signature(fn)
+ if needs_h5py and isinstance(h5py.File, NotAModule):
+ raise RuntimeError("This appears to be an HDF5 file, "
+ "but h5py is not installed.")
+
class HDF5FileHandler(object):
handle = None
@@ -67,6 +85,29 @@
def close(self):
self.handle.close()
+
+def valid_netcdf_classic_signature(filename):
+ signature_v1 = b'CDF\x01'
+ signature_v2 = b'CDF\x02'
+ try:
+ with open(filename, 'rb') as f:
+ header = f.read(4)
+ return (header == signature_v1 or header == signature_v2)
+ except:
+ return False
+
+
+def warn_netcdf(fn):
+ needs_netcdf = valid_netcdf_classic_signature(fn)
+ if needs_netcdf:
+ try:
+ from netCDF4 import Dataset
+ except ImportError:
+ raise RuntimeError("This appears to be a netCDF file, "
+ "but the python bindings for netCDF4 "
+ "are not installed.")
+
+
class NetCDF4FileHandler(object):
def __init__(self, filename):
from netCDF4 import Dataset
https://bitbucket.org/yt_analysis/yt/commits/918f8e728cdf/
Changeset: 918f8e728cdf
Branch: yt
User: atmyers
Date: 2017-03-02 04:22:13+00:00
Summary: turn off rules E305 and E306
Affected #: 1 file
diff -r dd4d33cc70012c584c754629b4e6818df69e91e1 -r 918f8e728cdf09dc98c15c9d15d4c4ac88fd6a4b setup.cfg
--- a/setup.cfg
+++ b/setup.cfg
@@ -15,4 +15,4 @@
# vendored libraries
exclude = doc,benchmarks,*/api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/lru_cache.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py,yt/utilities/fits_image.py
max-line-length=999
-ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E402,E502,E701,E703,E731,W291,W292,W293,W391,W503
\ No newline at end of file
+ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E305,E306,E402,E502,E701,E703,E731,W291,W292,W293,W391,W503
\ No newline at end of file
https://bitbucket.org/yt_analysis/yt/commits/2cb2972fefd1/
Changeset: 2cb2972fefd1
Branch: yt
User: atmyers
Date: 2017-03-02 04:22:32+00:00
Summary: add on demand import for netCDF4
Affected #: 3 files
diff -r 918f8e728cdf09dc98c15c9d15d4c4ac88fd6a4b -r 2cb2972fefd17a35c79b2a8cfce793e7d41fef22 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -13,7 +13,7 @@
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
-from yt.utilities.on_demand_imports import _h5py as h5py, NotAModule
+from yt.utilities.on_demand_imports import _h5py as h5py
import re
import os
import weakref
diff -r 918f8e728cdf09dc98c15c9d15d4c4ac88fd6a4b -r 2cb2972fefd17a35c79b2a8cfce793e7d41fef22 yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -14,6 +14,7 @@
#-----------------------------------------------------------------------------
from yt.utilities.on_demand_imports import _h5py as h5py
+from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
from yt.utilities.on_demand_imports import NotAModule
def valid_hdf5_signature(fn):
@@ -99,13 +100,9 @@
def warn_netcdf(fn):
needs_netcdf = valid_netcdf_classic_signature(fn)
- if needs_netcdf:
- try:
- from netCDF4 import Dataset
- except ImportError:
- raise RuntimeError("This appears to be a netCDF file, "
- "but the python bindings for netCDF4 "
- "are not installed.")
+ if needs_netcdf and isinstance(netCDF4.Dataset, NotAModule):
+ raise RuntimeError("This appears to be a netCDF file, but the "
+ "python bindings for netCDF4 are not installed.")
class NetCDF4FileHandler(object):
diff -r 918f8e728cdf09dc98c15c9d15d4c4ac88fd6a4b -r 2cb2972fefd17a35c79b2a8cfce793e7d41fef22 yt/utilities/on_demand_imports.py
--- a/yt/utilities/on_demand_imports.py
+++ b/yt/utilities/on_demand_imports.py
@@ -30,6 +30,22 @@
def __call__(self, *args, **kwargs):
raise self.error
+class netCDF4_imports(object):
+ _name = "netCDF4"
+ _Dataset = None
+ @property
+ def Dataset(self):
+ if self._Dataset is None:
+ try:
+ from netCDF4.Dataset import Dataset
+ self.log
+ except ImportError:
+ Dataset = NotAModule(self._name)
+ self._Dataset = Dataset
+ return self._Dataset
+
+_netCDF4 = netCDF4_imports()
+
class astropy_imports(object):
_name = "astropy"
_pyfits = None
https://bitbucket.org/yt_analysis/yt/commits/d8f50f5632df/
Changeset: d8f50f5632df
Branch: yt
User: atmyers
Date: 2017-03-02 04:26:20+00:00
Summary: Fix the netCDF4 on demand import.
Affected #: 1 file
diff -r 2cb2972fefd17a35c79b2a8cfce793e7d41fef22 -r d8f50f5632df1ed6174edd92b7d3a6aa3c0c1ce6 yt/utilities/on_demand_imports.py
--- a/yt/utilities/on_demand_imports.py
+++ b/yt/utilities/on_demand_imports.py
@@ -37,15 +37,16 @@
def Dataset(self):
if self._Dataset is None:
try:
- from netCDF4.Dataset import Dataset
- self.log
+ from netCDF4 import Dataset
except ImportError:
Dataset = NotAModule(self._name)
self._Dataset = Dataset
return self._Dataset
+
_netCDF4 = netCDF4_imports()
+
class astropy_imports(object):
_name = "astropy"
_pyfits = None
https://bitbucket.org/yt_analysis/yt/commits/c0c28ef312b4/
Changeset: c0c28ef312b4
Branch: yt
User: atmyers
Date: 2017-03-02 04:51:40+00:00
Summary: Don't make netCDF4 a top-level import
Affected #: 1 file
diff -r d8f50f5632df1ed6174edd92b7d3a6aa3c0c1ce6 -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -14,7 +14,6 @@
#-----------------------------------------------------------------------------
from yt.utilities.on_demand_imports import _h5py as h5py
-from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
from yt.utilities.on_demand_imports import NotAModule
def valid_hdf5_signature(fn):
@@ -100,6 +99,7 @@
def warn_netcdf(fn):
needs_netcdf = valid_netcdf_classic_signature(fn)
+ from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
if needs_netcdf and isinstance(netCDF4.Dataset, NotAModule):
raise RuntimeError("This appears to be a netCDF file, but the "
"python bindings for netCDF4 are not installed.")
@@ -107,6 +107,6 @@
class NetCDF4FileHandler(object):
def __init__(self, filename):
- from netCDF4 import Dataset
- ds = Dataset(filename)
+ from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
+ ds = netCDF4.Dataset(filename)
self.dataset = ds
https://bitbucket.org/yt_analysis/yt/commits/43cf42503977/
Changeset: 43cf42503977
Branch: yt
User: atmyers
Date: 2017-03-16 19:10:50+00:00
Summary: merging with tip.
Affected #: 46 files
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/Makefile
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -33,20 +33,24 @@
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " clean to remove the build directory"
- @echo " fullclean to remove the build directory and autogenerated api docs"
@echo " recipeclean to remove files produced by running the cookbook scripts"
clean:
-rm -rf $(BUILDDIR)/*
+ -rm -rf source/reference/api/yt.*
+ -rm -rf source/reference/api/modules.rst
-fullclean:
- -rm -rf $(BUILDDIR)/*
- -rm -rf source/reference/api/generated
+fullclean: clean
recipeclean:
-rm -rf _temp/*.done source/cookbook/_static/*
html:
+ifneq ($(READTHEDOCS),True)
+ SPHINX_APIDOC_OPTIONS=members,undoc-members,inherited-members,show-inheritance sphinx-apidoc \
+ -o source/reference/api/ \
+ -e ../yt ../yt/extern/* $(shell find ../yt -name "*tests*" -type d) ../yt/utilities/voropp*
+endif
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/extensions/pythonscript_sphinxext.py
--- a/doc/extensions/pythonscript_sphinxext.py
+++ b/doc/extensions/pythonscript_sphinxext.py
@@ -1,4 +1,5 @@
import tempfile
+import time
import os
import glob
import shutil
@@ -37,12 +38,16 @@
f.write(content)
# Use sphinx logger?
+ uid = uuid.uuid4().hex[:8]
print("")
+ print(">> Contents of the script: %s" % uid)
print(content)
print("")
+ start = time.time()
subprocess.call(['python', 'temp.py'])
-
+ print(">> The execution of the script %s took %f s" %
+ (uid, time.time() - start))
text = ''
for im in sorted(glob.glob("*.png")):
text += get_image_tag(im, image_dir, image_rel_dir)
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/analyzing/Particle_Trajectories.ipynb
--- a/doc/source/analyzing/Particle_Trajectories.ipynb
+++ b/doc/source/analyzing/Particle_Trajectories.ipynb
@@ -279,9 +279,9 @@
"source": [
"fig = plt.figure(figsize=(8.0, 8.0))\n",
"ax = fig.add_subplot(111, projection='3d')\n",
- "ax.plot(trajs[\"particle_position_x\"][100], trajs[\"particle_position_z\"][100], trajs[\"particle_position_z\"][100])\n",
- "ax.plot(trajs[\"particle_position_x\"][8], trajs[\"particle_position_z\"][8], trajs[\"particle_position_z\"][8])\n",
- "ax.plot(trajs[\"particle_position_x\"][25], trajs[\"particle_position_z\"][25], trajs[\"particle_position_z\"][25])"
+ "ax.plot(trajs[\"particle_position_x\"][100], trajs[\"particle_position_y\"][100], trajs[\"particle_position_z\"][100])\n",
+ "ax.plot(trajs[\"particle_position_x\"][8], trajs[\"particle_position_y\"][8], trajs[\"particle_position_z\"][8])\n",
+ "ax.plot(trajs[\"particle_position_x\"][25], trajs[\"particle_position_y\"][25], trajs[\"particle_position_z\"][25])"
]
},
{
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -481,7 +481,9 @@
A :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog`
saved to disk can be reloaded as a yt dataset with the
-standard call to ``yt.load``. Any side data, such as profiles, can be reloaded
+standard call to ``yt.load``. See :ref:`halocatalog` for a demonstration
+of loading and working only with the catalog.
+Any side data, such as profiles, can be reloaded
with a ``load_profiles`` callback and a call to
:func:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog.load`.
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/analyzing/analysis_modules/star_analysis.rst
--- a/doc/source/analyzing/analysis_modules/star_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/star_analysis.rst
@@ -209,8 +209,8 @@
There are two ways to write out the data once the spectrum has been calculated.
The command ``write_out`` outputs two columns of data:
- 1. Wavelength :math:`(\text{\AA})`
- 2. Flux (Luminosity per unit wavelength :math:`(\mathrm{\rm{L}_\odot} / \text{\AA})` , where
+ 1. Wavelength (:math:`\text{Angstroms}`)
+ 2. Flux (Luminosity per unit wavelength :math:`(\mathrm{\rm{L}_\odot} / \text{Angstrom})` , where
:math:`\mathrm{\rm{L}_\odot} = 3.826 \cdot 10^{33}\, \mathrm{ergs / s}` ).
and can be called simply, specifying the output file:
@@ -225,7 +225,7 @@
distribution to. The default is 5200 Angstroms. This command outputs the data
in two columns:
- 1. Wavelength :math:`(\text{\AA})`
+ 1. Wavelength :math:`(\text{Angstroms})`
2. Relative flux normalized to the flux at *flux_norm*.
.. code-block:: python
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/analyzing/analysis_modules/xray_emission_fields.rst
--- a/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
+++ b/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
@@ -1,3 +1,6 @@
.. _xray_emission_fields:
+X-ray Emission Fields
+=====================
+
.. notebook:: XrayEmissionFields.ipynb
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -11,9 +11,9 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys, os, glob, re
-from sphinx.search import WordCollector
-from docutils.nodes import comment, title, Text, SkipNode
+import sys
+import os
+import glob
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
@@ -30,7 +30,7 @@
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
- 'sphinx.ext.pngmath', 'sphinx.ext.viewcode',
+ 'sphinx.ext.mathjax', 'sphinx.ext.viewcode',
'sphinx.ext.napoleon', 'yt_cookbook', 'yt_colormaps',
'config_help']
@@ -228,13 +228,6 @@
# If true, show URL addresses after external links.
#latex_show_urls = False
-# Additional stuff for the LaTeX preamble.
-latex_preamble = r"""
-\renewcommand{\AA}{\text{\r{A}}} % Allow \AA in math mode
-\usepackage[utf8]{inputenc} % Allow unicode symbols in text
-\DeclareUnicodeCharacter {212B} {\AA} % Angstrom
-"""
-
# Documents to append as an appendix to all manuals.
#latex_appendices = []
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1458,7 +1458,8 @@
If you have access to both the halo catalog and the simulation snapshot from
the same redshift, additional analysis can be performed for each halo using
-:ref:`halo_catalog`.
+:ref:`halo_catalog`. The resulting product can be reloaded in a similar manner
+to the other halo catalogs shown here.
.. _rockstar:
@@ -1600,6 +1601,39 @@
# The halo mass
print(ad["FOF", "particle_mass"])
+.. _halocatalog:
+
+HaloCatalog
+^^^^^^^^^^^
+
+These are catalogs produced by the analysis discussed in :ref:`halo_catalog`.
+In the case where multiple files were produced, one need only provide the path
+to a single one of them. The field type for all fields is "halos". The fields
+available here are similar to other catalogs. Any addition
+:ref:`halo_catalog_quantities` will also be accessible as fields.
+
++-------------------+---------------------------+
+| HaloCatalog field | yt field name |
++===================+===========================+
+| halo id | particle_identifier |
++-------------------+---------------------------+
+| virial mass | particle_mass |
++-------------------+---------------------------+
+| virial radius | virial_radius |
++-------------------+---------------------------+
+| halo position | particle_position_(x,y,z) |
++-------------------+---------------------------+
+| halo velocity | particle_velocity_(x,y,z) |
++-------------------+---------------------------+
+
+.. code-block:: python
+
+ import yt
+ ds = yt.load("catalogs/catalog.0.h5")
+ ad = ds.all_data()
+ # The halo mass
+ print(ad["halos", "particle_mass"])
+
.. _loading-openpmd-data:
openPMD Data
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d doc/source/reference/api/api.rst
--- a/doc/source/reference/api/api.rst
+++ b/doc/source/reference/api/api.rst
@@ -10,7 +10,6 @@
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.visualization.plot_window.SlicePlot
~yt.visualization.plot_window.AxisAlignedSlicePlot
@@ -24,7 +23,6 @@
^^^^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.visualization.profile_plotter.ProfilePlot
~yt.visualization.profile_plotter.PhasePlot
@@ -34,7 +32,6 @@
^^^^^^^^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.visualization.particle_plots.ParticleProjectionPlot
~yt.visualization.particle_plots.ParticlePhasePlot
@@ -44,7 +41,6 @@
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.visualization.fixed_resolution.FixedResolutionBuffer
~yt.visualization.fixed_resolution.ParticleImageBuffer
@@ -69,7 +65,6 @@
These will almost never need to be instantiated on their own.
.. autosummary::
- :toctree: generated/
~yt.data_objects.data_containers.YTDataContainer
~yt.data_objects.data_containers.YTSelectionContainer
@@ -85,7 +80,6 @@
geometric.
.. autosummary::
- :toctree: generated/
~yt.data_objects.selection_data_containers.YTPoint
~yt.data_objects.selection_data_containers.YTOrthoRay
@@ -108,7 +102,6 @@
expensive set of intermediate data.
.. autosummary::
- :toctree: generated/
~yt.data_objects.construction_data_containers.YTStreamline
~yt.data_objects.construction_data_containers.YTQuadTreeProj
@@ -124,7 +117,6 @@
datasets.
.. autosummary::
- :toctree: generated/
~yt.data_objects.time_series.DatasetSeries
~yt.data_objects.time_series.DatasetSeriesObject
@@ -138,7 +130,6 @@
These objects generate an "index" into multiresolution data.
.. autosummary::
- :toctree: generated/
~yt.geometry.geometry_handler.Index
~yt.geometry.grid_geometry_handler.GridIndex
@@ -152,7 +143,6 @@
These classes and functions enable yt's symbolic unit handling system.
.. autosummary::
- :toctree: generated/
yt.data_objects.static_output.Dataset.arr
yt.data_objects.static_output.Dataset.quan
@@ -173,13 +163,11 @@
---------
.. autosummary::
- :toctree: generated/
ARTIO
^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.artio.data_structures.ARTIOIndex
~yt.frontends.artio.data_structures.ARTIOOctreeSubset
@@ -194,7 +182,6 @@
^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.athena.data_structures.AthenaGrid
~yt.frontends.athena.data_structures.AthenaHierarchy
@@ -206,7 +193,6 @@
^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.boxlib.data_structures.BoxlibGrid
~yt.frontends.boxlib.data_structures.BoxlibHierarchy
@@ -225,7 +211,6 @@
^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.chombo.data_structures.ChomboGrid
~yt.frontends.chombo.data_structures.ChomboHierarchy
@@ -239,7 +224,6 @@
^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.enzo.answer_testing_support.ShockTubeTest
~yt.frontends.enzo.data_structures.EnzoGrid
@@ -264,7 +248,6 @@
^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.fits.data_structures.FITSGrid
~yt.frontends.fits.data_structures.FITSHierarchy
@@ -276,7 +259,6 @@
^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.flash.data_structures.FLASHGrid
~yt.frontends.flash.data_structures.FLASHHierarchy
@@ -288,7 +270,6 @@
^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.gdf.data_structures.GDFGrid
~yt.frontends.gdf.data_structures.GDFHierarchy
@@ -299,7 +280,6 @@
^^^^^^^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.halo_catalog.data_structures.HaloCatalogHDF5File
~yt.frontends.halo_catalog.data_structures.HaloCatalogDataset
@@ -319,7 +299,6 @@
^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.moab.data_structures.MoabHex8Hierarchy
~yt.frontends.moab.data_structures.MoabHex8Mesh
@@ -334,7 +313,6 @@
^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.open_pmd.data_structures.OpenPMDGrid
~yt.frontends.open_pmd.data_structures.OpenPMDHierarchy
@@ -349,7 +327,6 @@
^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.ramses.data_structures.RAMSESDomainFile
~yt.frontends.ramses.data_structures.RAMSESDomainSubset
@@ -362,7 +339,6 @@
^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.gadget.data_structures.GadgetBinaryFile
~yt.frontends.gadget.data_structures.GadgetHDF5Dataset
@@ -384,7 +360,6 @@
^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.stream.data_structures.StreamDictFieldHandler
~yt.frontends.stream.data_structures.StreamGrid
@@ -410,7 +385,6 @@
^^^^^^
.. autosummary::
- :toctree: generated/
~yt.frontends.ytdata.data_structures.YTDataContainerDataset
~yt.frontends.ytdata.data_structures.YTSpatialPlotDataset
@@ -434,7 +408,6 @@
------------
.. autosummary::
- :toctree: generated/
~yt.convenience.load
~yt.convenience.simulation
@@ -457,7 +430,6 @@
.. autosummary::
- :toctree: generated/
~yt.data_objects.profiles.ProfileND
~yt.data_objects.profiles.Profile1D
@@ -475,7 +447,6 @@
of topologically disconnected structures, i.e., clump finding.
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.level_sets.clump_handling.Clump
~yt.analysis_modules.level_sets.clump_handling.Clump.add_info_item
@@ -495,7 +466,6 @@
on cosmological halos. It is also the primary interface for halo finding.
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog
~yt.analysis_modules.halo_analysis.halo_finding_methods.HaloFindingMethod
@@ -526,7 +496,6 @@
to use the ``HaloCatalog``.
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.halo_finding.halo_objects.FOFHaloFinder
~yt.analysis_modules.halo_finding.halo_objects.HOPHaloFinder
@@ -541,7 +510,6 @@
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.two_point_functions.two_point_functions.TwoPointFunctions
~yt.analysis_modules.two_point_functions.two_point_functions.FcnSet
@@ -550,7 +518,6 @@
-----------
.. autosummary::
- :toctree: generated/
~yt.fields.field_info_container.FieldInfoContainer
~yt.fields.derived_field.DerivedField
@@ -564,7 +531,6 @@
---------------
.. autosummary::
- :toctree: generated/
~yt.fields.field_info_container.FieldInfoContainer.add_field
~yt.data_objects.static_output.Dataset.add_field
@@ -574,7 +540,6 @@
----------------
.. autosummary::
- :toctree: generated/
~yt.data_objects.particle_filters.add_particle_filter
~yt.data_objects.particle_filters.particle_filter
@@ -587,7 +552,6 @@
writing to bitmaps.
.. autosummary::
- :toctree: generated/
~yt.data_objects.image_array.ImageArray
@@ -601,7 +565,6 @@
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.star_analysis.sfr_spectrum.StarFormationRate
~yt.analysis_modules.star_analysis.sfr_spectrum.SpectrumBuilder
@@ -611,7 +574,6 @@
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.cosmological_observation.light_cone.light_cone.LightCone
~yt.analysis_modules.cosmological_observation.light_ray.light_ray.LightRay
@@ -619,7 +581,6 @@
Absorption and X-ray spectra and spectral lines:
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.absorption_spectrum.absorption_spectrum.AbsorptionSpectrum
~yt.fields.xray_emission_fields.XrayEmissivityIntegrator
@@ -628,14 +589,12 @@
Absorption spectra fitting:
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.generate_total_fit
Sunrise exporting:
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.sunrise_export.sunrise_exporter.export_to_sunrise
~yt.analysis_modules.sunrise_export.sunrise_exporter.export_to_sunrise_from_halolist
@@ -643,7 +602,6 @@
RADMC-3D exporting:
.. autosummary::
- :toctree: generated/
~yt.analysis_modules.radmc3d_export.RadMC3DInterface.RadMC3DLayer
~yt.analysis_modules.radmc3d_export.RadMC3DInterface.RadMC3DWriter
@@ -657,7 +615,6 @@
Scene infrastructure:
.. autosummary::
- :toctree: generated/
~yt.visualization.volume_rendering.volume_rendering.volume_render
~yt.visualization.volume_rendering.volume_rendering.create_scene
@@ -669,7 +626,6 @@
The different kinds of sources:
.. autosummary::
- :toctree: generated/
~yt.visualization.volume_rendering.render_source.RenderSource
~yt.visualization.volume_rendering.render_source.VolumeSource
@@ -683,7 +639,6 @@
The different kinds of transfer functions:
.. autosummary::
- :toctree: generated/
~yt.visualization.volume_rendering.transfer_functions.TransferFunction
~yt.visualization.volume_rendering.transfer_functions.ColorTransferFunction
@@ -695,7 +650,6 @@
The different kinds of lenses:
.. autosummary::
- :toctree: generated/
~yt.visualization.volume_rendering.lens.Lens
~yt.visualization.volume_rendering.lens.PlaneParallelLens
@@ -712,7 +666,6 @@
.. autosummary::
- :toctree: generated/
~yt.visualization.streamlines.Streamlines
@@ -725,7 +678,6 @@
.. autosummary::
- :toctree: generated/
~yt.visualization.image_writer.multi_image_composite
~yt.visualization.image_writer.write_bitmap
@@ -740,7 +692,6 @@
particularly with complicated layouts.
.. autosummary::
- :toctree: generated/
~yt.visualization.eps_writer.DualEPS
~yt.visualization.eps_writer.single_plot
@@ -757,7 +708,6 @@
.. autosummary::
- :toctree: generated/
~yt.data_objects.derived_quantities.DerivedQuantity
~yt.data_objects.derived_quantities.DerivedQuantityCollection
@@ -783,7 +733,6 @@
See also :ref:`callbacks`.
.. autosummary::
- :toctree: generated/
~yt.visualization.plot_window.PWViewerMPL.annotate_clear
~yt.visualization.plot_modifications.ArrowCallback
@@ -817,7 +766,6 @@
See also :ref:`colormaps`.
.. autosummary::
- :toctree: generated/
~yt.visualization.color_maps.add_cmap
~yt.visualization.color_maps.make_colormap
@@ -828,7 +776,6 @@
.. autosummary::
- :toctree: generated/
~yt.convenience.load
~yt.frontends.ytdata.utilities.save_as_dataset
@@ -864,7 +811,6 @@
.. autosummary::
- :toctree: generated/
~yt.utilities.math_utils.periodic_position
~yt.utilities.math_utils.periodic_dist
@@ -899,7 +845,6 @@
.. autosummary::
- :toctree: generated/
~yt.config.YTConfigParser
~yt.utilities.parameter_file_storage.ParameterFileStore
@@ -913,7 +858,6 @@
--------------------
.. autosummary::
- :toctree: generated/
~yt.utilities.cosmology.Cosmology
~yt.utilities.cosmology.Cosmology.hubble_distance
@@ -937,7 +881,6 @@
The first set of functions are all provided by NumPy.
.. autosummary::
- :toctree: generated/
~yt.testing.assert_array_equal
~yt.testing.assert_almost_equal
@@ -953,7 +896,6 @@
These are yt-provided functions:
.. autosummary::
- :toctree: generated/
~yt.testing.assert_rel_equal
~yt.testing.amrspace
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -72,11 +72,11 @@
--------
>>> # create profiles or overdensity vs. radius for each halo and save to disk
- >>> from yt.mods import *
+ >>> import yt
>>> from yt.analysis_modules.halo_analysis.api import *
- >>> data_ds = load("DD0064/DD0064")
- >>> halos_ds = load("rockstar_halos/halos_64.0.bin",
- ... output_dir="halo_catalogs/catalog_0064")
+ >>> data_ds = yt.load("DD0064/DD0064")
+ >>> halos_ds = yt.load("rockstar_halos/halos_64.0.bin",
+ ... output_dir="halo_catalogs/catalog_0064")
>>> hc = HaloCatalog(data_ds=data_ds, halos_ds=halos_ds)
>>> # filter out halos with mass < 1e13 Msun
>>> hc.add_filter("quantity_value", "particle_mass", ">", 1e13, "Msun")
@@ -91,7 +91,7 @@
>>> hc.create()
>>> # load in the saved halo catalog and all the profile data
- >>> halos_ds = load("halo_catalogs/catalog_0064/catalog_0064.0.h5")
+ >>> halos_ds = yt.load("halo_catalogs/catalog_0064/catalog_0064.0.h5")
>>> hc = HaloCatalog(halos_ds=halos_ds,
output_dir="halo_catalogs/catalog_0064")
>>> hc.add_callback("load_profiles", output_dir="profiles")
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -708,12 +708,12 @@
-------
tuple : (cm, mag_A, mag_B, mag_C, e0_vector, tilt)
The 6-tuple has in order:
- #. The center of mass as an array.
- #. mag_A as a float.
- #. mag_B as a float.
- #. mag_C as a float.
- #. e0_vector as an array.
- #. tilt as a float.
+ #. The center of mass as an array.
+ #. mag_A as a float.
+ #. mag_B as a float.
+ #. mag_C as a float.
+ #. e0_vector as an array.
+ #. tilt as a float.
Examples
--------
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/analysis_modules/halo_mass_function/halo_mass_function.py
--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py
@@ -327,22 +327,22 @@
def sigmaM(self):
"""
- Written by BWO, 2006 (updated 25 January 2007).
- Converted to Python by Stephen Skory December 2009.
+ Written by BWO, 2006 (updated 25 January 2007).
+ Converted to Python by Stephen Skory December 2009.
- This routine takes in cosmological parameters and creates a file (array) with
- sigma(M) in it, which is necessary for various press-schechter type
- stuff. In principle one can calculate it ahead of time, but it's far,
- far faster in the long run to calculate your sigma(M) ahead of time.
+ This routine takes in cosmological parameters and creates a file (array) with
+ sigma(M) in it, which is necessary for various press-schechter type
+ stuff. In principle one can calculate it ahead of time, but it's far,
+ far faster in the long run to calculate your sigma(M) ahead of time.
- Inputs: cosmology, user must set parameters
+ Inputs: cosmology, user must set parameters
- Outputs: four columns of data containing the following information:
+ Outputs: four columns of data containing the following information:
- 1) mass (Msolar/h)
- 2) sigma (normalized) using Msun/h as the input
-
- The arrays output are used later.
+ 1) mass (Msolar/h)
+ 2) sigma (normalized) using Msun/h as the input
+
+ The arrays output are used later.
"""
# Set up the transfer function object.
@@ -446,13 +446,12 @@
def sigma_squared_of_R(self, R):
"""
- /* calculates sigma^2(R). This is the routine where the magic happens (or
- whatever it is that we do here). Integrates the sigma_squared_integrand
- parameter from R to infinity. Calls GSL (gnu scientific library) to do
- the actual integration.
+ calculates sigma^2(R). This is the routine where the magic happens (or
+ whatever it is that we do here). Integrates the sigma_squared_integrand
+ parameter from R to infinity. Calls GSL (gnu scientific library) to do
+ the actual integration.
- Note that R is in h^-1 Mpc (comoving)
- */
+ Note that R is in h^-1 Mpc (comoving)
"""
self.R = R
result = integrate_inf(self.sigma_squared_integrand)
@@ -463,7 +462,7 @@
def sigma_squared_integrand(self, k):
"""
- /* integrand for integral to get sigma^2(R). */
+ integrand for integral to get sigma^2(R).
"""
Rcom = self.R; # this is R in comoving Mpc/h
@@ -474,7 +473,7 @@
def PofK(self, k):
"""
- /* returns power spectrum as a function of wavenumber k */
+ returns power spectrum as a function of wavenumber k
"""
thisPofK = np.power(k, self.primordial_index) * np.power( self.TofK(k), 2.0);
@@ -483,7 +482,7 @@
def TofK(self, k):
"""
- /* returns transfer function as a function of wavenumber k. */
+ returns transfer function as a function of wavenumber k.
"""
thisTofK = self.TF.TFmdm_onek_hmpc(k);
@@ -503,9 +502,9 @@
def multiplicityfunction(self, sigma):
"""
- /* Multiplicity function - this is where the various fitting functions/analytic
+ Multiplicity function - this is where the various fitting functions/analytic
theories are different. The various places where I found these fitting functions
- are listed below. */
+ are listed below.
"""
nu = self.delta_c0 / sigma;
@@ -552,7 +551,7 @@
def sigmaof_M_z(self, sigmabin, redshift):
"""
- /* sigma(M, z) */
+ sigma(M, z)
"""
thissigma = self.Dofz(redshift) * self.sigmaarray[sigmabin];
@@ -561,7 +560,7 @@
def Dofz(self, redshift):
"""
- /* Growth function */
+ Growth function
"""
thisDofz = self.gofz(redshift) / self.gofz(0.0) / (1.0+redshift);
@@ -571,7 +570,7 @@
def gofz(self, redshift):
"""
- /* g(z) - I don't think this has any other name*/
+ g(z) - I don't think this has any other name
"""
thisgofz = 2.5 * self.omega_matter_of_z(redshift) / \
@@ -585,7 +584,7 @@
def omega_matter_of_z(self,redshift):
"""
- /* Omega matter as a function of redshift */
+ Omega matter as a function of redshift
"""
thisomofz = self.omega_matter0 * math.pow( 1.0+redshift, 3.0) / \
@@ -595,7 +594,7 @@
def omega_lambda_of_z(self,redshift):
"""
- /* Omega lambda as a function of redshift */
+ Omega lambda as a function of redshift
"""
thisolofz = self.omega_lambda0 / math.pow( self.Eofz(redshift), 2.0 )
@@ -604,7 +603,7 @@
def Eofz(self, redshift):
"""
- /* E(z) - I don't think this has any other name */
+ E(z) - I don't think this has any other name
"""
thiseofz = math.sqrt( self.omega_lambda0 \
+ (1.0 - self.omega_lambda0 - self.omega_matter0)*math.pow( 1.0+redshift, 2.0) \
@@ -614,15 +613,15 @@
"""
-/* Fitting Formulae for CDM + Baryon + Massive Neutrino (MDM) cosmologies. */
-/* Daniel J. Eisenstein & Wayne Hu, Institute for Advanced Study */
+Fitting Formulae for CDM + Baryon + Massive Neutrino (MDM) cosmologies.
+Daniel J. Eisenstein & Wayne Hu, Institute for Advanced Study
-/* There are two primary routines here, one to set the cosmology, the
+There are two primary routines here, one to set the cosmology, the
other to construct the transfer function for a single wavenumber k.
You should call the former once (per cosmology) and the latter as
-many times as you want. */
+many times as you want.
-/* TFmdm_set_cosm() -- User passes all the cosmological parameters as
+ TFmdm_set_cosm() -- User passes all the cosmological parameters as
arguments; the routine sets up all of the scalar quantites needed
computation of the fitting formula. The input parameters are:
1) omega_matter -- Density of CDM, baryons, and massive neutrinos,
@@ -634,7 +633,7 @@
6) hubble -- Hubble constant, in units of 100 km/s/Mpc
7) redshift -- The redshift at which to evaluate */
-/* TFmdm_onek_mpc() -- User passes a single wavenumber, in units of Mpc^-1.
+ TFmdm_onek_mpc() -- User passes a single wavenumber, in units of Mpc^-1.
Routine returns the transfer function from the Eisenstein & Hu
fitting formula, based on the cosmology currently held in the
internal variables. The routine returns T_cb (the CDM+Baryon
@@ -642,29 +641,40 @@
Baryon+Neutrino density-weighted transfer function) is stored
in the global variable tf_cbnu. */
-/* We also supply TFmdm_onek_hmpc(), which is identical to the previous
- routine, but takes the wavenumber in units of h Mpc^-1. */
+We also supply TFmdm_onek_hmpc(), which is identical to the previous
+routine, but takes the wavenumber in units of h Mpc^-1.
-/* We hold the internal scalar quantities in global variables, so that
-the user may access them in an external program, via "extern" declarations. */
+We hold the internal scalar quantities in global variables, so that
+the user may access them in an external program, via "extern" declarations.
-/* Please note that all internal length scales are in Mpc, not h^-1 Mpc! */
+Please note that all internal length scales are in Mpc, not h^-1 Mpc!
"""
class TransferFunction(object):
"""
- /* This routine takes cosmological parameters and a redshift and sets up
- all the internal scalar quantities needed to compute the transfer function. */
- /* INPUT: omega_matter -- Density of CDM, baryons, and massive neutrinos,
- in units of the critical density. */
- /* omega_baryon -- Density of baryons, in units of critical. */
- /* omega_hdm -- Density of massive neutrinos, in units of critical */
- /* degen_hdm -- (Int) Number of degenerate massive neutrino species */
- /* omega_lambda -- Cosmological constant */
- /* hubble -- Hubble constant, in units of 100 km/s/Mpc */
- /* redshift -- The redshift at which to evaluate */
- /* OUTPUT: Returns 0 if all is well, 1 if a warning was issued. Otherwise,
- sets many global variables for use in TFmdm_onek_mpc() */
+ This routine takes cosmological parameters and a redshift and sets up
+ all the internal scalar quantities needed to compute the transfer function.
+
+ Parameters
+ ----------
+ omega_matter : float
+ Density of CDM, baryons, and massive neutrinos, in units
+ of the critical density.
+ omega_baryon : float
+ Density of baryons, in units of critical.
+ omega_hdm : float
+ Density of massive neutrinos, in units of critical
+ degen_hdm : integer
+ Number of degenerate massive neutrino species
+ omega_lambda : float
+ Cosmological constant
+ hubble : float
+ Hubble constant, in units of 100 km/s/Mpc
+ redshift : float
+ The redshift at which to evaluate
+
+ Returns 0 if all is well, 1 if a warning was issued. Otherwise,
+ sets many global variables for use in TFmdm_onek_mpc()
"""
def __init__(self, omega_matter, omega_baryon, omega_hdm,
degen_hdm, omega_lambda, hubble, redshift):
@@ -751,15 +761,23 @@
def TFmdm_onek_mpc(self, kk):
"""
- /* Given a wavenumber in Mpc^-1, return the transfer function for the
- cosmology held in the global variables. */
- /* Input: kk -- Wavenumber in Mpc^-1 */
- /* Output: The following are set as global variables:
- growth_cb -- the transfer function for density-weighted
- CDM + Baryon perturbations.
- growth_cbnu -- the transfer function for density-weighted
- CDM + Baryon + Massive Neutrino perturbations. */
- /* The function returns growth_cb */
+ Given a wavenumber in Mpc^-1, return the transfer function for the
+ cosmology held in the global variables.
+
+ Parameters
+ ----------
+ kk : float
+ Wavenumber in Mpc^-1
+
+ Returns
+ -------
+ growth_cb : float
+ the transfer function for density-weighted
+ CDM + Baryon perturbations. (returned and set as a global var)
+ growth_cbnu : float
+ the transfer function for density-weighted
+ CDM + Baryon + Massive Neutrino perturbations.
+ (set as a global var)
"""
self.qq = kk/self.omhh*SQR(self.theta_cmb);
@@ -794,15 +812,22 @@
def TFmdm_onek_hmpc(self, kk):
"""
- /* Given a wavenumber in h Mpc^-1, return the transfer function for the
- cosmology held in the global variables. */
- /* Input: kk -- Wavenumber in h Mpc^-1 */
- /* Output: The following are set as global variables:
- growth_cb -- the transfer function for density-weighted
- CDM + Baryon perturbations.
- growth_cbnu -- the transfer function for density-weighted
- CDM + Baryon + Massive Neutrino perturbations. */
- /* The function returns growth_cb */
+ Given a wavenumber in h Mpc^-1, return the transfer function for the
+ cosmology held in the global variables.
+
+ Parameters
+ ----------
+ kk : float
+ Wavenumber in h Mpc^-1
+
+ Returns
+ -------
+ growth_cb : float
+ the transfer function for density-weighted
+ CDM + Baryon perturbations. (return and set as a global var)
+ growth_cbnu : float
+ the transfer function for density-weighted
+ CDM + Baryon + Massive Neutrino perturbations.
"""
return self.TFmdm_onek_mpc(kk*self.hhubble);
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/analysis_modules/sunyaev_zeldovich/projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/projection.py
@@ -78,7 +78,7 @@
Parameters
----------
- ds : Dataset
+ ds : ~yt.data_objects.static_output.Dataset
The dataset
freqs : array_like
The frequencies (in GHz) at which to compute the SZ spectral distortion.
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -1957,7 +1957,7 @@
multiple data objects.
This object is not designed to be created directly; it is designed to be
- created implicitly by using one of the bitwise operations (&, |, ^, ~) on
+ created implicitly by using one of the bitwise operations (&, \|, ^, \~) on
one or two other data objects. These correspond to the appropriate boolean
operations, and the resultant object can be nested.
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -194,6 +194,8 @@
self.data = weakref.WeakKeyDictionary()
def __get__(self, instance, owner):
+ if not instance:
+ return None
ret = self.data.get(instance, None)
try:
ret = ret.copy()
@@ -1056,7 +1058,7 @@
Parameters
----------
- input_array : iterable
+ input_array : Iterable
A tuple, list, or array to attach units to
input_units : String unit specification, unit symbol or astropy object
The units of the array. Powers must be specified using python syntax
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/fields/species_fields.py
--- a/yt/fields/species_fields.py
+++ b/yt/fields/species_fields.py
@@ -126,8 +126,8 @@
"""
This takes a field registry, a fluid type, and two species names.
The first species name is one you wish to alias to an existing species
- name. For instance you might alias all "H_p0" fields to "H_" fields
- to indicate that "H_" fields are really just neutral hydrogen fields.
+ name. For instance you might alias all "H_p0" fields to "H\_" fields
+ to indicate that "H\_" fields are really just neutral hydrogen fields.
This function registers field aliases for the density, number_density,
mass, and fraction fields between the two species given in the arguments.
"""
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/fields/xray_emission_fields.py
--- a/yt/fields/xray_emission_fields.py
+++ b/yt/fields/xray_emission_fields.py
@@ -72,7 +72,7 @@
Parameters
----------
- table_type: string
+ table_type : string
The type of data to use when computing the emissivity values. If "cloudy",
a file called "cloudy_emissivity.h5" is used, for photoionized
plasmas. If, "apec", a file called "apec_emissivity.h5" is used for
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -199,11 +199,9 @@
def parse_orion_sinks(fn):
'''
-
Orion sink particles are stored in text files. This function
is for figuring what particle fields are present based on the
- number of entries per line in the *.sink file.
-
+ number of entries per line in the \*.sink file.
'''
# Figure out the format of the particle file
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/exodus_ii/data_structures.py
--- a/yt/frontends/exodus_ii/data_structures.py
+++ b/yt/frontends/exodus_ii/data_structures.py
@@ -181,18 +181,18 @@
def _parse_parameter_file(self):
self._handle = NetCDF4FileHandler(self.parameter_filename)
- self._vars = self._handle.dataset.variables
- self._read_glo_var()
- self.dimensionality = self._vars['coor_names'].shape[0]
- self.parameters['info_records'] = self._load_info_records()
- self.unique_identifier = self._get_unique_identifier()
- self.num_steps = len(self._vars['time_whole'])
- self.current_time = self._get_current_time()
- self.parameters['num_meshes'] = self._vars['eb_status'].shape[0]
- self.parameters['elem_names'] = self._get_elem_names()
- self.parameters['nod_names'] = self._get_nod_names()
- self.domain_left_edge, self.domain_right_edge = self._load_domain_edge()
- self.periodicity = (False, False, False)
+ with self._handle.open_ds() as ds:
+ self._read_glo_var()
+ self.dimensionality = ds.variables['coor_names'].shape[0]
+ self.parameters['info_records'] = self._load_info_records()
+ self.unique_identifier = self._get_unique_identifier()
+ self.num_steps = len(ds.variables['time_whole'])
+ self.current_time = self._get_current_time()
+ self.parameters['num_meshes'] = ds.variables['eb_status'].shape[0]
+ self.parameters['elem_names'] = self._get_elem_names()
+ self.parameters['nod_names'] = self._get_nod_names()
+ self.domain_left_edge, self.domain_right_edge = self._load_domain_edge()
+ self.periodicity = (False, False, False)
# These attributes don't really make sense for unstructured
# mesh data, but yt warns if they are not present, so we set
@@ -206,18 +206,18 @@
self.refine_by = 0
def _get_fluid_types(self):
- handle = NetCDF4FileHandler(self.parameter_filename).dataset
- fluid_types = ()
- i = 1
- while True:
- ftype = 'connect%d' % i
- if ftype in handle.variables:
- fluid_types += (ftype,)
- i += 1
- else:
- break
- fluid_types += ('all',)
- return fluid_types
+ with NetCDF4FileHandler(self.parameter_filename).open_ds() as ds:
+ fluid_types = ()
+ i = 1
+ while True:
+ ftype = 'connect%d' % i
+ if ftype in ds.variables:
+ fluid_types += (ftype,)
+ i += 1
+ else:
+ break
+ fluid_types += ('all',)
+ return fluid_types
def _read_glo_var(self):
"""
@@ -227,31 +227,34 @@
names = self._get_glo_names()
if not names:
return
- values = self._vars['vals_glo_var'][:].transpose()
- for name, value in zip(names, values):
- self.parameters[name] = value
+ with self._handle.open_ds() as ds:
+ values = ds.variables['vals_glo_var'][:].transpose()
+ for name, value in zip(names, values):
+ self.parameters[name] = value
def _load_info_records(self):
"""
Returns parsed version of the info_records.
"""
- try:
- return load_info_records(self._vars['info_records'])
- except (KeyError, TypeError):
- mylog.warning("No info_records found")
- return []
+ with self._handle.open_ds() as ds:
+ try:
+ return load_info_records(ds.variables['info_records'])
+ except (KeyError, TypeError):
+ mylog.warning("No info_records found")
+ return []
def _get_unique_identifier(self):
return self.parameter_filename
def _get_current_time(self):
- try:
- return self._vars['time_whole'][self.step]
- except IndexError:
- raise RuntimeError("Invalid step number, max is %d" \
- % (self.num_steps - 1))
- except (KeyError, TypeError):
- return 0.0
+ with self._handle.open_ds() as ds:
+ try:
+ return ds.variables['time_whole'][self.step]
+ except IndexError:
+ raise RuntimeError("Invalid step number, max is %d" \
+ % (self.num_steps - 1))
+ except (KeyError, TypeError):
+ return 0.0
def _get_glo_names(self):
"""
@@ -260,12 +263,13 @@
"""
- if "name_glo_var" not in self._vars:
- mylog.warning("name_glo_var not found")
- return []
- else:
- return [sanitize_string(v.tostring()) for v in
- self._vars["name_glo_var"]]
+ with self._handle.open_ds() as ds:
+ if "name_glo_var" not in ds.variables:
+ mylog.warning("name_glo_var not found")
+ return []
+ else:
+ return [sanitize_string(v.tostring()) for v in
+ ds.variables["name_glo_var"]]
def _get_elem_names(self):
"""
@@ -274,12 +278,13 @@
"""
- if "name_elem_var" not in self._vars:
- mylog.warning("name_elem_var not found")
- return []
- else:
- return [sanitize_string(v.tostring()) for v in
- self._vars["name_elem_var"]]
+ with self._handle.open_ds() as ds:
+ if "name_elem_var" not in ds.variables:
+ mylog.warning("name_elem_var not found")
+ return []
+ else:
+ return [sanitize_string(v.tostring()) for v in
+ ds.variables["name_elem_var"]]
def _get_nod_names(self):
"""
@@ -288,12 +293,13 @@
"""
- if "name_nod_var" not in self._vars:
- mylog.warning("name_nod_var not found")
- return []
- else:
- return [sanitize_string(v.tostring()) for v in
- self._vars["name_nod_var"]]
+ with self._handle.open_ds() as ds:
+ if "name_nod_var" not in ds.variables:
+ mylog.warning("name_nod_var not found")
+ return []
+ else:
+ return [sanitize_string(v.tostring()) for v in
+ ds.variables["name_nod_var"]]
def _read_coordinates(self):
"""
@@ -305,13 +311,14 @@
coord_axes = 'xyz'[:self.dimensionality]
mylog.info("Loading coordinates")
- if "coord" not in self._vars:
- coords = np.array([self._vars["coord%s" % ax][:]
- for ax in coord_axes]).transpose().copy()
- else:
- coords = np.array([coord for coord in
- self._vars["coord"][:]]).transpose().copy()
- return coords
+ with self._handle.open_ds() as ds:
+ if "coord" not in ds.variables:
+ coords = np.array([ds.variables["coord%s" % ax][:]
+ for ax in coord_axes]).transpose().copy()
+ else:
+ coords = np.array([coord for coord in
+ ds.variables["coord"][:]]).transpose().copy()
+ return coords
def _apply_displacement(self, coords, mesh_id):
@@ -325,13 +332,14 @@
offset = self.displacements[mesh_name][1]
coord_axes = 'xyz'[:self.dimensionality]
- for i, ax in enumerate(coord_axes):
- if "disp_%s" % ax in self.parameters['nod_names']:
- ind = self.parameters['nod_names'].index("disp_%s" % ax)
- disp = self._vars['vals_nod_var%d' % (ind + 1)][self.step]
- new_coords[:, i] = coords[:, i] + fac*disp + offset[i]
+ with self._handle.open_ds() as ds:
+ for i, ax in enumerate(coord_axes):
+ if "disp_%s" % ax in self.parameters['nod_names']:
+ ind = self.parameters['nod_names'].index("disp_%s" % ax)
+ disp = ds.variables['vals_nod_var%d' % (ind + 1)][self.step]
+ new_coords[:, i] = coords[:, i] + fac*disp + offset[i]
- return new_coords
+ return new_coords
def _read_connectivity(self):
"""
@@ -339,9 +347,10 @@
"""
mylog.info("Loading connectivity")
connectivity = []
- for i in range(self.parameters['num_meshes']):
- connectivity.append(self._vars["connect%d" % (i+1)][:].astype("i8"))
- return connectivity
+ with self._handle.open_ds() as ds:
+ for i in range(self.parameters['num_meshes']):
+ connectivity.append(ds.variables["connect%d" % (i+1)][:].astype("i8"))
+ return connectivity
def _load_domain_edge(self):
"""
@@ -374,7 +383,7 @@
for i in range(self.dimensionality, 3):
mi[i] = 0.0
ma[i] = 1.0
-
+
return mi, ma
@classmethod
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/exodus_ii/io.py
--- a/yt/frontends/exodus_ii/io.py
+++ b/yt/frontends/exodus_ii/io.py
@@ -28,7 +28,7 @@
def __init__(self, ds):
self.filename = ds.index_filename
exodus_ii_handler = NetCDF4FileHandler(self.filename)
- self.handler = exodus_ii_handler.dataset
+ self.handler = exodus_ii_handler
super(IOHandlerExodusII, self).__init__(ds)
self.node_fields = ds._get_nod_names()
self.elem_fields = ds._get_elem_names()
@@ -46,46 +46,47 @@
# dict gets returned at the end and it should be flat, with selected
# data. Note that if you're reading grid data, you might need to
# special-case a grid selector object.
- chunks = list(chunks)
- rv = {}
- for field in fields:
- ftype, fname = field
- if ftype == "all":
- ci = np.concatenate([mesh.connectivity_indices - self._INDEX_OFFSET \
- for mesh in self.ds.index.mesh_union])
- else:
- ci = self.handler.variables[ftype][:] - self._INDEX_OFFSET
- num_elem = ci.shape[0]
- if fname in self.node_fields:
- nodes_per_element = ci.shape[1]
- rv[field] = np.zeros((num_elem, nodes_per_element), dtype="float64")
- elif fname in self.elem_fields:
- rv[field] = np.zeros(num_elem, dtype="float64")
- for field in fields:
- ind = 0
- ftype, fname = field
- if ftype == "all":
- mesh_ids = [mesh.mesh_id + 1 for mesh in self.ds.index.mesh_union]
- objs = [mesh for mesh in self.ds.index.mesh_union]
- else:
- mesh_ids = [int(ftype[-1])]
- chunk = chunks[mesh_ids[0] - 1]
- objs = chunk.objs
- if fname in self.node_fields:
- field_ind = self.node_fields.index(fname)
- fdata = self.handler.variables['vals_nod_var%d' % (field_ind + 1)]
- for g in objs:
- ci = g.connectivity_indices - self._INDEX_OFFSET
- data = fdata[self.ds.step][ci]
- ind += g.select(selector, data, rv[field], ind) # caches
- if fname in self.elem_fields:
- field_ind = self.elem_fields.index(fname)
- for g, mesh_id in zip(objs, mesh_ids):
- fdata = self.handler.variables['vals_elem_var%deb%s' %
- (field_ind + 1, mesh_id)][:]
- data = fdata[self.ds.step, :]
- ind += g.select(selector, data, rv[field], ind) # caches
- return rv
+ with self.handler.open_ds() as ds:
+ chunks = list(chunks)
+ rv = {}
+ for field in fields:
+ ftype, fname = field
+ if ftype == "all":
+ ci = np.concatenate([mesh.connectivity_indices - self._INDEX_OFFSET \
+ for mesh in self.ds.index.mesh_union])
+ else:
+ ci = ds.variables[ftype][:] - self._INDEX_OFFSET
+ num_elem = ci.shape[0]
+ if fname in self.node_fields:
+ nodes_per_element = ci.shape[1]
+ rv[field] = np.zeros((num_elem, nodes_per_element), dtype="float64")
+ elif fname in self.elem_fields:
+ rv[field] = np.zeros(num_elem, dtype="float64")
+ for field in fields:
+ ind = 0
+ ftype, fname = field
+ if ftype == "all":
+ mesh_ids = [mesh.mesh_id + 1 for mesh in self.ds.index.mesh_union]
+ objs = [mesh for mesh in self.ds.index.mesh_union]
+ else:
+ mesh_ids = [int(ftype[-1])]
+ chunk = chunks[mesh_ids[0] - 1]
+ objs = chunk.objs
+ if fname in self.node_fields:
+ field_ind = self.node_fields.index(fname)
+ fdata = ds.variables['vals_nod_var%d' % (field_ind + 1)]
+ for g in objs:
+ ci = g.connectivity_indices - self._INDEX_OFFSET
+ data = fdata[self.ds.step][ci]
+ ind += g.select(selector, data, rv[field], ind) # caches
+ if fname in self.elem_fields:
+ field_ind = self.elem_fields.index(fname)
+ for g, mesh_id in zip(objs, mesh_ids):
+ fdata = ds.variables['vals_elem_var%deb%s' %
+ (field_ind + 1, mesh_id)][:]
+ data = fdata[self.ds.step, :]
+ ind += g.select(selector, data, rv[field], ind) # caches
+ return rv
def _read_chunk_data(self, chunk, fields):
# This reads the data from a single chunk, and is only used for
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/exodus_ii/simulation_handling.py
--- a/yt/frontends/exodus_ii/simulation_handling.py
+++ b/yt/frontends/exodus_ii/simulation_handling.py
@@ -14,7 +14,6 @@
from yt.data_objects.time_series import \
DatasetSeries, \
RegisteredSimulationTimeSeries
-from yt.frontends.exodus_ii.api import ExodusIIDataset
@add_metaclass(RegisteredSimulationTimeSeries)
@@ -45,6 +44,25 @@
self.all_outputs = self._check_for_outputs(potential_outputs)
self.all_outputs.sort(key=lambda obj: obj["filename"])
+ def __iter__(self):
+ for o in self._pre_outputs:
+ fn, step = o
+ ds = load(fn, step=step)
+ self._setup_function(ds)
+ yield ds
+
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ if isinstance(key.start, float):
+ return self.get_range(key.start, key.stop)
+ # This will return a sliced up object!
+ return DatasetSeries(self._pre_outputs[key], self.parallel)
+ o = self._pre_outputs[key]
+ fn, step = o
+ o = load(fn, step=step)
+ self._setup_function(o)
+ return o
+
def get_time_series(self, parallel=False, setup_function=None):
r"""
Instantiate a DatasetSeries object for a set of outputs.
@@ -55,15 +73,14 @@
Fine-level filtering is currently not implemented.
"""
-
+
all_outputs = self.all_outputs
ds_list = []
for output in all_outputs:
num_steps = output['num_steps']
fn = output['filename']
for step in range(num_steps):
- ds = ExodusIIDataset(fn, step=step)
- ds_list.append(ds)
+ ds_list.append((fn, step))
super(ExodusIISimulation, self).__init__(ds_list,
parallel=parallel,
setup_function=setup_function)
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/fits/misc.py
--- a/yt/frontends/fits/misc.py
+++ b/yt/frontends/fits/misc.py
@@ -49,7 +49,7 @@
Parameters
----------
- ds : Dataset
+ ds : `~yt.data_objects.static_output.Dataset`
The FITS events file dataset to add the counts fields to.
ebounds : list of tuples
A list of tuples, one for each field, with (emin, emax) as the
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -903,7 +903,7 @@
Parameters
----------
- base_ds : Dataset
+ base_ds : `~yt.data_objects.static_output.Dataset`
This is any static output. It can also be a stream static output, for
instance as returned by load_uniform_data.
refinement_critera : list of :class:`~yt.utilities.flagging_methods.FlaggingMethod`
@@ -1167,7 +1167,7 @@
r"""Define the cell coordinates and cell neighbors of a hexahedral mesh
for a semistructured grid. Used to specify the connectivity and
coordinates parameters used in
- :function:`~yt.frontends.stream.data_structures.load_hexahedral_mesh`.
+ :func:`~yt.frontends.stream.data_structures.load_hexahedral_mesh`.
Parameters
----------
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/ytdata/data_structures.py
--- a/yt/frontends/ytdata/data_structures.py
+++ b/yt/frontends/ytdata/data_structures.py
@@ -275,7 +275,7 @@
# since this is now particle-like data.
data_type = self.parameters.get("data_type")
container_type = self.parameters.get("container_type")
- ex_container_type = ["cutting", "proj", "ray", "slice"]
+ ex_container_type = ["cutting", "proj", "ray", "slice", "cut_region"]
if data_type == "yt_light_ray" or container_type in ex_container_type:
mylog.info("Returning an all_data data container.")
return self.all_data()
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/ytdata/tests/test_outputs.py
--- a/yt/frontends/ytdata/tests/test_outputs.py
+++ b/yt/frontends/ytdata/tests/test_outputs.py
@@ -100,6 +100,12 @@
assert isinstance(sphere_ds, YTDataContainerDataset)
yield YTDataFieldTest(full_fn, ("grid", "density"))
yield YTDataFieldTest(full_fn, ("all", "particle_mass"))
+ cr = ds.cut_region(sphere, ['obj["temperature"] > 1e4'])
+ fn = cr.save_as_dataset(fields=["temperature"])
+ full_fn = os.path.join(tmpdir, fn)
+ cr_ds = load(full_fn)
+ assert isinstance(cr_ds, YTDataContainerDataset)
+ assert (cr["temperature"] == cr_ds.data["temperature"]).all()
os.chdir(curdir)
shutil.rmtree(tmpdir)
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/frontends/ytdata/utilities.py
--- a/yt/frontends/ytdata/utilities.py
+++ b/yt/frontends/ytdata/utilities.py
@@ -239,4 +239,9 @@
val = np.array(val)
if val.dtype.kind == 'U':
val = val.astype('|S')
- fh.attrs[str(attr)] = val
+ try:
+ fh.attrs[str(attr)] = val
+ # This is raised if no HDF5 equivalent exists.
+ # In that case, save its string representation.
+ except TypeError:
+ fh.attrs[str(attr)] = str(val)
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -747,6 +747,7 @@
With a name provided by the user, this will decide how to
appropriately name the output file by the following rules:
+
1. if name is None, the filename will be the keyword plus
the suffix.
2. if name ends with "/", assume name is a directory and
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/pmods.py
--- a/yt/pmods.py
+++ b/yt/pmods.py
@@ -79,13 +79,13 @@
# LLC, and shall not be used for advertising or product endorsement
# purposes.
-"""MPI_Import defines an mpi-aware import hook. The standard use of
-this module is as follows:
+"""``MPI_Import`` defines an mpi-aware import hook. The standard use of
+this module is as follows::
- from MPI_Import import mpi_import
- with mpi_import():
- import foo
- import bar
+ from MPI_Import import mpi_import
+ with mpi_import():
+ import foo
+ import bar
Within the with block, the standard import statement is replaced by an
MPI-aware import statement. The rank 0 process finds the location of
@@ -94,35 +94,35 @@
One CRITICAL detail: any code inside the mpi_import block must be
executed exactly the same on all of the MPI ranks. For example,
-consider this:
+consider this::
-def foo():
- import mpi
- if mpi.rank == 0:
- bar = someFunction()
- bar = mpi.bcast(bar,root=0)
+ def foo():
+ import mpi
+ if mpi.rank == 0:
+ bar = someFunction()
+ bar = mpi.bcast(bar,root=0)
+
+ def someFunction():
+ import os
+ return os.name
-def someFunction():
- import os
- return os.name
-
-If foo() is called during the import process, then things may go very
+If ``foo()`` is called during the import process, then things may go very
wrong. If the os module hasn't been loaded, then the rank 0 process
will find os and broadcast its location. Since there's no
-corresponding bcast for rank > 0, the other processes will receive
+corresponding bcast for ``rank > 0``, the other processes will receive
that broadcast instead of the broadcast for bar, resulting in
-undefined behavior. Similarly, if rank >0 process encounters an import
-that rank 0 does not encounter, that process will either hang waiting
+undefined behavior. Similarly, if ``rank > 0`` process encounters an import
+that ``rank`` 0 does not encounter, that process will either hang waiting
for the bcast, or it will receive an out-of-order bcast.
The import hook provides a way to test whether we're using this
importer, which can be used to disable rank-asymmetric behavior in a
-module import:
+module import::
-from yt.extern.six.moves import builtins
-hasattr(builtins.__import__,"mpi_import")
+ from yt.extern.six.moves import builtins
+ hasattr(builtins.__import__,"mpi_import")
-This evaluates to True only when we're in an mpi_import() context
+This evaluates to True only when we're in an ``mpi_import()`` context
manager.
There are some situations where rank-dependent code may be necessary.
@@ -130,66 +130,66 @@
tends to cause deadlocks when it is executed inside an mpi_imported
module. In that case, we provide a hook to execute a function after
the mpi_import hook has been replaced by the standard import hook.
-Here is an example showing the use of this feature:
-
-# encapsulate the rank-asymmetric code in a function
-def f():
- if mpi.rank == 0:
- doOneThing()
- else:
- doSomethingElse()
+Here is an example showing the use of this feature::
-# Either importer is None (standard import) or it's a reference to
-# the mpi_import object that owns the current importer.
-from yt.extern.six.moves import builtins
-importer = getattr(builtins.__import__,"mpi_import",None)
-if importer:
- importer.callAfterImport(f)
-else:
- # If we're using the standard import, then we'll execute the
- # code in f immediately
- f()
+ # encapsulate the rank-asymmetric code in a function
+ def f():
+ if mpi.rank == 0:
+ doOneThing()
+ else:
+ doSomethingElse()
-WARNING: the callAfterImport feature is not intended for casual use.
+ # Either importer is None (standard import) or it's a reference to
+ # the mpi_import object that owns the current importer.
+ from yt.extern.six.moves import builtins
+ importer = getattr(builtins.__import__,"mpi_import",None)
+ if importer:
+ importer.callAfterImport(f)
+ else:
+ # If we're using the standard import, then we'll execute the
+ # code in f immediately
+ f()
+
+WARNING: the ``callAfterImport`` feature is not intended for casual use.
Usually it will be sufficient (and preferable) to either remove the
rank-asymmetric code or explicitly move it outside of the 'with
mpi_import' block. callAfterImport is provided for the (hopefully
rare!) cases where this does not suffice.
-
-Some implementation details:
-
--This code is based on knee.py, which is an example of a pure Python
- hierarchical import that was included with Python 2.6 distributions.
-
--Python PEP 302 defines another way to override import by using finder
- and loader objects, which behave similarly to the imp.find_module and
- imp.load_module functions in __import_module__ below. Unfortunately,
- the implementation of PEP 302 is such that the path for the module
- has already been found by the time that the "finder" object is
- constructed, so it's not suitable for our purposes.
+Some implementation details
+---------------------------
--This module uses pyMPI. It was originally designed with mpi4py, and
- switching back to mpi4py requires only minor modifications. To
- quickly substitute mpi4py for pyMPI, the 'import mpi' line below can
- be replaced with the following wrapper:
+* This code is based on knee.py, which is an example of a pure Python
+ hierarchical import that was included with Python 2.6 distributions.
-from mpi4py import MPI
-class mpi(object):
- rank = MPI.COMM_WORLD.Get_rank()
- @staticmethod
- def bcast(obj=None,root=0):
- return MPI.COMM_WORLD.bcast(obj,root)
+* Python PEP 302 defines another way to override import by using finder
+ and loader objects, which behave similarly to the imp.find_module and
+ imp.load_module functions in __import_module__ below. Unfortunately,
+ the implementation of PEP 302 is such that the path for the module
+ has already been found by the time that the "finder" object is
+ constructed, so it's not suitable for our purposes.
--An alternate version of this module had rank 0 perform all of the
- lookups, and then broadcast the locations all-at-once when that
- process reached the end of the context manager. This was somewhat
- faster than the current implementation, but was prone to deadlock
- when loading modules containing MPI synchronization points.
+* This module uses pyMPI. It was originally designed with mpi4py, and
+ switching back to mpi4py requires only minor modifications. To
+ quickly substitute mpi4py for pyMPI, the 'import mpi' line below can
+ be replaced with the following wrapper::
--The 'level' parameter to the import hook is not handled correctly; we
- treat it as if it were -1 (try relative and absolute imports). For
- more information about the level parameter, run 'help(__import__)'.
+ from mpi4py import MPI
+ class mpi(object):
+ rank = MPI.COMM_WORLD.Get_rank()
+ @staticmethod
+ def bcast(obj=None,root=0):
+ return MPI.COMM_WORLD.bcast(obj,root)
+
+* An alternate version of this module had rank 0 perform all of the
+ lookups, and then broadcast the locations all-at-once when that
+ process reached the end of the context manager. This was somewhat
+ faster than the current implementation, but was prone to deadlock
+ when loading modules containing MPI synchronization points.
+
+* The ``level`` parameter to the import hook is not handled correctly; we
+ treat it as if it were -1 (try relative and absolute imports). For
+ more information about the level parameter, run ``help(__import__)``.
"""
from __future__ import print_function
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -353,13 +353,13 @@
This dataset allows you to easily explore orientations and
handiness in VR and other renderings
- Parameters:
- -----------
+ Parameters
+ ----------
- N: integer
+ N : integer
The number of cells along each direction
- scale: float
+ scale : float
A spatial scale, the domain boundaries will be multiplied by scale to
test datasets that have spatial different scales (e.g. data in CGS units)
@@ -811,17 +811,17 @@
Examples
--------
- @check_results
- def my_func(ds):
- return ds.domain_width
+ >>> @check_results
+ ... def my_func(ds):
+ ... return ds.domain_width
- my_func(ds)
+ >>> my_func(ds)
- @check_results
- def field_checker(dd, field_name):
- return dd[field_name]
+ >>> @check_results
+ ... def field_checker(dd, field_name):
+ ... return dd[field_name]
- field_cheker(ds.all_data(), 'density', result_basename='density')
+ >>> field_cheker(ds.all_data(), 'density', result_basename='density')
"""
def compute_results(func):
@@ -952,9 +952,12 @@
with the units of ``actual`` and ``desired``. If no units are attached,
assumes the same units as ``desired``. Defaults to zero.
+ Notes
+ -----
Also accepts additional keyword arguments accepted by
:func:`numpy.testing.assert_allclose`, see the documentation of that
function for details.
+
"""
# Create a copy to ensure this function does not alter input arrays
act = YTArray(actual)
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -228,7 +228,7 @@
Parameters
----------
- input_array : iterable
+ input_array : Iterable
A tuple, list, or array to attach units to
input_units : String unit specification, unit symbol object, or astropy units
The units of the array. Powers must be specified using python
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/utilities/exodusII_reader.py
--- a/yt/utilities/exodusII_reader.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import string
-from itertools import takewhile
-from netCDF4 import Dataset
-import numpy as np
-from yt.config import ytcfg
-import os
-import warnings
-
-
-def sanitize_string(s):
- s = "".join(_ for _ in takewhile(lambda a: a in string.printable, s))
- return s
-
-
-def get_data(fn):
- warnings.warn("The yt.utilities.exodusII_reader module is deprecated "
- "and will be removed in a future release. "
- "Please use the normal yt.load() command to access "
- "your data instead.")
- try:
- f = Dataset(fn)
- except RuntimeError:
- f = Dataset(os.path.join(ytcfg.get("yt", "test_data_dir"), fn))
- fvars = f.variables
- # Is this correct?
- etypes = fvars["eb_status"][:]
- nelem = etypes.shape[0]
- varnames = [sanitize_string(v.tostring()) for v in
- fvars["name_elem_var"][:]]
- nodnames = [sanitize_string(v.tostring()) for v in
- fvars["name_nod_var"][:]]
- coord = np.array([fvars["coord%s" % ax][:]
- for ax in 'xyz']).transpose().copy()
- coords = []
- connects = []
- data = []
- for i in range(nelem):
- connects.append(fvars["connect%s" % (i+1)][:].astype("i8"))
- ci = connects[-1]
- coords.append(coord) # Same for all
- vals = {}
- for j, v in enumerate(varnames):
- values = fvars["vals_elem_var%seb%s" % (j+1, i+1)][:]
- vals['gas', v] = values.astype("f8")[-1, :]
- for j, v in enumerate(nodnames):
- # We want just for this set of nodes all the node variables
- # Use (ci - 1) to get these values
- values = fvars["vals_nod_var%s" % (j+1)][:]
- vals['gas', v] = values.astype("f8")[-1, ci - 1, ...]
- data.append(vals)
- return coords, connects, data
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -15,6 +15,7 @@
from yt.utilities.on_demand_imports import _h5py as h5py
from yt.utilities.on_demand_imports import NotAModule
+from contextlib import contextmanager
def valid_hdf5_signature(fn):
signature = b'\x89HDF\r\n\x1a\n'
@@ -107,6 +108,11 @@
class NetCDF4FileHandler(object):
def __init__(self, filename):
+ self.filename = filename
+
+ @contextmanager
+ def open_ds(self):
from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
- ds = netCDF4.Dataset(filename)
- self.dataset = ds
+ ds = netCDF4.Dataset(self.filename)
+ yield ds
+ ds.close()
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -50,11 +50,13 @@
dataset_units : dictionary, optional
A dictionary of (value, unit) tuples to set the default units
of the dataset. Keys can be:
- "length_unit"
- "time_unit"
- "mass_unit"
- "velocity_unit"
- "magnetic_unit"
+
+ * "length_unit"
+ * "time_unit"
+ * "mass_unit"
+ * "velocity_unit"
+ * "magnetic_unit"
+
If not specified, these will carry over from the parent
dataset.
particle_type_name : string, optional
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/utilities/hierarchy_inspection.py
--- a/yt/utilities/hierarchy_inspection.py
+++ b/yt/utilities/hierarchy_inspection.py
@@ -12,7 +12,7 @@
Parameters
----------
- candidates : iterable
+ candidates : Iterable
An interable object that is a collection of classes to find the lowest
subclass of.
diff -r c0c28ef312b43db21ba36b0d2aa5e0023dbeaef6 -r 43cf425039774e6d6997022a8eaeef86358df77d yt/utilities/math_utils.py
--- a/yt/utilities/math_utils.py
+++ b/yt/utilities/math_utils.py
@@ -61,7 +61,7 @@
pos : array
An array of floats.
- ds : Dataset
+ ds : ~yt.data_objects.static_output.Dataset
A simulation static output.
Examples
@@ -852,7 +852,7 @@
Parameters
----------
maxr : scalar
- should be max(|x|, |y|)
+ should be ``max(|x|, |y|)``
aspect : scalar
The aspect ratio of width / height for the projection.
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list