[yt-svn] commit/yt: MatthewTurk: Merged in ngoldbaum/yt/yt-3.0 (pull request #972)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Jun 24 12:29:33 PDT 2014


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/0e28105c1c3b/
Changeset:   0e28105c1c3b
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-24 21:29:25
Summary:     Merged in ngoldbaum/yt/yt-3.0 (pull request #972)

Updates for the API docs
Affected #:  14 files

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 doc/source/reference/api/api.rst
--- a/doc/source/reference/api/api.rst
+++ b/doc/source/reference/api/api.rst
@@ -11,10 +11,31 @@
    :toctree: generated/
 
    ~yt.visualization.plot_window.SlicePlot
+   ~yt.visualization.plot_window.AxisAlignedSlicePlot
    ~yt.visualization.plot_window.OffAxisSlicePlot
    ~yt.visualization.plot_window.ProjectionPlot
    ~yt.visualization.plot_window.OffAxisProjectionPlot
 
+ProfilePlot and PhasePlot
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.visualization.profile_plotter.ProfilePlot
+   ~yt.visualization.profile_plotter.PhasePlot
+
+Fixed Resolution Pixelization
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.visualization.fixed_resolution.FixedResolutionBuffer
+   ~yt.visualization.fixed_resolution.CylindricalFixedResolutionBuffer
+   ~yt.visualization.fixed_resolution.ObliqueFixedResolutionBuffer
+   ~yt.visualization.fixed_resolution.OffAxisProjectionFixedResolutionBuffer
+
 Data Sources
 ------------
 
@@ -91,6 +112,33 @@
    ~yt.data_objects.time_series.TimeSeriesQuantitiesContainer
    ~yt.data_objects.time_series.AnalysisTaskProxy
 
+Geometry Handlers
+-----------------
+
+These objects generate an "index" into multiresolution data.
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.geometry.geometry_handler.Index
+   ~yt.geometry.grid_geometry_handler.GridIndex
+   ~yt.geometry.oct_geometry_handler.OctreeIndex
+   ~yt.geometry.particle_geometry_handler.ParticleIndex
+   ~yt.geometry.unstructured_mesh_handler.UnstructuredIndex
+
+Units
+-----
+
+These classes enable yt's symbolic unit handling system.
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.units.unit_object.Unit
+   ~yt.units.unit_registry.UnitRegistry
+   ~yt.units.yt_array.YTArray
+   ~yt.units.yt_array.YTQuantity
+
 Frontends
 ---------
 
@@ -145,6 +193,22 @@
    ~yt.frontends.boxlib.io.IOHandlerNyx
    ~yt.frontends.boxlib.io.IOHandlerOrion
 
+Chombo
+^^^^^^
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.frontends.chombo.data_structures.ChomboGrid
+   ~yt.frontends.chombo.data_structures.ChomboHierarchy
+   ~yt.frontends.chombo.data_structures.ChomboDataset
+   ~yt.frontends.chombo.data_structures.Orion2Hierarchy
+   ~yt.frontends.chombo.data_structures.Orion2Dataset
+   ~yt.frontends.chombo.io.IOHandlerChomboHDF5
+   ~yt.frontends.chombo.io.IOHandlerChombo2DHDF5
+   ~yt.frontends.chombo.io.IOHandlerChombo1DHDF5
+   ~yt.frontends.chombo.io.IOHandlerOrion2HDF5
+
 Enzo
 ^^^^
 
@@ -194,6 +258,17 @@
    ~yt.frontends.flash.fields.FLASHFieldInfo
    ~yt.frontends.flash.io.IOHandlerFLASH
 
+GDF
+^^^
+
+.. autosummary::
+   :toctree: generated/
+
+   ~yt.frontends.gdf.data_structures.GDFGrid
+   ~yt.frontends.gdf.data_structures.GDFHierarchy
+   ~yt.frontends.gdf.data_structures.GDFDataset
+   ~yt.frontends.gdf.io.IOHandlerGDFHDF5
+
 Halo Catalogs
 ^^^^^^^^^^^^^
 
@@ -281,6 +356,19 @@
    ~yt.frontends.stream.io.IOHandlerStreamOctree
    ~yt.frontends.stream.io.StreamParticleIOHandler
 
+Loading Data
+------------
+
+.. autosummary::
+   :toctree: generated/
+
+   yt.convenience.load
+   yt.convenience.simulation
+   yt.frontends.stream.data_structures.load_uniform_grid
+   yt.frontends.stream.data_structures.load_amr_grids
+   yt.frontends.stream.data_structures.load_particles
+   yt.frontends.stream.data_structures.load_hexahedral_mesh
+
 Derived Datatypes
 -----------------
 
@@ -288,16 +376,19 @@
 ^^^^^^^^^^^^^^^^^^^^^^^
 
 These types are used to sum data up and either return that sum or return an
-average.  Typically they are more easily used through the
-`yt.visualization.plot_collection` interface.
+average.  Typically they are more easily used through the ``ProfilePlot``
+``PhasePlot`` interface. We also provide the ``create_profile`` function
+to create these objects in a uniform manner.
 
 
 .. autosummary::
    :toctree: generated/
 
-   ~yt.data_objects.profiles.BinnedProfile1D
-   ~yt.data_objects.profiles.BinnedProfile2D
-   ~yt.data_objects.profiles.BinnedProfile3D
+   ~yt.data_objects.profiles.ProfileND
+   ~yt.data_objects.profiles.Profile1D
+   ~yt.data_objects.profiles.Profile2D
+   ~yt.data_objects.profiles.Profile3D
+   ~yt.data_objects.profiles.create_profile
 
 Halo Finding and Particle Functions
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -415,8 +506,6 @@
    :toctree: generated/
 
    ~yt.data_objects.image_array.ImageArray
-   ~yt.data_objects.image_array.ImageArray.write_png
-   ~yt.data_objects.image_array.ImageArray.write_hdf5
 
 Extension Types
 ---------------

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -170,29 +170,26 @@
     To use the script below you must run it using MPI:
     mpirun -np 4 python run_rockstar.py --parallel
 
-    run_rockstar.py:
+    >>> from yt.mods import *
+    >>> from yt.analysis_modules.halo_finding.rockstar.api import \
+    ... RockstarHaloFinder
+    >>> from yt.data_objects.particle_filters import \
+    ... particle_filter
 
-    from yt.mods import *
+    >>> # create a particle filter to remove star particles
+    >>> @particle_filter("dark_matter", requires=["creation_time"])
+    ... def _dm_filter(pfilter, data):
+    ...     return data["creation_time"] <= 0.0
 
-    from yt.analysis_modules.halo_finding.rockstar.api import \
-        RockstarHaloFinder
-    from yt.data_objects.particle_filters import \
-        particle_filter
+    >>> def setup_pf(pf):
+    ...     pf.add_particle_filter("dark_matter")
 
-    # create a particle filter to remove star particles
-    @particle_filter("dark_matter", requires=["creation_time"])
-    def _dm_filter(pfilter, data):
-        return data["creation_time"] <= 0.0
+    >>> es = simulation("enzo_tiny_cosmology/32Mpc_32.enzo", "Enzo")
+    >>> es.get_time_series(setup_function=setup_pf, redshift_data=False)
 
-    def setup_pf(pf):
-        pf.add_particle_filter("dark_matter")
-
-    es = simulation("enzo_tiny_cosmology/32Mpc_32.enzo", "Enzo")
-    es.get_time_series(setup_function=setup_pf, redshift_data=False)
-
-    rh = RockstarHaloFinder(es, num_readers=1, num_writers=2,
-                            particle_type="dark_matter")
-    rh.run()
+    >>> rh = RockstarHaloFinder(es, num_readers=1, num_writers=2,
+    ...                         particle_type="dark_matter")
+    >>> rh.run()
 
     """
     def __init__(self, ts, num_readers = 1, num_writers = None,

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/data_objects/derived_quantities.py
--- a/yt/data_objects/derived_quantities.py
+++ b/yt/data_objects/derived_quantities.py
@@ -50,6 +50,7 @@
         return
 
     def __call__(self, *args, **kwargs):
+        """Calculate results for the derived quantity"""
         self.count_values(*args, **kwargs)
         chunks = self.data_source.chunks([], chunking_style="io")
         storage = {}

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/data_objects/profiles.py
--- a/yt/data_objects/profiles.py
+++ b/yt/data_objects/profiles.py
@@ -754,6 +754,7 @@
         self.weight_values = np.zeros(size, dtype="float64")
 
 class ProfileND(ParallelAnalysisInterface):
+    """The profile object class"""
     def __init__(self, data_source, weight_field = None):
         self.data_source = data_source
         self.pf = data_source.pf
@@ -763,6 +764,14 @@
         ParallelAnalysisInterface.__init__(self, comm=data_source.comm)
 
     def add_fields(self, fields):
+        """Add fields to profile
+
+        Parameters
+        ----------
+        fields : list of field names
+            A list of fields to create profile histograms for
+        
+        """
         fields = ensure_list(fields)
         temp_storage = ProfileFieldAccumulator(len(fields), self.size)
         cfields = fields + list(self.bin_fields)
@@ -774,7 +783,7 @@
     def set_field_unit(self, field, new_unit):
         """Sets a new unit for the requested field
 
-        parameters
+        Parameters
         ----------
         field : string or field tuple
            The name of the field that is to be changed.
@@ -871,6 +880,28 @@
             return np.linspace(mi, ma, n+1)
 
 class Profile1D(ProfileND):
+    """An object that represents a 1D profile.
+
+    Parameters
+    ----------
+
+    data_source : AMD3DData object
+        The data object to be profiled
+    x_field : string field name
+        The field to profile as a function of
+    x_n : integer
+        The number of bins along the x direction.
+    x_min : float
+        The minimum value of the x profile field.
+    x_max : float
+        The maximum value of hte x profile field.
+    x_log : boolean
+        Controls whether or not the bins for the x field are evenly
+        spaced in linear (False) or log (True) space.
+    weight_field : string field name
+        The field to weight the profiled fields by.
+
+    """
     def __init__(self, data_source, x_field, x_n, x_min, x_max, x_log,
                  weight_field = None):
         super(Profile1D, self).__init__(data_source, weight_field)
@@ -911,6 +942,39 @@
         return ((self.x_bins[0], self.x_bins[-1]),)
 
 class Profile2D(ProfileND):
+    """An object that represents a 2D profile.
+
+    Parameters
+    ----------
+
+    data_source : AMD3DData object
+        The data object to be profiled
+    x_field : string field name
+        The field to profile as a function of along the x axis.
+    x_n : integer
+        The number of bins along the x direction.
+    x_min : float
+        The minimum value of the x profile field.
+    x_max : float
+        The maximum value of hte x profile field.
+    x_log : boolean
+        Controls whether or not the bins for the x field are evenly
+        spaced in linear (False) or log (True) space.
+    y_field : string field name
+        The field to profile as a function of along the y axis
+    y_n : integer
+        The number of bins along the y direction.
+    y_min : float
+        The minimum value of the y profile field.
+    y_max : float
+        The maximum value of hte y profile field.
+    y_log : boolean
+        Controls whether or not the bins for the y field are evenly
+        spaced in linear (False) or log (True) space.
+    weight_field : string field name
+        The field to weight the profiled fields by.
+
+    """
     def __init__(self, data_source,
                  x_field, x_n, x_min, x_max, x_log,
                  y_field, y_n, y_min, y_max, y_log,
@@ -975,6 +1039,50 @@
                 (self.y_bins[0], self.y_bins[-1]))
 
 class Profile3D(ProfileND):
+    """An object that represents a 2D profile.
+
+    Parameters
+    ----------
+
+    data_source : AMD3DData object
+        The data object to be profiled
+    x_field : string field name
+        The field to profile as a function of along the x axis.
+    x_n : integer
+        The number of bins along the x direction.
+    x_min : float
+        The minimum value of the x profile field.
+    x_max : float
+        The maximum value of hte x profile field.
+    x_log : boolean
+        Controls whether or not the bins for the x field are evenly
+        spaced in linear (False) or log (True) space.
+    y_field : string field name
+        The field to profile as a function of along the y axis
+    y_n : integer
+        The number of bins along the y direction.
+    y_min : float
+        The minimum value of the y profile field.
+    y_max : float
+        The maximum value of hte y profile field.
+    y_log : boolean
+        Controls whether or not the bins for the y field are evenly
+        spaced in linear (False) or log (True) space.
+    z_field : string field name
+        The field to profile as a function of along the z axis
+    z_n : integer
+        The number of bins along the z direction.
+    z_min : float
+        The minimum value of the z profile field.
+    z_max : float
+        The maximum value of hte z profile field.
+    z_log : boolean
+        Controls whether or not the bins for the z field are evenly
+        spaced in linear (False) or log (True) space.
+    weight_field : string field name
+        The field to weight the profiled fields by.
+
+    """
     def __init__(self, data_source,
                  x_field, x_n, x_min, x_max, x_log,
                  y_field, y_n, y_min, y_max, y_log,

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -519,16 +519,17 @@
 
     This should allow a uniform grid of data to be loaded directly into yt and
     analyzed as would any others.  This comes with several caveats:
-        * Units will be incorrect unless the unit system is explicitly
-          specified.
-        * Some functions may behave oddly, and parallelism will be
-          disappointing or non-existent in most cases.
-        * Particles may be difficult to integrate.
+
+    * Units will be incorrect unless the unit system is explicitly
+      specified.
+    * Some functions may behave oddly, and parallelism will be
+      disappointing or non-existent in most cases.
+    * Particles may be difficult to integrate.
 
     Particle fields are detected as one-dimensional fields. The number of
     particles is set by the "number_of_particles" key in data.
     
-Parameters
+    Parameters
     ----------
     data : dict
         This is a dict of numpy arrays or (numpy array, unit spec) tuples.
@@ -690,13 +691,16 @@
     This should allow a sequence of grids of varying resolution of data to be
     loaded directly into yt and analyzed as would any others.  This comes with
     several caveats:
-        * Units will be incorrect unless the unit system is explicitly specified.
-        * Some functions may behave oddly, and parallelism will be
-          disappointing or non-existent in most cases.
-        * Particles may be difficult to integrate.
-        * No consistency checks are performed on the index
-Parameters
+
+    * Units will be incorrect unless the unit system is explicitly specified.
+    * Some functions may behave oddly, and parallelism will be
+      disappointing or non-existent in most cases.
+    * Particles may be difficult to integrate.
+    * No consistency checks are performed on the index
+
+    Parameters
     ----------
+
     grid_data : list of dicts
         This is a list of dicts. Each dict must have entries "left_edge",
         "right_edge", "dimensions", "level", and then any remaining entries are
@@ -751,6 +755,7 @@
     ...
     >>> units = dict(Density='g/cm**3')
     >>> pf = load_amr_grids(grid_data, [32, 32, 32], 1.0)
+
     """
 
     domain_dimensions = np.array(domain_dimensions)
@@ -971,10 +976,11 @@
 
     This should allow a collection of particle data to be loaded directly into
     yt and analyzed as would any others.  This comes with several caveats:
-        * Units will be incorrect unless the data has already been converted to
-          cgs.
-        * Some functions may behave oddly, and parallelism will be
-          disappointing or non-existent in most cases.
+
+    * Units will be incorrect unless the data has already been converted to
+      cgs.
+    * Some functions may behave oddly, and parallelism will be
+      disappointing or non-existent in most cases.
 
     This will initialize an Octree of data.  Note that fluid fields will not
     work yet, or possibly ever.
@@ -1142,11 +1148,12 @@
 
     This should allow a semistructured grid of data to be loaded directly into
     yt and analyzed as would any others.  This comes with several caveats:
-        * Units will be incorrect unless the data has already been converted to
-          cgs.
-        * Some functions may behave oddly, and parallelism will be
-          disappointing or non-existent in most cases.
-        * Particles may be difficult to integrate.
+
+    * Units will be incorrect unless the data has already been converted to
+      cgs.
+    * Some functions may behave oddly, and parallelism will be
+      disappointing or non-existent in most cases.
+    * Particles may be difficult to integrate.
 
     Particle fields are detected as one-dimensional fields. The number of particles
     is set by the "number_of_particles" key in data.

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/geometry/geometry_handler.py
--- a/yt/geometry/geometry_handler.py
+++ b/yt/geometry/geometry_handler.py
@@ -38,6 +38,7 @@
 from yt.utilities.exceptions import YTFieldNotFound
 
 class Index(ParallelAnalysisInterface):
+    """The base index class"""
     _global_mesh = True
     _unsupported_objects = ()
     _index_properties = ()

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/geometry/grid_geometry_handler.py
--- a/yt/geometry/grid_geometry_handler.py
+++ b/yt/geometry/grid_geometry_handler.py
@@ -37,6 +37,7 @@
 from yt.data_objects.data_containers import data_object_registry
 
 class GridIndex(Index):
+    """The index class for patch and block AMR datasets. """
     float_type = 'float64'
     _preload_implemented = False
     _index_properties = ("grid_left_edge", "grid_right_edge",

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/geometry/oct_geometry_handler.py
--- a/yt/geometry/oct_geometry_handler.py
+++ b/yt/geometry/oct_geometry_handler.py
@@ -35,7 +35,7 @@
 from yt.data_objects.data_containers import data_object_registry
 
 class OctreeIndex(Index):
-
+    """The Index subclass for oct AMR datasets"""
     def _setup_geometry(self):
         mylog.debug("Initializing Octree Geometry Handler.")
         self._initialize_oct_handler()

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -38,6 +38,7 @@
 from yt.data_objects.octree_subset import ParticleOctreeSubset
 
 class ParticleIndex(Index):
+    """The Index subclass for particle datasets"""
     _global_mesh = False
 
     def __init__(self, pf, dataset_type):

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/geometry/unstructured_mesh_handler.py
--- a/yt/geometry/unstructured_mesh_handler.py
+++ b/yt/geometry/unstructured_mesh_handler.py
@@ -22,6 +22,7 @@
 from yt.utilities.lib.mesh_utilities import smallest_fwidth
 
 class UnstructuredIndex(Index):
+    """The Index subclass for unstructured and hexahedral mesh datasets. """
     _global_mesh = False
     _unsupported_objects = ('proj', 'covering_grid', 'smoothed_covering_grid')
 

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/units/unit_registry.py
--- a/yt/units/unit_registry.py
+++ b/yt/units/unit_registry.py
@@ -22,6 +22,7 @@
     pass
 
 class UnitRegistry:
+    """A registry for unit symbols"""
     def __init__(self, add_default_symbols=True, lut=None):
         if lut:
             self.lut = lut

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -178,6 +178,61 @@
 
 class YTArray(np.ndarray):
     """
+    An ndarray subclass that attaches a symbolic unit object to the array data.
+
+    Parameters
+    ----------
+
+    input_array : ndarray or ndarray subclass
+        An array to attach units to
+    input_units : String unit specification, unit symbol object, or astropy units
+        The units of the array. Powers must be specified using python
+        symtax (cm**3, not cm^3).
+    registry : A UnitRegistry object
+        The registry to create units from. If input_units is already associated
+        with a unit registry and this is specified, this will be used instead of
+        the registry associated with the unit object.
+    dtype : string of NumPy dtype object
+        The dtype of the array data.
+
+    Examples
+    --------
+
+    >>> from yt import YTArray
+    >>> a = YTArray([1,2,3], 'cm')
+    >>> b = YTArray([4,5,6], 'm')
+    >>> a + b
+    YTArray([ 401.,  502.,  603.]) cm
+    >>> b + a
+    YTArray([ 4.01,  5.02,  6.03]) m
+
+    NumPy ufuncs will pass through units where appropriate.
+
+    >>> import numpy as np
+    >>> a = YTArray(np.arange(8), 'g/cm**3')
+    >>> np.ones_like(a)
+    YTArray([1, 1, 1, 1, 1, 1, 1, 1]) g/cm**3
+
+    and strip them when it would be annoying to deal with them.
+
+    >>> np.log10(a)
+    array([       -inf,  0.        ,  0.30103   ,  0.47712125,  0.60205999,
+            0.69897   ,  0.77815125,  0.84509804])
+
+    YTArray is tightly integrated with yt datasets:
+
+    >>> import yt
+    >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')
+    >>> a = ds.arr(np.ones(5), 'code_length')
+    >>> a.in_cgs()
+    YTArray([  3.08600000e+24,   3.08600000e+24,   3.08600000e+24,
+             3.08600000e+24,   3.08600000e+24]) cm
+
+    This is equivalent to:
+
+    >>> b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry)
+    >>> np.all(a == b)
+    True
 
     """
     _ufunc_registry = {
@@ -1006,10 +1061,68 @@
         return type(self)(ret, copy.deepcopy(self.units))
 
 class YTQuantity(YTArray):
-    def __new__(cls, input, input_units=None, registry=None, dtype=np.float64):
-        if not isinstance(input, (numeric_type, np.number, np.ndarray)):
+    """
+    A scalar associated with a unit.
+
+    Parameters
+    ----------
+
+    input_scalar : ndarray or ndarray subclass
+        An array to attach units to
+    input_units : String unit specification, unit symbol object, or astropy units
+        The units of the array. Powers must be specified using python
+        symtax (cm**3, not cm^3).
+    registry : A UnitRegistry object
+        The registry to create units from. If input_units is already associated
+        with a unit registry and this is specified, this will be used instead of
+        the registry associated with the unit object.
+    dtype : string of NumPy dtype object
+        The dtype of the array data.
+
+    Examples
+    --------
+
+    >>> from yt import YTQuantity
+    >>> a = YTQuantity(1, 'cm')
+    >>> b = YTQuantity(2, 'm')
+    >>> a + b
+    201.0 cm
+    >>> b + a
+    2.01 m
+
+    NumPy ufuncs will pass through units where appropriate.
+
+    >>> import numpy as np
+    >>> a = YTQuantity(12, 'g/cm**3')
+    >>> np.ones_like(a)
+    1 g/cm**3
+
+    and strip them when it would be annoying to deal with them.
+
+    >>> print np.log10(a)
+    1.07918124605
+
+    YTQuantity is tightly integrated with yt datasets:
+
+    >>> import yt
+    >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')
+    >>> a = ds.quan(5, 'code_length')
+    >>> a.in_cgs()
+    1.543e+25 cm
+
+    This is equivalent to:
+
+    >>> b = YTQuantity(5, 'code_length', registry=ds.unit_registry)
+    >>> np.all(a == b)
+    True
+
+    """
+    def __new__(cls, input_scalar, input_units=None, registry=None,
+                dtype=np.float64):
+        if not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)):
             raise RuntimeError("YTQuantity values must be numeric")
-        ret = YTArray.__new__(cls, input, input_units, registry, dtype=dtype)
+        ret = YTArray.__new__(cls, input_scalar, input_units, registry,
+                              dtype=dtype)
         if ret.size > 1:
             raise RuntimeError("YTQuantity instances must be scalars")
         return ret

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/visualization/fixed_resolution.py
--- a/yt/visualization/fixed_resolution.py
+++ b/yt/visualization/fixed_resolution.py
@@ -340,7 +340,11 @@
         return rv
 
 class CylindricalFixedResolutionBuffer(FixedResolutionBuffer):
-
+    """
+    This object is a subclass of
+    :class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
+    that supports non-aligned input data objects, primarily cutting planes.
+    """
     def __init__(self, data_source, radius, buff_size, antialias = True) :
 
         self.data_source = data_source
@@ -365,7 +369,8 @@
         
 class ObliqueFixedResolutionBuffer(FixedResolutionBuffer):
     """
-    This object is a subclass of :class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
+    This object is a subclass of
+    :class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
     that supports non-aligned input data objects, primarily cutting planes.
     """
     def __getitem__(self, item):
@@ -390,7 +395,12 @@
 
 
 class OffAxisProjectionFixedResolutionBuffer(FixedResolutionBuffer):
-    def __init__(self, data_source, bounds, buff_size, antialias = True,                                                         
+    """
+    This object is a subclass of
+    :class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
+    that supports off axis projections.  This calls the volume renderer.
+    """
+    def __init__(self, data_source, bounds, buff_size, antialias = True,
                  periodic = False):
         self.data = {}
         FixedResolutionBuffer.__init__(self, data_source, bounds, buff_size, antialias, periodic)

diff -r 51e5fd53e4dcc5927fee55cb2ec0e1517bb50f63 -r 0e28105c1c3bbaeb901e4a1ad14b72577f6ba8c7 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -388,6 +388,7 @@
         ----------
         deltas : Two-element sequence of floats, quantities, or (float, unit)
                  tuples.
+
             (delta_x, delta_y).  If a unit is not supplied the unit is assumed
             to be code_length.
 
@@ -524,6 +525,7 @@
         ----------
         width : float, array of floats, (float, unit) tuple, or tuple of
                 (float, unit) tuples.
+
              Width can have four different formats to support windows with
              variable x and y widths.  They are:
 
@@ -1714,19 +1716,19 @@
 
 
 def SlicePlot(pf, normal=None, fields=None, axis=None, *args, **kwargs):
-    r"""
-    A factory function for
+    r"""A factory function for
     :class:`yt.visualization.plot_window.AxisAlignedSlicePlot`
     and :class:`yt.visualization.plot_window.OffAxisSlicePlot` objects.  This
     essentially allows for a single entry point to both types of slice plots,
     the distinction being determined by the specified normal vector to the
     slice.
 
-        The returned plot object can be updated using one of the many helper
+    The returned plot object can be updated using one of the many helper
     functions defined in PlotWindow.
 
     Parameters
     ----------
+
     pf : :class:`yt.data_objects.api.Dataset`
         This is the parameter file object corresponding to the
         simulation output to be plotted.
@@ -1820,6 +1822,7 @@
 
     Raises
     ------
+
     AssertionError
         If a proper normal axis is not specified via the normal or axis
         keywords, and/or if a field to plot is not specified.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list