[yt-svn] commit/yt: MatthewTurk: Merged in qobilidop/yt (pull request #2474)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Dec 15 13:56:12 PST 2016


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/dcfa4d3f885f/
Changeset:   dcfa4d3f885f
Branch:      yt
User:        MatthewTurk
Date:        2016-12-15 21:55:42+00:00
Summary:     Merged in qobilidop/yt (pull request #2474)

Refactor particle dataset
Affected #:  6 files

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -51,8 +51,6 @@
     ValidateSpatial
 from yt.fields.fluid_fields import \
     setup_gradient_fields
-from yt.fields.particle_fields import \
-    add_volume_weighted_smoothed_field
 from yt.data_objects.particle_filters import \
     filter_registry
 from yt.data_objects.particle_unions import \
@@ -1223,56 +1221,6 @@
             validators=[ValidateSpatial()])
         return ("deposit", field_name)
 
-    def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
-                                    nneighbors=64, kernel_name="cubic"):
-        """Add a new smoothed particle field
-
-        Creates a new smoothed field based on the particle *smooth_field*.
-
-        Parameters
-        ----------
-
-        smooth_field : tuple
-           The field name tuple of the particle field the smoothed field will
-           be created from.  This must be a field name tuple so yt can
-           appropriately infer the correct particle type.
-        method : string, default 'volume_weighted'
-           The particle smoothing method to use. Can only be 'volume_weighted'
-           for now.
-        nneighbors : int, default 64
-            The number of neighbors to examine during the process.
-        kernel_name : string, default 'cubic'
-            This is the name of the smoothing kernel to use. Current supported
-            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
-            `wendland4`, and `wendland6`.
-
-        Returns
-        -------
-
-        The field name tuple for the newly created field.
-        """
-        self.index
-        if isinstance(smooth_field, tuple):
-            ptype, smooth_field = smooth_field[0], smooth_field[1]
-        else:
-            raise RuntimeError("smooth_field must be a tuple, received %s" %
-                               smooth_field)
-        if method != "volume_weighted":
-            raise NotImplementedError("method must be 'volume_weighted'")
-
-        coord_name = "particle_position"
-        mass_name = "particle_mass"
-        smoothing_length_name = "smoothing_length"
-        if (ptype, smoothing_length_name) not in self.derived_field_list:
-            raise ValueError("%s not in derived_field_list" %
-                             ((ptype, smoothing_length_name),))
-        density_name = "density"
-        registry = self.field_info
-
-        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
-                   smoothing_length_name, density_name, smooth_field, registry,
-                   nneighbors=nneighbors, kernel_name=kernel_name)[0]
-
     def add_gradient_fields(self, input_field):
         """Add gradient fields.
 
@@ -1353,3 +1301,17 @@
 
     def __lt__(self, other):
         return self.filename < other.filename
+
+
+class ParticleDataset(Dataset):
+    _unit_base = None
+    filter_bbox = False
+
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(ParticleDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system)

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -26,7 +26,7 @@
 from yt.data_objects.static_output import \
     ParticleFile
 from yt.frontends.sph.data_structures import \
-    ParticleDataset
+    SPHDataset
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.utilities.cosmology import \
@@ -76,7 +76,7 @@
             field_list, self.total_particles,
             self._position_offset, self._file_size)
 
-class GadgetDataset(ParticleDataset):
+class GadgetDataset(SPHDataset):
     _index_class = ParticleIndex
     _file_class = GadgetBinaryFile
     _field_info_class = GadgetFieldInfo
@@ -89,6 +89,7 @@
                  additional_fields=(),
                  unit_base=None, n_ref=64,
                  over_refine_factor=1,
+                 kernel_name=None,
                  index_ptype="all",
                  bounding_box = None,
                  header_spec = "default",
@@ -103,8 +104,6 @@
             field_spec, gadget_field_specs)
         self._ptype_spec = self._setup_binary_spec(
             ptype_spec, gadget_ptype_specs)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         self.index_ptype = index_ptype
         self.storage_filename = None
         if unit_base is not None and "UnitLength_in_cm" in unit_base:
@@ -123,7 +122,10 @@
         if units_override is not None:
             raise RuntimeError("units_override is not supported for GadgetDataset. "+
                                "Use unit_base instead.")
-        super(GadgetDataset, self).__init__(filename, dataset_type, unit_system=unit_system)
+        super(GadgetDataset, self).__init__(
+            filename, dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor,
+            kernel_name=kernel_name)
         if self.cosmological_simulation:
             self.time_unit.convert_to_units('s/h')
             self.length_unit.convert_to_units('kpccm/h')
@@ -362,6 +364,7 @@
     def __init__(self, filename, dataset_type="gadget_hdf5",
                  unit_base = None, n_ref=64,
                  over_refine_factor=1,
+                 kernel_name=None,
                  index_ptype="all",
                  bounding_box = None,
                  units_override=None,
@@ -374,7 +377,8 @@
         super(GadgetHDF5Dataset, self).__init__(
             filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
             over_refine_factor=over_refine_factor, index_ptype=index_ptype,
-            bounding_box = bounding_box, unit_system=unit_system)
+            kernel_name=kernel_name, bounding_box=bounding_box,
+            unit_system=unit_system)
 
     def _get_hvals(self):
         handle = h5py.File(self.parameter_filename, mode="r")

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/http_stream/data_structures.py
--- a/yt/frontends/http_stream/data_structures.py
+++ b/yt/frontends/http_stream/data_structures.py
@@ -20,9 +20,8 @@
 import time
 
 from yt.data_objects.static_output import \
+    ParticleDataset, \
     ParticleFile
-from yt.frontends.sph.data_structures import \
-    ParticleDataset
 from yt.frontends.sph.fields import \
     SPHFieldInfo
 from yt.funcs import \
@@ -43,17 +42,15 @@
     filename_template = ""
     
     def __init__(self, base_url,
-                 dataset_type = "http_particle_stream",
-                 n_ref = 64, over_refine_factor=1, 
-                 unit_system="cgs"):
+                 dataset_type="http_particle_stream", unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
         if get_requests() is None:
             raise ImportError(
                 "This functionality depends on the requests package")
         self.base_url = base_url
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HTTPStreamDataset, self).__init__("", dataset_type, 
-                                                unit_system=unit_system)
+        super(HTTPStreamDataset, self).__init__(
+            "", dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def __repr__(self):
         return self.base_url

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/sdf/data_structures.py
--- a/yt/frontends/sdf/data_structures.py
+++ b/yt/frontends/sdf/data_structures.py
@@ -26,7 +26,8 @@
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
-    Dataset, ParticleFile
+    ParticleDataset, \
+    ParticleFile
 from yt.funcs import \
     get_requests, \
     setdefaultattr
@@ -53,7 +54,7 @@
 class SDFFile(ParticleFile):
     pass
 
-class SDFDataset(Dataset):
+class SDFDataset(ParticleDataset):
     _index_class = ParticleIndex
     _file_class = SDFFile
     _field_info_class = SDFFieldInfo
@@ -65,18 +66,16 @@
     _subspace = False
 
 
-    def __init__(self, filename, dataset_type = "sdf_particles",
-                 n_ref = 64, over_refine_factor = 1,
-                 bounding_box = None,
-                 sdf_header = None,
-                 midx_filename = None,
-                 midx_header = None,
-                 midx_level = None,
-                 field_map = None,
+    def __init__(self, filename, dataset_type="sdf_particles",
+                 n_ref=64, over_refine_factor=1,
+                 bounding_box=None,
+                 sdf_header=None,
+                 midx_filename=None,
+                 midx_header=None,
+                 midx_level=None,
+                 field_map=None,
                  units_override=None,
                  unit_system="cgs"):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         if bounding_box is not None:
             self._subspace = True
             bbox = np.array(bounding_box, dtype="float32")
@@ -99,9 +98,10 @@
         if filename.startswith("http"):
             prefix += 'http_'
         dataset_type = prefix + 'sdf_particles'
-        super(SDFDataset, self).__init__(filename, dataset_type,
-                                         units_override=units_override,
-                                         unit_system=unit_system)
+        super(SDFDataset, self).__init__(
+            filename, dataset_type=dataset_type,
+            units_override=units_override, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def _parse_parameter_file(self):
         if self.parameter_filename.startswith("http"):

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -16,9 +16,83 @@
 #-----------------------------------------------------------------------------
 
 from yt.data_objects.static_output import \
-    Dataset
+    ParticleDataset
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
 
-class ParticleDataset(Dataset):
-    _unit_base = None
-    over_refine_factor = 1
-    filter_bbox = False
+
+class SPHDataset(ParticleDataset):
+    default_kernel_name = "cubic"
+
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1,
+                 kernel_name=None):
+        if kernel_name is None:
+            self.kernel_name = self.default_kernel_name
+        else:
+            self.kernel_name = kernel_name
+        super(SPHDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
+
+    def add_smoothed_particle_field(self, smooth_field,
+                                    method="volume_weighted", nneighbors=64,
+                                    kernel_name=None):
+        """Add a new smoothed particle field
+
+        Creates a new smoothed field based on the particle *smooth_field*.
+
+        Parameters
+        ----------
+
+        smooth_field : tuple
+           The field name tuple of the particle field the smoothed field will
+           be created from.  This must be a field name tuple so yt can
+           appropriately infer the correct particle type.
+        method : string, default 'volume_weighted'
+           The particle smoothing method to use. Can only be 'volume_weighted'
+           for now.
+        nneighbors : int, default 64
+            The number of neighbors to examine during the process.
+        kernel_name : string or None, default None
+            This is the name of the smoothing kernel to use. Current supported
+            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
+            `wendland4`, and `wendland6`. If left as None,
+            :attr:`~yt.frontends.sph.data_structures.SPHDataset.kernel_name`
+            will be used.
+
+        Returns
+        -------
+
+        The field name tuple for the newly created field.
+        """
+        # The magical step
+        self.index
+
+        # Parse arguments
+        if isinstance(smooth_field, tuple):
+            ptype, smooth_field = smooth_field[0], smooth_field[1]
+        else:
+            raise RuntimeError("smooth_field must be a tuple, received %s" %
+                               smooth_field)
+        if method != "volume_weighted":
+            raise NotImplementedError("method must be 'volume_weighted'")
+        if kernel_name is None:
+            kernel_name = self.kernel_name
+
+        # Prepare field names and registry to be used later
+        coord_name = "particle_position"
+        mass_name = "particle_mass"
+        smoothing_length_name = "smoothing_length"
+        if (ptype, smoothing_length_name) not in self.derived_field_list:
+            raise ValueError("%s not in derived_field_list" %
+                             ((ptype, smoothing_length_name),))
+        density_name = "density"
+        registry = self.field_info
+
+        # Do the actual work
+        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
+                   smoothing_length_name, density_name, smooth_field, registry,
+                   nneighbors=nneighbors, kernel_name=kernel_name)[0]

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -22,7 +22,7 @@
 import os
 
 from yt.frontends.sph.data_structures import \
-    ParticleDataset
+    SPHDataset
 from yt.funcs import deprecate
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
@@ -54,7 +54,7 @@
     def _calculate_offsets(self, field_list):
         self.field_offsets = self.io._calculate_particle_offsets(self)
 
-class TipsyDataset(ParticleDataset):
+class TipsyDataset(SPHDataset):
     _index_class = ParticleIndex
     _file_class = TipsyFile
     _field_info_class = TipsyFieldInfo
@@ -74,6 +74,7 @@
                  parameter_file=None,
                  cosmology_parameters=None,
                  n_ref=64, over_refine_factor=1,
+                 kernel_name=None,
                  bounding_box=None,
                  units_override=None,
                  unit_system="cgs"):
@@ -82,8 +83,6 @@
         # and domain_right_edge
         self.bounding_box = bounding_box
         self.filter_bbox = (bounding_box is not None)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         if field_dtypes is None:
             field_dtypes = {}
         success, self.endian = self._validate_header(filename)
@@ -113,8 +112,10 @@
         if units_override is not None:
             raise RuntimeError("units_override is not supported for TipsyDataset. "+
                                "Use unit_base instead.")
-        super(TipsyDataset, self).__init__(filename, dataset_type,
-                                           unit_system=unit_system)
+        super(TipsyDataset, self).__init__(
+            filename, dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor,
+            kernel_name=kernel_name)
 
     def __repr__(self):
         return os.path.basename(self.parameter_filename)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list