[yt-svn] commit/yt: 16 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Thu Dec 15 13:56:09 PST 2016


16 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/d105a0583f1b/
Changeset:   d105a0583f1b
Branch:      yt
User:        qobilidop
Date:        2016-12-14 18:18:16+00:00
Summary:     Move particle dataset specific operations from `Dataset` to `ParticleDataset`

The reason to do this is that I would like those operations to be able to use particle dataset specific information. For example, it makes sense for a particle dataset to have a property `kernel_type`, and `add_smoothed_particle_field` would use that kernel in default. The `kernel_type` property is not there yet, but I'm gonna add it.
Affected #:  2 files

diff -r 011cd19563215cefb3facfd5d6f575b6b38de0db -r d105a0583f1b05f4db5ddc382e2a3a723b5ce6c3 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -1145,134 +1145,6 @@
         deps, _ = self.field_info.check_derived_fields([name])
         self.field_dependencies.update(deps)
 
-    def add_deposited_particle_field(self, deposit_field, method, kernel_name='cubic',
-                                     weight_field='particle_mass'):
-        """Add a new deposited particle field
-
-        Creates a new deposited field based on the particle *deposit_field*.
-
-        Parameters
-        ----------
-
-        deposit_field : tuple
-           The field name tuple of the particle field the deposited field will
-           be created from.  This must be a field name tuple so yt can
-           appropriately infer the correct particle type.
-        method : string
-           This is the "method name" which will be looked up in the
-           `particle_deposit` namespace as `methodname_deposit`.  Current
-           methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
-           `mesh_id`, and `nearest`.
-        kernel_name : string, default 'cubic'
-           This is the name of the smoothing kernel to use. It is only used for
-           the `simple_smooth` method and is otherwise ignored. Current
-           supported kernel names include `cubic`, `quartic`, `quintic`,
-           `wendland2`, `wendland4`, and `wendland6`.
-        weight_field : string, default 'particle_mass'
-           Weighting field name for deposition method `weighted_mean`.
-
-        Returns
-        -------
-
-        The field name tuple for the newly created field.
-        """
-        self.index
-        if isinstance(deposit_field, tuple):
-            ptype, deposit_field = deposit_field[0], deposit_field[1]
-        else:
-            raise RuntimeError
-
-        units = self.field_info[ptype, deposit_field].units
-        take_log = self.field_info[ptype, deposit_field].take_log
-        name_map = {"sum": "sum", "std":"std", "cic": "cic", "weighted_mean": "avg",
-                    "nearest": "nn", "simple_smooth": "ss", "count": "count"}
-        field_name = "%s_" + name_map[method] + "_%s"
-        field_name = field_name % (ptype, deposit_field.replace('particle_', ''))
-
-        if method == "count":
-            field_name = "%s_count" % ptype
-            if ("deposit", field_name) in self.field_info:
-                mylog.warning("The deposited field %s already exists" % field_name)
-                return ("deposit", field_name)
-            else:
-                units = "dimensionless"
-                take_log = False
-
-        def _deposit_field(field, data):
-            """
-            Create a grid field for particle quantities using given method.
-            """
-            pos = data[ptype, "particle_position"]
-            if method == 'weighted_mean':
-                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field],
-                                                   data[ptype, weight_field]],
-                                             method=method, kernel_name=kernel_name),
-                                             input_units=units)
-                d[np.isnan(d)] = 0.0
-            else:
-                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field]],
-                                             method=method, kernel_name=kernel_name),
-                                             input_units=units)
-            return d
-
-        self.add_field(
-            ("deposit", field_name),
-            function=_deposit_field,
-            units=units,
-            take_log=take_log,
-            validators=[ValidateSpatial()])
-        return ("deposit", field_name)
-
-    def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
-                                    nneighbors=64, kernel_name="cubic"):
-        """Add a new smoothed particle field
-
-        Creates a new smoothed field based on the particle *smooth_field*.
-
-        Parameters
-        ----------
-
-        smooth_field : tuple
-           The field name tuple of the particle field the smoothed field will
-           be created from.  This must be a field name tuple so yt can
-           appropriately infer the correct particle type.
-        method : string, default 'volume_weighted'
-           The particle smoothing method to use. Can only be 'volume_weighted'
-           for now.
-        nneighbors : int, default 64
-            The number of neighbors to examine during the process.
-        kernel_name : string, default 'cubic'
-            This is the name of the smoothing kernel to use. Current supported
-            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
-            `wendland4`, and `wendland6`.
-
-        Returns
-        -------
-
-        The field name tuple for the newly created field.
-        """
-        self.index
-        if isinstance(smooth_field, tuple):
-            ptype, smooth_field = smooth_field[0], smooth_field[1]
-        else:
-            raise RuntimeError("smooth_field must be a tuple, received %s" %
-                               smooth_field)
-        if method != "volume_weighted":
-            raise NotImplementedError("method must be 'volume_weighted'")
-
-        coord_name = "particle_position"
-        mass_name = "particle_mass"
-        smoothing_length_name = "smoothing_length"
-        if (ptype, smoothing_length_name) not in self.derived_field_list:
-            raise ValueError("%s not in derived_field_list" %
-                             ((ptype, smoothing_length_name),))
-        density_name = "density"
-        registry = self.field_info
-
-        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
-                   smoothing_length_name, density_name, smooth_field, registry,
-                   nneighbors=nneighbors, kernel_name=kernel_name)[0]
-
     def add_gradient_fields(self, input_field):
         """Add gradient fields.
 

diff -r 011cd19563215cefb3facfd5d6f575b6b38de0db -r d105a0583f1b05f4db5ddc382e2a3a723b5ce6c3 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -22,3 +22,131 @@
     _unit_base = None
     over_refine_factor = 1
     filter_bbox = False
+
+    def add_deposited_particle_field(self, deposit_field, method, kernel_name='cubic',
+                                     weight_field='particle_mass'):
+        """Add a new deposited particle field
+
+        Creates a new deposited field based on the particle *deposit_field*.
+
+        Parameters
+        ----------
+
+        deposit_field : tuple
+           The field name tuple of the particle field the deposited field will
+           be created from.  This must be a field name tuple so yt can
+           appropriately infer the correct particle type.
+        method : string
+           This is the "method name" which will be looked up in the
+           `particle_deposit` namespace as `methodname_deposit`.  Current
+           methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
+           `mesh_id`, and `nearest`.
+        kernel_name : string, default 'cubic'
+           This is the name of the smoothing kernel to use. It is only used for
+           the `simple_smooth` method and is otherwise ignored. Current
+           supported kernel names include `cubic`, `quartic`, `quintic`,
+           `wendland2`, `wendland4`, and `wendland6`.
+        weight_field : string, default 'particle_mass'
+           Weighting field name for deposition method `weighted_mean`.
+
+        Returns
+        -------
+
+        The field name tuple for the newly created field.
+        """
+        self.index
+        if isinstance(deposit_field, tuple):
+            ptype, deposit_field = deposit_field[0], deposit_field[1]
+        else:
+            raise RuntimeError
+
+        units = self.field_info[ptype, deposit_field].units
+        take_log = self.field_info[ptype, deposit_field].take_log
+        name_map = {"sum": "sum", "std":"std", "cic": "cic", "weighted_mean": "avg",
+                    "nearest": "nn", "simple_smooth": "ss", "count": "count"}
+        field_name = "%s_" + name_map[method] + "_%s"
+        field_name = field_name % (ptype, deposit_field.replace('particle_', ''))
+
+        if method == "count":
+            field_name = "%s_count" % ptype
+            if ("deposit", field_name) in self.field_info:
+                mylog.warning("The deposited field %s already exists" % field_name)
+                return ("deposit", field_name)
+            else:
+                units = "dimensionless"
+                take_log = False
+
+        def _deposit_field(field, data):
+            """
+            Create a grid field for particle quantities using given method.
+            """
+            pos = data[ptype, "particle_position"]
+            if method == 'weighted_mean':
+                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field],
+                                                   data[ptype, weight_field]],
+                                             method=method, kernel_name=kernel_name),
+                                             input_units=units)
+                d[np.isnan(d)] = 0.0
+            else:
+                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field]],
+                                             method=method, kernel_name=kernel_name),
+                                             input_units=units)
+            return d
+
+        self.add_field(
+            ("deposit", field_name),
+            function=_deposit_field,
+            units=units,
+            take_log=take_log,
+            validators=[ValidateSpatial()])
+        return ("deposit", field_name)
+
+    def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
+                                    nneighbors=64, kernel_name="cubic"):
+        """Add a new smoothed particle field
+
+        Creates a new smoothed field based on the particle *smooth_field*.
+
+        Parameters
+        ----------
+
+        smooth_field : tuple
+           The field name tuple of the particle field the smoothed field will
+           be created from.  This must be a field name tuple so yt can
+           appropriately infer the correct particle type.
+        method : string, default 'volume_weighted'
+           The particle smoothing method to use. Can only be 'volume_weighted'
+           for now.
+        nneighbors : int, default 64
+            The number of neighbors to examine during the process.
+        kernel_name : string, default 'cubic'
+            This is the name of the smoothing kernel to use. Current supported
+            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
+            `wendland4`, and `wendland6`.
+
+        Returns
+        -------
+
+        The field name tuple for the newly created field.
+        """
+        self.index
+        if isinstance(smooth_field, tuple):
+            ptype, smooth_field = smooth_field[0], smooth_field[1]
+        else:
+            raise RuntimeError("smooth_field must be a tuple, received %s" %
+                               smooth_field)
+        if method != "volume_weighted":
+            raise NotImplementedError("method must be 'volume_weighted'")
+
+        coord_name = "particle_position"
+        mass_name = "particle_mass"
+        smoothing_length_name = "smoothing_length"
+        if (ptype, smoothing_length_name) not in self.derived_field_list:
+            raise ValueError("%s not in derived_field_list" %
+                             ((ptype, smoothing_length_name),))
+        density_name = "density"
+        registry = self.field_info
+
+        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
+                   smoothing_length_name, density_name, smooth_field, registry,
+                   nneighbors=nneighbors, kernel_name=kernel_name)[0]


https://bitbucket.org/yt_analysis/yt/commits/b442391a3731/
Changeset:   b442391a3731
Branch:      yt
User:        qobilidop
Date:        2016-12-14 18:46:45+00:00
Summary:     Move a method back

This method is also useful for grid and oct codes as well.
Affected #:  2 files

diff -r d105a0583f1b05f4db5ddc382e2a3a723b5ce6c3 -r b442391a373199d22b346ebb15a97408b1fa5c67 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -1145,6 +1145,84 @@
         deps, _ = self.field_info.check_derived_fields([name])
         self.field_dependencies.update(deps)
 
+    def add_deposited_particle_field(self, deposit_field, method, kernel_name='cubic',
+                                     weight_field='particle_mass'):
+        """Add a new deposited particle field
+
+        Creates a new deposited field based on the particle *deposit_field*.
+
+        Parameters
+        ----------
+
+        deposit_field : tuple
+           The field name tuple of the particle field the deposited field will
+           be created from.  This must be a field name tuple so yt can
+           appropriately infer the correct particle type.
+        method : string
+           This is the "method name" which will be looked up in the
+           `particle_deposit` namespace as `methodname_deposit`.  Current
+           methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
+           `mesh_id`, and `nearest`.
+        kernel_name : string, default 'cubic'
+           This is the name of the smoothing kernel to use. It is only used for
+           the `simple_smooth` method and is otherwise ignored. Current
+           supported kernel names include `cubic`, `quartic`, `quintic`,
+           `wendland2`, `wendland4`, and `wendland6`.
+        weight_field : string, default 'particle_mass'
+           Weighting field name for deposition method `weighted_mean`.
+
+        Returns
+        -------
+
+        The field name tuple for the newly created field.
+        """
+        self.index
+        if isinstance(deposit_field, tuple):
+            ptype, deposit_field = deposit_field[0], deposit_field[1]
+        else:
+            raise RuntimeError
+
+        units = self.field_info[ptype, deposit_field].units
+        take_log = self.field_info[ptype, deposit_field].take_log
+        name_map = {"sum": "sum", "std":"std", "cic": "cic", "weighted_mean": "avg",
+                    "nearest": "nn", "simple_smooth": "ss", "count": "count"}
+        field_name = "%s_" + name_map[method] + "_%s"
+        field_name = field_name % (ptype, deposit_field.replace('particle_', ''))
+
+        if method == "count":
+            field_name = "%s_count" % ptype
+            if ("deposit", field_name) in self.field_info:
+                mylog.warning("The deposited field %s already exists" % field_name)
+                return ("deposit", field_name)
+            else:
+                units = "dimensionless"
+                take_log = False
+
+        def _deposit_field(field, data):
+            """
+            Create a grid field for particle quantities using given method.
+            """
+            pos = data[ptype, "particle_position"]
+            if method == 'weighted_mean':
+                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field],
+                                                   data[ptype, weight_field]],
+                                             method=method, kernel_name=kernel_name),
+                                             input_units=units)
+                d[np.isnan(d)] = 0.0
+            else:
+                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field]],
+                                             method=method, kernel_name=kernel_name),
+                                             input_units=units)
+            return d
+
+        self.add_field(
+            ("deposit", field_name),
+            function=_deposit_field,
+            units=units,
+            take_log=take_log,
+            validators=[ValidateSpatial()])
+        return ("deposit", field_name)
+
     def add_gradient_fields(self, input_field):
         """Add gradient fields.
 

diff -r d105a0583f1b05f4db5ddc382e2a3a723b5ce6c3 -r b442391a373199d22b346ebb15a97408b1fa5c67 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -23,84 +23,6 @@
     over_refine_factor = 1
     filter_bbox = False
 
-    def add_deposited_particle_field(self, deposit_field, method, kernel_name='cubic',
-                                     weight_field='particle_mass'):
-        """Add a new deposited particle field
-
-        Creates a new deposited field based on the particle *deposit_field*.
-
-        Parameters
-        ----------
-
-        deposit_field : tuple
-           The field name tuple of the particle field the deposited field will
-           be created from.  This must be a field name tuple so yt can
-           appropriately infer the correct particle type.
-        method : string
-           This is the "method name" which will be looked up in the
-           `particle_deposit` namespace as `methodname_deposit`.  Current
-           methods include `simple_smooth`, `sum`, `std`, `cic`, `weighted_mean`,
-           `mesh_id`, and `nearest`.
-        kernel_name : string, default 'cubic'
-           This is the name of the smoothing kernel to use. It is only used for
-           the `simple_smooth` method and is otherwise ignored. Current
-           supported kernel names include `cubic`, `quartic`, `quintic`,
-           `wendland2`, `wendland4`, and `wendland6`.
-        weight_field : string, default 'particle_mass'
-           Weighting field name for deposition method `weighted_mean`.
-
-        Returns
-        -------
-
-        The field name tuple for the newly created field.
-        """
-        self.index
-        if isinstance(deposit_field, tuple):
-            ptype, deposit_field = deposit_field[0], deposit_field[1]
-        else:
-            raise RuntimeError
-
-        units = self.field_info[ptype, deposit_field].units
-        take_log = self.field_info[ptype, deposit_field].take_log
-        name_map = {"sum": "sum", "std":"std", "cic": "cic", "weighted_mean": "avg",
-                    "nearest": "nn", "simple_smooth": "ss", "count": "count"}
-        field_name = "%s_" + name_map[method] + "_%s"
-        field_name = field_name % (ptype, deposit_field.replace('particle_', ''))
-
-        if method == "count":
-            field_name = "%s_count" % ptype
-            if ("deposit", field_name) in self.field_info:
-                mylog.warning("The deposited field %s already exists" % field_name)
-                return ("deposit", field_name)
-            else:
-                units = "dimensionless"
-                take_log = False
-
-        def _deposit_field(field, data):
-            """
-            Create a grid field for particle quantities using given method.
-            """
-            pos = data[ptype, "particle_position"]
-            if method == 'weighted_mean':
-                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field],
-                                                   data[ptype, weight_field]],
-                                             method=method, kernel_name=kernel_name),
-                                             input_units=units)
-                d[np.isnan(d)] = 0.0
-            else:
-                d = data.ds.arr(data.deposit(pos, [data[ptype, deposit_field]],
-                                             method=method, kernel_name=kernel_name),
-                                             input_units=units)
-            return d
-
-        self.add_field(
-            ("deposit", field_name),
-            function=_deposit_field,
-            units=units,
-            take_log=take_log,
-            validators=[ValidateSpatial()])
-        return ("deposit", field_name)
-
     def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
                                     nneighbors=64, kernel_name="cubic"):
         """Add a new smoothed particle field


https://bitbucket.org/yt_analysis/yt/commits/f0df52b8c450/
Changeset:   f0df52b8c450
Branch:      yt
User:        qobilidop
Date:        2016-12-14 18:49:28+00:00
Summary:     Move the accompanying import
Affected #:  2 files

diff -r b442391a373199d22b346ebb15a97408b1fa5c67 -r f0df52b8c4506be98babc625a145e2fc4b2a3724 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -51,8 +51,6 @@
     ValidateSpatial
 from yt.fields.fluid_fields import \
     setup_gradient_fields
-from yt.fields.particle_fields import \
-    add_volume_weighted_smoothed_field
 from yt.data_objects.particle_filters import \
     filter_registry
 from yt.data_objects.particle_unions import \

diff -r b442391a373199d22b346ebb15a97408b1fa5c67 -r f0df52b8c4506be98babc625a145e2fc4b2a3724 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -17,6 +17,9 @@
 
 from yt.data_objects.static_output import \
     Dataset
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
+
 
 class ParticleDataset(Dataset):
     _unit_base = None


https://bitbucket.org/yt_analysis/yt/commits/0837fdbe1733/
Changeset:   0837fdbe1733
Branch:      yt
User:        qobilidop
Date:        2016-12-14 18:52:30+00:00
Summary:     Styling

Line widths should not be more than 80 characters.
Affected #:  1 file

diff -r f0df52b8c4506be98babc625a145e2fc4b2a3724 -r 0837fdbe1733ecfc7de58f1c8909f76ade8a7e58 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -26,8 +26,9 @@
     over_refine_factor = 1
     filter_bbox = False
 
-    def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
-                                    nneighbors=64, kernel_name="cubic"):
+    def add_smoothed_particle_field(self, smooth_field,
+                                    method="volume_weighted", nneighbors=64,
+                                    kernel_name="cubic"):
         """Add a new smoothed particle field
 
         Creates a new smoothed field based on the particle *smooth_field*.


https://bitbucket.org/yt_analysis/yt/commits/f2982890c401/
Changeset:   f2982890c401
Branch:      yt
User:        qobilidop
Date:        2016-12-14 19:42:12+00:00
Summary:     Add the `default_kernel` attribute
Affected #:  1 file

diff -r 0837fdbe1733ecfc7de58f1c8909f76ade8a7e58 -r f2982890c40113b8ca1a24dfc72a34af503dd076 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -26,9 +26,11 @@
     over_refine_factor = 1
     filter_bbox = False
 
+    default_kernel = "cubic"
+
     def add_smoothed_particle_field(self, smooth_field,
                                     method="volume_weighted", nneighbors=64,
-                                    kernel_name="cubic"):
+                                    kernel_name=None):
         """Add a new smoothed particle field
 
         Creates a new smoothed field based on the particle *smooth_field*.
@@ -45,10 +47,11 @@
            for now.
         nneighbors : int, default 64
             The number of neighbors to examine during the process.
-        kernel_name : string, default 'cubic'
+        kernel_name : string or None, default None
             This is the name of the smoothing kernel to use. Current supported
             kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
-            `wendland4`, and `wendland6`.
+            `wendland4`, and `wendland6`. If left as None,
+            :attr:`ParticleDataset.default_kernel` will be used.
 
         Returns
         -------
@@ -63,6 +66,8 @@
                                smooth_field)
         if method != "volume_weighted":
             raise NotImplementedError("method must be 'volume_weighted'")
+        if kernel_name is None:
+            kernel_name = self.default_kernel
 
         coord_name = "particle_position"
         mass_name = "particle_mass"


https://bitbucket.org/yt_analysis/yt/commits/429b1ae171d8/
Changeset:   429b1ae171d8
Branch:      yt
User:        qobilidop
Date:        2016-12-14 19:52:12+00:00
Summary:     Add comments
Affected #:  1 file

diff -r f2982890c40113b8ca1a24dfc72a34af503dd076 -r 429b1ae171d81ec2dd265cd27ac7f9fd6432de21 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -58,7 +58,10 @@
 
         The field name tuple for the newly created field.
         """
+        # The magical step
         self.index
+
+        # Parse arguments
         if isinstance(smooth_field, tuple):
             ptype, smooth_field = smooth_field[0], smooth_field[1]
         else:
@@ -69,6 +72,7 @@
         if kernel_name is None:
             kernel_name = self.default_kernel
 
+        # Prepare field names and registry to be used later
         coord_name = "particle_position"
         mass_name = "particle_mass"
         smoothing_length_name = "smoothing_length"
@@ -78,6 +82,7 @@
         density_name = "density"
         registry = self.field_info
 
+        # Do the actual work
         return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
                    smoothing_length_name, density_name, smooth_field, registry,
                    nneighbors=nneighbors, kernel_name=kernel_name)[0]


https://bitbucket.org/yt_analysis/yt/commits/0c4bb07de4f2/
Changeset:   0c4bb07de4f2
Branch:      yt
User:        qobilidop
Date:        2016-12-14 19:55:12+00:00
Summary:     Correct the use of `:attr:`

Oh my poor Sphinx knowledge.
Affected #:  1 file

diff -r 429b1ae171d81ec2dd265cd27ac7f9fd6432de21 -r 0c4bb07de4f202935d431062336efc5900f5acf9 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -51,7 +51,8 @@
             This is the name of the smoothing kernel to use. Current supported
             kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
             `wendland4`, and `wendland6`. If left as None,
-            :attr:`ParticleDataset.default_kernel` will be used.
+            :attr:`~yt.frontends.sph.data_structures.ParticleDataset.default_kernel`
+            will be used.
 
         Returns
         -------


https://bitbucket.org/yt_analysis/yt/commits/0417a18c3cdf/
Changeset:   0417a18c3cdf
Branch:      yt
User:        qobilidop
Date:        2016-12-14 20:53:56+00:00
Summary:     Rearrange class attributes in alphabetic order
Affected #:  1 file

diff -r 0c4bb07de4f202935d431062336efc5900f5acf9 -r 0417a18c3cdfcb6dbc024d40a671f6164784272f yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -23,10 +23,9 @@
 
 class ParticleDataset(Dataset):
     _unit_base = None
-    over_refine_factor = 1
+    default_kernel = "cubic"
     filter_bbox = False
-
-    default_kernel = "cubic"
+    over_refine_factor = 1
 
     def add_smoothed_particle_field(self, smooth_field,
                                     method="volume_weighted", nneighbors=64,


https://bitbucket.org/yt_analysis/yt/commits/edcbca20b45f/
Changeset:   edcbca20b45f
Branch:      yt
User:        qobilidop
Date:        2016-12-14 21:08:16+00:00
Summary:     Separate `SPHDataset` for SPH codes from `ParticleDataset` for pure N-body codes

The idea is from @ngoldbaum.
Affected #:  3 files

diff -r 0417a18c3cdfcb6dbc024d40a671f6164784272f -r edcbca20b45fb4b53575aac05a4e80dce191bea1 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -26,7 +26,7 @@
 from yt.data_objects.static_output import \
     ParticleFile
 from yt.frontends.sph.data_structures import \
-    ParticleDataset
+    SPHDataset
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.utilities.cosmology import \
@@ -76,7 +76,7 @@
             field_list, self.total_particles,
             self._position_offset, self._file_size)
 
-class GadgetDataset(ParticleDataset):
+class GadgetDataset(SPHDataset):
     _index_class = ParticleIndex
     _file_class = GadgetBinaryFile
     _field_info_class = GadgetFieldInfo

diff -r 0417a18c3cdfcb6dbc024d40a671f6164784272f -r edcbca20b45fb4b53575aac05a4e80dce191bea1 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -23,10 +23,13 @@
 
 class ParticleDataset(Dataset):
     _unit_base = None
-    default_kernel = "cubic"
     filter_bbox = False
     over_refine_factor = 1
 
+
+class SPHDataset(ParticleDataset):
+    default_kernel = "cubic"
+
     def add_smoothed_particle_field(self, smooth_field,
                                     method="volume_weighted", nneighbors=64,
                                     kernel_name=None):

diff -r 0417a18c3cdfcb6dbc024d40a671f6164784272f -r edcbca20b45fb4b53575aac05a4e80dce191bea1 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -22,7 +22,7 @@
 import os
 
 from yt.frontends.sph.data_structures import \
-    ParticleDataset
+    SPHDataset
 from yt.funcs import deprecate
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
@@ -54,7 +54,7 @@
     def _calculate_offsets(self, field_list):
         self.field_offsets = self.io._calculate_particle_offsets(self)
 
-class TipsyDataset(ParticleDataset):
+class TipsyDataset(SPHDataset):
     _index_class = ParticleIndex
     _file_class = TipsyFile
     _field_info_class = TipsyFieldInfo


https://bitbucket.org/yt_analysis/yt/commits/c6f23e16471a/
Changeset:   c6f23e16471a
Branch:      yt
User:        qobilidop
Date:        2016-12-14 21:40:47+00:00
Summary:     Refactor constructors

All the common parts in related constructors are gathered together and put in the proper class. For `TipsyDataset`, `GadgetDataset` and `HTTPStreamDataset`, the API remains the same. For `ParticleDataset` and `SPHDataset` the API is expanded a little, which is still compatible with the original API. The reason for the expansion is that I want to keep the API consistent with `Dataset`.
Affected #:  4 files

diff -r edcbca20b45fb4b53575aac05a4e80dce191bea1 -r c6f23e16471a3a133824c0bcd8f927c46487afeb yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -103,8 +103,6 @@
             field_spec, gadget_field_specs)
         self._ptype_spec = self._setup_binary_spec(
             ptype_spec, gadget_ptype_specs)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         self.index_ptype = index_ptype
         self.storage_filename = None
         if unit_base is not None and "UnitLength_in_cm" in unit_base:
@@ -123,7 +121,9 @@
         if units_override is not None:
             raise RuntimeError("units_override is not supported for GadgetDataset. "+
                                "Use unit_base instead.")
-        super(GadgetDataset, self).__init__(filename, dataset_type, unit_system=unit_system)
+        super(GadgetDataset, self).__init__(
+            filename, dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
         if self.cosmological_simulation:
             self.time_unit.convert_to_units('s/h')
             self.length_unit.convert_to_units('kpccm/h')

diff -r edcbca20b45fb4b53575aac05a4e80dce191bea1 -r c6f23e16471a3a133824c0bcd8f927c46487afeb yt/frontends/http_stream/data_structures.py
--- a/yt/frontends/http_stream/data_structures.py
+++ b/yt/frontends/http_stream/data_structures.py
@@ -43,17 +43,15 @@
     filename_template = ""
     
     def __init__(self, base_url,
-                 dataset_type = "http_particle_stream",
-                 n_ref = 64, over_refine_factor=1, 
-                 unit_system="cgs"):
+                 dataset_type = "http_particle_stream", unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
         if get_requests() is None:
             raise ImportError(
                 "This functionality depends on the requests package")
         self.base_url = base_url
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HTTPStreamDataset, self).__init__("", dataset_type, 
-                                                unit_system=unit_system)
+        super(HTTPStreamDataset, self).__init__(
+            "", dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def __repr__(self):
         return self.base_url

diff -r edcbca20b45fb4b53575aac05a4e80dce191bea1 -r c6f23e16471a3a133824c0bcd8f927c46487afeb yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -24,11 +24,27 @@
 class ParticleDataset(Dataset):
     _unit_base = None
     filter_bbox = False
-    over_refine_factor = 1
+
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(ParticleDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system)
 
 
 class SPHDataset(ParticleDataset):
-    default_kernel = "cubic"
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1,
+                 default_kernel="cubic"):
+        self.default_kernel = default_kernel
+        super(SPHDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def add_smoothed_particle_field(self, smooth_field,
                                     method="volume_weighted", nneighbors=64,

diff -r edcbca20b45fb4b53575aac05a4e80dce191bea1 -r c6f23e16471a3a133824c0bcd8f927c46487afeb yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -82,8 +82,6 @@
         # and domain_right_edge
         self.bounding_box = bounding_box
         self.filter_bbox = (bounding_box is not None)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         if field_dtypes is None:
             field_dtypes = {}
         success, self.endian = self._validate_header(filename)
@@ -113,8 +111,9 @@
         if units_override is not None:
             raise RuntimeError("units_override is not supported for TipsyDataset. "+
                                "Use unit_base instead.")
-        super(TipsyDataset, self).__init__(filename, dataset_type,
-                                           unit_system=unit_system)
+        super(TipsyDataset, self).__init__(
+            filename, dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def __repr__(self):
         return os.path.basename(self.parameter_filename)


https://bitbucket.org/yt_analysis/yt/commits/817a23e546b1/
Changeset:   817a23e546b1
Branch:      yt
User:        qobilidop
Date:        2016-12-15 00:42:14+00:00
Summary:     Make `SDFDataset` a subclass of `ParticleDataset` and some styling tweaks
Affected #:  2 files

diff -r c6f23e16471a3a133824c0bcd8f927c46487afeb -r 817a23e546b1deb4ae2d41cebc09a9ec00a7b477 yt/frontends/http_stream/data_structures.py
--- a/yt/frontends/http_stream/data_structures.py
+++ b/yt/frontends/http_stream/data_structures.py
@@ -43,7 +43,7 @@
     filename_template = ""
     
     def __init__(self, base_url,
-                 dataset_type = "http_particle_stream", unit_system="cgs",
+                 dataset_type="http_particle_stream", unit_system="cgs",
                  n_ref=64, over_refine_factor=1):
         if get_requests() is None:
             raise ImportError(

diff -r c6f23e16471a3a133824c0bcd8f927c46487afeb -r 817a23e546b1deb4ae2d41cebc09a9ec00a7b477 yt/frontends/sdf/data_structures.py
--- a/yt/frontends/sdf/data_structures.py
+++ b/yt/frontends/sdf/data_structures.py
@@ -26,7 +26,9 @@
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
-    Dataset, ParticleFile
+    ParticleFile
+from yt.frontends.sph.data_structures import \
+    ParticleDataset
 from yt.funcs import \
     get_requests, \
     setdefaultattr
@@ -53,7 +55,7 @@
 class SDFFile(ParticleFile):
     pass
 
-class SDFDataset(Dataset):
+class SDFDataset(ParticleDataset):
     _index_class = ParticleIndex
     _file_class = SDFFile
     _field_info_class = SDFFieldInfo
@@ -65,18 +67,16 @@
     _subspace = False
 
 
-    def __init__(self, filename, dataset_type = "sdf_particles",
-                 n_ref = 64, over_refine_factor = 1,
-                 bounding_box = None,
-                 sdf_header = None,
-                 midx_filename = None,
-                 midx_header = None,
-                 midx_level = None,
-                 field_map = None,
+    def __init__(self, filename, dataset_type="sdf_particles",
+                 n_ref=64, over_refine_factor=1,
+                 bounding_box=None,
+                 sdf_header=None,
+                 midx_filename=None,
+                 midx_header=None,
+                 midx_level=None,
+                 field_map=None,
                  units_override=None,
                  unit_system="cgs"):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         if bounding_box is not None:
             self._subspace = True
             bbox = np.array(bounding_box, dtype="float32")
@@ -99,9 +99,10 @@
         if filename.startswith("http"):
             prefix += 'http_'
         dataset_type = prefix + 'sdf_particles'
-        super(SDFDataset, self).__init__(filename, dataset_type,
-                                         units_override=units_override,
-                                         unit_system=unit_system)
+        super(SDFDataset, self).__init__(
+            filename, dataset_type=dataset_type,
+            units_override=units_override, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def _parse_parameter_file(self):
         if self.parameter_filename.startswith("http"):


https://bitbucket.org/yt_analysis/yt/commits/4b74b1808d4e/
Changeset:   4b74b1808d4e
Branch:      yt
User:        qobilidop
Date:        2016-12-15 01:12:20+00:00
Summary:     Move `ParticleDataset` into `yt.data_objects.static_output`
Affected #:  4 files

diff -r 817a23e546b1deb4ae2d41cebc09a9ec00a7b477 -r 4b74b1808d4e5ad6edb7a8da22106c8fb9313c9c yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -1301,3 +1301,17 @@
 
     def __lt__(self, other):
         return self.filename < other.filename
+
+
+class ParticleDataset(Dataset):
+    _unit_base = None
+    filter_bbox = False
+
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(ParticleDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system)

diff -r 817a23e546b1deb4ae2d41cebc09a9ec00a7b477 -r 4b74b1808d4e5ad6edb7a8da22106c8fb9313c9c yt/frontends/http_stream/data_structures.py
--- a/yt/frontends/http_stream/data_structures.py
+++ b/yt/frontends/http_stream/data_structures.py
@@ -20,9 +20,8 @@
 import time
 
 from yt.data_objects.static_output import \
+    ParticleDataset, \
     ParticleFile
-from yt.frontends.sph.data_structures import \
-    ParticleDataset
 from yt.frontends.sph.fields import \
     SPHFieldInfo
 from yt.funcs import \

diff -r 817a23e546b1deb4ae2d41cebc09a9ec00a7b477 -r 4b74b1808d4e5ad6edb7a8da22106c8fb9313c9c yt/frontends/sdf/data_structures.py
--- a/yt/frontends/sdf/data_structures.py
+++ b/yt/frontends/sdf/data_structures.py
@@ -26,9 +26,8 @@
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
+    ParticleDataset, \
     ParticleFile
-from yt.frontends.sph.data_structures import \
-    ParticleDataset
 from yt.funcs import \
     get_requests, \
     setdefaultattr

diff -r 817a23e546b1deb4ae2d41cebc09a9ec00a7b477 -r 4b74b1808d4e5ad6edb7a8da22106c8fb9313c9c yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -7,34 +7,12 @@
 """
 from __future__ import print_function
 
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
 from yt.data_objects.static_output import \
-    Dataset
+    ParticleDataset
 from yt.fields.particle_fields import \
     add_volume_weighted_smoothed_field
 
 
-class ParticleDataset(Dataset):
-    _unit_base = None
-    filter_bbox = False
-
-    def __init__(self, filename, dataset_type=None, file_style=None,
-                 units_override=None, unit_system="cgs",
-                 n_ref=64, over_refine_factor=1):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(ParticleDataset, self).__init__(
-            filename, dataset_type=dataset_type, file_style=file_style,
-            units_override=units_override, unit_system=unit_system)
-
-
 class SPHDataset(ParticleDataset):
     def __init__(self, filename, dataset_type=None, file_style=None,
                  units_override=None, unit_system="cgs",


https://bitbucket.org/yt_analysis/yt/commits/d2f82aca53f0/
Changeset:   d2f82aca53f0
Branch:      yt
User:        qobilidop
Date:        2016-12-15 01:20:01+00:00
Summary:     Recover the copyright info

It was deleted accidentally.
Affected #:  1 file

diff -r 4b74b1808d4e5ad6edb7a8da22106c8fb9313c9c -r d2f82aca53f013702b6224a7441ef3228a48d1d3 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -7,6 +7,14 @@
 """
 from __future__ import print_function
 
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
 from yt.data_objects.static_output import \
     ParticleDataset
 from yt.fields.particle_fields import \


https://bitbucket.org/yt_analysis/yt/commits/4e7367415e60/
Changeset:   4e7367415e60
Branch:      yt
User:        qobilidop
Date:        2016-12-15 01:51:46+00:00
Summary:     Refactor implementation of the default kernel name functionality

First, their is a class attribute `default_kernel_name`. During instantiation, if the `kernel_name` argument is not set, the `default_kernel_name` will be used for the instance's `kernel_name` attribute. Then for the instance's `add_smoothed_particle_field` method, if the `kernel_name` argument is not set, the instance's `kernel_name` attribute will be used.
Affected #:  1 file

diff -r d2f82aca53f013702b6224a7441ef3228a48d1d3 -r 4e7367415e60a894f59bcef5725509b7ab38f9fe yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -22,11 +22,16 @@
 
 
 class SPHDataset(ParticleDataset):
+    default_kernel_name = "cubic"
+
     def __init__(self, filename, dataset_type=None, file_style=None,
                  units_override=None, unit_system="cgs",
                  n_ref=64, over_refine_factor=1,
-                 default_kernel="cubic"):
-        self.default_kernel = default_kernel
+                 kernel_name=None):
+        if kernel_name is None:
+            self.kernel_name = self.default_kernel_name
+        else:
+            self.kernel_name = kernel_name
         super(SPHDataset, self).__init__(
             filename, dataset_type=dataset_type, file_style=file_style,
             units_override=units_override, unit_system=unit_system,
@@ -55,7 +60,7 @@
             This is the name of the smoothing kernel to use. Current supported
             kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
             `wendland4`, and `wendland6`. If left as None,
-            :attr:`~yt.frontends.sph.data_structures.ParticleDataset.default_kernel`
+            :attr:`~yt.frontends.sph.data_structures.SPHDataset.kernel_name`
             will be used.
 
         Returns
@@ -75,7 +80,7 @@
         if method != "volume_weighted":
             raise NotImplementedError("method must be 'volume_weighted'")
         if kernel_name is None:
-            kernel_name = self.default_kernel
+            kernel_name = self.kernel_name
 
         # Prepare field names and registry to be used later
         coord_name = "particle_position"


https://bitbucket.org/yt_analysis/yt/commits/eadae145e294/
Changeset:   eadae145e294
Branch:      yt
User:        qobilidop
Date:        2016-12-15 02:00:02+00:00
Summary:     Make `kernel_name` settable during loading for all the subclasses of `SPHDataset`
Affected #:  2 files

diff -r 4e7367415e60a894f59bcef5725509b7ab38f9fe -r eadae145e294d248def9e8e971fcefac996e50e4 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -89,6 +89,7 @@
                  additional_fields=(),
                  unit_base=None, n_ref=64,
                  over_refine_factor=1,
+                 kernel_name=None,
                  index_ptype="all",
                  bounding_box = None,
                  header_spec = "default",
@@ -123,7 +124,8 @@
                                "Use unit_base instead.")
         super(GadgetDataset, self).__init__(
             filename, dataset_type=dataset_type, unit_system=unit_system,
-            n_ref=n_ref, over_refine_factor=over_refine_factor)
+            n_ref=n_ref, over_refine_factor=over_refine_factor,
+            kernel_name=kernel_name)
         if self.cosmological_simulation:
             self.time_unit.convert_to_units('s/h')
             self.length_unit.convert_to_units('kpccm/h')
@@ -362,6 +364,7 @@
     def __init__(self, filename, dataset_type="gadget_hdf5",
                  unit_base = None, n_ref=64,
                  over_refine_factor=1,
+                 kernel_name=None,
                  index_ptype="all",
                  bounding_box = None,
                  units_override=None,
@@ -374,7 +377,8 @@
         super(GadgetHDF5Dataset, self).__init__(
             filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
             over_refine_factor=over_refine_factor, index_ptype=index_ptype,
-            bounding_box = bounding_box, unit_system=unit_system)
+            kernel_name=kernel_name, bounding_box=bounding_box,
+            unit_system=unit_system)
 
     def _get_hvals(self):
         handle = h5py.File(self.parameter_filename, mode="r")

diff -r 4e7367415e60a894f59bcef5725509b7ab38f9fe -r eadae145e294d248def9e8e971fcefac996e50e4 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -74,6 +74,7 @@
                  parameter_file=None,
                  cosmology_parameters=None,
                  n_ref=64, over_refine_factor=1,
+                 kernel_name=None,
                  bounding_box=None,
                  units_override=None,
                  unit_system="cgs"):
@@ -113,7 +114,8 @@
                                "Use unit_base instead.")
         super(TipsyDataset, self).__init__(
             filename, dataset_type=dataset_type, unit_system=unit_system,
-            n_ref=n_ref, over_refine_factor=over_refine_factor)
+            n_ref=n_ref, over_refine_factor=over_refine_factor,
+            kernel_name=kernel_name)
 
     def __repr__(self):
         return os.path.basename(self.parameter_filename)


https://bitbucket.org/yt_analysis/yt/commits/dcfa4d3f885f/
Changeset:   dcfa4d3f885f
Branch:      yt
User:        MatthewTurk
Date:        2016-12-15 21:55:42+00:00
Summary:     Merged in qobilidop/yt (pull request #2474)

Refactor particle dataset
Affected #:  6 files

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -51,8 +51,6 @@
     ValidateSpatial
 from yt.fields.fluid_fields import \
     setup_gradient_fields
-from yt.fields.particle_fields import \
-    add_volume_weighted_smoothed_field
 from yt.data_objects.particle_filters import \
     filter_registry
 from yt.data_objects.particle_unions import \
@@ -1223,56 +1221,6 @@
             validators=[ValidateSpatial()])
         return ("deposit", field_name)
 
-    def add_smoothed_particle_field(self, smooth_field, method="volume_weighted",
-                                    nneighbors=64, kernel_name="cubic"):
-        """Add a new smoothed particle field
-
-        Creates a new smoothed field based on the particle *smooth_field*.
-
-        Parameters
-        ----------
-
-        smooth_field : tuple
-           The field name tuple of the particle field the smoothed field will
-           be created from.  This must be a field name tuple so yt can
-           appropriately infer the correct particle type.
-        method : string, default 'volume_weighted'
-           The particle smoothing method to use. Can only be 'volume_weighted'
-           for now.
-        nneighbors : int, default 64
-            The number of neighbors to examine during the process.
-        kernel_name : string, default 'cubic'
-            This is the name of the smoothing kernel to use. Current supported
-            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
-            `wendland4`, and `wendland6`.
-
-        Returns
-        -------
-
-        The field name tuple for the newly created field.
-        """
-        self.index
-        if isinstance(smooth_field, tuple):
-            ptype, smooth_field = smooth_field[0], smooth_field[1]
-        else:
-            raise RuntimeError("smooth_field must be a tuple, received %s" %
-                               smooth_field)
-        if method != "volume_weighted":
-            raise NotImplementedError("method must be 'volume_weighted'")
-
-        coord_name = "particle_position"
-        mass_name = "particle_mass"
-        smoothing_length_name = "smoothing_length"
-        if (ptype, smoothing_length_name) not in self.derived_field_list:
-            raise ValueError("%s not in derived_field_list" %
-                             ((ptype, smoothing_length_name),))
-        density_name = "density"
-        registry = self.field_info
-
-        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
-                   smoothing_length_name, density_name, smooth_field, registry,
-                   nneighbors=nneighbors, kernel_name=kernel_name)[0]
-
     def add_gradient_fields(self, input_field):
         """Add gradient fields.
 
@@ -1353,3 +1301,17 @@
 
     def __lt__(self, other):
         return self.filename < other.filename
+
+
+class ParticleDataset(Dataset):
+    _unit_base = None
+    filter_bbox = False
+
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
+        self.n_ref = n_ref
+        self.over_refine_factor = over_refine_factor
+        super(ParticleDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system)

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -26,7 +26,7 @@
 from yt.data_objects.static_output import \
     ParticleFile
 from yt.frontends.sph.data_structures import \
-    ParticleDataset
+    SPHDataset
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.utilities.cosmology import \
@@ -76,7 +76,7 @@
             field_list, self.total_particles,
             self._position_offset, self._file_size)
 
-class GadgetDataset(ParticleDataset):
+class GadgetDataset(SPHDataset):
     _index_class = ParticleIndex
     _file_class = GadgetBinaryFile
     _field_info_class = GadgetFieldInfo
@@ -89,6 +89,7 @@
                  additional_fields=(),
                  unit_base=None, n_ref=64,
                  over_refine_factor=1,
+                 kernel_name=None,
                  index_ptype="all",
                  bounding_box = None,
                  header_spec = "default",
@@ -103,8 +104,6 @@
             field_spec, gadget_field_specs)
         self._ptype_spec = self._setup_binary_spec(
             ptype_spec, gadget_ptype_specs)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         self.index_ptype = index_ptype
         self.storage_filename = None
         if unit_base is not None and "UnitLength_in_cm" in unit_base:
@@ -123,7 +122,10 @@
         if units_override is not None:
             raise RuntimeError("units_override is not supported for GadgetDataset. "+
                                "Use unit_base instead.")
-        super(GadgetDataset, self).__init__(filename, dataset_type, unit_system=unit_system)
+        super(GadgetDataset, self).__init__(
+            filename, dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor,
+            kernel_name=kernel_name)
         if self.cosmological_simulation:
             self.time_unit.convert_to_units('s/h')
             self.length_unit.convert_to_units('kpccm/h')
@@ -362,6 +364,7 @@
     def __init__(self, filename, dataset_type="gadget_hdf5",
                  unit_base = None, n_ref=64,
                  over_refine_factor=1,
+                 kernel_name=None,
                  index_ptype="all",
                  bounding_box = None,
                  units_override=None,
@@ -374,7 +377,8 @@
         super(GadgetHDF5Dataset, self).__init__(
             filename, dataset_type, unit_base=unit_base, n_ref=n_ref,
             over_refine_factor=over_refine_factor, index_ptype=index_ptype,
-            bounding_box = bounding_box, unit_system=unit_system)
+            kernel_name=kernel_name, bounding_box=bounding_box,
+            unit_system=unit_system)
 
     def _get_hvals(self):
         handle = h5py.File(self.parameter_filename, mode="r")

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/http_stream/data_structures.py
--- a/yt/frontends/http_stream/data_structures.py
+++ b/yt/frontends/http_stream/data_structures.py
@@ -20,9 +20,8 @@
 import time
 
 from yt.data_objects.static_output import \
+    ParticleDataset, \
     ParticleFile
-from yt.frontends.sph.data_structures import \
-    ParticleDataset
 from yt.frontends.sph.fields import \
     SPHFieldInfo
 from yt.funcs import \
@@ -43,17 +42,15 @@
     filename_template = ""
     
     def __init__(self, base_url,
-                 dataset_type = "http_particle_stream",
-                 n_ref = 64, over_refine_factor=1, 
-                 unit_system="cgs"):
+                 dataset_type="http_particle_stream", unit_system="cgs",
+                 n_ref=64, over_refine_factor=1):
         if get_requests() is None:
             raise ImportError(
                 "This functionality depends on the requests package")
         self.base_url = base_url
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
-        super(HTTPStreamDataset, self).__init__("", dataset_type, 
-                                                unit_system=unit_system)
+        super(HTTPStreamDataset, self).__init__(
+            "", dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def __repr__(self):
         return self.base_url

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/sdf/data_structures.py
--- a/yt/frontends/sdf/data_structures.py
+++ b/yt/frontends/sdf/data_structures.py
@@ -26,7 +26,8 @@
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
-    Dataset, ParticleFile
+    ParticleDataset, \
+    ParticleFile
 from yt.funcs import \
     get_requests, \
     setdefaultattr
@@ -53,7 +54,7 @@
 class SDFFile(ParticleFile):
     pass
 
-class SDFDataset(Dataset):
+class SDFDataset(ParticleDataset):
     _index_class = ParticleIndex
     _file_class = SDFFile
     _field_info_class = SDFFieldInfo
@@ -65,18 +66,16 @@
     _subspace = False
 
 
-    def __init__(self, filename, dataset_type = "sdf_particles",
-                 n_ref = 64, over_refine_factor = 1,
-                 bounding_box = None,
-                 sdf_header = None,
-                 midx_filename = None,
-                 midx_header = None,
-                 midx_level = None,
-                 field_map = None,
+    def __init__(self, filename, dataset_type="sdf_particles",
+                 n_ref=64, over_refine_factor=1,
+                 bounding_box=None,
+                 sdf_header=None,
+                 midx_filename=None,
+                 midx_header=None,
+                 midx_level=None,
+                 field_map=None,
                  units_override=None,
                  unit_system="cgs"):
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         if bounding_box is not None:
             self._subspace = True
             bbox = np.array(bounding_box, dtype="float32")
@@ -99,9 +98,10 @@
         if filename.startswith("http"):
             prefix += 'http_'
         dataset_type = prefix + 'sdf_particles'
-        super(SDFDataset, self).__init__(filename, dataset_type,
-                                         units_override=units_override,
-                                         unit_system=unit_system)
+        super(SDFDataset, self).__init__(
+            filename, dataset_type=dataset_type,
+            units_override=units_override, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
 
     def _parse_parameter_file(self):
         if self.parameter_filename.startswith("http"):

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/sph/data_structures.py
--- a/yt/frontends/sph/data_structures.py
+++ b/yt/frontends/sph/data_structures.py
@@ -16,9 +16,83 @@
 #-----------------------------------------------------------------------------
 
 from yt.data_objects.static_output import \
-    Dataset
+    ParticleDataset
+from yt.fields.particle_fields import \
+    add_volume_weighted_smoothed_field
 
-class ParticleDataset(Dataset):
-    _unit_base = None
-    over_refine_factor = 1
-    filter_bbox = False
+
+class SPHDataset(ParticleDataset):
+    default_kernel_name = "cubic"
+
+    def __init__(self, filename, dataset_type=None, file_style=None,
+                 units_override=None, unit_system="cgs",
+                 n_ref=64, over_refine_factor=1,
+                 kernel_name=None):
+        if kernel_name is None:
+            self.kernel_name = self.default_kernel_name
+        else:
+            self.kernel_name = kernel_name
+        super(SPHDataset, self).__init__(
+            filename, dataset_type=dataset_type, file_style=file_style,
+            units_override=units_override, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor)
+
+    def add_smoothed_particle_field(self, smooth_field,
+                                    method="volume_weighted", nneighbors=64,
+                                    kernel_name=None):
+        """Add a new smoothed particle field
+
+        Creates a new smoothed field based on the particle *smooth_field*.
+
+        Parameters
+        ----------
+
+        smooth_field : tuple
+           The field name tuple of the particle field the smoothed field will
+           be created from.  This must be a field name tuple so yt can
+           appropriately infer the correct particle type.
+        method : string, default 'volume_weighted'
+           The particle smoothing method to use. Can only be 'volume_weighted'
+           for now.
+        nneighbors : int, default 64
+            The number of neighbors to examine during the process.
+        kernel_name : string or None, default None
+            This is the name of the smoothing kernel to use. Current supported
+            kernel names include `cubic`, `quartic`, `quintic`, `wendland2`,
+            `wendland4`, and `wendland6`. If left as None,
+            :attr:`~yt.frontends.sph.data_structures.SPHDataset.kernel_name`
+            will be used.
+
+        Returns
+        -------
+
+        The field name tuple for the newly created field.
+        """
+        # The magical step
+        self.index
+
+        # Parse arguments
+        if isinstance(smooth_field, tuple):
+            ptype, smooth_field = smooth_field[0], smooth_field[1]
+        else:
+            raise RuntimeError("smooth_field must be a tuple, received %s" %
+                               smooth_field)
+        if method != "volume_weighted":
+            raise NotImplementedError("method must be 'volume_weighted'")
+        if kernel_name is None:
+            kernel_name = self.kernel_name
+
+        # Prepare field names and registry to be used later
+        coord_name = "particle_position"
+        mass_name = "particle_mass"
+        smoothing_length_name = "smoothing_length"
+        if (ptype, smoothing_length_name) not in self.derived_field_list:
+            raise ValueError("%s not in derived_field_list" %
+                             ((ptype, smoothing_length_name),))
+        density_name = "density"
+        registry = self.field_info
+
+        # Do the actual work
+        return add_volume_weighted_smoothed_field(ptype, coord_name, mass_name,
+                   smoothing_length_name, density_name, smooth_field, registry,
+                   nneighbors=nneighbors, kernel_name=kernel_name)[0]

diff -r ad6d4f04b990edfeae34479a220c6629df074070 -r dcfa4d3f885f3a2c907acb32431c47cff2c6b561 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -22,7 +22,7 @@
 import os
 
 from yt.frontends.sph.data_structures import \
-    ParticleDataset
+    SPHDataset
 from yt.funcs import deprecate
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
@@ -54,7 +54,7 @@
     def _calculate_offsets(self, field_list):
         self.field_offsets = self.io._calculate_particle_offsets(self)
 
-class TipsyDataset(ParticleDataset):
+class TipsyDataset(SPHDataset):
     _index_class = ParticleIndex
     _file_class = TipsyFile
     _field_info_class = TipsyFieldInfo
@@ -74,6 +74,7 @@
                  parameter_file=None,
                  cosmology_parameters=None,
                  n_ref=64, over_refine_factor=1,
+                 kernel_name=None,
                  bounding_box=None,
                  units_override=None,
                  unit_system="cgs"):
@@ -82,8 +83,6 @@
         # and domain_right_edge
         self.bounding_box = bounding_box
         self.filter_bbox = (bounding_box is not None)
-        self.n_ref = n_ref
-        self.over_refine_factor = over_refine_factor
         if field_dtypes is None:
             field_dtypes = {}
         success, self.endian = self._validate_header(filename)
@@ -113,8 +112,10 @@
         if units_override is not None:
             raise RuntimeError("units_override is not supported for TipsyDataset. "+
                                "Use unit_base instead.")
-        super(TipsyDataset, self).__init__(filename, dataset_type,
-                                           unit_system=unit_system)
+        super(TipsyDataset, self).__init__(
+            filename, dataset_type=dataset_type, unit_system=unit_system,
+            n_ref=n_ref, over_refine_factor=over_refine_factor,
+            kernel_name=kernel_name)
 
     def __repr__(self):
         return os.path.basename(self.parameter_filename)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list