[yt-svn] commit/yt: 4 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Apr 21 06:47:20 PDT 2014


4 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/9764e62b8135/
Changeset:   9764e62b8135
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-04-17 18:36:52
Summary:     Adding blank line to make docs render.
Affected #:  1 file

diff -r 1552a88fee6e41b4a8cd7ce0b6fe857428e201e9 -r 9764e62b81357ceec9d2816539e80e31e850bf6e doc/source/analyzing/analysis_modules/halo_analysis.rst
--- a/doc/source/analyzing/analysis_modules/halo_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/halo_analysis.rst
@@ -6,6 +6,7 @@
 
 .. toctree::
    :maxdepth: 1
+
    halo_catalogs
    halo_finding
    halo_mass_function


https://bitbucket.org/yt_analysis/yt/commits/57804d7b8b3a/
Changeset:   57804d7b8b3a
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-04-19 14:41:22
Summary:     Merging.
Affected #:  7 files

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 doc/source/analyzing/analysis_modules/halo_analysis.rst
--- a/doc/source/analyzing/analysis_modules/halo_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/halo_analysis.rst
@@ -6,6 +6,7 @@
 
 .. toctree::
    :maxdepth: 1
+
    halo_catalogs
    halo_finding
    halo_mass_function

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 yt/analysis_modules/halo_analysis/api.py
--- a/yt/analysis_modules/halo_analysis/api.py
+++ b/yt/analysis_modules/halo_analysis/api.py
@@ -20,6 +20,9 @@
 from .halo_callbacks import \
      add_callback
 
+from .halo_finding_methods import \
+     add_finding_method
+
 from .halo_filters import \
      add_filter
      

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 yt/analysis_modules/halo_analysis/finding_methods.py
--- a/yt/analysis_modules/halo_analysis/finding_methods.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-Halo Finding methods
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .operator_registry import \
-    hf_registry
-
-class HaloFindingMethod(object):
-    pass

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -30,20 +30,9 @@
 from .operator_registry import \
      callback_registry, \
      filter_registry, \
-     hf_registry, \
+     finding_method_registry, \
      quantity_registry
 
-from yt.analysis_modules.halo_finding.halo_objects import \
-    FOFHaloFinder, HOPHaloFinder
-from yt.frontends.halo_catalogs.halo_catalog.data_structures import \
-    HaloCatalogDataset
-from yt.frontends.stream.data_structures import \
-    load_particles
-from yt.frontends.halo_catalogs.rockstar.data_structures import \
-    RockstarDataset
-from yt.analysis_modules.halo_finding.rockstar.api import \
-    RockstarHaloFinder
-
 class HaloCatalog(ParallelAnalysisInterface):
     r"""Create a HaloCatalog: an object that allows for the creation and association
     of data with a set of halo objects.
@@ -103,7 +92,7 @@
 
     See Also
     --------
-    add_callback, add_filter, add_quantity
+    add_callback, add_filter, add_finding_method, add_quantity
     
     """
     
@@ -113,7 +102,6 @@
         ParallelAnalysisInterface.__init__(self)
         self.halos_pf = halos_pf
         self.data_pf = data_pf
-        self.finder_method = finder_method
         self.output_dir = ensure_dir(output_dir)
         if os.path.basename(self.output_dir) != ".":
             self.output_prefix = os.path.basename(self.output_dir)
@@ -133,6 +121,10 @@
                 data_source = data_pf.h.all_data()
         self.data_source = data_source
 
+        if finder_method is not None:
+            finder_method = finding_method_registry.find(finder_method)
+        self.finder_method = finder_method            
+        
         # all of the analysis actions to be performed: callbacks, filters, and quantities
         self.actions = []
         # fields to be written to the halo catalog
@@ -358,16 +350,14 @@
 
         if self.halos_pf is None:
             # Find the halos and make a dataset of them
-            particles_pf = self.find_halos()
+            self.halos_pf = self.finder_method(self.data_pf)
 
             # Assign pf and data sources appropriately
-            self.halos_pf = particles_pf
-            self.data_source = particles_pf.all_data()
+            self.data_source = self.halos_pf.all_data()
 
             # Add all of the default quantities that all halos must have
             self.add_default_quantities('all')
 
-
         my_index = np.argsort(self.data_source["particle_identifier"])
         for i in parallel_objects(my_index, njobs=njobs, dynamic=dynamic):
             new_halo = Halo(self)
@@ -400,80 +390,6 @@
         if save_catalog:
             self.save_catalog()
 
-    def find_halos(self):
-
-        finder_method = (self.finder_method).lower()
-
-        if finder_method == "hop":
-            halo_list = HOPHaloFinder(self.data_pf)
-            halos_pf = self._parse_old_halo_list(halo_list)
-
-        elif finder_method == "fof":
-            halo_list = FOFHaloFinder(self.data_pf)
-            halos_pf = self._parse_old_halo_list(halo_list)
-            
-        elif finder_method == 'rockstar':
-            rh = RockstarHaloFinder(self.data_pf, 
-                outbase='{0}/rockstar_halos'.format(self.output_prefix))
-            rh.run()
-            halos_pf = RockstarDataset('{0}/rockstar_halos/halos_0.0.bin'.format(self.output_prefix))
-            halos_pf.create_field_info()
-        else:
-            raise RuntimeError("finder_method must be 'fof', 'hop', or 'rockstar'")
-
-        for attr in ["current_redshift", "current_time",
-                     "domain_dimensions",
-                     "cosmological_simulation", "omega_lambda",
-                     "omega_matter", "hubble_constant"]:
-            attr_val = getattr(self.data_pf, attr)
-            setattr(halos_pf, attr, attr_val)
-        halos_pf.current_time = halos_pf.current_time.in_cgs()
-
-        return halos_pf
-
-    def _parse_old_halo_list(self, halo_list):
-
-
-        data_pf = self.data_pf
-        num_halos = len(halo_list)
-
-        # Set up fields that we want to pull from identified halos and their units
-        new_fields = ['particle_identifier', 'particle_mass', 'particle_position_x', 
-            'particle_position_y','particle_position_z',
-            'virial_radius']
-        new_units = [ '', 'g', 'cm', 'cm','cm','cm']
-
-        # Set up a dictionary based on those fields 
-        # with empty arrays where we will fill in their values
-        halo_properties = { f : (np.zeros(num_halos),unit) \
-            for f, unit in zip(new_fields,new_units)}
-
-        # Iterate through the halos pulling out their positions and virial quantities
-        # and filling in the properties dictionary
-        for i,halo in enumerate(halo_list):
-            halo_properties['particle_identifier'][0][i] = i
-            halo_properties['particle_mass'][0][i] = halo.virial_mass().in_cgs()
-            halo_properties['virial_radius'][0][i] = halo.virial_radius().in_cgs()
-
-            com = halo.center_of_mass().in_cgs()
-            halo_properties['particle_position_x'][0][i] = com[0]
-            halo_properties['particle_position_y'][0][i] = com[1]
-            halo_properties['particle_position_z'][0][i] = com[2]
-
-        # Define a bounding box based on original data pf
-        bbox = np.array([data_pf.domain_left_edge.in_cgs(),
-                data_pf.domain_right_edge.in_cgs()]).T
-
-        # Create a pf with the halos as particles
-        particle_pf = load_particles(halo_properties, 
-                bbox=bbox, length_unit = 1, mass_unit=1)
-
-        # Create the field info dictionary so we can reference those fields
-        particle_pf.create_field_info()
-
-        return particle_pf
-
-
     def save_catalog(self):
         "Write out hdf5 file with all halo quantities."
 
@@ -513,4 +429,3 @@
         self.add_quantity("particle_position_z", field_type=field_type)
         self.add_quantity("virial_radius", field_type=field_type)
 
-

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 yt/analysis_modules/halo_analysis/halo_filters.py
--- a/yt/analysis_modules/halo_analysis/halo_filters.py
+++ b/yt/analysis_modules/halo_analysis/halo_filters.py
@@ -13,6 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import numpy as np
+
+from yt.utilities.spatial import KDTree
+
 from .halo_callbacks import HaloCallback
 from .operator_registry import filter_registry
 
@@ -58,3 +62,45 @@
     return eval("%s %s %s" % (h_value, operator, value))
 
 add_filter("quantity_value", quantity_value)
+
+def _not_subhalo(halo, field_type="halos"):
+    """
+    Only return true if this halo is not a subhalo.
+    
+    This is used for halo finders such as Rockstar that output parent
+    and subhalos together.
+    """
+
+    if not hasattr(halo.halo_catalog, "parent_dict"):
+        setattr(halo.halo_catalog, "parent_dict",
+                create_parent_dict(halo.halo_catalog.data_source,
+                                   ptype=field_type))
+    return halo.halo_catalog.parent_dict[int(halo.quantities["particle_identifier"])] == -1
+add_filter("not_subhalo", _not_subhalo)
+
+def create_parent_dict(data_source, ptype="halos"):
+    """
+    Create a dictionary of halo parents to allow for filtering of subhalos.
+
+    For a pair of halos whose distance is smaller than the radius of at least 
+    one of the halos, the parent is defined as the halo with the larger radius.
+    Parent halos (halos with no parents of their own) have parent index values of -1.
+    """
+    pos = np.rollaxis(
+        np.array([data_source[ptype, "particle_position_x"].in_units("Mpc"),
+                  data_source[ptype, "particle_position_y"].in_units("Mpc"),
+                  data_source[ptype, "particle_position_z"].in_units("Mpc")]), 1)
+    rad = data_source[ptype, "virial_radius"].in_units("Mpc").to_ndarray()
+    ids = data_source[ptype, "particle_identifier"].to_ndarray().astype("int")
+    parents = -1 * np.ones_like(ids, dtype="int")
+    my_tree = KDTree(pos)
+
+    for i in xrange(ids.size):
+        neighbors = np.array(
+            my_tree.query_ball_point(pos[i], rad[i], p=2))
+        if neighbors.size > 1:
+            parents[neighbors] = ids[neighbors[np.argmax(rad[neighbors])]]
+
+    parents[ids == parents] = -1
+    parent_dict = dict(zip(ids, parents))
+    return parent_dict

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 yt/analysis_modules/halo_analysis/halo_finding_methods.py
--- /dev/null
+++ b/yt/analysis_modules/halo_analysis/halo_finding_methods.py
@@ -0,0 +1,132 @@
+"""
+Halo Finding methods
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.analysis_modules.halo_finding.halo_objects import \
+    FOFHaloFinder, HOPHaloFinder
+from yt.frontends.halo_catalogs.halo_catalog.data_structures import \
+    HaloCatalogDataset
+from yt.frontends.stream.data_structures import \
+    load_particles
+
+from .operator_registry import \
+    finding_method_registry
+
+def add_finding_method(name, function):
+    finding_method_registry[name] = HaloFindingMethod(function)
+    
+class HaloFindingMethod(object):
+    r"""
+    A halo finding method is a callback that performs halo finding on a 
+    dataset and returns a new dataset that is the loaded halo finder output.
+    """
+    def __init__(self, function, args=None, kwargs=None):
+        self.function = function
+        self.args = args
+        if self.args is None: self.args = []
+        self.kwargs = kwargs
+        if self.kwargs is None: self.kwargs = {}
+
+    def __call__(self, ds):
+        return self.function(ds, *self.args, **self.kwargs)
+
+def _hop_method(pf):
+    r"""
+    Run the Hop halo finding method.
+    """
+    
+    halo_list = HOPHaloFinder(pf)
+    halos_pf = _parse_old_halo_list(pf, halo_list)
+    return halos_pf
+add_finding_method("hop", _hop_method)
+
+def _fof_method(pf):
+    r"""
+    Run the FoF halo finding method.
+    """
+
+    halo_list = FOFHaloFinder(pf)
+    halos_pf = _parse_old_halo_list(pf, halo_list)
+    return halos_pf
+add_finding_method("fof", _fof_method)
+
+def _rockstar_method(pf):
+    r"""
+    Run the Rockstar halo finding method.
+    """
+
+    from yt.frontends.halo_catalogs.rockstar.data_structures import \
+     RockstarDataset
+    from yt.analysis_modules.halo_finding.rockstar.api import \
+     RockstarHaloFinder
+    
+    rh = RockstarHaloFinder(pf)
+    rh.run()
+    halos_pf = RockstarDataset("rockstar_halos/halos_0.0.bin")
+    halos_pf.create_field_info()
+    return halos_pf
+add_finding_method("rockstar", _rockstar_method)
+
+def _parse_old_halo_list(data_pf, halo_list):
+    r"""
+    Convert the halo list into a loaded dataset.
+    """
+
+    num_halos = len(halo_list)
+
+    # Set up fields that we want to pull from identified halos and their units
+    new_fields = ['particle_identifier', 'particle_mass', 'particle_position_x', 
+        'particle_position_y','particle_position_z',
+        'virial_radius']
+    new_units = [ '', 'g', 'cm', 'cm','cm','cm']
+
+    # Set up a dictionary based on those fields 
+    # with empty arrays where we will fill in their values
+    halo_properties = { f : (np.zeros(num_halos),unit) \
+        for f, unit in zip(new_fields,new_units)}
+
+    # Iterate through the halos pulling out their positions and virial quantities
+    # and filling in the properties dictionary
+    for i,halo in enumerate(halo_list):
+        halo_properties['particle_identifier'][0][i] = i
+        halo_properties['particle_mass'][0][i] = halo.virial_mass().in_cgs()
+        halo_properties['virial_radius'][0][i] = halo.virial_radius().in_cgs()
+
+        com = halo.center_of_mass().in_cgs()
+        halo_properties['particle_position_x'][0][i] = com[0]
+        halo_properties['particle_position_y'][0][i] = com[1]
+        halo_properties['particle_position_z'][0][i] = com[2]
+
+    # Define a bounding box based on original data pf
+    bbox = np.array([data_pf.domain_left_edge.in_cgs(),
+            data_pf.domain_right_edge.in_cgs()]).T
+
+    # Create a pf with the halos as particles
+    particle_pf = load_particles(halo_properties, 
+            bbox=bbox, length_unit = 1, mass_unit=1)
+
+    # Create the field info dictionary so we can reference those fields
+    particle_pf.create_field_info()
+
+    for attr in ["current_redshift", "current_time",
+                 "domain_dimensions",
+                 "cosmological_simulation", "omega_lambda",
+                 "omega_matter", "hubble_constant"]:
+        attr_val = getattr(data_pf, attr)
+        setattr(particle_pf, attr, attr_val)
+    particle_pf.current_time = particle_pf.current_time.in_cgs()
+    
+    return particle_pf

diff -r f1032b474045bcc6f5cbe14350d306d4cc09e1cd -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 yt/analysis_modules/halo_analysis/operator_registry.py
--- a/yt/analysis_modules/halo_analysis/operator_registry.py
+++ b/yt/analysis_modules/halo_analysis/operator_registry.py
@@ -27,5 +27,5 @@
 
 callback_registry = OperatorRegistry()
 filter_registry = OperatorRegistry()
-hf_registry = OperatorRegistry()
+finding_method_registry = OperatorRegistry()
 quantity_registry = OperatorRegistry()


https://bitbucket.org/yt_analysis/yt/commits/fbfe6843e42d/
Changeset:   fbfe6843e42d
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-04-19 14:43:48
Summary:     Merging.
Affected #:  15 files

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/__init__.py
--- a/yt/__init__.py
+++ b/yt/__init__.py
@@ -154,6 +154,9 @@
 from yt.convenience import \
     load, simulation
 
+from yt.testing import \
+    run_nose
+
 # Import some helpful math utilities
 from yt.utilities.math_utils import \
     ortho_find, quartiles, periodic_position

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/analysis_modules/halo_analysis/halo_filters.py
--- a/yt/analysis_modules/halo_analysis/halo_filters.py
+++ b/yt/analysis_modules/halo_analysis/halo_filters.py
@@ -72,9 +72,8 @@
     """
 
     if not hasattr(halo.halo_catalog, "parent_dict"):
-        setattr(halo.halo_catalog, "parent_dict",
-                create_parent_dict(halo.halo_catalog.data_source,
-                                   ptype=field_type))
+        halo.halo_catalog.parent_dict = \
+          create_parent_dict(halo.halo_catalog.data_source, ptype=field_type)
     return halo.halo_catalog.parent_dict[int(halo.quantities["particle_identifier"])] == -1
 add_filter("not_subhalo", _not_subhalo)
 

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -110,7 +110,9 @@
         if self._name == "RockstarHalo":
             ds = self.pf.sphere(self.CoM, self._radjust * self.max_radius)
         elif self._name == "LoadedHalo":
-            ds = self.pf.sphere(self.CoM, self._radjust * self.max_radius)
+            ds = self.pf.sphere(self.CoM, np.maximum(self._radjust * \
+	    self.pf.quan(self.max_radius, 'code_length'), \
+	    self.pf.index.get_smallest_dx()))
         sp_pid = ds['particle_index']
         self._ds_sort = sp_pid.argsort()
         sp_pid = sp_pid[self._ds_sort]
@@ -217,7 +219,7 @@
         vx = (self["particle_velocity_x"] * pm).sum()
         vy = (self["particle_velocity_y"] * pm).sum()
         vz = (self["particle_velocity_z"] * pm).sum()
-        return np.array([vx, vy, vz]) / pm.sum()
+        return self.pf.arr([vx, vy, vz], vx.units) / pm.sum()
 
     def rms_velocity(self):
         r"""Returns the mass-weighted RMS velocity for the halo
@@ -331,9 +333,11 @@
         handle.create_group("/%s" % gn)
         for field in ["particle_position_%s" % ax for ax in 'xyz'] \
                    + ["particle_velocity_%s" % ax for ax in 'xyz'] \
-                   + ["particle_index"] + ["particle_mass"].in_units('Msun'):
+                   + ["particle_index"]:
             handle.create_dataset("/%s/%s" % (gn, field), data=self[field])
-        if 'creation_time' in self.data.pf.field_list:
+	handle.create_dataset("/%s/particle_mass" % gn,
+		data=self["particle_mass"].in_units('Msun'))
+        if ('io','creation_time') in self.data.pf.field_list:
             handle.create_dataset("/%s/creation_time" % gn,
                 data=self['creation_time'])
         n = handle["/%s" % gn]
@@ -848,6 +852,7 @@
         self._saved_fields = {}
         self._ds_sort = None
         self._particle_mask = None
+	self._pid_sort = None
 
 
     def __getitem__(self, key):
@@ -865,14 +870,28 @@
             self.size, key)
         if field_data is not None:
             if key == 'particle_index':
-                field_data = field_data[field_data.argsort()]
+                #this is an index for turning data sorted by particle index 
+		#into the same order as the fields on disk
+		self._pid_sort = field_data.argsort().argsort()
+	    #convert to YTArray using the data from disk
+	    if key == 'particle_mass':
+		field_data = self.pf.arr(field_data, 'Msun')
+	    else:
+	        field_data = self.pf.arr(field_data, 
+		    self.pf._get_field_info('unknown',key).units)
             self._saved_fields[key] = field_data
             return self._saved_fields[key]
         # We won't store this field below in saved_fields because
         # that would mean keeping two copies of it, one in the yt
         # machinery and one here.
-        ds = self.pf.sphere(self.CoM, 1.05 * self.max_radius)
-        return np.take(ds[key][self._ds_sort], self.particle_mask)
+        ds = self.pf.sphere(self.CoM, np.maximum(self._radjust * \
+	    self.pf.quan(self.max_radius, 'code_length'), \
+	    self.pf.index.get_smallest_dx()))
+	# If particle_mask hasn't been called once then _ds_sort won't have
+	# the proper values set yet
+        if self._particle_mask is None:
+	    self.particle_mask
+        return ds[key][self._ds_sort][self.particle_mask][self._pid_sort]
 
     def _get_particle_data(self, halo, fnames, size, field):
         # Given a list of file names, a halo, its size, and the desired field,
@@ -1087,10 +1106,10 @@
         gc.collect()
 
     def _get_dm_indices(self):
-        if 'creation_time' in self._data_source.index.field_list:
+        if ('io','creation_time') in self._data_source.index.field_list:
             mylog.debug("Differentiating based on creation time")
             return (self._data_source["creation_time"] <= 0)
-        elif 'particle_type' in self._data_source.index.field_list:
+        elif ('io','particle_type') in self._data_source.index.field_list:
             mylog.debug("Differentiating based on particle type")
             return (self._data_source["particle_type"] == 1)
         else:
@@ -2141,7 +2160,7 @@
         elif fancy_padding and self._distributed:
             LE_padding = np.empty(3, dtype='float64')
             RE_padding = np.empty(3, dtype='float64')
-            avg_spacing = (float(vol) / data.size) ** (1. / 3.)
+            avg_spacing = (vol / data.size) ** (1. / 3.)
             base_padding = (self.num_neighbors) ** (1. / 3.) * self.safety * \
                 avg_spacing
             for dim in xrange(3):
@@ -2388,7 +2407,7 @@
                 total_mass = \
                     self.comm.mpi_allreduce((self._data_source['all', "particle_mass"][select].in_units('Msun')).sum(dtype='float64'), op='sum')
             else:
-                total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("particle_mass")[0].in_units('Msun'), op='sum')
+                total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("particle_mass").in_units('Msun'), op='sum')
         # MJT: Note that instead of this, if we are assuming that the particles
         # are all on different processors, we should instead construct an
         # object representing the entire domain and sum it "lazily" with
@@ -2412,7 +2431,7 @@
             sub_mass = self._data_source["particle_mass"][select].in_units('Msun').sum(dtype='float64')
         else:
             sub_mass = \
-                self._data_source.quantities["TotalQuantity"]("particle_mass")[0].in_units('Msun')
+                self._data_source.quantities["TotalQuantity"]("particle_mass").in_units('Msun')
         HOPHaloList.__init__(self, self._data_source,
             threshold * total_mass / sub_mass, dm_only)
         self._parse_halolist(total_mass / sub_mass)

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py
--- a/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py
+++ b/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py
@@ -53,7 +53,7 @@
         self.zpos = particle_fields.pop("particle_position_z")
         self.real_size = len(self.xpos)
         self.index = particle_fields.pop("particle_index")
-        self.mass = particle_fields.pop("ParticleMassMsun")
+        self.mass = particle_fields.pop("particle_mass")
         self.padded_particles = []
         self.nMerge = 4
         self.tree = tree

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -478,9 +478,14 @@
         return tuple(self.ActiveDimensions.tolist())
 
     def _setup_data_source(self):
-        self._data_source = self.pf.region(self.center,
-            self.left_edge - self.base_dds,
-            self.right_edge + self.base_dds)
+        LE = self.left_edge - self.base_dds
+        RE = self.right_edge + self.base_dds
+        if not all(self.pf.periodicity):
+            for i in range(3):
+                if self.pf.periodicity[i]: continue
+                LE[i] = max(LE[i], self.pf.domain_left_edge[i])
+                RE[i] = min(RE[i], self.pf.domain_right_edge[i])
+        self._data_source = self.pf.region(self.center, LE, RE)
         self._data_source.min_level = 0
         self._data_source.max_level = self.level
         self._pdata_source = self.pf.region(self.center,

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -624,7 +624,7 @@
         self.unit_registry.modify("code_length", self.length_unit)
         self.unit_registry.modify("code_mass", self.mass_unit)
         self.unit_registry.modify("code_time", self.time_unit)
-        vel_unit = getattr(self, "code_velocity",
+        vel_unit = getattr(self, "velocity_unit",
                     self.length_unit / self.time_unit)
         self.unit_registry.modify("code_velocity", vel_unit)
         # domain_width does not yet exist

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/frontends/_skeleton/api.py
--- a/yt/frontends/_skeleton/api.py
+++ b/yt/frontends/_skeleton/api.py
@@ -20,7 +20,7 @@
 
 from .fields import \
       SkeletonFieldInfo, \
-      add_flash_field
+      add_skeleton_field
 
 from .io import \
       IOHandlerSkeleton

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/frontends/_skeleton/data_structures.py
--- a/yt/frontends/_skeleton/data_structures.py
+++ b/yt/frontends/_skeleton/data_structures.py
@@ -13,36 +13,30 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
-import stat
 import numpy as np
-import weakref
 
-from yt.funcs import *
 from yt.data_objects.grid_patch import \
     AMRGridPatch
-from yt.data_objects.index import \
-    AMRHierarchy
+from yt.data_objects.grid_patch import \
+    AMRGridPatch
+from yt.geometry.grid_geometry_handler import \
+    GridIndex
 from yt.data_objects.static_output import \
     Dataset
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
-from yt.utilities.io_handler import \
-    io_registry
-from yt.utilities.physical_constants import cm_per_mpc
-from .fields import SkeletonFieldInfo, add_flash_field, KnownSkeletonFields
-from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc, ValidateDataField, TranslationFunc
+from yt.utilities.lib.misc_utilities import \
+    get_box_grids_level
 
 class SkeletonGrid(AMRGridPatch):
     _id_offset = 0
-    #__slots__ = ["_level_id", "stop_index"]
-    def __init__(self, id, index, level):
-        AMRGridPatch.__init__(self, id, filename = index.index_filename,
-                              index = index)
-        self.Parent = None
+    def __init__(self, id, index, level, start, dimensions):
+        AMRGridPatch.__init__(self, id, filename=index.index_filename,
+                              index=index)
+        self.Parent = []
         self.Children = []
         self.Level = level
+        self.start_index = start.copy()
+        self.stop_index = self.start_index + dimensions
+        self.ActiveDimensions = dimensions.copy()
 
     def __repr__(self):
         return "SkeletonGrid_%04i (%s)" % (self.id, self.ActiveDimensions)
@@ -50,7 +44,6 @@
 class SkeletonHierarchy(AMRHierarchy):
 
     grid = SkeletonGrid
-    float_type = np.float64
     
     def __init__(self, pf, dataset_type='skeleton'):
         self.dataset_type = dataset_type
@@ -66,6 +59,10 @@
     def _detect_output_fields(self):
         # This needs to set a self.field_list that contains all the available,
         # on-disk fields.
+        # NOTE: Each should be a tuple, where the first element is the on-disk
+        # fluid type or particle type.  Convention suggests that the on-disk
+        # fluid type is usually the dataset_type and the on-disk particle type
+        # (for a single population of particles) is "io".
         pass
     
     def _count_grids(self):
@@ -96,30 +93,34 @@
 
 class SkeletonDataset(Dataset):
     _index_class = SkeletonHierarchy
-    _fieldinfo_fallback = SkeletonFieldInfo
-    _fieldinfo_known = KnownSkeletonFields
-    _handle = None
+    _field_info_class = SkeletonFieldInfo
     
-    def __init__(self, filename, dataset_type='skeleton',
-                 storage_filename = None,
-                 conversion_override = None):
-
-        if conversion_override is None: conversion_override = {}
-        self._conversion_override = conversion_override
-
+    def __init__(self, filename, dataset_type='skeleton'):
+        self.fluid_types += ('skeleton',)
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
-    def _set_units(self):
-        # This needs to set up the dictionaries that convert from code units to
-        # CGS.  The needed items are listed in the second entry:
-        #   self.time_units         <= sec_conversion
-        #   self.conversion_factors <= mpc_conversion
-        #   self.units              <= On-disk fields
+    def _set_code_unit_attributes(self):
+        # This is where quantities are created that represent the various
+        # on-disk units.  These are the currently available quantities which
+        # should be set, along with examples of how to set them to standard
+        # values.
+        #
+        # self.length_unit = self.quan(1.0, "cm")
+        # self.mass_unit = self.quan(1.0, "g")
+        # self.time_unit = self.quan(1.0, "s")
+        # self.time_unit = self.quan(1.0, "s")
+        #
+        # These can also be set:
+        # self.velocity_unit = self.quan(1.0, "cm/s")
+        # self.magnetic_unit = self.quan(1.0, "gauss")
         pass
 
     def _parse_parameter_file(self):
-        # This needs to set up the following items:
+        # This needs to set up the following items.  Note that these are all
+        # assumed to be in code units; domain_left_edge and domain_right_edge
+        # will be updated to be in code units at a later time.  This includes
+        # the cosmological parameters.
         #
         #   self.unique_identifier
         #   self.parameters             <= full of code-specific items of use

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/frontends/_skeleton/definitions.py
--- a/yt/frontends/_skeleton/definitions.py
+++ b/yt/frontends/_skeleton/definitions.py
@@ -0,0 +1,1 @@
+# This file is often empty.  It can hold definitions related to a frontend.

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/frontends/_skeleton/fields.py
--- a/yt/frontends/_skeleton/fields.py
+++ b/yt/frontends/_skeleton/fields.py
@@ -13,79 +13,35 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import numpy as np
+from yt.funcs import mylog
 from yt.fields.field_info_container import \
-    FieldInfoContainer, \
-    NullFunc, \
-    TranslationFunc, \
-    FieldInfo, \
-    ValidateParameter, \
-    ValidateDataField, \
-    ValidateProperty, \
-    ValidateSpatial, \
-    ValidateGridType
-from yt.utilities.physical_constants import \
-    kboltz
+    FieldInfoContainer
 
-# The first field container is where any fields that exist on disk go, along
-# with their conversion factors, display names, etc.
+# We need to specify which fields we might have in our dataset.  The field info
+# container subclass here will define which fields it knows about.  There are
+# optionally methods on it that get called which can be subclassed.
 
-KnownSkeletonFields = FieldInfoContainer()
-add_skeleton_field = KnownSkeletonFields.add_field
+class SkeletonFieldInfo(FieldInfoContainer):
+    known_other_fields = (
+        # Each entry here is of the form
+        # ( "name", ("units", ["fields", "to", "alias"], # "display_name")),
+    )
 
-SkeletonFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_field = SkeletonFieldInfo.add_field
+    known_particle_fields = (
+        # Identical form to above
+        # ( "name", ("units", ["fields", "to", "alias"], # "display_name")),
+    )
 
-# Often, we want to translate between fields on disk and fields in yt.  This
-# construct shows how to do that.  Note that we use TranslationFunc.
+    def __init__(self, pf):
+        super(SkeletonFieldInfo, self).__init__(pf)
+        # If you want, you can check self.field_list
 
-translation_dict = {"x-velocity": "velx",
-                    "y-velocity": "vely",
-                    "z-velocity": "velz",
-                    "Density": "dens",
-                    "Temperature": "temp",
-                    "Pressure" : "pres", 
-                    "Grav_Potential" : "gpot",
-                    "particle_position_x" : "particle_posx",
-                    "particle_position_y" : "particle_posy",
-                    "particle_position_z" : "particle_posz",
-                    "particle_velocity_x" : "particle_velx",
-                    "particle_velocity_y" : "particle_vely",
-                    "particle_velocity_z" : "particle_velz",
-                    "particle_index" : "particle_tag",
-                    "Electron_Fraction" : "elec",
-                    "HI_Fraction" : "h   ",
-                    "HD_Fraction" : "hd  ",
-                    "HeI_Fraction": "hel ",
-                    "HeII_Fraction": "hep ",
-                    "HeIII_Fraction": "hepp",
-                    "HM_Fraction": "hmin",
-                    "HII_Fraction": "hp  ",
-                    "H2I_Fraction": "htwo",
-                    "H2II_Fraction": "htwp",
-                    "DI_Fraction": "deut",
-                    "DII_Fraction": "dplu",
-                    "ParticleMass": "particle_mass",
-                    "Flame_Fraction": "flam"}
+    def setup_fluid_fields(self):
+        # Here we do anything that might need info about the parameter file.
+        # You can use self.alias, self.add_output_field and self.add_field .
+        pass
 
-for f,v in translation_dict.items():
-    if v not in KnownSkeletonFields:
-        pfield = v.startswith("particle")
-        add_skeleton_field(v, function=NullFunc, take_log=False,
-                  validators = [ValidateDataField(v)],
-                  particle_type = pfield)
-    if f.endswith("_Fraction") :
-        dname = "%s\/Fraction" % f.split("_")[0]
-    else :
-        dname = f                    
-    ff = KnownSkeletonFields[v]
-    pfield = f.startswith("particle")
-    add_field(f, TranslationFunc(v),
-              take_log=KnownSkeletonFields[v].take_log,
-              units = ff.units, display_name=dname,
-              particle_type = pfield)
-
-# Here's an example of adding a new field:
-
-add_skeleton_field("dens", function=NullFunc, take_log=True,
-                convert_function=_get_convert("dens"),
-                units=r"g / cm**3")
+    def setup_particle_fields(self, ptype):
+        # This will get called for every particle type.
+        pass

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/frontends/_skeleton/io.py
--- a/yt/frontends/_skeleton/io.py
+++ b/yt/frontends/_skeleton/io.py
@@ -23,12 +23,31 @@
     _particle_reader = False
     _dataset_type = "skeleton"
 
-    def _read_data(self, grid, field):
-        # This must return the array, of size/shape grid.ActiveDimensions, that
-        # corresponds to 'field'.
+    def _read_particle_coords(self, chunks, ptf):
+        # This needs to *yield* a series of tuples of (ptype, (x, y, z)).
+        # chunks is a list of chunks, and ptf is a dict where the keys are
+        # ptypes and the values are lists of fields.
         pass
 
-    def _read_data_slice(self, grid, field, axis, coord):
-        # If this is not implemented, the IO handler will just slice a
-        # _read_data item.
+    def _read_particle_fields(self, chunks, ptf, selector):
+        # This gets called after the arrays have been allocated.  It needs to
+        # yield ((ptype, field), data) where data is the masked results of
+        # reading ptype, field and applying the selector to the data read in.
+        # Selector objects have a .select_points(x,y,z) that returns a mask, so
+        # you need to do your masking here.
         pass
+
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        # This needs to allocate a set of arrays inside a dictionary, where the
+        # keys are the (ftype, fname) tuples and the values are arrays that
+        # have been masked using whatever selector method is appropriate.  The
+        # dict gets returned at the end and it should be flat, with selected
+        # data.  Note that if you're reading grid data, you might need to
+        # special-case a grid selector object.
+        pass
+
+    def _read_chunk_data(self, chunk, fields):
+        # This reads the data from a single chunk, and is only used for
+        # caching.
+        pass

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/frontends/boxlib/fields.py
--- a/yt/frontends/boxlib/fields.py
+++ b/yt/frontends/boxlib/fields.py
@@ -243,12 +243,17 @@
                 self.add_field(name = ("gas", "%s_density" % nice_name),
                                function = func,
                                units = "g/cm**3")
-                # We know this will either have one letter, or two.
-                if field[3] in string.letters:
-                    element, weight = field[2:4], field[4:-1]
-                else:
-                    element, weight = field[2:3], field[3:-1]
-                weight = int(weight)
+                # Most of the time our species will be of the form
+                # element name + atomic weight (e.g. C12), but
+                # sometimes we make up descriptive names (e.g. ash)
+                if any(char.isdigit() for char in field):
+                    # We know this will either have one letter, or two.
+                    if field[3] in string.letters:
+                        element, weight = field[2:4], field[4:-1]
+                    else:
+                        element, weight = field[2:3], field[3:-1]
+                    weight = int(weight)
+
                 # Here we can, later, add number density.
             if field.startswith("omegadot("):
                 nice_name = field[9:-1]

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/units/unit_lookup_table.py
--- a/yt/units/unit_lookup_table.py
+++ b/yt/units/unit_lookup_table.py
@@ -94,6 +94,7 @@
 # Add LaTeX representations for units with trivial representations.
 latex_symbol_lut = {
     "unitary" : "",
+    "dimensionless" : "",
     "code_length" : "\\rm{code}\/\\rm{length}",
     "code_time" : "\\rm{code}\/\\rm{time}",
     "code_mass" : "\\rm{code}\/\\rm{mass}",

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/units/unit_registry.py
--- a/yt/units/unit_registry.py
+++ b/yt/units/unit_registry.py
@@ -47,20 +47,18 @@
 
         # Validate
         if not isinstance(cgs_value, float):
-            raise UnitParseError("cgs_value must be a float, got a %s." \
+            raise UnitParseError("cgs_value must be a float, got a %s."
                                  % type(cgs_value))
-        
+
         validate_dimensions(dimensions)
 
         # Add to symbol lut
         if tex_repr is None:
-            latex_symbol_lut[symbol] = "\\rm{" + symbol + "}"
-        else:
-            latex_symbol_lut[symbol] = tex_repr
+            tex_repr = "\\rm{" + symbol + "}"
+        latex_symbol_lut.setdefault(symbol, tex_repr)
 
         # Add to lut
-        if tex_repr is None: tex_repr = symbol
-        self.lut.update( {symbol: (cgs_value, dimensions)} )
+        self.lut.update({symbol: (cgs_value, dimensions)})
 
     def remove(self, symbol):
         """

diff -r 57804d7b8b3a3f9b24b25201c61e6cc0da39f896 -r fbfe6843e42d81494db808ae11541d6daa5b5684 yt/visualization/plot_container.py
--- a/yt/visualization/plot_container.py
+++ b/yt/visualization/plot_container.py
@@ -251,10 +251,15 @@
         # Left blank to be overriden in subclasses
         pass
 
-    def _switch_pf(self, new_pf):
+    def _switch_pf(self, new_pf, data_source=None):
         ds = self.data_source
         name = ds._type_name
         kwargs = dict((n, getattr(ds, n)) for n in ds._con_args)
+        if data_source is not None:
+            if name != "proj":
+                raise RuntimeError("The data_source keyword argument "
+                                   "is only defined for projections.")
+            kwargs['data_source'] = data_source
         new_ds = getattr(new_pf, name)(**kwargs)
         self.pf = new_pf
         self.data_source = new_ds


https://bitbucket.org/yt_analysis/yt/commits/e194bdae23c0/
Changeset:   e194bdae23c0
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-04-21 10:48:03
Summary:     Merging.
Affected #:  0 files

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list