[yt-svn] commit/yt: 4 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Tue Jul 15 09:13:00 PDT 2014
4 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/d24a8070639d/
Changeset: d24a8070639d
Branch: yt-3.0
User: jzuhone
Date: 2014-07-11 18:01:00
Summary: Removing analysis modules as per Trello card
Affected #: 16 files
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/api.py
--- a/yt/analysis_modules/api.py
+++ b/yt/analysis_modules/api.py
@@ -16,9 +16,6 @@
from .absorption_spectrum.api import \
AbsorptionSpectrum
-from .coordinate_transformation.api import \
- spherical_regrid
-
from .cosmological_observation.api import \
CosmologySplice, \
LightCone, \
@@ -58,11 +55,6 @@
MergerTreeDotOutput, \
MergerTreeTextOutput
-from .halo_profiler.api import \
- VirialFilter, \
- HaloProfiler, \
- FakeProfile
-
from .level_sets.api import \
identify_contours, \
Clump, \
@@ -80,9 +72,6 @@
recursive_bottom_clumps, \
clump_list_sort
-from .radial_column_density.api import \
- RadialColumnDensity
-
from .spectral_integrator.api import \
add_xray_emissivity_field
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/coordinate_transformation/api.py
--- a/yt/analysis_modules/coordinate_transformation/api.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-API for coordinate_transformation
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .transforms import \
- spherical_regrid
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/coordinate_transformation/setup.py
--- a/yt/analysis_modules/coordinate_transformation/setup.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
-
-import os.path
-
-
-def configuration(parent_package='', top_path=None):
- from numpy.distutils.misc_util import Configuration
- config = Configuration('coordinate_transformation',
- parent_package, top_path)
- config.make_config_py() # installs __config__.py
- #config.make_svn_version_py()
- return config
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/coordinate_transformation/transforms.py
--- a/yt/analysis_modules/coordinate_transformation/transforms.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
-Transformations between coordinate systems
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from yt.funcs import *
-
-from yt.utilities.linear_interpolators import \
- TrilinearFieldInterpolator
-
-def spherical_regrid(pf, nr, ntheta, nphi, rmax, fields,
- center=None, smoothed=True):
- """
- This function takes a parameter file (*pf*) along with the *nr*, *ntheta*
- and *nphi* points to generate out to *rmax*, and it grids *fields* onto
- those points and returns a dict. *center* if supplied will be the center,
- otherwise the most dense point will be chosen. *smoothed* governs whether
- regular covering grids or smoothed covering grids will be used.
- """
- mylog.warning("This code may produce some artifacts of interpolation")
- mylog.warning("See yt/extensions/coordinate_transforms.py for plotting information")
- if center is None: center = pf.h.find_max("Density")[1]
- fields = ensure_list(fields)
- r,theta,phi = np.mgrid[0:rmax:nr*1j,
- 0:np.pi:ntheta*1j,
- 0:2*np.pi:nphi*1j]
- new_grid = dict(r=r, theta=theta, phi=phi)
- new_grid['x'] = r*np.sin(theta)*np.cos(phi) + center[0]
- new_grid['y'] = r*np.sin(theta)*np.sin(phi) + center[1]
- new_grid['z'] = r*np.cos(theta) + center[2]
- sphere = pf.sphere(center, rmax)
- return arbitrary_regrid(new_grid, sphere, fields, smoothed)
-
-def arbitrary_regrid(new_grid, data_source, fields, smoothed=True):
- """
- This function accepts a dict of points 'x', 'y' and 'z' and a data source
- from which to interpolate new points, along with a list of fields it needs
- to regrid onto those xyz points. It then returns interpolated points.
- This has not been well-tested other than for regular spherical regridding.
- """
- fields = ensure_list(fields)
- new_grid['handled'] = np.zeros(new_grid['x'].shape, dtype='bool')
- for field in fields:
- new_grid[field] = np.zeros(new_grid['x'].shape, dtype='float64')
- grid_order = np.argsort(data_source.grid_levels[:,0])
- ng = len(data_source._grids)
-
- for i,grid in enumerate(data_source._grids[grid_order][::-1]):
- mylog.info("Regridding grid % 4i / % 4i (%s - %s)", i, ng, grid.id, grid.Level)
- cg = grid.retrieve_ghost_zones(1, fields, smoothed=smoothed)
-
- # makes x0,x1,y0,y1,z0,z1
- bounds = np.concatenate(zip(cg.left_edge, cg.right_edge))
-
-
- # Now we figure out which of our points are inside this grid
- # Note that we're only looking at the grid, not the grid-with-ghost-zones
- point_ind = np.ones(new_grid['handled'].shape, dtype='bool') # everything at first
- for i,ax in enumerate('xyz'): # i = 0,1,2 ; ax = x, y, z
- # &= does a logical_and on the array
- point_ind &= ( ( grid.LeftEdge[i] <= new_grid[ax] )
- & ( new_grid[ax] <= grid.RightEdge[i] ) )
- point_ind &= (new_grid['handled'] == False) # only want unhandled points
-
- # If we don't have any, we can just leave
- if point_ind.sum() == 0: continue
-
- # because of the funky way the interpolator takes points, we have to make a
- # new dict of just the points inside this grid
- point_grid = {'x' : new_grid['x'][point_ind],
- 'y' : new_grid['y'][point_ind],
- 'z' : new_grid['z'][point_ind]}
-
- # Now we know which of the points in new_grid are inside this grid
- for field in fields:
- interpolator = TrilinearFieldInterpolator(
- cg[field],bounds,['x','y','z'])
- new_grid[field][point_ind] = interpolator(point_grid)
-
- new_grid['handled'][point_ind] = True
-
- mylog.info("Finished with %s dangling points",
- new_grid['handled'].size - new_grid['handled'].sum())
-
- return new_grid
-
-"""
-# The following will work to plot through different slices:
-
-import pylab
-for i in range(n_theta):
- print "Doing % 3i / % 3i" % (i, n_theta)
- pylab.clf()
- ax=pylab.subplot(1,1,1, projection="polar", aspect=1.)
- ax.pcolormesh(phi[:,i,:], r[:,i,:],
- np.log10(sph_grid[field][:,i,:]))
- pylab.savefig("polar/latitude_%03i.png" % i)
-
-for i in range(n_phi):
- print "Doing % 3i / % 3i" % (i, n_phi)
- pylab.clf()
- ax=pylab.subplot(1,1,1, projection="polar", aspect=1.)
- ax.pcolormesh(theta[:,:,i], r[:,:,i],
- np.log10(sph_grid[field][:,:,i]))
- pylab.savefig("polar/longitude_%03i.png" % i)
-"""
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/halo_profiler/api.py
--- a/yt/analysis_modules/halo_profiler/api.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-API for halo_profiler
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .halo_filters import \
- VirialFilter
-
-from .multi_halo_profiler import \
- HaloProfiler, \
- FakeProfile, \
- standard_fields
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/halo_profiler/centering_methods.py
--- a/yt/analysis_modules/halo_profiler/centering_methods.py
+++ /dev/null
@@ -1,107 +0,0 @@
-"""
-HaloProfiler re-centering functions.
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-from yt.funcs import *
-
-from yt.fields.local_fields import \
- add_field
-
-centering_registry = {}
-
-def add_function(name):
- def wrapper(func):
- centering_registry[name] = func
- return func
- return wrapper
-
-#### Dark Matter Density ####
-
- at add_function("Min_Dark_Matter_Density")
-def find_minimum_dm_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MinLocation']('Dark_Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Dark_Matter_Density")
-def find_maximum_dm_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Dark_Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("CoM_Dark_Matter_Density")
-def find_CoM_dm_density(data):
- dc_x, dc_y, dc_z = data.quantities['CenterOfMass'](use_cells=False,
- use_particles=True,
- preload=False)
- return (dc_x, dc_y, dc_z)
-
-#### Gas Density ####
-
- at add_function("Min_Gas_Density")
-def find_minimum_gas_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MinLocation']('Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Gas_Density")
-def find_maximum_gas_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("CoM_Gas_Density")
-def find_CoM_gas_density(data):
- dc_x, dc_y, dc_z = data.quantities['CenterOfMass'](use_cells=True,
- use_particles=False,
- preload=False)
- return (dc_x, dc_y, dc_z)
-
-#### Total Density ####
-
- at add_function("Min_Total_Density")
-def find_minimum_total_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MinLocation']('Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Total_Density")
-def find_maximum_total_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("CoM_Total_Density")
-def find_CoM_total_density(data):
- dc_x, dc_y, dc_z = data.quantities['CenterOfMass'](use_cells=True,
- use_particles=True,
- preload=False)
- return (dc_x, dc_y, dc_z)
-
-#### Temperature ####
-
- at add_function("Min_Temperature")
-def find_minimum_temperature(data):
- ma, mini, mx, my, mz, mg = data.quantities['MinLocation']('Temperature',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Temperature")
-def find_maximum_temperature(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Temperature',
- preload=False)
- return (mx, my, mz)
-
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/halo_profiler/halo_filters.py
--- a/yt/analysis_modules/halo_profiler/halo_filters.py
+++ /dev/null
@@ -1,153 +0,0 @@
-"""
-Halo filters to be used with the HaloProfiler.
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from copy import deepcopy
-import numpy as np
-
-from yt.funcs import *
-from yt.utilities.physical_constants import TINY
-
-def VirialFilter(profile, overdensity_field='ActualOverdensity',
- virial_overdensity=200., must_be_virialized=True,
- virial_filters=[['TotalMassMsun', '>=','1e14']],
- virial_quantities=['TotalMassMsun', 'RadiusMpc'],
- virial_index=None, use_log=False):
- r"""Filter halos by virial quantities.
-
- Return values are a True or False whether the halo passed the filter,
- along with a dictionary of virial quantities for the fields specified in
- the virial_quantities keyword. Thresholds for virial quantities are
- given with the virial_filters keyword in the following way:
- [field, condition, value].
-
- This is typically used as part of a call to `add_halo_filter`.
-
- Parameters
- ----------
- overdensity_field : string
- The field used for interpolation with the
- specified critical value given with 'virial_overdensity'.
- Default='ActualOverdensity'.
- virial_overdensity : float
- The value used to determine the outer radius of the virialized halo.
- Default: 200.
- must_be_virialized : bool
- If no values in the profile are above the
- value of virial_overdensity, the halo does not pass the filter.
- Default: True.
- virial_filters : array_like
- Conditional filters based on virial quantities
- given in the following way: [field, condition, value].
- Default: [['TotalMassMsun', '>=','1e14']].
- virial_quantities : array_like
- Fields for which interpolated values should
- be calculated and returned. Default: ['TotalMassMsun', 'RadiusMpc'].
- virial_index : array_like
- If given as a list, the index of the radial profile
- which is used for interpolation is placed here. Default: None.
- use_log : bool
- If True, interpolation is done in log space.
- Default: False.
-
- Examples
- --------
- >>> hp.add_halo_filter(HP.VirialFilter, must_be_virialized=True,
- overdensity_field='ActualOverdensity',
- virial_overdensity=200,
- virial_filters=[['TotalMassMsun','>=','1e14']],
- virial_quantities=['TotalMassMsun','RadiusMpc'])
-
- """
-
- fields = deepcopy(virial_quantities)
- if virial_filters is None: virial_filters = []
- for vfilter in virial_filters:
- if not vfilter[0] in fields:
- fields.append(vfilter[0])
-
- overDensity = []
- temp_profile = dict((field, []) for field in fields)
-
- for q in range(len(profile[overdensity_field])):
- good = True
- if (profile[overdensity_field][q] != profile[overdensity_field][q]):
- good = False
- continue
- for field in fields:
- if (profile[field][q] != profile[field][q]):
- good = False
- break
- if good:
- overDensity.append(profile[overdensity_field][q])
- for field in fields:
- temp_profile[field].append(profile[field][q])
-
- if use_log:
- for field in temp_profile.keys():
- temp_profile[field] = np.log10(np.clip(temp_profile[field], TINY,
- max(temp_profile[field])))
-
- virial = dict((field, 0.0) for field in fields)
-
- if (not (np.array(overDensity) >= virial_overdensity).any()) and \
- must_be_virialized:
- mylog.debug("This halo is not virialized!")
- return [False, {}]
-
- if (len(overDensity) < 2):
- mylog.debug("Skipping halo with no valid points in profile.")
- return [False, {}]
-
- if (overDensity[1] <= virial_overdensity):
- index = 0
- elif (overDensity[-1] >= virial_overdensity):
- index = -2
- else:
- for q in (np.arange(len(overDensity),0,-1)-1):
- if (overDensity[q] < virial_overdensity) and (overDensity[q-1] >= virial_overdensity):
- index = q - 1
- break
-
- if type(virial_index) is list:
- virial_index.append(index)
-
- for field in fields:
- if (overDensity[index+1] - overDensity[index]) == 0:
- mylog.debug("Overdensity profile has slope of zero.")
- return [False, {}]
- else:
- slope = (temp_profile[field][index+1] - temp_profile[field][index]) / \
- (overDensity[index+1] - overDensity[index])
- value = slope * (virial_overdensity - overDensity[index]) + \
- temp_profile[field][index]
- virial[field] = value
-
- if use_log:
- for field in virial.keys():
- virial[field] = np.power(10, virial[field])
-
- for vfilter in virial_filters:
- if eval("%s %s %s" % (virial[vfilter[0]],vfilter[1],vfilter[2])):
- mylog.debug("(%s %s %s) returned True for %s." % \
- (vfilter[0],vfilter[1],vfilter[2],virial[vfilter[0]]))
- continue
- else:
- mylog.debug("(%s %s %s) returned False for %s." % \
- (vfilter[0],vfilter[1],vfilter[2],virial[vfilter[0]]))
- return [False, {}]
-
- return [True, dict((("%s_%s" % (q, virial_overdensity)), virial[q])
- for q in virial_quantities)]
-
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/halo_profiler/multi_halo_profiler.py
--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py
+++ /dev/null
@@ -1,1368 +0,0 @@
-"""
-HaloProfiler class and member functions.
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import gc
-import numpy as np
-import os
-import h5py
-import types
-
-from yt.funcs import *
-from yt.utilities.math_utils import periodic_dist
-
-from yt.convenience import \
- load
-from yt.data_objects.profiles import \
- BinnedProfile1D, YTEmptyProfileData
-from yt.analysis_modules.halo_finding.api import *
-from .halo_filters import \
- VirialFilter
-from .centering_methods import \
- centering_registry
-from yt.fields.local_fields import \
- add_field
-from yt.data_objects.static_output import \
- Dataset
-
-from yt.utilities.exceptions import \
- YTException
-from yt.utilities.parallel_tools.parallel_analysis_interface import \
- ParallelAnalysisInterface, \
- parallel_blocking_call, \
- parallel_root_only, \
- parallel_objects
-from yt.utilities.physical_constants import \
- mass_sun_cgs
-from yt.utilities.physical_ratios import \
- rho_crit_g_cm3_h2
-from yt.visualization.fixed_resolution import \
- FixedResolutionBuffer
-from yt.visualization.image_writer import write_image
-
-PROFILE_RADIUS_THRESHOLD = 2
-
-class HaloProfiler(ParallelAnalysisInterface):
- r"""Initialize a Halo Profiler object.
-
- In order to run the halo profiler, the Halo Profiler object must be
- instantiated. At the minimum, the path to a parameter file
- must be provided as the first term.
-
- Parameters
- ----------
-
- dataset : string, required
- The path to the parameter file for the dataset to be analyzed.
- output_dir : string, optional
- If specified, all output will be put into this path instead of
- in the dataset directories. Default: None.
- halos : {"multiple", "single"}, optional
- For profiling more than one halo. In this mode halos are read in
- from a list or identified with a halo finder. In "single" mode,
- the one and only halo
- center is identified automatically as the location of the peak
- in the density field.
- Default: "multiple".
- halo_list_file : string, optional
- The name of a file containing the list of halos. The HaloProfiler
- will look for this file in the data directory.
- Default: "HopAnalysis.out".
- halo_list_format : {string, dict}
- The format of the halo list file. "yt_hop" for the format
- given by yt's halo finders. "enzo_hop" for the format written
- by enzo_hop. "p-groupfinder" for P-Groupfinder. This keyword
- can also be given in the form of a dictionary specifying the
- column in which various properties can be found.
- For example, {"id": 0, "center": [1, 2, 3], "mass": 4, "radius": 5}.
- Default: "yt_hop".
- halo_finder_function : function
- If halos is set to multiple and the file given by
- halo_list_file does not exit, the halo finding function
- specified here will be called.
- Default: HaloFinder (yt_hop).
- halo_finder_args : tuple
- Args given with call to halo finder function. Default: None.
- halo_finder_kwargs : dict
- kwargs given with call to halo finder function. Default: None.
- recenter : {string, function}
- The exact location of the sphere center can significantly affect
- radial profiles. The halo center loaded by the HaloProfiler will
- typically be the dark matter center of mass calculated by a halo
- finder. However, this may not be the best location for centering
- profiles of baryon quantities. For example, one may want to center
- on the maximum density.
- If recenter is given as a string, one of the existing recentering
- functions will be used:
-
- * Min_Dark_Matter_Density : location of minimum dark matter density
- * Max_Dark_Matter_Density : location of maximum dark matter density
- * CoM_Dark_Matter_Density : dark matter center of mass
- * Min_Gas_Density : location of minimum gas density
- * Max_Gas_Density : location of maximum gas density
- * CoM_Gas_Density : gas center of mass
- * Min_Total_Density : location of minimum total density
- * Max_Total_Density : location of maximum total density
- * CoM_Total_Density : total center of mass
- * Min_Temperature : location of minimum temperature
- * Max_Temperature : location of maximum temperature
-
- Alternately, a function can be supplied for custom recentering.
- The function should take only one argument, a sphere object. Example
- function::
-
- def my_center_of_mass(data):
- my_x, my_y, my_z = data.quantities['CenterOfMass']()
- return (my_x, my_y, my_z)
-
- Default: None.
- halo_radius : float
- If no halo radii are provided in the halo list file, this
- parameter is used to specify the radius out to which radial
- profiles will be made. This keyword is also
- used when halos is set to single. Default: 0.1.
- radius_units : string
- The units of halo_radius. Default: "1" (code units).
- n_profile_bins : int
- The number of bins in the radial profiles. Default: 50.
- profile_output_dir : str
- The subdirectory, inside the data directory, in which radial profile
- output files will be created. The directory will be created if it does
- not exist. Default: "radial_profiles".
- projection_output_dir : str
- The subdirectory, inside the data directory, in which projection
- output files will be created. The directory will be created if it does
- not exist. Default: "projections".
- projection_width : float
- The width of halo projections. Default: 8.0.
- projection_width_units : string
- The units of projection_width. Default: "mpc".
- project_at_level : {"max", int}
- The maximum refinement level to be included in projections.
- Default: "max" (maximum level within the dataset).
- velocity_center : array_like
- The method in which the halo bulk velocity is calculated (used for
- calculation of radial and tangential velocities. Valid options are:
-
- * ["bulk", "halo"] (Default): the velocity provided in
- the halo list
- * ["bulk", "sphere"]: the bulk velocity of the sphere
- centered on the halo center.
- * ["max", field]: the velocity of the cell that is the
- location of the maximum of the field specified.
-
- filter_quantities : array_like
- Quantities from the original halo list file to be written out in the
- filtered list file. Default: ['id','center'].
- use_critical_density : bool
- If True, the definition of overdensity for virial quantities
- is calculated with respect to the critical density.
- If False, overdensity is with respect to mean matter density,
- which is lower by a factor of Omega_M. Default: False.
-
- Examples
- --------
-
- >>> from yt.analysis_modules.halo_profiler.api import *
- >>> hp = HaloProfiler("RedshiftOutput0005/RD0005")
-
- """
- def __init__(self, dataset, output_dir=None,
- halos='multiple', halo_list_file='HopAnalysis.out',
- halo_list_format='yt_hop', halo_finder_function=parallelHF,
- halo_finder_args=None,
- halo_finder_kwargs=dict(threshold=160.0, safety=1.5,
- dm_only=False, resize=True,
- fancy_padding=True, rearrange=True),
- halo_radius=None, radius_units='1', n_profile_bins=50,
- recenter=None,
- profile_output_dir='radial_profiles', projection_output_dir='projections',
- projection_width=8.0, projection_width_units='mpc', project_at_level='max',
- velocity_center=['bulk', 'halo'], filter_quantities=['id', 'center', 'r_max'],
- use_critical_density=False):
- ParallelAnalysisInterface.__init__(self)
-
- self.dataset = dataset
- self.n_profile_bins = n_profile_bins
- self.projection_width = projection_width
- self.projection_width_units = projection_width_units
- self.project_at_level = project_at_level
- self.filter_quantities = filter_quantities
- if self.filter_quantities is None: self.filter_quantities = []
- self.use_critical_density = use_critical_density
-
- self.profile_fields = []
- self.projection_fields = []
-
- self._halo_filters = []
- self.all_halos = []
- self.filtered_halos = []
-
- # Set halo finder function and parameters, if needed.
- self.halo_finder_function = halo_finder_function
- self.halo_finder_args = halo_finder_args
- if self.halo_finder_args is None: self.halo_finder_args = ()
- self.halo_finder_kwargs = halo_finder_kwargs
- if self.halo_finder_kwargs is None: self.halo_finder_kwargs = {}
-
- # Set option to get halos from hop or single halo at density maximum.
- # multiple: get halos from hop
- # single: get single halo from density maximum
- self.halos = halos
- if not(self.halos is 'multiple' or self.halos is 'single'):
- mylog.error("Keyword, halos, must be either 'single' or 'multiple'.")
- return None
-
- # Set halo list format.
- # 'yt_hop': yt hop output.
- # 'enzo_hop': enzo_hop output.
- # dictionary: a dictionary containing fields and their corresponding columns.
- self.halo_list_file = halo_list_file
- if halo_list_format == 'yt_hop':
- self.halo_list_format = {'id':0, 'mass':1, 'np': 2,
- 'center':[7, 8, 9], 'velocity':[10, 11, 12], 'r_max':13}
- elif halo_list_format == 'enzo_hop':
- self.halo_list_format = {'id':0, 'center':[4, 5, 6]}
- elif halo_list_format == 'p-groupfinder':
- self.halo_list_format = {'id':3, 'mass':5, 'center':[0, 1, 2], 'r200kpc':8}
- elif isinstance(halo_list_format, types.DictType):
- self.halo_list_format = halo_list_format
- else:
- mylog.error("Keyword, halo_list_format, must be 'yt_hop', 'enzo_hop', 'p-groupfinder', or a dictionary of custom settings.")
- return None
-
- # Option to recenter sphere someplace else.
- self.recenter = recenter
-
- # Flag for whether calculating halo bulk velocity is necessary.
- self._need_bulk_velocity = False
-
- # Check validity for VelocityCenter parameter which toggles how the
- # velocity is zeroed out for radial velocity profiles.
- self.velocity_center = velocity_center[:]
- if self.velocity_center[0] == 'bulk':
- if self.velocity_center[1] == 'halo' and \
- self.halos is 'single':
- mylog.error("Parameter, VelocityCenter, must be set to 'bulk sphere' or 'max <field>' with halos flag set to 'single'.")
- return None
- if self.velocity_center[1] == 'halo' and \
- self.halo_list_format is 'enzo_hop':
- mylog.error("Parameter, VelocityCenter, must be 'bulk sphere' for old style hop output files.")
- return None
- if not(self.velocity_center[1] == 'halo' or
- self.velocity_center[1] == 'sphere'):
- mylog.error("Second value of VelocityCenter must be either 'halo' or 'sphere' if first value is 'bulk'.")
- return None
- elif self.velocity_center[0] == 'max':
- mylog.info('Using position of max %s for velocity center.' % self.velocity_center[1])
- else:
- mylog.error("First value of parameter, VelocityCenter, must be either 'bulk' or 'max'.")
- return None
-
- # Create dataset object.
- if isinstance(self.dataset, Dataset):
- self.pf = self.dataset
- else:
- self.pf = load(self.dataset)
- self.pf.h
-
- # Create output directories.
- self.output_dir = output_dir
- if output_dir is None:
- self.output_dir = self.pf.fullpath
- else:
- self.__check_directory(output_dir)
- self.output_dir = os.path.join(output_dir, self.pf.directory)
- self.__check_directory(self.output_dir)
- self.profile_output_dir = os.path.join(self.output_dir, profile_output_dir)
- self.projection_output_dir = os.path.join(self.output_dir, projection_output_dir)
-
- # Figure out what max radius to use for profiling.
- if halo_radius is not None:
- self.halo_radius = halo_radius / self.pf[radius_units]
- elif self.halos is 'single' or not 'r_max' in self.halo_list_format:
- self.halo_radius = 0.1
- else:
- self.halo_radius = None
-
- # Get halo(s).
- if self.halos is 'single':
- v, center = self.pf.h.find_max('Density')
- singleHalo = {}
- singleHalo['center'] = center
- singleHalo['r_max'] = self.halo_radius * self.pf.units['mpc']
- singleHalo['id'] = 0
- self.all_halos.append(singleHalo)
- elif self.halos is 'multiple':
- # Get hop data.
- self._load_halo_data()
- if len(self.all_halos) == 0:
- mylog.error("No halos loaded, there will be nothing to do.")
- return None
- else:
- mylog.error("Keyword, halos, must be either 'single' or 'multiple'.")
- return None
-
- def add_halo_filter(self, function, *args, **kwargs):
- r"""Filters can be added to create a refined list of halos based on
- their profiles or to avoid profiling halos altogether based on
- information given in the halo list file.
-
- It is often the case that one is looking to identify halos with a
- specific set of properties. This can be accomplished through the
- creation of filter functions. A filter function can take as many args
- and kwargs as you like, as long as the first argument is a profile
- object, or at least a dictionary which contains the profile arrays
- for each field. Filter functions must return a list of two things.
- The first is a True or False indicating whether the halo passed the
- filter. The second is a dictionary containing quantities calculated
- for that halo that will be written to a file if the halo passes the
- filter. A sample filter function based on virial quantities can be
- found in yt/analysis_modules/halo_profiler/halo_filters.py.
-
- Parameters
- ----------
- function : function
- The name of a halo filter function.
- args : values
- Arguments passed to the halo filter function.
- kwargs : values
- Arguments passed to the halo filter function.
-
- Examples
- -------
- >>> hp.add_halo_filter(HP.VirialFilter, must_be_virialized=True,
- overdensity_field='ActualOverdensity',
- virial_overdensity=200,
- virial_filters=[['TotalMassMsun','>=','1e14']],
- virial_quantities=['TotalMassMsun','RadiusMpc'])
-
- """
-
- self._halo_filters.append({'function':function, 'args':args, 'kwargs':kwargs})
-
- def add_profile(self, field, weight_field=None, accumulation=False):
- r"""Add a field for profiling.
-
- Once the halo profiler object has been instantiated,
- fields can be added for profiling using this function. This function
- may be called multiple times, once per field to be added.
-
- Parameters
- ----------
- field : string
- The name of the field.
- weight_field : {None, string}, optional
- The field that will be used to weight the field `field` when
- the radial binning is done. Default: None.
- accumulation : bool
- Whether or not the `field` values should be summed up with the
- radius of the profile.
-
- Examples
- >>> hp.add_profile('CellVolume', weight_field=None, accumulation=True)
- >>> hp.add_profile('TotalMassMsun', weight_field=None, accumulation=True)
- >>> hp.add_profile('Density', weight_field=None, accumulation=False)
- >>> hp.add_profile('Temperature', weight_field='CellMassMsun', accumulation=False)
-
- """
-
- # Check for any field that might need to have the bulk velocity set.
- if 'Velocity' in field or 'Mach' in field:
- self._need_bulk_velocity = True
-
- self.profile_fields.append({'field':field, 'weight_field':weight_field,
- 'accumulation':accumulation})
-
- def add_projection(self, field, weight_field=None, cmap='algae'):
- r"""Make a projection of the specified field.
-
- For the given field, a projection will be produced that can be saved
- to HDF5 or image format. See `make_projections`.
-
- Parameters
- ----------
- field : string
- The name of the field.
- weight_field : string
- The field that will be used to weight the field `field` when
- the projection is done. Default: None.
- cmap : string
- The name of the matplotlib color map that will be used if an
- image is made from the projection. Default="algae".
-
- Examples
- --------
- >>> hp.add_projection('Density', weight_field=None)
- >>> hp.add_projection('Temperature', weight_field='Density')
- >>> hp.add_projection('Metallicity', weight_field='Density')
-
- """
-
- # Check for any field that might need to have the bulk velocity set.
- if 'Velocity' in field or 'Mach' in field:
- self._need_bulk_velocity = True
-
- self.projection_fields.append({'field':field, 'weight_field':weight_field,
- 'cmap': cmap})
-
- @parallel_blocking_call
- def make_profiles(self, filename=None, prefilters=None, njobs=-1,
- dynamic=False, profile_format='ascii'):
- r"""Make radial profiles for all halos in the list.
-
- After all the calls to `add_profile`, this will trigger the actual
- calculations and output the profiles to disk.
-
- Parameters
- ----------
-
- filename : str
- If set, a file will be written with all of the filtered halos
- and the quantities returned by the filter functions.
- Default: None.
- prefilters : array_like
- A single dataset can contain thousands or tens of thousands of
- halos. Significant time can be saved by not profiling halos
- that are certain to not pass any filter functions in place.
- Simple filters based on quantities provided in the initial
- halo list can be used to filter out unwanted halos using this
- parameter.
- Default: None.
- njobs : int
- The number of jobs over which to split the profiling. Set
- to -1 so that each halo is done by a single processor.
- Default: -1.
- dynamic : bool
- If True, distribute halos using a task queue. If False,
- distribute halos evenly over all jobs.
- Default: False.
- profile_format : str
- The file format for the radial profiles, 'ascii' or 'hdf5'.
- Default: 'ascii'.
-
- Examples
- --------
-
- >>> hp.make_profiles(filename="FilteredQuantities.out",
- prefilters=["halo['mass'] > 1e13"])
-
- """
-
- extension_map = {'ascii': 'dat',
- 'hdf5': 'h5'}
- if not profile_format in extension_map:
- mylog.error("Invalid profile_format: %s. Valid options are %s." %
- (profile_format, ", ".join(extension_map.keys())))
- raise YTException(pf=self.pf)
-
- if len(self.all_halos) == 0:
- mylog.error("Halo list is empty, returning.")
- return None
-
- # Reset filtered halo list.
- self.filtered_halos = []
-
- # Check to see if the VirialFilter has been added to the filter list.
- # If a lower mass cutoff is being used, use it to make a pre-filter.
- if prefilters is None: prefilters = []
- virial_prefilter = None
- virial_prefilter_safety_factor = 0.5
- all_filter_functions = [hf['function'] for hf in self._halo_filters]
- virial_filter = VirialFilter in all_filter_functions
- if 'mass' in self.halo_list_format and VirialFilter in all_filter_functions:
- vFilter = self._halo_filters[all_filter_functions.index(VirialFilter)]
- if vFilter['kwargs'].has_key('virial_filters') and \
- vFilter['kwargs']['virial_filters'] is not None:
- all_vqFilters = [vqf[0] for vqf in vFilter['kwargs']['virial_filters']]
- if 'TotalMassMsun' in all_vqFilters:
- mass_filter = vFilter['kwargs']['virial_filters'][all_vqFilters.index('TotalMassMsun')]
- if '>' in mass_filter[1]:
- virial_prefilter = "halo['mass'] %s %f * %s" % \
- (mass_filter[1], virial_prefilter_safety_factor, mass_filter[2])
- prefilters.append(virial_prefilter)
- elif '<' in mass_filter[1]:
- virial_prefilter = "halo['mass'] %s %f * %s" % \
- (mass_filter[1], (1./virial_prefilter_safety_factor), mass_filter[2])
- prefilters.append(virial_prefilter)
-
- # Add profile fields necessary for calculating virial quantities.
- if virial_filter: self._check_for_needed_profile_fields()
-
- # Create output directory.
- self.__check_directory(self.profile_output_dir)
-
- # Profile all halos.
- updated_halos = []
- for halo in parallel_objects(self.all_halos, njobs=njobs, dynamic=dynamic):
- # Apply prefilters to avoid profiling unwanted halos.
- filter_result = True
- haloQuantities = {}
- if prefilters is not None:
- for prefilter in prefilters:
- if not eval(prefilter):
- filter_result = False
- break
-
- if filter_result and len(self.profile_fields) > 0:
-
- profile_filename = "%s/Halo_%04d_profile.%s" % \
- (self.profile_output_dir, halo['id'], extension_map[profile_format])
-
- profiledHalo = self._get_halo_profile(halo, profile_filename,
- virial_filter=virial_filter)
-
- if profiledHalo is None:
- continue
-
- # Apply filter and keep track of the quantities that are returned.
- for hFilter in self._halo_filters:
- filter_result, filterQuantities = hFilter['function'](profiledHalo, *hFilter['args'],
- **hFilter['kwargs'])
-
- if not filter_result: break
-
- if filterQuantities is not None:
- haloQuantities.update(filterQuantities)
-
- if filter_result:
- for quantity in self.filter_quantities:
- if halo.has_key(quantity): haloQuantities[quantity] = halo[quantity]
-
- only_on_root(self.filtered_halos.append, haloQuantities)
-
- # If we've gotten this far down, this halo is good and we want
- # to keep it. But we need to communicate the recentering changes
- # to all processors (the root one in particular) without having
- # one task clobber the other.
- only_on_root(updated_halos.append, halo)
-
- # And here is where we bring it all together.
- updated_halos = self.comm.par_combine_object(updated_halos,
- datatype="list", op="cat")
- updated_halos.sort(key=lambda a:a['id'])
- self.all_halos = updated_halos
-
- self.filtered_halos = self.comm.par_combine_object(self.filtered_halos,
- datatype="list", op="cat")
- self.filtered_halos.sort(key=lambda a:a['id'])
-
- if filename is not None:
- self._write_filtered_halo_list(os.path.join(self.output_dir, filename))
-
- def _get_halo_profile(self, halo, filename, virial_filter=True,
- force_write=False):
- r"""Profile a single halo and write profile data to a file.
- If file already exists, read profile data from file.
- Return a dictionary of id, center, and virial quantities if virial_filter is True.
- """
-
- # Read profile from file if it already exists.
- # If not, profile will be None.
- profile = self._read_profile(filename)
-
- # Make profile if necessary.
- newProfile = profile is None
- if newProfile:
-
- r_min = 2 * self.pf.index.get_smallest_dx() * self.pf['mpc']
- if (halo['r_max'] / r_min < PROFILE_RADIUS_THRESHOLD):
- mylog.debug("Skipping halo with r_max / r_min = %f." % (halo['r_max']/r_min))
- return None
-
- # get a sphere object to profile
- sphere = self._get_halo_sphere(halo)
- if sphere is None: return None
-
- try:
- profile = BinnedProfile1D(sphere, self.n_profile_bins, "RadiusMpc",
- r_min, halo['r_max'],
- log_space=True, end_collect=True)
- except YTEmptyProfileData:
- mylog.error("Caught EmptyProfileData exception, returning None for this halo.")
- return None
- # Figure out which fields to add simultaneously
- field_groupings = defaultdict(lambda: defaultdict(list))
- for hp in self.profile_fields:
- field_groupings[hp['weight_field']][hp['accumulation']].append(hp['field'])
- for weight_field in field_groupings:
- for accum, fields in field_groupings[weight_field].items():
- profile.add_fields(fields, weight=weight_field,
- accumulation=accum)
-
- if virial_filter:
- self._add_actual_overdensity(profile)
-
- if newProfile:
- mylog.info("Writing halo %d" % halo['id'])
- if os.path.exists(filename): os.remove(filename)
- if filename.endswith('.h5'):
- profile.write_out_h5(filename)
- else:
- profile.write_out(filename, format='%0.6e')
- # profile will have N+1 bins so remove the last one
- for field in profile.keys():
- profile[field] = profile[field][:-1]
- elif force_write:
- mylog.info("Re-writing halo %d" % halo['id'])
- self._write_profile(profile, filename, format='%0.6e')
-
- return profile
-
- def _get_halo_sphere(self, halo):
- """
- Returns a sphere object for a given halo, performs the recentering,
- and calculates bulk velocities.
- """
-
- sphere = self.pf.sphere(halo['center'], halo['r_max']/self.pf.units['mpc'])
- new_sphere = False
-
- if self.recenter:
- old = halo['center']
- if self.recenter in centering_registry:
- new_x, new_y, new_z = \
- centering_registry[self.recenter](sphere)
- else:
- # user supplied function
- new_x, new_y, new_z = self.recenter(sphere)
- if new_x < self.pf.domain_left_edge[0] or \
- new_y < self.pf.domain_left_edge[1] or \
- new_z < self.pf.domain_left_edge[2]:
- mylog.info("Recentering rejected, skipping halo %d" % \
- halo['id'])
- return None
- halo['center'] = [new_x, new_y, new_z]
- d = self.pf['kpc'] * periodic_dist(old, halo['center'],
- self.pf.domain_right_edge - self.pf.domain_left_edge)
- mylog.info("Recentered halo %d %1.3e kpc away." % (halo['id'], d))
- # Expand the halo to account for recentering.
- halo['r_max'] += d / 1000. # d is in kpc -> want mpc
- new_sphere = True
-
- if new_sphere:
- sphere = self.pf.sphere(halo['center'], halo['r_max']/self.pf.units['mpc'])
-
- if self._need_bulk_velocity:
- # Set bulk velocity to zero out radial velocity profiles.
- if self.velocity_center[0] == 'bulk':
- if self.velocity_center[1] == 'halo':
- sphere.set_field_parameter('bulk_velocity', halo['velocity'])
- elif self.velocity_center[1] == 'sphere':
- mylog.info('Calculating sphere bulk velocity.')
- sphere.set_field_parameter('bulk_velocity',
- sphere.quantities['BulkVelocity']())
- else:
- mylog.error("Invalid parameter: velocity_center.")
- return None
- elif self.velocity_center[0] == 'max':
- mylog.info('Setting bulk velocity with value at max %s.' % self.velocity_center[1])
- max_val, maxi, mx, my, mz, mg = sphere.quantities['MaxLocation'](self.velocity_center[1])
-
- max_grid = self.pf.index.grids[mg]
- max_cell = np.unravel_index(maxi, max_grid.ActiveDimensions)
- sphere.set_field_parameter('bulk_velocity', [max_grid['x-velocity'][max_cell],
- max_grid['y-velocity'][max_cell],
- max_grid['z-velocity'][max_cell]])
- mylog.info('Bulk velocity set.')
-
- return sphere
-
- @parallel_blocking_call
- def make_projections(self, axes=[0, 1, 2], halo_list='filtered',
- save_images=False, save_cube=True, njobs=-1,
- dynamic=False):
- r"""Make projections of all halos using specified fields.
-
- After adding fields using `add_projection`, this starts the actual
- calculations and saves the output to disk.
-
- Parameters
- ----------
-
- axes : array_like
- A list of the axes to project along, using the usual 0,1,2
- convention. Default=[0,1,2]
- halo_list : {'filtered', 'all'}
- Which set of halos to make profiles of, either ones passed by the
- halo filters (if enabled/added), or all halos.
- Default='filtered'.
- save_images : bool
- Whether or not to save images of the projections. Default=False.
- save_cube : bool
- Whether or not to save the HDF5 files of the halo projections.
- Default=True.
- njobs : int
- The number of jobs over which to split the projections. Set
- to -1 so that each halo is done by a single processor. Halo
- projections do not currently work in parallel, so this must
- be set to -1.
- Default: -1.
- dynamic : bool
- If True, distribute halos using a task queue. If False,
- distribute halos evenly over all jobs.
- Default: False.
-
- Examples
- --------
-
- >>> hp.make_projections(axes=[0, 1, 2], save_cube=True,
- save_images=True, halo_list="filtered")
-
- """
-
- # Halo projections cannot run in parallel because they are done by
- # giving a data source to the projection object.
- if njobs > 0:
- mylog.warn("Halo projections cannot use more than one processor per halo, setting njobs to -1.")
- njobs = -1
-
- # Get list of halos for projecting.
- if halo_list == 'filtered':
- halo_projection_list = self.filtered_halos
- elif halo_list == 'all':
- halo_projection_list = self.all_halos
- elif isinstance(halo_list, types.StringType):
- halo_projection_list = self._read_halo_list(halo_list)
- elif isinstance(halo_list, types.ListType):
- halo_projection_list = halo_list
- else:
- mylog.error("Keyword, halo_list', must be 'filtered', 'all', a filename, or an actual list.")
- return
-
- if len(halo_projection_list) == 0:
- mylog.error("Halo list for projections is empty.")
- return
-
- # Set resolution for fixed resolution output.
- if self.project_at_level == 'max':
- proj_level = self.pf.h.max_level
- else:
- proj_level = int(self.project_at_level)
- proj_dx = self.pf.units[self.projection_width_units] / \
- self.pf.parameters['TopGridDimensions'][0] / \
- (self.pf.parameters['RefineBy']**proj_level)
- projectionResolution = int(self.projection_width / proj_dx)
-
- # Create output directory.
- self.__check_directory(self.projection_output_dir)
-
- center = [0.5 * (self.pf.parameters['DomainLeftEdge'][w] +
- self.pf.parameters['DomainRightEdge'][w])
- for w in range(self.pf.parameters['TopGridRank'])]
-
- for halo in parallel_objects(halo_projection_list, njobs=njobs, dynamic=dynamic):
- if halo is None:
- continue
- # Check if region will overlap domain edge.
- # Using non-periodic regions is faster than using periodic ones.
- leftEdge = [(halo['center'][w] -
- 0.5 * self.projection_width/self.pf.units[self.projection_width_units])
- for w in range(len(halo['center']))]
- rightEdge = [(halo['center'][w] +
- 0.5 * self.projection_width/self.pf.units[self.projection_width_units])
- for w in range(len(halo['center']))]
-
- mylog.info("Projecting halo %04d in region: [%f, %f, %f] to [%f, %f, %f]." %
- (halo['id'], leftEdge[0], leftEdge[1], leftEdge[2],
- rightEdge[0], rightEdge[1], rightEdge[2]))
-
- need_per = False
- for w in range(len(halo['center'])):
- if ((leftEdge[w] < self.pf.parameters['DomainLeftEdge'][w]) or
- (rightEdge[w] > self.pf.parameters['DomainRightEdge'][w])):
- need_per = True
- break
-
- # We use the same type of region regardless. The selection will be
- # correct, but we need the need_per variable for projection
- # shifting.
- region = self.pf.region(halo['center'], leftEdge, rightEdge)
-
- # Make projections.
- if not isinstance(axes, types.ListType): axes = list([axes])
- for w in axes:
- projections = []
- # YT projections do not follow the right-hand rule.
- coords = range(3)
- del coords[w]
- x_axis = coords[0]
- y_axis = coords[1]
-
- for hp in self.projection_fields:
- projections.append(self.pf.proj(hp['field'], w,
- weight_field=hp['weight_field'],
- data_source=region,
- center=halo['center']))
-
- # Set x and y limits, shift image if it overlaps domain boundary.
- if need_per:
- pw = self.projection_width/self.pf.units[self.projection_width_units]
- _shift_projections(self.pf, projections, halo['center'], center, w)
- # Projection has now been shifted to center of box.
- proj_left = [center[x_axis]-0.5*pw, center[y_axis]-0.5*pw]
- proj_right = [center[x_axis]+0.5*pw, center[y_axis]+0.5*pw]
- else:
- proj_left = [leftEdge[x_axis], leftEdge[y_axis]]
- proj_right = [rightEdge[x_axis], rightEdge[y_axis]]
-
- # Save projection data to hdf5 file.
- if save_cube or save_images:
- axis_labels = ['x', 'y', 'z']
-
- if save_cube:
- dataFilename = "%s/Halo_%04d_%s_data.h5" % \
- (self.projection_output_dir, halo['id'], axis_labels[w])
- mylog.info("Saving projection data to %s." % dataFilename)
- output = h5py.File(dataFilename, "a")
-
- # Create fixed resolution buffer for each projection and write them out.
- for e, hp in enumerate(self.projection_fields):
- frb = FixedResolutionBuffer(projections[e], (proj_left[0], proj_right[0],
- proj_left[1], proj_right[1]),
- (projectionResolution, projectionResolution),
- antialias=False)
- dataset_name = "%s_%s" % (hp['field'], hp['weight_field'])
- if save_cube:
- if dataset_name in output: del output[dataset_name]
- output.create_dataset(dataset_name, data=frb[hp['field']])
- if save_images:
- filename = "%s/Halo_%04d_%s_%s.png" % \
- (self.projection_output_dir, halo['id'],
- dataset_name, axis_labels[w])
- if (frb[hp['field']] != 0).any():
- write_image(np.log10(frb[hp['field']]), filename, cmap_name=hp['cmap'])
- else:
- mylog.info('Projection of %s for halo %d is all zeros, skipping image.' %
- (hp['field'], halo['id']))
- if save_cube: output.close()
-
- del region
-
- @parallel_blocking_call
- def analyze_halo_spheres(self, analysis_function, halo_list='filtered',
- analysis_output_dir=None, njobs=-1, dynamic=False):
- r"""Perform custom analysis on all halos.
-
- This will loop through all halo on the HaloProfiler's list,
- creating a sphere object for each halo and passing that sphere
- to the provided analysis function.
-
- Parameters
- ----------
-
- analysis_function : function
- A function taking two arguments, the halo dictionary, and a
- sphere object.
- Example function to calculate total mass of halo::
-
- def my_analysis(halo, sphere):
- total_mass = sphere.quantities['TotalMass']()
- print total_mass
-
- halo_list : {'filtered', 'all'}
- Which set of halos to make profiles of, either ones passed by the
- halo filters (if enabled/added), or all halos.
- Default='filtered'.
- analysis_output_dir : string, optional
- If specified, this directory will be created within the dataset to
- contain any output from the analysis function. Default: None.
- njobs : int
- The number of jobs over which to split the analysis. Set
- to -1 so that each halo is done by a single processor.
- Default: -1.
- dynamic : bool
- If True, distribute halos using a task queue. If False,
- distribute halos evenly over all jobs.
- Default: False.
-
- Examples
- --------
-
- >>> hp.analyze_halo_spheres(my_analysis, halo_list="filtered",
- analysis_output_dir='special_analysis')
-
- """
-
- # Get list of halos for projecting.
- if halo_list == 'filtered':
- halo_analysis_list = self.filtered_halos
- elif halo_list == 'all':
- halo_analysis_list = self.all_halos
- elif isinstance(halo_list, types.StringType):
- halo_analysis_list = self._read_halo_list(halo_list)
- elif isinstance(halo_list, types.ListType):
- halo_analysis_list = halo_list
- else:
- mylog.error("Keyword, halo_list', must be 'filtered', 'all', a filename, or an actual list.")
- return
-
- if len(halo_analysis_list) == 0:
- mylog.error("Halo list for analysis is empty.")
- return
-
- # Create output directory.
- if analysis_output_dir is not None:
- my_output_dir = os.path.join(self.output_dir, analysis_output_dir)
- self.__check_directory(my_output_dir)
-
- for halo in parallel_objects(halo_analysis_list, njobs=njobs, dynamic=dynamic):
- if halo is None: continue
-
- # Get a sphere object to analze.
- sphere = self._get_halo_sphere(halo)
- if sphere is None: continue
-
- # Call the given analysis function.
- analysis_function(halo, sphere)
-
- def _add_actual_overdensity(self, profile):
- "Calculate overdensity from TotalMassMsun and CellVolume fields."
-
- if 'ActualOverdensity' in profile.keys():
- return
-
- rhocritnow = rho_crit_g_cm3_h2 * self.pf.hubble_constant**2 # g cm^-3
- rho_crit = rhocritnow * ((1.0 + self.pf.current_redshift)**3.0)
- if not self.use_critical_density: rho_crit *= self.pf.omega_matter
-
- profile['ActualOverdensity'] = (mass_sun_cgs * profile['TotalMassMsun']) / \
- profile['CellVolume'] / rho_crit
-
- def _check_for_needed_profile_fields(self):
- "Make sure CellVolume and TotalMass fields are added so virial quantities can be calculated."
- all_profile_fields = [hp['field'] for hp in self.profile_fields]
- if not 'CellVolume' in all_profile_fields:
- mylog.info("Adding CellVolume field to so virial quantities can be calculated")
- self.add_profile('CellVolume', weight_field=None, accumulation=True)
- if not 'TotalMassMsun' in all_profile_fields:
- mylog.info("Adding TotalMassMsun field to so virial quantities can be calculated")
- self.add_profile('TotalMassMsun', weight_field=None, accumulation=True)
-
- def _load_halo_data(self, filename=None):
- "Read hop output file or run hop if it doesn't exist."
-
- # Don't run if hop data already loaded.
- if self.all_halos:
- return
-
- if filename is None:
- filename = self.halo_list_file
-
- hop_file = os.path.join(self.output_dir, filename)
-
- if not(os.path.exists(hop_file)):
- mylog.info("Halo finder file not found, running halo finder to get halos.")
- self._run_hop(hop_file)
-
- self.all_halos = self._read_halo_list(hop_file)
-
- def _read_halo_list(self, listFile):
- """
- Read halo list from aue file.
- Allow for columnar data in varying formats.
- """
-
- def __isE(arg):
- parts = arg.lower().split('e')
- if len(parts) != 2: return False
- return not (True in [q.isalpha() for q in ''.join(parts)])
-
- def __get_num(arg):
- if __isE(arg):
- return float(arg)
- if arg != arg.swapcase():
- return arg
- return float(arg)
-
- mylog.info("Reading halo information from %s." % listFile)
- haloList = []
- listLines = file(listFile)
-
- fields = self.halo_list_format.keys()
- getID = not 'id' in fields
- has_rmax = 'r_max' in fields
- has_r200kpc = 'r200kpc' in fields
-
- for line in listLines:
- line = line.strip()
- if len(line) > 0 and not line.startswith('#') and not line[0].isalpha():
- halo = {}
- onLine = line.split()
- for field in fields:
- if isinstance(self.halo_list_format[field], types.ListType):
- halo[field] = [__get_num(onLine[q]) for q in self.halo_list_format[field]]
- else:
- halo[field] = __get_num(onLine[self.halo_list_format[field]])
- if getID: halo['id'] = len(haloList)
- if self.halo_radius is not None:
- halo['r_max'] = self.halo_radius * self.pf.units['mpc']
- elif has_rmax:
- halo['r_max'] *= self.pf.units['mpc']
- elif has_r200kpc:
- # If P-Groupfinder used, r_200 [kpc] is calculated.
- # set r_max as 50% past r_200.
- halo['r_max'] = 1.5 * halo['r200kpc'] / 1000.
- else:
- mylog.error("HaloProfiler has no way to get halo radius.")
- return None
- haloList.append(halo)
-
- mylog.info("Loaded %d halos." % (len(haloList)))
- return haloList
-
- def _read_profile(self, profileFile):
- "Read radial profile from file. Return None if it doesn't have all the fields requested."
-
- # Check to see if file exists.
- if not os.path.exists(profileFile):
- return None
-
- if profileFile.endswith('.h5'):
- return self._read_profile_hdf5(profileFile)
- else:
- return self._read_profile_ascii(profileFile)
-
- def _read_profile_ascii(self, profileFile):
- "Read radial profile from file. Return None if it doesn't have all the fields requested."
-
- f = open(profileFile, 'r')
- lines = f.readlines()
- f.close()
-
- if not lines:
- return None
-
- # Get fields from header.
- header = lines.pop(0)
- header = header.strip()
- fields = header.split()
- # First string is '#'.
- fields.pop(0)
-
- profile = {}
- profile_obj = FakeProfile(self.pf)
- for field in fields:
- profile[field] = []
-
- # Check if all fields needed are present.
- all_profile_fields = [hp['field'] for hp in self.profile_fields]
- for field in all_profile_fields:
- if not field in profile:
- return None
-
- # Fill profile fields, skip bad values.
- for line in lines:
- line = line.strip()
- onLine = line.split()
- lineOK = True
- for value in onLine:
- if value.isalpha():
- lineOK = False
- break
- if lineOK:
- for q, field in enumerate(fields):
- profile[field].append(float(onLine[q]))
-
- for field in fields:
- profile[field] = np.array(profile[field])
-
- profile_obj._data = profile
-
- if len(profile[fields[0]]) > 1:
- return profile_obj
- else:
- return None
-
- def _read_profile_hdf5(self, profileFile):
- "Read radial profile from file. Return None if it doesn't have all the fields requested."
-
- profile = {}
- try:
- in_file = h5py.File(profileFile, 'r')
- except IOError:
- return None
- if not 'RadiusMpc-1d' in in_file:
- return None
- my_group = in_file['RadiusMpc-1d']
- if not 'x-axis-RadiusMpc' in my_group.attrs:
- return None
- profile['RadiusMpc'] = my_group.attrs['x-axis-RadiusMpc']
- fields = my_group.keys()
-
- # Check if all fields needed are present.
- all_profile_fields = [hp['field'] for hp in self.profile_fields]
- for field in all_profile_fields:
- if not field in fields:
- in_file.close()
- return None
-
- for field in fields:
- profile[field] = my_group[field][:]
- in_file.close()
-
- profile_obj = FakeProfile(self.pf)
- profile_obj._data = profile
- return profile_obj
-
- @parallel_blocking_call
- def _run_hop(self, hop_file):
- "Run hop to get halos."
-
- hop_results = self.halo_finder_function(self.pf, *self.halo_finder_args,
- **self.halo_finder_kwargs)
- hop_results.write_out(hop_file)
-
- del hop_results
- self.pf.h.clear_all_data()
-
- @parallel_root_only
- def _write_filtered_halo_list(self, filename, format="%s"):
- """
- Write out list of filtered halos along with any quantities
- picked up during the filtering process.
- """
-
- if filename.endswith('.h5'):
- self._write_filtered_halo_list_h5(filename)
- else:
- self._write_filtered_halo_list_ascii(filename, format=format)
-
- def _write_filtered_halo_list_ascii(self, filename, format="%s"):
- """
- Write out list of filtered halos along with any quantities
- picked up during the filtering process.
- """
-
- if len(self.filtered_halos) == 0:
- mylog.error("No halos in filtered list.")
- return
-
- mylog.info("Writing filtered halo list to %s." % filename)
- out_file = open(filename, "w")
- fields = [field for field in sorted(self.filtered_halos[0])]
- halo_fields = []
- for halo_field in self.filter_quantities:
- if halo_field in fields:
- fields.remove(halo_field)
- halo_fields.append(halo_field)
- # Make it so number of fields in header is same as number of data columns.
- header_fields = []
- for halo_field in halo_fields:
- if isinstance(self.filtered_halos[0][halo_field], types.ListType):
- header_fields.extend(["%s[%d]" % (halo_field, q)
- for q in range(len(self.filtered_halos[0][halo_field]))])
- else:
- header_fields.append(halo_field)
- out_file.write("# ")
- out_file.write("\t".join(header_fields + fields + ["\n"]))
-
- for halo in self.filtered_halos:
- for halo_field in halo_fields:
- if isinstance(halo[halo_field], types.ListType):
- field_data = np.array(halo[halo_field])
- field_data.tofile(out_file, sep="\t", format=format)
- else:
- if halo_field == 'id':
- out_file.write("%04d" % halo[halo_field])
- else:
- out_file.write("%s" % halo[halo_field])
- out_file.write("\t")
- field_data = np.array([halo[field] for field in fields])
- field_data.tofile(out_file, sep="\t", format=format)
- out_file.write("\n")
- out_file.close()
-
- def _write_filtered_halo_list_h5(self, filename):
- """
- Write out list of filtered halos along with any quantities
- picked up during the filtering process.
- """
-
- if len(self.filtered_halos) == 0:
- mylog.error("No halos in filtered list.")
- return
-
- mylog.info("Writing filtered halo list to %s." % filename)
- out_file = h5py.File(filename, "w")
- fields = [field for field in sorted(self.filtered_halos[0])]
- halo_fields = []
- for halo_field in self.filter_quantities:
- if halo_field in fields:
- fields.remove(halo_field)
- halo_fields.append(halo_field)
-
- for halo_field in halo_fields + fields:
- value_list = []
- for halo in self.filtered_halos:
- value_list.append(halo[halo_field])
- value_list = np.array(value_list)
- out_file.create_dataset(halo_field, data=value_list)
- out_file.close()
-
- def _write_profile(self, profile, filename, format="%0.16e"):
- fid = open(filename, "w")
- fields = [field for field in sorted(profile.keys()) if field != "UsedBins"]
- fid.write("\t".join(["#"] + fields + ["\n"]))
- field_data = np.array([profile[field] for field in fields])
- for line in range(field_data.shape[1]):
- field_data[:, line].tofile(fid, sep="\t", format=format)
- fid.write("\n")
- fid.close()
-
- @parallel_root_only
- def __check_directory(self, my_output_dir):
- if (os.path.exists(my_output_dir)):
- if not(os.path.isdir(my_output_dir)):
- mylog.error("Output directory exists, but is not a directory: %s." % my_output_dir)
- raise IOError(my_output_dir)
- else:
- os.makedirs(my_output_dir)
-
-def _shift_projections(pf, projections, oldCenter, newCenter, axis):
- """
- Shift projection data around.
- This is necessary when projecting a preiodic region.
- """
- offset = [newCenter[q]-oldCenter[q] for q in range(len(oldCenter))]
- width = [pf.parameters['DomainRightEdge'][q]-pf.parameters['DomainLeftEdge'][q] \
- for q in range(len(oldCenter))]
-
- del offset[axis]
- del width[axis]
-
- for plot in projections:
- # Get name of data field.
- other_fields = {'px':True, 'py':True, 'pdx':True, 'pdy':True, 'weight_field':True}
- for pfield in plot.field_data.keys():
- if not(other_fields.has_key(pfield)):
- field = pfield
- break
-
- # Shift x and y positions.
- plot['px'] += offset[0]
- plot['py'] += offset[1]
-
- # Wrap off-edge cells back around to other side (periodic boundary conditions).
- plot['px'][plot['px'] < 0] += width[0]
- plot['py'][plot['py'] < 0] += width[1]
- plot['px'][plot['px'] > width[0]] -= width[0]
- plot['py'][plot['py'] > width[1]] -= width[1]
-
- # After shifting, some cells have fractional coverage on both sides of the box.
- # Find those cells and make copies to be placed on the other side.
-
- # Cells hanging off the right edge.
- add_x_px = plot['px'][plot['px'] + 0.5 * plot['pdx'] > width[0]]
- add_x_px -= width[0]
- add_x_py = plot['py'][plot['px'] + 0.5 * plot['pdx'] > width[0]]
- add_x_pdx = plot['pdx'][plot['px'] + 0.5 * plot['pdx'] > width[0]]
- add_x_pdy = plot['pdy'][plot['px'] + 0.5 * plot['pdx'] > width[0]]
- add_x_field = plot[field][plot['px'] + 0.5 * plot['pdx'] > width[0]]
- add_x_weight_field = plot['weight_field'][plot['px'] + 0.5 * plot['pdx'] > width[0]]
-
- # Cells hanging off the left edge.
- add2_x_px = plot['px'][plot['px'] - 0.5 * plot['pdx'] < 0]
- add2_x_px += width[0]
- add2_x_py = plot['py'][plot['px'] - 0.5 * plot['pdx'] < 0]
- add2_x_pdx = plot['pdx'][plot['px'] - 0.5 * plot['pdx'] < 0]
- add2_x_pdy = plot['pdy'][plot['px'] - 0.5 * plot['pdx'] < 0]
- add2_x_field = plot[field][plot['px'] - 0.5 * plot['pdx'] < 0]
- add2_x_weight_field = plot['weight_field'][plot['px'] - 0.5 * plot['pdx'] < 0]
-
- # Cells hanging off the top edge.
- add_y_px = plot['px'][plot['py'] + 0.5 * plot['pdy'] > width[1]]
- add_y_py = plot['py'][plot['py'] + 0.5 * plot['pdy'] > width[1]]
- add_y_py -= width[1]
- add_y_pdx = plot['pdx'][plot['py'] + 0.5 * plot['pdy'] > width[1]]
- add_y_pdy = plot['pdy'][plot['py'] + 0.5 * plot['pdy'] > width[1]]
- add_y_field = plot[field][plot['py'] + 0.5 * plot['pdy'] > width[1]]
- add_y_weight_field = plot['weight_field'][plot['py'] + 0.5 * plot['pdy'] > width[1]]
-
- # Cells hanging off the bottom edge.
- add2_y_px = plot['px'][plot['py'] - 0.5 * plot['pdy'] < 0]
- add2_y_py = plot['py'][plot['py'] - 0.5 * plot['pdy'] < 0]
- add2_y_py += width[1]
- add2_y_pdx = plot['pdx'][plot['py'] - 0.5 * plot['pdy'] < 0]
- add2_y_pdy = plot['pdy'][plot['py'] - 0.5 * plot['pdy'] < 0]
- add2_y_field = plot[field][plot['py'] - 0.5 * plot['pdy'] < 0]
- add2_y_weight_field = plot['weight_field'][plot['py'] - 0.5 * plot['pdy'] < 0]
-
- # Add the hanging cells back to the projection data.
- plot.field_data['px'] = np.concatenate([plot['px'], add_x_px, add_y_px,
- add2_x_px, add2_y_px])
- plot.field_data['py'] = np.concatenate([plot['py'], add_x_py, add_y_py,
- add2_x_py, add2_y_py])
- plot.field_data['pdx'] = np.concatenate([plot['pdx'], add_x_pdx, add_y_pdx,
- add2_x_pdx, add2_y_pdx])
- plot.field_data['pdy'] = np.concatenate([plot['pdy'], add_x_pdy, add_y_pdy,
- add2_x_pdy, add2_y_pdy])
- plot.field_data[field] = np.concatenate([plot[field], add_x_field, add_y_field,
- add2_x_field, add2_y_field])
- plot.field_data['weight_field'] = np.concatenate([plot['weight_field'],
- add_x_weight_field, add_y_weight_field,
- add2_x_weight_field, add2_y_weight_field])
-
- # Delete original copies of hanging cells.
- del add_x_px, add_y_px, add2_x_px, add2_y_px
- del add_x_py, add_y_py, add2_x_py, add2_y_py
- del add_x_pdx, add_y_pdx, add2_x_pdx, add2_y_pdx
- del add_x_pdy, add_y_pdy, add2_x_pdy, add2_y_pdy
- del add_x_field, add_y_field, add2_x_field, add2_y_field
- del add_x_weight_field, add_y_weight_field, add2_x_weight_field, add2_y_weight_field
-
-class FakeProfile(ParallelAnalysisInterface):
- """
- This is used to mimic a profile object when reading profile data from disk.
- """
- def __init__(self, pf):
- ParallelAnalysisInterface.__init__(self)
- self.pf = pf
- self._data = {}
-
- def __getitem__(self, key):
- return self._data[key]
-
- def keys(self):
- return self._data.keys()
-
-standard_fields = [
- ("Density", "CellMassMsun", False),
- ("Temperature", "CellMassMsun", False),
- ("VelocityMagnitude", "CellMassMsun", False),
- ("Ones", None, False),
- ("Entropy", "CellMassMsun", False),
- ("RadialVelocity", "CellMassMsun", False),
- ("SpecificAngularMomentumX", "CellMassMsun", False),
- ("SpecificAngularMomentumY", "CellMassMsun", False),
- ("SpecificAngularMomentumZ", "CellMassMsun", False),
- ("CoolingTime", "CellMassMsun", False),
- ("DynamicalTime", "CellMassMsun", False),
- ("CellMassMsun", None, True),
- ("TotalMassMsun", None, True),
- ("Dark_Matter_Density", "CellMassMsun", False),
- #("ParticleSpecificAngularMomentumX", "ParticleMassMsun"),
- #("ParticleSpecificAngularMomentumY", "ParticleMassMsun"),
- #("ParticleSpecificAngularMomentumZ", "ParticleMassMsun"),
- ("OverDensity", "CellMassMsun", False),
- #("ParticleMassMsun", None),
- ("StarParticleDensity", "StarParticleMassMsun", False), # How do we weight this?
- #("StarParticleMassMsun", None),
- ("StarParticleDensity", "StarParticleMassMsun", False), # How do we weight this?
-]
-
-standard_fields += [("%s_Fraction" % (s), "CellMassMsun", False)
- for s in ["HI","HII","HeI","HeII","HeIII","H2I","H2II",
- "HM","Electron", "DI","DII","HDI","Metal"]
-]
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/halo_profiler/setup.py
--- a/yt/analysis_modules/halo_profiler/setup.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
-
-
-def configuration(parent_package='', top_path=None):
- from numpy.distutils.misc_util import Configuration
- config = Configuration('halo_profiler', parent_package, top_path)
- config.make_config_py() # installs __config__.py
- #config.make_svn_version_py()
- return config
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/halo_profiler/standard_analysis.py
--- a/yt/analysis_modules/halo_profiler/standard_analysis.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
-This module contains a near-replacement for enzo_anyl
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import os
-
-from yt.data_objects.profiles import BinnedProfile1D
-from yt.funcs import *
-
-class StandardRadialAnalysis(object):
- def __init__(self, pf, center, radius, n_bins = 128, inner_radius = None):
- raise NotImplementedError # see TODO
- self.pf = pf
- # We actually don't want to replicate the handling of setting the
- # center here, so we will pass it to the sphere creator.
- # Note also that the sphere can handle (val, unit) for radius, so we
- # will grab that from the sphere as well
- self.obj = pf.sphere(center, radius)
- if inner_radius is None:
- inner_radius = pf.index.get_smallest_dx() * pf['cm']
- self.inner_radius = inner_radius
- self.outer_radius = self.obj.radius * pf['cm']
- self.n_bins = n_bins
-
- def setup_field_parameters(self):
- # First the bulk velocity
- bv = self.obj.quantities["BulkVelocity"]()
- self.obj.set_field_parameter("bulk_velocity", bv)
-
- def construct_profile(self):
- # inner_bound in cm, outer_bound in same
- # Note that in some cases, we will need to massage this object.
- prof = BinnedProfile1D(self.obj, self.n_bins, "Radius",
- self.inner_radius, self.outer_radius)
- by_weights = defaultdict(list)
- # TODO: analysis_field_list is undefined
- for fspec in analysis_field_list:
- if isinstance(fspec, types.TupleType) and len(fspec) == 2:
- field, weight = fspec
- else:
- field, weight = fspec, "CellMassMsun"
- by_weights[weight].append(field)
- known_fields = set(self.pf.field_list + self.pf.derived_field_list)
- for weight, fields in by_weights.items():
- fields = set(fields)
- fields.intersection_update(known_fields)
- prof.add_fields(list(fields), weight=weight)
- self.prof = prof
-
- def plot_everything(self, dirname = None):
- if not dirname:
- dirname = "%s_profile_plots/" % (self.pf)
- if not os.path.isdir(dirname):
- os.makedirs(dirname)
- import matplotlib; matplotlib.use("Agg")
- import pylab
- for field in self.prof.keys():
- if field in ("UsedBins", "Radius"): continue
- pylab.clf()
- pylab.loglog(self.prof["Radius"], self.prof[field], '-x')
- pylab.xlabel("Radius [cm]")
- pylab.ylabel("%s" % field)
- pylab.savefig(os.path.join(
- dirname, "Radius_%s.png" % (field.replace(" ","_"))))
diff -r 14dfbdf28b07dd18e112abafce364d04a4864829 -r d24a8070639df84aa70a4c1b72bb150a56805b5d yt/analysis_modules/radial_column_density/api.py
--- a/yt/analysis_modules/radial_column_density/api.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-API for radial_column_density
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .radial_column_density import RadialColumnDensity
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/yt_analysis/yt/commits/0495a0da7d35/
Changeset: 0495a0da7d35
Branch: yt-3.0
User: jzuhone
Date: 2014-07-11 18:54:19
Summary: Remove this file
Affected #: 1 file
diff -r d24a8070639df84aa70a4c1b72bb150a56805b5d -r 0495a0da7d35bb301db2a612c6ebe584086a613a yt/analysis_modules/api.py
--- a/yt/analysis_modules/api.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
-API for yt.analysis_modules
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .absorption_spectrum.api import \
- AbsorptionSpectrum
-
-from .cosmological_observation.api import \
- CosmologySplice, \
- LightCone, \
- find_unique_solutions, \
- project_unique_light_cones, \
- LightRay
-
-from .halo_finding.api import \
- Halo, \
- HOPHalo, \
- parallelHOPHalo, \
- LoadedHalo, \
- FOFHalo, \
- HaloList, \
- HOPHaloList, \
- FOFHaloList, \
- parallelHOPHaloList, \
- LoadedHaloList, \
- GenericHaloFinder, \
- parallelHF, \
- HOPHaloFinder, \
- FOFHaloFinder, \
- HaloFinder, \
- LoadHaloes
-
-from .halo_mass_function.api import \
- HaloMassFcn, \
- TransferFunction, \
- integrate_inf
-
-from .halo_merger_tree.api import \
- DatabaseFunctions, \
- MergerTree, \
- MergerTreeConnect, \
- Node, \
- Link, \
- MergerTreeDotOutput, \
- MergerTreeTextOutput
-
-from .level_sets.api import \
- identify_contours, \
- Clump, \
- find_clumps, \
- get_lowest_clumps, \
- write_clump_index, \
- write_clumps, \
- write_old_clump_index, \
- write_old_clumps, \
- write_old_clump_info, \
- _DistanceToMainClump, \
- recursive_all_clumps, \
- return_all_clumps, \
- return_bottom_clumps, \
- recursive_bottom_clumps, \
- clump_list_sort
-
-from .spectral_integrator.api import \
- add_xray_emissivity_field
-
-from .star_analysis.api import \
- StarFormationRate, \
- SpectrumBuilder
-
-from .two_point_functions.api import \
- TwoPointFunctions, \
- FcnSet
-
-from .sunyaev_zeldovich.api import SZProjection
-
-from .radmc3d_export.api import \
- RadMC3DWriter
-
-from .particle_trajectories.api import \
- ParticleTrajectories
-
-from .photon_simulator.api import \
- PhotonList, \
- EventList, \
- SpectralModel, \
- XSpecThermalModel, \
- XSpecAbsorbModel, \
- TableApecModel, \
- TableAbsorbModel, \
- PhotonModel, \
- ThermalPhotonModel
-
-from .ppv_cube.api import \
- PPVCube
https://bitbucket.org/yt_analysis/yt/commits/7f05dc865802/
Changeset: 7f05dc865802
Branch: yt-3.0
User: jzuhone
Date: 2014-07-14 22:15:29
Summary: Removing references to global analysis modules api from the docs
Affected #: 6 files
diff -r 0495a0da7d35bb301db2a612c6ebe584086a613a -r 7f05dc865802e5ed16ff88941b386a0a944d43c2 doc/source/analyzing/analysis_modules/PPVCube.ipynb
--- a/doc/source/analyzing/analysis_modules/PPVCube.ipynb
+++ b/doc/source/analyzing/analysis_modules/PPVCube.ipynb
@@ -1,7 +1,7 @@
{
"metadata": {
"name": "",
- "signature": "sha256:3f810954006851303837edb8fd85ee6583a883122b0f4867903562546c4f19d2"
+ "signature": "sha256:ba8b6a53571695ae1d0c236ad43875823746e979a329a9d35ab0a8b899cebbba"
},
"nbformat": 3,
"nbformat_minor": 0,
@@ -21,7 +21,7 @@
"input": [
"%matplotlib inline\n",
"from yt.mods import *\n",
- "from yt.analysis_modules.api import PPVCube"
+ "from yt.analysis_modules.ppv_cube.api import PPVCube"
],
"language": "python",
"metadata": {},
diff -r 0495a0da7d35bb301db2a612c6ebe584086a613a -r 7f05dc865802e5ed16ff88941b386a0a944d43c2 doc/source/analyzing/analysis_modules/SZ_projections.ipynb
--- a/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
+++ b/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
@@ -1,7 +1,7 @@
{
"metadata": {
"name": "",
- "signature": "sha256:7fc053480ba7896bfa5905bd69f7b3dd326364fbab324975b76f79640f2e0adf"
+ "signature": "sha256:4745a15abb6512547b50280b92c22567f89255189fd968ca706ef7c39d48024f"
},
"nbformat": 3,
"nbformat_minor": 0,
@@ -91,7 +91,7 @@
"input": [
"%matplotlib inline\n",
"from yt.mods import *\n",
- "from yt.analysis_modules.api import SZProjection\n",
+ "from yt.analysis_modules.sunyaev_zeldovich.api import SZProjection\n",
"\n",
"ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
"\n",
diff -r 0495a0da7d35bb301db2a612c6ebe584086a613a -r 7f05dc865802e5ed16ff88941b386a0a944d43c2 doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -35,7 +35,7 @@
.. code-block:: python
- from yt.analysis_modules.api import AbsorptionSpectrum
+ from yt.analysis_modules.absorption_spectrum.api import AbsorptionSpectrum
sp = AbsorptionSpectrum(900.0, 1800.0, 10000)
diff -r 0495a0da7d35bb301db2a612c6ebe584086a613a -r 7f05dc865802e5ed16ff88941b386a0a944d43c2 doc/source/analyzing/analysis_modules/light_ray_generator.rst
--- a/doc/source/analyzing/analysis_modules/light_ray_generator.rst
+++ b/doc/source/analyzing/analysis_modules/light_ray_generator.rst
@@ -25,7 +25,7 @@
.. code-block:: python
- from yt.analysis_modules.api import LightRay
+ from yt.analysis_modules.cosmological_observation.api import LightRay
lr = LightRay("enzo_tiny_cosmology/32Mpc_32.enzo",
'Enzo', 0.0, 0.1)
diff -r 0495a0da7d35bb301db2a612c6ebe584086a613a -r 7f05dc865802e5ed16ff88941b386a0a944d43c2 doc/source/analyzing/analysis_modules/photon_simulator.rst
--- a/doc/source/analyzing/analysis_modules/photon_simulator.rst
+++ b/doc/source/analyzing/analysis_modules/photon_simulator.rst
@@ -386,7 +386,7 @@
from yt.mods import *
from yt.utilities.physical_constants import cm_per_kpc, K_per_keV, mp
from yt.utilities.cosmology import Cosmology
- from yt.analysis_modules.api import *
+ from yt.analysis_modules.photon_simulator.api import *
import aplpy
R = 1000. # in kpc
diff -r 0495a0da7d35bb301db2a612c6ebe584086a613a -r 7f05dc865802e5ed16ff88941b386a0a944d43c2 doc/source/analyzing/analysis_modules/planning_cosmology_simulations.rst
--- a/doc/source/analyzing/analysis_modules/planning_cosmology_simulations.rst
+++ b/doc/source/analyzing/analysis_modules/planning_cosmology_simulations.rst
@@ -10,7 +10,7 @@
.. code-block:: python
- from yt.analysis_modules.api import CosmologySplice
+ from yt.analysis_modules.cosmological_observation.api import CosmologySplice
my_splice = CosmologySplice('enzo_tiny_cosmology/32Mpc_32.enzo', 'Enzo')
my_splice.plan_cosmology_splice(0.0, 0.1, filename='redshifts.out')
https://bitbucket.org/yt_analysis/yt/commits/7e7ef2aaa715/
Changeset: 7e7ef2aaa715
Branch: yt-3.0
User: ngoldbaum
Date: 2014-07-15 18:12:52
Summary: Merged in jzuhone/yt/yt-3.0 (pull request #1009)
Removing analysis modules as per Trello card
Affected #: 22 files
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 doc/source/analyzing/analysis_modules/PPVCube.ipynb
--- a/doc/source/analyzing/analysis_modules/PPVCube.ipynb
+++ b/doc/source/analyzing/analysis_modules/PPVCube.ipynb
@@ -1,7 +1,7 @@
{
"metadata": {
"name": "",
- "signature": "sha256:3f810954006851303837edb8fd85ee6583a883122b0f4867903562546c4f19d2"
+ "signature": "sha256:ba8b6a53571695ae1d0c236ad43875823746e979a329a9d35ab0a8b899cebbba"
},
"nbformat": 3,
"nbformat_minor": 0,
@@ -21,7 +21,7 @@
"input": [
"%matplotlib inline\n",
"from yt.mods import *\n",
- "from yt.analysis_modules.api import PPVCube"
+ "from yt.analysis_modules.ppv_cube.api import PPVCube"
],
"language": "python",
"metadata": {},
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 doc/source/analyzing/analysis_modules/SZ_projections.ipynb
--- a/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
+++ b/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
@@ -1,7 +1,7 @@
{
"metadata": {
"name": "",
- "signature": "sha256:7fc053480ba7896bfa5905bd69f7b3dd326364fbab324975b76f79640f2e0adf"
+ "signature": "sha256:4745a15abb6512547b50280b92c22567f89255189fd968ca706ef7c39d48024f"
},
"nbformat": 3,
"nbformat_minor": 0,
@@ -91,7 +91,7 @@
"input": [
"%matplotlib inline\n",
"from yt.mods import *\n",
- "from yt.analysis_modules.api import SZProjection\n",
+ "from yt.analysis_modules.sunyaev_zeldovich.api import SZProjection\n",
"\n",
"ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
"\n",
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -35,7 +35,7 @@
.. code-block:: python
- from yt.analysis_modules.api import AbsorptionSpectrum
+ from yt.analysis_modules.absorption_spectrum.api import AbsorptionSpectrum
sp = AbsorptionSpectrum(900.0, 1800.0, 10000)
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 doc/source/analyzing/analysis_modules/light_ray_generator.rst
--- a/doc/source/analyzing/analysis_modules/light_ray_generator.rst
+++ b/doc/source/analyzing/analysis_modules/light_ray_generator.rst
@@ -26,7 +26,7 @@
.. code-block:: python
- from yt.analysis_modules.api import LightRay
+ from yt.analysis_modules.cosmological_observation.api import LightRay
lr = LightRay("enzo_tiny_cosmology/32Mpc_32.enzo",
'Enzo', 0.0, 0.1)
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 doc/source/analyzing/analysis_modules/photon_simulator.rst
--- a/doc/source/analyzing/analysis_modules/photon_simulator.rst
+++ b/doc/source/analyzing/analysis_modules/photon_simulator.rst
@@ -386,7 +386,7 @@
from yt.mods import *
from yt.utilities.physical_constants import cm_per_kpc, K_per_keV, mp
from yt.utilities.cosmology import Cosmology
- from yt.analysis_modules.api import *
+ from yt.analysis_modules.photon_simulator.api import *
import aplpy
R = 1000. # in kpc
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 doc/source/analyzing/analysis_modules/planning_cosmology_simulations.rst
--- a/doc/source/analyzing/analysis_modules/planning_cosmology_simulations.rst
+++ b/doc/source/analyzing/analysis_modules/planning_cosmology_simulations.rst
@@ -10,7 +10,7 @@
.. code-block:: python
- from yt.analysis_modules.api import CosmologySplice
+ from yt.analysis_modules.cosmological_observation.api import CosmologySplice
my_splice = CosmologySplice('enzo_tiny_cosmology/32Mpc_32.enzo', 'Enzo')
my_splice.plan_cosmology_splice(0.0, 0.1, filename='redshifts.out')
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/api.py
--- a/yt/analysis_modules/api.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
-API for yt.analysis_modules
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .absorption_spectrum.api import \
- AbsorptionSpectrum
-
-from .coordinate_transformation.api import \
- spherical_regrid
-
-from .cosmological_observation.api import \
- CosmologySplice, \
- LightCone, \
- find_unique_solutions, \
- project_unique_light_cones, \
- LightRay
-
-from .halo_finding.api import \
- Halo, \
- HOPHalo, \
- parallelHOPHalo, \
- LoadedHalo, \
- FOFHalo, \
- HaloList, \
- HOPHaloList, \
- FOFHaloList, \
- parallelHOPHaloList, \
- LoadedHaloList, \
- GenericHaloFinder, \
- parallelHF, \
- HOPHaloFinder, \
- FOFHaloFinder, \
- HaloFinder, \
- LoadHaloes
-
-from .halo_mass_function.api import \
- HaloMassFcn, \
- TransferFunction, \
- integrate_inf
-
-from .halo_merger_tree.api import \
- DatabaseFunctions, \
- MergerTree, \
- MergerTreeConnect, \
- Node, \
- Link, \
- MergerTreeDotOutput, \
- MergerTreeTextOutput
-
-from .halo_profiler.api import \
- VirialFilter, \
- HaloProfiler, \
- FakeProfile
-
-from .level_sets.api import \
- identify_contours, \
- Clump, \
- find_clumps, \
- get_lowest_clumps, \
- write_clump_index, \
- write_clumps, \
- write_old_clump_index, \
- write_old_clumps, \
- write_old_clump_info, \
- _DistanceToMainClump, \
- recursive_all_clumps, \
- return_all_clumps, \
- return_bottom_clumps, \
- recursive_bottom_clumps, \
- clump_list_sort
-
-from .radial_column_density.api import \
- RadialColumnDensity
-
-from .spectral_integrator.api import \
- add_xray_emissivity_field
-
-from .star_analysis.api import \
- StarFormationRate, \
- SpectrumBuilder
-
-from .two_point_functions.api import \
- TwoPointFunctions, \
- FcnSet
-
-from .sunyaev_zeldovich.api import SZProjection
-
-from .radmc3d_export.api import \
- RadMC3DWriter
-
-from .particle_trajectories.api import \
- ParticleTrajectories
-
-from .photon_simulator.api import \
- PhotonList, \
- EventList, \
- SpectralModel, \
- XSpecThermalModel, \
- XSpecAbsorbModel, \
- TableApecModel, \
- TableAbsorbModel, \
- PhotonModel, \
- ThermalPhotonModel
-
-from .ppv_cube.api import \
- PPVCube
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/coordinate_transformation/api.py
--- a/yt/analysis_modules/coordinate_transformation/api.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-API for coordinate_transformation
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .transforms import \
- spherical_regrid
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/coordinate_transformation/setup.py
--- a/yt/analysis_modules/coordinate_transformation/setup.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
-
-import os.path
-
-
-def configuration(parent_package='', top_path=None):
- from numpy.distutils.misc_util import Configuration
- config = Configuration('coordinate_transformation',
- parent_package, top_path)
- config.make_config_py() # installs __config__.py
- #config.make_svn_version_py()
- return config
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/coordinate_transformation/transforms.py
--- a/yt/analysis_modules/coordinate_transformation/transforms.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
-Transformations between coordinate systems
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from yt.funcs import *
-
-from yt.utilities.linear_interpolators import \
- TrilinearFieldInterpolator
-
-def spherical_regrid(pf, nr, ntheta, nphi, rmax, fields,
- center=None, smoothed=True):
- """
- This function takes a parameter file (*pf*) along with the *nr*, *ntheta*
- and *nphi* points to generate out to *rmax*, and it grids *fields* onto
- those points and returns a dict. *center* if supplied will be the center,
- otherwise the most dense point will be chosen. *smoothed* governs whether
- regular covering grids or smoothed covering grids will be used.
- """
- mylog.warning("This code may produce some artifacts of interpolation")
- mylog.warning("See yt/extensions/coordinate_transforms.py for plotting information")
- if center is None: center = pf.h.find_max("Density")[1]
- fields = ensure_list(fields)
- r,theta,phi = np.mgrid[0:rmax:nr*1j,
- 0:np.pi:ntheta*1j,
- 0:2*np.pi:nphi*1j]
- new_grid = dict(r=r, theta=theta, phi=phi)
- new_grid['x'] = r*np.sin(theta)*np.cos(phi) + center[0]
- new_grid['y'] = r*np.sin(theta)*np.sin(phi) + center[1]
- new_grid['z'] = r*np.cos(theta) + center[2]
- sphere = pf.sphere(center, rmax)
- return arbitrary_regrid(new_grid, sphere, fields, smoothed)
-
-def arbitrary_regrid(new_grid, data_source, fields, smoothed=True):
- """
- This function accepts a dict of points 'x', 'y' and 'z' and a data source
- from which to interpolate new points, along with a list of fields it needs
- to regrid onto those xyz points. It then returns interpolated points.
- This has not been well-tested other than for regular spherical regridding.
- """
- fields = ensure_list(fields)
- new_grid['handled'] = np.zeros(new_grid['x'].shape, dtype='bool')
- for field in fields:
- new_grid[field] = np.zeros(new_grid['x'].shape, dtype='float64')
- grid_order = np.argsort(data_source.grid_levels[:,0])
- ng = len(data_source._grids)
-
- for i,grid in enumerate(data_source._grids[grid_order][::-1]):
- mylog.info("Regridding grid % 4i / % 4i (%s - %s)", i, ng, grid.id, grid.Level)
- cg = grid.retrieve_ghost_zones(1, fields, smoothed=smoothed)
-
- # makes x0,x1,y0,y1,z0,z1
- bounds = np.concatenate(zip(cg.left_edge, cg.right_edge))
-
-
- # Now we figure out which of our points are inside this grid
- # Note that we're only looking at the grid, not the grid-with-ghost-zones
- point_ind = np.ones(new_grid['handled'].shape, dtype='bool') # everything at first
- for i,ax in enumerate('xyz'): # i = 0,1,2 ; ax = x, y, z
- # &= does a logical_and on the array
- point_ind &= ( ( grid.LeftEdge[i] <= new_grid[ax] )
- & ( new_grid[ax] <= grid.RightEdge[i] ) )
- point_ind &= (new_grid['handled'] == False) # only want unhandled points
-
- # If we don't have any, we can just leave
- if point_ind.sum() == 0: continue
-
- # because of the funky way the interpolator takes points, we have to make a
- # new dict of just the points inside this grid
- point_grid = {'x' : new_grid['x'][point_ind],
- 'y' : new_grid['y'][point_ind],
- 'z' : new_grid['z'][point_ind]}
-
- # Now we know which of the points in new_grid are inside this grid
- for field in fields:
- interpolator = TrilinearFieldInterpolator(
- cg[field],bounds,['x','y','z'])
- new_grid[field][point_ind] = interpolator(point_grid)
-
- new_grid['handled'][point_ind] = True
-
- mylog.info("Finished with %s dangling points",
- new_grid['handled'].size - new_grid['handled'].sum())
-
- return new_grid
-
-"""
-# The following will work to plot through different slices:
-
-import pylab
-for i in range(n_theta):
- print "Doing % 3i / % 3i" % (i, n_theta)
- pylab.clf()
- ax=pylab.subplot(1,1,1, projection="polar", aspect=1.)
- ax.pcolormesh(phi[:,i,:], r[:,i,:],
- np.log10(sph_grid[field][:,i,:]))
- pylab.savefig("polar/latitude_%03i.png" % i)
-
-for i in range(n_phi):
- print "Doing % 3i / % 3i" % (i, n_phi)
- pylab.clf()
- ax=pylab.subplot(1,1,1, projection="polar", aspect=1.)
- ax.pcolormesh(theta[:,:,i], r[:,:,i],
- np.log10(sph_grid[field][:,:,i]))
- pylab.savefig("polar/longitude_%03i.png" % i)
-"""
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/halo_profiler/api.py
--- a/yt/analysis_modules/halo_profiler/api.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-API for halo_profiler
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from .halo_filters import \
- VirialFilter
-
-from .multi_halo_profiler import \
- HaloProfiler, \
- FakeProfile, \
- standard_fields
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/halo_profiler/centering_methods.py
--- a/yt/analysis_modules/halo_profiler/centering_methods.py
+++ /dev/null
@@ -1,107 +0,0 @@
-"""
-HaloProfiler re-centering functions.
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-
-from yt.funcs import *
-
-from yt.fields.local_fields import \
- add_field
-
-centering_registry = {}
-
-def add_function(name):
- def wrapper(func):
- centering_registry[name] = func
- return func
- return wrapper
-
-#### Dark Matter Density ####
-
- at add_function("Min_Dark_Matter_Density")
-def find_minimum_dm_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MinLocation']('Dark_Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Dark_Matter_Density")
-def find_maximum_dm_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Dark_Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("CoM_Dark_Matter_Density")
-def find_CoM_dm_density(data):
- dc_x, dc_y, dc_z = data.quantities['CenterOfMass'](use_cells=False,
- use_particles=True,
- preload=False)
- return (dc_x, dc_y, dc_z)
-
-#### Gas Density ####
-
- at add_function("Min_Gas_Density")
-def find_minimum_gas_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MinLocation']('Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Gas_Density")
-def find_maximum_gas_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("CoM_Gas_Density")
-def find_CoM_gas_density(data):
- dc_x, dc_y, dc_z = data.quantities['CenterOfMass'](use_cells=True,
- use_particles=False,
- preload=False)
- return (dc_x, dc_y, dc_z)
-
-#### Total Density ####
-
- at add_function("Min_Total_Density")
-def find_minimum_total_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MinLocation']('Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Total_Density")
-def find_maximum_total_density(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Matter_Density',
- preload=False)
- return (mx, my, mz)
-
- at add_function("CoM_Total_Density")
-def find_CoM_total_density(data):
- dc_x, dc_y, dc_z = data.quantities['CenterOfMass'](use_cells=True,
- use_particles=True,
- preload=False)
- return (dc_x, dc_y, dc_z)
-
-#### Temperature ####
-
- at add_function("Min_Temperature")
-def find_minimum_temperature(data):
- ma, mini, mx, my, mz, mg = data.quantities['MinLocation']('Temperature',
- preload=False)
- return (mx, my, mz)
-
- at add_function("Max_Temperature")
-def find_maximum_temperature(data):
- ma, maxi, mx, my, mz, mg = data.quantities['MaxLocation']('Temperature',
- preload=False)
- return (mx, my, mz)
-
diff -r 2e2d9a9bc75add24c98e8fc13217e3033985e1f3 -r 7e7ef2aaa7159717cda503f44cdcf379f8060aa2 yt/analysis_modules/halo_profiler/halo_filters.py
--- a/yt/analysis_modules/halo_profiler/halo_filters.py
+++ /dev/null
@@ -1,153 +0,0 @@
-"""
-Halo filters to be used with the HaloProfiler.
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-from copy import deepcopy
-import numpy as np
-
-from yt.funcs import *
-from yt.utilities.physical_constants import TINY
-
-def VirialFilter(profile, overdensity_field='ActualOverdensity',
- virial_overdensity=200., must_be_virialized=True,
- virial_filters=[['TotalMassMsun', '>=','1e14']],
- virial_quantities=['TotalMassMsun', 'RadiusMpc'],
- virial_index=None, use_log=False):
- r"""Filter halos by virial quantities.
-
- Return values are a True or False whether the halo passed the filter,
- along with a dictionary of virial quantities for the fields specified in
- the virial_quantities keyword. Thresholds for virial quantities are
- given with the virial_filters keyword in the following way:
- [field, condition, value].
-
- This is typically used as part of a call to `add_halo_filter`.
-
- Parameters
- ----------
- overdensity_field : string
- The field used for interpolation with the
- specified critical value given with 'virial_overdensity'.
- Default='ActualOverdensity'.
- virial_overdensity : float
- The value used to determine the outer radius of the virialized halo.
- Default: 200.
- must_be_virialized : bool
- If no values in the profile are above the
- value of virial_overdensity, the halo does not pass the filter.
- Default: True.
- virial_filters : array_like
- Conditional filters based on virial quantities
- given in the following way: [field, condition, value].
- Default: [['TotalMassMsun', '>=','1e14']].
- virial_quantities : array_like
- Fields for which interpolated values should
- be calculated and returned. Default: ['TotalMassMsun', 'RadiusMpc'].
- virial_index : array_like
- If given as a list, the index of the radial profile
- which is used for interpolation is placed here. Default: None.
- use_log : bool
- If True, interpolation is done in log space.
- Default: False.
-
- Examples
- --------
- >>> hp.add_halo_filter(HP.VirialFilter, must_be_virialized=True,
- overdensity_field='ActualOverdensity',
- virial_overdensity=200,
- virial_filters=[['TotalMassMsun','>=','1e14']],
- virial_quantities=['TotalMassMsun','RadiusMpc'])
-
- """
-
- fields = deepcopy(virial_quantities)
- if virial_filters is None: virial_filters = []
- for vfilter in virial_filters:
- if not vfilter[0] in fields:
- fields.append(vfilter[0])
-
- overDensity = []
- temp_profile = dict((field, []) for field in fields)
-
- for q in range(len(profile[overdensity_field])):
- good = True
- if (profile[overdensity_field][q] != profile[overdensity_field][q]):
- good = False
- continue
- for field in fields:
- if (profile[field][q] != profile[field][q]):
- good = False
- break
- if good:
- overDensity.append(profile[overdensity_field][q])
- for field in fields:
- temp_profile[field].append(profile[field][q])
-
- if use_log:
- for field in temp_profile.keys():
- temp_profile[field] = np.log10(np.clip(temp_profile[field], TINY,
- max(temp_profile[field])))
-
- virial = dict((field, 0.0) for field in fields)
-
- if (not (np.array(overDensity) >= virial_overdensity).any()) and \
- must_be_virialized:
- mylog.debug("This halo is not virialized!")
- return [False, {}]
-
- if (len(overDensity) < 2):
- mylog.debug("Skipping halo with no valid points in profile.")
- return [False, {}]
-
- if (overDensity[1] <= virial_overdensity):
- index = 0
- elif (overDensity[-1] >= virial_overdensity):
- index = -2
- else:
- for q in (np.arange(len(overDensity),0,-1)-1):
- if (overDensity[q] < virial_overdensity) and (overDensity[q-1] >= virial_overdensity):
- index = q - 1
- break
-
- if type(virial_index) is list:
- virial_index.append(index)
-
- for field in fields:
- if (overDensity[index+1] - overDensity[index]) == 0:
- mylog.debug("Overdensity profile has slope of zero.")
- return [False, {}]
- else:
- slope = (temp_profile[field][index+1] - temp_profile[field][index]) / \
- (overDensity[index+1] - overDensity[index])
- value = slope * (virial_overdensity - overDensity[index]) + \
- temp_profile[field][index]
- virial[field] = value
-
- if use_log:
- for field in virial.keys():
- virial[field] = np.power(10, virial[field])
-
- for vfilter in virial_filters:
- if eval("%s %s %s" % (virial[vfilter[0]],vfilter[1],vfilter[2])):
- mylog.debug("(%s %s %s) returned True for %s." % \
- (vfilter[0],vfilter[1],vfilter[2],virial[vfilter[0]]))
- continue
- else:
- mylog.debug("(%s %s %s) returned False for %s." % \
- (vfilter[0],vfilter[1],vfilter[2],virial[vfilter[0]]))
- return [False, {}]
-
- return [True, dict((("%s_%s" % (q, virial_overdensity)), virial[q])
- for q in virial_quantities)]
-
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list