[yt-svn] commit/cookbook: MatthewTurk: Moving some cookbooks to the docs

Bitbucket commits-noreply at bitbucket.org
Fri May 25 17:38:46 PDT 2012


1 new commit in cookbook:


https://bitbucket.org/yt_analysis/cookbook/changeset/a0aa0c76c5c0/
changeset:   a0aa0c76c5c0
user:        MatthewTurk
date:        2012-05-26 02:38:32
summary:     Moving some cookbooks to the docs
affected #:  10 files

diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/average_value.py
--- a/recipes/average_value.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-This recipe finds the average value of a quantity through the entire box.  (See
-:ref:`derived-quantities`.)  Note that this recipe will take advantage of
-multiple CPUs if executed with mpirun and supplied the --parallel command line
-argument.
-"""
-from yt.mods import *
-
-fn = "RedshiftOutput0005" # parameter file to load
-pf = load(fn) # load data
-
-field = "Temperature"  # The field to average
-weight = "CellMassMsun" # The weight for the average
-
-dd = pf.h.all_data() # This is a region describing the entire box,
-                     # but note it doesn't read anything in yet!
-# We now use our 'quantities' call to get the average quantity
-average_value = dd.quantities["WeightedAverageQuantity"](
-        field, weight)
-
-print "Average %s (weighted by %s) is %0.5e" % (field, weight, average_value)
-


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/boolean_data_objects.py
--- a/recipes/boolean_data_objects.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
-Below shows the creation of a number of boolean data objects, which
-are built upon previously-defined data objects. The boolean
-data ojbects can be used like any other, except for a few cases.
-Please see :ref:`boolean_data_objects` for more information.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-# Make a few data ojbects to start.
-re1 = pf.h.region([0.5, 0.5, 0.5], [0.4, 0.4, 0.4], [0.6, 0.6, 0.6])
-re2 = pf.h.region([0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [0.6, 0.6, 0.6])
-sp1 = pf.h.sphere([0.5, 0.5, 0.5], 0.05)
-sp2 = pf.h.sphere([0.1, 0.2, 0.3], 0.1)
-# The "AND" operator. This will make a region identical to re2.
-bool1 = pf.h.boolean([re1, "AND", re2])
-xp = bool1["particle_position_x"]
-# The "OR" operator. This will make a region identical to re1.
-bool2 = pf.h.boolean([re1, "OR", re2])
-# The "NOT" operator. This will make a region like re1, but with the corner
-# that re2 covers cut out.
-bool3 = pf.h.boolean([re1, "NOT", re2])
-# Disjoint regions can be combined with the "OR" operator.
-bool4 = pf.h.boolean([sp1, "OR", sp2])
-# Find oddly-shaped overlapping regions.
-bool5 = pf.h.boolean([re2, "AND", sp1])
-# Nested logic with parentheses.
-# This is re1 with the oddly-shaped region cut out.
-bool6 = pf.h.boolean([re1, "NOT", "(", re1, "AND", sp1, ")"])


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/contours_on_slice.py
--- a/recipes/contours_on_slice.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""
-This is a simple recipe to show how to open a dataset, plot a slice
-through it, and add contours of another quantity on top.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-pc = PlotCollection(pf) # defaults to center at most dense point
-p = pc.add_slice("Density", 0) # 0 = x-axis
-p.modify["contour"]("Temperature")
-pc.set_width(1.5, 'mpc') # change width of all plots in pc
-pc.save(fn) # save all plots


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/find_clumps.py
--- a/recipes/find_clumps.py
+++ /dev/null
@@ -1,51 +0,0 @@
-"""
-This is a recipe to show how to find topologicall connected sets of cells
-inside a dataset.  It returns these clumps and they can be inspected or
-visualized as would any other data object.  More detail on this method can be
-found in astro-ph/0806.1653.  For more information, see
-:ref:`methods-contours`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-field = "Density" # this is the field we look for contours over -- we could do
-                  # this over anything.  Other common choices are 'AveragedDensity'
-                  # and 'Dark_Matter_Density'.
-step = 10.0 # This is the multiplicative interval between contours.
-
-pf = load(fn) # load data
-
-# We want to find clumps over the entire dataset, so we'll just grab the whole
-# thing!  This is a convenience parameter that prepares an object that covers
-# the whole domain.  Note, though, that it will load on demand and not before!
-data_source = pf.h.all_data()
-
-# Now we set some sane min/max values between which we want to find contours.
-# This is how we tell the clump finder what to look for -- it won't look for
-# contours connected below or above these threshold values.
-c_min = 10**na.floor(na.log10(data_source[field]).min()  )
-c_max = 10**na.floor(na.log10(data_source[field]).max()+1)
-
-# Now find get our 'base' clump -- this one just covers the whole domain.
-master_clump = amods.level_sets.Clump(data_source, None, field)
-
-# This next command accepts our base clump and we say the range between which
-# we want to contour.  It recursively finds clumps within the master clump, at
-# intervals defined by the step size we feed it.  The current value is
-# *multiplied* by step size, rather than added to it -- so this means if you
-# want to look in log10 space intervals, you would supply step = 10.0.
-amods.level_sets.find_clumps(master_clump, c_min, c_max, step)
-
-# As it goes, it appends the information about all the sub-clumps to the
-# master-clump.  Among different ways we can examine it, there's a convenience
-# function for outputting the full hierarchy to a file.
-f = open('%s_clump_hierarchy.txt' % pf,'w')
-amods.level_sets.write_clump_hierarchy(master_clump,0,f)
-f.close()
-
-# We can also output some handy information, as well.
-f = open('%s_clumps.txt' % pf,'w')
-amods.level_sets.write_clumps(master_clump,0,f)
-f.close()
-# If you'd like to visualize these clumps, a list of clumps can be supplied to
-# the "clumps" callback on a plot.


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/global_phase_plots.py
--- a/recipes/global_phase_plots.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-This is a simple recipe to show how to open a dataset and then plot a couple
-phase diagrams, save them, and quit.  Note that this recipe will take advantage
-of multiple CPUs if executed with mpirun and supplied the --parallel command
-line argument.  For more information, see :ref:`methods-profiles`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-dd = pf.h.all_data() # This is an object that describes the entire box
-pc = PlotCollection(pf) # defaults to center at most dense point
-
-# We plot the average x-velocity (mass-weighted) in our object as a function of
-# Density and Temperature
-plot=pc.add_phase_object(dd, ["Density","Temperature","x-velocity"])
-
-# We now plot the average value of x-velocity as a function of temperature
-plot=pc.add_profile_object(dd, ["Temperature", "x-velocity"])
-
-# Finally, the velocity magnitude as a function of density
-plot=pc.add_profile_object(dd, ["Density", "VelocityMagnitude"])
-pc.save() # save all plots


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/halo_finding.py
--- a/recipes/halo_finding.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""
-This script shows the simplest way of getting halo information.  For more
-information, see :ref:`halo_finding`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-halos = HaloFinder(pf)
-halos.write_out("%s_halos.txt" % pf)


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/halo_particle_plotting.py
--- a/recipes/halo_particle_plotting.py
+++ /dev/null
@@ -1,21 +0,0 @@
-"""
-This is a simple mechanism for overplotting the particles belonging only to
-halos.  For more information, see :ref:`halo_finding`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-halos = HaloFinder(pf)
-
-pc = PlotCollection(pf, "c")
-p = pc.add_projection("Density", "x")
-p.modify["hop_circles"](halos) # We like the circles for framing
-
-# Only plot the first 100 halos.  Also, by default the particles are
-# semi-transparent, but the alpha parameter can be overriden to make them
-# darker.
-p.modify["hop_particles"](halos, max_number=100)
-
-pc.save()


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/halo_plotting.py
--- a/recipes/halo_plotting.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-This is a mechanism for plotting circles representing identified particle halos
-on an image.  For more information, see :ref:`halo_finding`.
-"""
-from yt.mods import * # set up our namespace
-
-fn = "RedshiftOutput0005" # parameter file to load
-
-pf = load(fn) # load data
-halos = HaloFinder(pf)
-
-pc = PlotCollection(pf, "c")
-p = pc.add_projection("Density", "x")
-p.modify["hop_circles"](halos)
-
-pc.save()


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/make_light_cone.py
--- a/recipes/make_light_cone.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
-The following recipe will make a light cone projection (see :ref:`light-cone-generator`) 
-of a single quantity over the redshift interval 0 to 0.4.
-"""
-from yt.mods import *
-from yt.analysis_modules.light_cone.api import *
-
-# All of the light cone parameters are given as keyword arguments at instantiation.
-lc = LightCone("128Mpc256grid_SFFB.param", initial_redshift=0.4, 
-               final_redshift=0.0, observer_redshift=0.0,
-               field_of_view_in_arcminutes=450.0, 
-               image_resolution_in_arcseconds=60.0,
-               use_minimum_datasets=True, deltaz_min=0.0, 
-               minimum_coherent_box_fraction=0.0,
-               output_dir='LC', output_prefix='LightCone')
-
-# Calculate a light cone solution and write out a text file with the details 
-# of the solution.
-lc.calculate_light_cone_solution(seed=123456789, filename='lightcone.dat')
-
-# This will be the field to be projected.
-field = 'SZY'
-
-# Make the light cone projection, save individual images of each slice 
-# and of the projection as well as an hdf5 file with the full data cube.
-lc.project_light_cone(field ,save_stack=True, save_slice_images=True)


diff -r a120514048dc1d257ee535fafd24131655c4f281 -r a0aa0c76c5c0c7220bc9449563b5520544648b3e recipes/make_light_ray.py
--- a/recipes/make_light_ray.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""
-This is a recipe to make a light ray through a simulation.
-"""
-import os
-import sys
-
-from yt.mods import *
-from yt.analysis_modules.halo_profiler.api import *
-from yt.analysis_modules.light_ray.api import *
-
-# Get the simulation parameter file from the command line.
-par_file = sys.argv[1]
-
-# Instantiate a ray object from z = 0 to z = 0.1 using the 
-# minimum number of datasets.
-lr = LightRay(par_file, 0.0, 0.1, use_minimum_datasets=True)
-
-# The next four variables are used when get_nearest_galaxy is set to True.
-# This option will calculate the distance and mass of the halo nearest to 
-# each element of the ray.
-# The light ray tool accomplishes this by using the HaloProfiler.
-# Here we are providing the LightRay with instructions to give the HaloProfiler.
-# This is a dictionary of standard halo profiler keyword arguments and values.
-halo_profiler_kwargs = {'halo_list_format': {'id':0, 'center':[4, 5, 6], 
-                                             'TotalMassMsun':1},
-                        'halo_list_file': 'HopAnalysis.out'}
-# This is a list of actions we want the HaloProfiler to perform.
-# Note that each list item is a dictionary with the following three 
-# entries: 'function', 'args', and 'kwargs'.
-# These are the function to be called, the arguments to that function, and 
-# any keyword arguments.
-halo_profiler_actions = [{'function': make_profiles,
-                          'args': None,
-                          'kwargs': {'filename': 'VirializedHalos.out'}},
-                         {'function': add_halo_filter,
-                          'args': VirialFilter,
-                          'kwargs': {'overdensity_field': 'ActualOverdensity',
-                                     'virial_overdensity': 200,
-                                     'virial_filters': [['TotalMassMsun','>=','1e14']],
-                                     'virial_quantities': ['TotalMassMsun','RadiusMpc']}}]
-# This option can only be 'all' or 'filtered' and tells the HaloProfiler to 
-# use either the full halo list or the filtered list made after calling make_profiles.
-halo_list = 'filtered'
-
-# This is the name of the field from the halo list that represents the halo mass.
-halo_mass_field = 'TotalMassMsun_200'
-
-# Make the ray and get the Density and Temperature fields, the nearest galaxy information, and 
-# the line of sight velocity.
-lr.make_light_ray(seed=8675309, 
-                  solution_filename='lightraysolution.txt',
-                  data_filename='lightray.h5',
-                  fields=['Temperature', 'Density'],
-                  get_nearest_galaxy=True, 
-                  halo_profiler_kwargs=halo_profiler_kwargs,
-                  halo_profiler_actions=halo_profiler_actions, 
-                  halo_list=halo_list,
-                  halo_mass_field=halo_mass_field,
-                  get_los_velocity=True)

Repository URL: https://bitbucket.org/yt_analysis/cookbook/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list