[Yt-svn] commit/cookbook: brittonsmith: Removed old scripts and updated light cone recipe.

Bitbucket commits-noreply at bitbucket.org
Mon Mar 14 17:12:34 PDT 2011


1 new changeset in cookbook:

http://bitbucket.org/yt_analysis/cookbook/changeset/3ec812af8bca/
changeset:   r39:3ec812af8bca
user:        brittonsmith
date:        2011-03-15 01:12:18
summary:     Removed old scripts and updated light cone recipe.
affected #:  3 files (302 bytes)

--- a/example_scripts/find_clumps_all_datasets.py	Tue Jan 11 16:42:25 2011 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,57 +0,0 @@
-# This is a wrapper for find_clump_dataset.py, allowing you to run the clump finder
-# over multiple datasets.
-# In this example, we are looking for clumps in a sphere of radius 10 pc surrounding 
-# the density maximum in each datadump.
-
-import yt.lagos as lagos
-from find_clumps_dataset import *
-
-data_dir = "/Users/britton/EnzoRuns/runs_08/cl-2.5/"
-#data_dir = "/Volumes/Turducken/EnzoRuns/runs_08/cl-5/"
-
-# Time datadumps.
-time_dumps = [34]
-time_dump_dir = "DataDir"
-time_dump_prefix = "DataDump"
-
-# Redshift datadumps.
-redshift_dumps = []
-redshift_dump_dir = "RedshiftDir"
-redshift_dump_prefix = "RedshiftDump"
-
-field = "Density"
-radius = 0.0001
-units = "pc"
-steps_per_dex = 4.
-step = 10**(1./steps_per_dex)
-
-minCells = 64 # not setting anything, only for file prefix
-
-# Prepare list of datasets.
-datasets = []
-
-for q in time_dumps:
-    datasets.append("%s%s%04d/%s%04d" % (data_dir,time_dump_dir,q,time_dump_prefix,q))
-for q in redshift_dumps:
-    datasets.append("%s%s%04d/%s%04d" % (data_dir,redshift_dump_dir,q,redshift_dump_prefix,q))
-
-for dataset in datasets:
-    print "Finding clumps in %s." % dataset
-
-    prefix = "%s_%.1e%s_spd%d_min%d" % (dataset,radius,units,steps_per_dex,minCells)
-
-    dataset_object = lagos.EnzoStaticOutput(dataset)
-
-    # Look for clumps in a sphere surrounding the density maximum.
-    v, c = dataset_object.h.find_max(field)
-    sphere = dataset_object.h.sphere(c, radius/dataset_object[units], [field]) # cache our field
-
-    print "Sphere is %s %s." % (radius,units)
-    print "Min %s: %e, Max %s: %e." % (field,sphere.data[field].min(),
-                                      field,sphere.data[field].max())
-
-    master = find_clumps_dataset(prefix,sphere,field,step)
-
-    del master
-    del sphere
-    del dataset_object


--- a/example_scripts/make_light_cone.py	Tue Jan 11 16:42:25 2011 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,47 +0,0 @@
-"""
-Light cone example.
-"""
-
-from yt.extensions.lightcone import *
-
-q = LightCone("128Mpc256grid_SFFB.param","lightcone.par")
-
-q.CalculateLightConeSolution()
-
-# If random seed was not provided in the parameter file, it can be given 
-# straight to the routine.
-q.CalculateLightConeSolution(seed=123456789)
-
-# Save a text file detailing the light cone solution.
-q.SaveLightConeSolution()
-
-# Make a density light cone.
-# The plot collection is returned so the final image can be
-# customized and remade.
-# Save the data to an hdf5 file and save images of the individual slices.
-pc = q.ProjectLightCone('Density',save_stack=True,save_slice_images=True)
-
-# Make a weighted light cone projection and save the stack to an hdf5 file.
-pc = q.ProjectLightCone('Temperature',weight_field='Density',save_stack=True)
-
-# Save the temperature stack to a different file.
-q.SaveLightConeStack(file='light_cone_temperature.h5')
-
-# Recycle current light cone solution by creating a new solution 
-# that only randomizes the lateral shifts.
-# This will allow the projection objects that have already been made 
-# to be re-used.
-# Just don't use the same random seed as the original.
-q.RerandomizeLightConeSolution(987654321,recycle=True)
-
-# Save the recycled solution.
-q.SaveLightConeSolution(file='light_cone_recycled.out')
-
-# Change the file prefix so that new light cones will not over-write the old ones.
-q.lightConeParameters['OutputPrefix'] = "LightCone_NewSeed"
-
-# Make new projection with the recycled solution.
-pc = q.ProjectLightCone('Density')
-
-# Rerandomize the light cone solution with an entirely new solution.
-q.RerandomizeLightConeSolution(8675309,recycle=False)


--- a/recipes/make_light_cone.py	Tue Jan 11 16:42:25 2011 -0500
+++ b/recipes/make_light_cone.py	Mon Mar 14 20:12:18 2011 -0400
@@ -3,16 +3,16 @@
 of a single quantity over the redshift interval 0 to 0.4.
 """
 from yt.mods import *
+from yt.analysis_modules.light_cone.api import *
 
 # All of the light cone parameters are given as keyword arguments at instantiation.
-lc = amods.light_cone.LightCone(
-                  "128Mpc256grid_SFFB.param", initial_redshift=0.4, 
-                  final_redshift=0.0, observer_redshift=0.0,
-                  field_of_view_in_arcminutes=450.0, 
-                  image_resolution_in_arcseconds=60.0,
-                  use_minimum_datasets=True, deltaz_min=0.0, 
-                  minimum_coherent_box_fraction=0.0,
-                  output_dir='LC', output_prefix='LightCone')
+lc = LightCone("128Mpc256grid_SFFB.param", initial_redshift=0.4, 
+               final_redshift=0.0, observer_redshift=0.0,
+               field_of_view_in_arcminutes=450.0, 
+               image_resolution_in_arcseconds=60.0,
+               use_minimum_datasets=True, deltaz_min=0.0, 
+               minimum_coherent_box_fraction=0.0,
+               output_dir='LC', output_prefix='LightCone')
 
 # Calculate a light cone solution and write out a text file with the details 
 # of the solution.
@@ -23,9 +23,4 @@
 
 # Make the light cone projection, save individual images of each slice 
 # and of the projection as well as an hdf5 file with the full data cube.
-# Add a label of the slice redshift to each individual image.
-# The return value is the PlotCollection that holds the image data for 
-# the final projection, allowing for additional customization of the 
-# final image.
-pc = lc.project_light_cone(field ,save_stack=True, save_slice_images=True, use_colorbar=False, 
-                           add_redshift_label=True)
+lc.project_light_cone(field ,save_stack=True, save_slice_images=True)

Repository URL: https://bitbucket.org/yt_analysis/cookbook/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list