[Yt-svn] commit/yt-doc: 4 new changesets

Bitbucket commits-noreply at bitbucket.org
Tue Mar 22 11:25:47 PDT 2011


4 new changesets in yt-doc:

http://bitbucket.org/yt_analysis/yt-doc/changeset/5d7d2cac4d6f/
changeset:   r43:5d7d2cac4d6f
user:        MatthewTurk
date:        2011-03-22 16:02:36
summary:     Updating with most recent cookbook entries.
affected #:  16 files (1.8 KB)

--- a/source/conf.py	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/conf.py	Tue Mar 22 08:02:36 2011 -0700
@@ -51,9 +51,9 @@
 # built documents.
 #
 # The short X.Y version.
-version = '2.0'
+version = '2.1'
 # The full version, including alpha/beta/rc tags.
-release = '2.0'
+release = '2.1beta'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.


--- a/source/cookbook/arbitrary_vectors_on_slice.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/arbitrary_vectors_on_slice.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -19,14 +19,19 @@
    pf = load(fn) # load data
    pc = PlotCollection(pf) # defaults to center at most dense point
    p = pc.add_slice("Density", ax) 
-   v1 = "magnetic_field_%s" % (axis_names[x_dict[ax]])
-   v2 = "magnetic_field_%s" % (axis_names[y_dict[ax]])
-   p.modify["quiver"](v1, v2) # This takes a few arguments, but we'll use the defaults
-                              # here.  You can control the 'skip' factor in the
-                              # vectors.
+   v1 = "%s-velocity" % (axis_names[x_dict[ax]])
+   v2 = "%s-velocity" % (axis_names[y_dict[ax]])
+   # This takes a few arguments, but we'll use the defaults here.  The third
+   # argument is the 'skip' factor -- every how-many pixels to put a vector.
+   p.modify["quiver"](v1, v2, 16)
    pc.set_width(2.5, 'mpc') # change width of all plots in pc
    pc.save(fn) # save all plots
    
 
+.. rubric:: Sample Output
 
+.. image:: _arbitrary_vectors_on_slice/arbitrary_vectors_on_slice_RedshiftOutput0005_Slice_x_Density.png
+   :width: 240
+   :target: ../_images/arbitrary_vectors_on_slice_RedshiftOutput0005_Slice_x_Density.png
 
+


--- a/source/cookbook/find_clumps.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/find_clumps.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -35,25 +35,25 @@
    c_max = 10**na.floor(na.log10(data_source[field]).max()+1)
    
    # Now find get our 'base' clump -- this one just covers the whole domain.
-   master_clump = Clump(data_source, None, field)
+   master_clump = amods.level_sets.Clump(data_source, None, field)
    
    # This next command accepts our base clump and we say the range between which
    # we want to contour.  It recursively finds clumps within the master clump, at
    # intervals defined by the step size we feed it.  The current value is
    # *multiplied* by step size, rather than added to it -- so this means if you
    # want to look in log10 space intervals, you would supply step = 10.0.
-   find_clumps(master_clump, c_min, c_max, step)
+   amods.level_sets.find_clumps(master_clump, c_min, c_max, step)
    
    # As it goes, it appends the information about all the sub-clumps to the
    # master-clump.  Among different ways we can examine it, there's a convenience
    # function for outputting the full hierarchy to a file.
    f = open('%s_clump_hierarchy.txt' % pf,'w')
-   write_clump_hierarchy(master_clump,0,f)
+   amods.level_sets.write_clump_hierarchy(master_clump,0,f)
    f.close()
    
    # We can also output some handy information, as well.
    f = open('%s_clumps.txt' % pf,'w')
-   write_clumps(master_clump,0,f)
+   amods.level_sets.write_clumps(master_clump,0,f)
    f.close()
    # If you'd like to visualize these clumps, a list of clumps can be supplied to
    # the "clumps" callback on a plot.


--- a/source/cookbook/global_phase_plots.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/global_phase_plots.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -21,20 +21,30 @@
    pc = PlotCollection(pf) # defaults to center at most dense point
    
    # We plot the average x-velocity (mass-weighted) in our object as a function of
-   # Electron_Density and Temperature
-   plot=pc.add_phase_object(dd, ["Electron_Density","Temperature","x-velocity"]
+   # Density and Temperature
+   plot=pc.add_phase_object(dd, ["Density","Temperature","x-velocity"],
                    lazy_reader = True)
    
    # We now plot the average value of x-velocity as a function of temperature
    plot=pc.add_profile_object(dd, ["Temperature", "x-velocity"],
                    lazy_reader = True)
    
-   # Finally, the average electron density as a function of the magnitude of the
-   # velocity
-   plot=pc.add_profile_object(dd, ["Electron_Density", "VelocityMagnitude"],
+   # Finally, the velocity magnitude as a function of density
+   plot=pc.add_profile_object(dd, ["Density", "VelocityMagnitude"],
                    lazy_reader = True)
    pc.save() # save all plots
    
 
+.. rubric:: Sample Output
 
+.. image:: _global_phase_plots/global_phase_plots_RedshiftOutput0005_Profile1D_1_Temperature_x-velocity.png
+   :width: 240
+   :target: ../_images/global_phase_plots_RedshiftOutput0005_Profile1D_1_Temperature_x-velocity.png
+.. image:: _global_phase_plots/global_phase_plots_RedshiftOutput0005_Profile1D_2_Density_VelocityMagnitude.png
+   :width: 240
+   :target: ../_images/global_phase_plots_RedshiftOutput0005_Profile1D_2_Density_VelocityMagnitude.png
+.. image:: _global_phase_plots/global_phase_plots_RedshiftOutput0005_Profile2D_0_Density_Temperature_x-velocity.png
+   :width: 240
+   :target: ../_images/global_phase_plots_RedshiftOutput0005_Profile2D_0_Density_Temperature_x-velocity.png
 
+


--- a/source/cookbook/light_cone_halo_mask.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/light_cone_halo_mask.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -15,11 +15,11 @@
 .. code-block:: python
 
    
-   import yt.extensions.lightcone as LC
-   import yt.extensions.HaloProfiler as HP
+   from yt.mods import *
    
    # Instantiate a light cone object as usual.
-   lc = LC.LightCone("128Mpc256grid_SFFB.param", initial_redshift=0.4, 
+   lc = amods.light_cone.LightCone(
+                     "128Mpc256grid_SFFB.param", initial_redshift=0.4, 
                      final_redshift=0.0, observer_redshift=0.0,
                      field_of_view_in_arcminutes=600.0, 
                      image_resolution_in_arcseconds=60.0,
@@ -42,8 +42,8 @@
    # be called ("function"), the arguments of the function ("args"), and the 
    # keyword arguments of the function ("kwargs").
    # This item will add a virial filter.
-   halo_profiler_actions.append({'function': HP.HaloProfiler.add_halo_filter,
-                                 'args': [HP.VirialFilter],
+   halo_profiler_actions.append({'function': amods.halo_profiler.HaloProfiler.add_halo_filter,
+                                 'args': [amods.halo_profiler.VirialFilter],
                                  'kwargs': {'must_be_virialized':True, 
                                             'overdensity_field':'ActualOverdensity',
                                             'virial_overdensity':200,
@@ -51,7 +51,7 @@
                                             'virial_quantities':['TotalMassMsun','RadiusMpc']}})
    
    # This item will call the make_profile method to get the filtered halo list.
-   halo_profiler_actions.append({'function': HP.HaloProfiler.make_profiles,
+   halo_profiler_actions.append({'function': amods.halo_profiler.HaloProfiler.make_profiles,
                                  'kwargs': {'filename': "virial_filter.out"}})
    
    # Specify the desired halo list is the filtered list.
@@ -77,3 +77,4 @@
    
 
 
+


--- a/source/cookbook/make_light_cone.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/make_light_cone.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -10,16 +10,17 @@
 
 .. code-block:: python
 
-   import yt.extensions.lightcone as LC
+   from yt.mods import *
+   from yt.analysis_modules.light_cone.api import *
    
    # All of the light cone parameters are given as keyword arguments at instantiation.
-   lc = LC.LightCone("128Mpc256grid_SFFB.param", initial_redshift=0.4, 
-                     final_redshift=0.0, observer_redshift=0.0,
-                     field_of_view_in_arcminutes=450.0, 
-                     image_resolution_in_arcseconds=60.0,
-                     use_minimum_datasets=True, deltaz_min=0.0, 
-                     minimum_coherent_box_fraction=0.0,
-                     output_dir='LC', output_prefix='LightCone')
+   lc = LightCone("128Mpc256grid_SFFB.param", initial_redshift=0.4, 
+                  final_redshift=0.0, observer_redshift=0.0,
+                  field_of_view_in_arcminutes=450.0, 
+                  image_resolution_in_arcseconds=60.0,
+                  use_minimum_datasets=True, deltaz_min=0.0, 
+                  minimum_coherent_box_fraction=0.0,
+                  output_dir='LC', output_prefix='LightCone')
    
    # Calculate a light cone solution and write out a text file with the details 
    # of the solution.
@@ -30,12 +31,8 @@
    
    # Make the light cone projection, save individual images of each slice 
    # and of the projection as well as an hdf5 file with the full data cube.
-   # Add a label of the slice redshift to each individual image.
-   # The return value is the PlotCollection that holds the image data for 
-   # the final projection, allowing for additional customization of the 
-   # final image.
-   pc = lc.project_light_cone(field ,save_stack=True, save_slice_images=True, use_colorbar=False, 
-                              add_redshift_label=True)
+   lc.project_light_cone(field ,save_stack=True, save_slice_images=True)
    
 
 
+


--- a/source/cookbook/multi_plot.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/multi_plot.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -5,7 +5,7 @@
 
 This is a simple recipe to show how to open a dataset and then plot a slice
 through it, centered at its most dense point.  For more information, see
-:func:`~yt.raven.get_multi_plot`.
+:func:`~yt.visualization.plot_collection.get_multi_plot`.
 
 The latest version of this recipe can be downloaded here: http://hg.enzotools.org/cookbook/raw-file/tip/recipes/multi_plot.py .
 
@@ -26,7 +26,7 @@
    #   Number of plots on the x-axis, number of plots on the y-axis, and how we
    #   want our colorbars oriented.  (This governs where they will go, too.
    #   bw is the base-width in inches, but 4 is about right for most cases.
-   fig, axes, colorbars = raven.get_multi_plot( 2, 1, colorbar=orient, bw = 4)
+   fig, axes, colorbars = get_multi_plot( 2, 1, colorbar=orient, bw = 4)
    
    # We'll use a plot collection, just for convenience's sake
    pc = PlotCollection(pf, center=[0.5, 0.5, 0.5])


--- a/source/cookbook/multi_plot_3x2.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/multi_plot_3x2.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -5,7 +5,7 @@
 
 This is a simple recipe to show how to open a dataset and then plot a slice
 through it, centered at its most dense point.  For more information, see
-:func:`~yt.raven.get_multi_plot`.
+:func:`~yt.visualization.plot_collection.get_multi_plot`.
 
 The latest version of this recipe can be downloaded here: http://hg.enzotools.org/cookbook/raw-file/tip/recipes/multi_plot_3x2.py .
 
@@ -26,7 +26,7 @@
    #   Number of plots on the x-axis, number of plots on the y-axis, and how we
    #   want our colorbars oriented.  (This governs where they will go, too.
    #   bw is the base-width in inches, but 4 is about right for most cases.
-   fig, axes, colorbars = raven.get_multi_plot( 2, 3, colorbar=orient, bw = 4)
+   fig, axes, colorbars = get_multi_plot( 2, 3, colorbar=orient, bw = 4)
    
    # We'll use a plot collection, just for convenience's sake
    pc = PlotCollection(pf, center=[0.5, 0.5, 0.5])


--- a/source/cookbook/offaxis_projection.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/offaxis_projection.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -15,8 +15,6 @@
 .. code-block:: python
 
    from yt.mods import * # set up our namespace
-   import yt.extensions.volume_rendering as vr
-   import yt.extensions.image_writer as iw
    
    fn = "RedshiftOutput0005" # parameter file to load
    
@@ -24,7 +22,7 @@
    
    # This operates on a pass-through basis, so you should not need to specify
    # limits.
-   tf = vr.ProjectionTransferFunction()
+   tf = ProjectionTransferFunction()
    
    # We don't want to take the log of Density, so we need to disable that here.
    # Note that if using the Camera interface, this does not need to be done.
@@ -60,7 +58,7 @@
    image = na.log10(vp.image[:,:,0]) 
    
    # And now, we call our direct image saver.  We save the log of the result.
-   iw.write_image(image, "%s_offaxis_projection.png" % pf)
+   write_image(image, "%s_offaxis_projection.png" % pf)
    
 
 .. rubric:: Sample Output


--- a/source/cookbook/overplot_particles.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/overplot_particles.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -18,8 +18,7 @@
    pf = load(fn) # load data
    pc = PlotCollection(pf, center=[0.5,0.5,0.5]) # defaults to center at most dense point
    p = pc.add_projection("Density", 0) # 0 = x-axis
-   # "nparticles" is slightly more efficient than "particles"
-   p.modify["nparticles"](1.0) # 1.0 is the 'width' we want for our slab of
+   p.modify["particles"](1.0) # 1.0 is the 'width' we want for our slab of
                                # particles -- this governs the allowable locations
                                # of particles that show up on the image
                                # NOTE: we can also supply a *ptype* to cut based


--- a/source/cookbook/run_halo_profiler.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/run_halo_profiler.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -11,15 +11,15 @@
 
 .. code-block:: python
 
-   import yt.extensions.HaloProfiler as HP
+   from yt.mods import *
    
    # Instantiate HaloProfiler for this dataset.
-   hp = HP.HaloProfiler("DD0242/DD0242")
+   hp = amods.halo_profiler.HaloProfiler("DD0242/DD0242")
    
    # Add a filter to remove halos that have no profile points with overdensity 
    # above 200, and with virial masses less than 1e14 solar masses.
    # Also, return the virial mass and radius to be written out to a file.
-   hp.add_halo_filter(HP.VirialFilter,must_be_virialized=True,
+   hp.add_halo_filter(amods.halo_profiler.VirialFilter,must_be_virialized=True,
                       overdensity_field='ActualOverdensity',
                       virial_overdensity=200,
                       virial_filters=[['TotalMassMsun','>=','1e14']],
@@ -46,3 +46,4 @@
    
 
 
+


--- a/source/cookbook/simple_pdf.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/simple_pdf.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -24,5 +24,10 @@
    pc.save(fn) # save all plots
    
 
+.. rubric:: Sample Output
 
+.. image:: _simple_pdf/simple_pdf_RedshiftOutput0005_Profile2D_0_Density_Temperature_CellMassMsun.png
+   :width: 240
+   :target: ../_images/simple_pdf_RedshiftOutput0005_Profile2D_0_Density_Temperature_CellMassMsun.png
 
+


--- a/source/cookbook/simple_projection.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/simple_projection.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -26,14 +26,14 @@
 
 .. rubric:: Sample Output
 
-.. image:: _simple_projection/simple_projection_RedshiftOutput0005_Projection_x_Density.png
+.. image:: _simple_projection/simple_projection_RedshiftOutput0005_Projection_x_Density_Density.png
    :width: 240
-   :target: ../_images/simple_projection_RedshiftOutput0005_Projection_x_Density.png
-.. image:: _simple_projection/simple_projection_RedshiftOutput0005_Projection_y_Density.png
+   :target: ../_images/simple_projection_RedshiftOutput0005_Projection_x_Density_Density.png
+.. image:: _simple_projection/simple_projection_RedshiftOutput0005_Projection_y_Density_Density.png
    :width: 240
-   :target: ../_images/simple_projection_RedshiftOutput0005_Projection_y_Density.png
-.. image:: _simple_projection/simple_projection_RedshiftOutput0005_Projection_z_Density.png
+   :target: ../_images/simple_projection_RedshiftOutput0005_Projection_y_Density_Density.png
+.. image:: _simple_projection/simple_projection_RedshiftOutput0005_Projection_z_Density_Density.png
    :width: 240
-   :target: ../_images/simple_projection_RedshiftOutput0005_Projection_z_Density.png
+   :target: ../_images/simple_projection_RedshiftOutput0005_Projection_z_Density_Density.png
 
 


--- a/source/cookbook/simple_volume_rendering.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/simple_volume_rendering.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -5,7 +5,9 @@
 
 This recipe shows how to volume render a dataset.  There are a number of
 twiddles, and rough edges, and the process is still very much in beta.
-See :ref:`volume_rendering` for more information.
+See :ref:`volume_rendering` for more information.  In particular, this
+interface will do some things very easily, but it provides almost no
+customizability.  The Camera interface is recommended.
 
 Additionally, for the purposes of the recipe, we have simplified the image
 considerably.
@@ -15,8 +17,6 @@
 .. code-block:: python
 
    from yt.mods import * # set up our namespace
-   import yt.extensions.volume_rendering as vr
-   import yt.extensions.image_writer as iw
    
    fn = "RedshiftOutput0005" # parameter file to load
    
@@ -29,7 +29,7 @@
    
    # We supply the min/max we want the function to cover, in log.
    # For this dataset it's -31 and -27.
-   tf = vr.ColorTransferFunction((na.log10(mi), na.log10(ma)))
+   tf = ColorTransferFunction((na.log10(mi), na.log10(ma)))
    
    # Now we add some Gaussians on.  Work is underway to transform this into a
    # graphical user interface, and the initial steps can be found in
@@ -66,7 +66,7 @@
    vp.ray_cast()
    
    # And now, we call our direct image saver.  
-   iw.write_bitmap(vp.image, "%s_volume_rendered.png" % pf)
+   write_bitmap(vp.image, "%s_volume_rendered.png" % pf)
    
 
 .. rubric:: Sample Output


--- a/source/cookbook/simulation_halo_profiler.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/simulation_halo_profiler.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -10,19 +10,19 @@
 
 .. code-block:: python
 
-   import yt.extensions.EnzoSimulation as ES
-   import yt.extensions.HaloProfiler as HP
+   from yt.mods import *
    
-   es = ES.EnzoSimulation("simulation_parameter_file", initial_redshift=10, final_redshift=0)
+   es = amods.simulation_handler.EnzoSimulation(
+           "simulation_parameter_file", initial_redshift=10, final_redshift=0)
    
    # Loop over all dataset in the requested time interval.
    for output in es.allOutputs:
    
        # Instantiate HaloProfiler for this dataset.
-       hp = HP.HaloProfiler(output['filename'])
+       hp = amods.halo_profiler.HaloProfiler(output['filename'])
        
        # Add a virialization filter.
-       hp.add_halo_filter(HP.VirialFilter,must_be_virialized=True,
+       hp.add_halo_filter(amods.halo_profiler.VirialFilter,must_be_virialized=True,
                           overdensity_field='ActualOverdensity',
                           virial_overdensity=200,
                           virial_filters=[['TotalMassMsun','>=','1e14']],
@@ -49,3 +49,4 @@
    
 
 
+


--- a/source/cookbook/unique_light_cones.inc	Thu Mar 10 13:04:20 2011 -0800
+++ b/source/cookbook/unique_light_cones.inc	Tue Mar 22 08:02:36 2011 -0700
@@ -10,10 +10,11 @@
 
 .. code-block:: python
 
-   import yt.extensions.lightcone as LC
+   from yt.mods import *
    
    # Instantiate a light cone object as usual.
-   lc = LC.LightCone("128Mpc256grid_SFFB.param", initial_redshift=0.4, 
+   lc = amods.light_cone.LightCone(
+                     "128Mpc256grid_SFFB.param", initial_redshift=0.4, 
                      final_redshift=0.0, observer_redshift=0.0,
                      field_of_view_in_arcminutes=120.0, 
                      image_resolution_in_arcseconds=60.0,
@@ -39,3 +40,4 @@
    
 
 
+


http://bitbucket.org/yt_analysis/yt-doc/changeset/c19764e2897f/
changeset:   r44:c19764e2897f
user:        MatthewTurk
date:        2011-03-22 16:03:17
summary:     Updating to 'python2.7' wherever 'python2.6' was used.
affected #:  5 files (0 bytes)

--- a/source/advanced/debugdrive.rst	Tue Mar 22 08:02:36 2011 -0700
+++ b/source/advanced/debugdrive.rst	Tue Mar 22 08:03:17 2011 -0700
@@ -56,7 +56,7 @@
 
 .. code-block:: bash
 
-   $ python2.6 some_problematic_script.py --paste
+   $ python2.7 some_problematic_script.py --paste
 
 The ``--paste`` option has to come after the name of the script.  When the
 script dies and prints its error, it will also submit that error to the
@@ -109,7 +109,7 @@
 
 .. code-block:: bash
 
-   $ mpirun -np 4 python2.6 some_script.py --parallel --rpdb
+   $ mpirun -np 4 python2.7 some_script.py --parallel --rpdb
 
 and it reaches an error or an exception, it will launch the debugger.
 Additionally, instructions will be printed for connecting to the debugger.


--- a/source/advanced/developing.rst	Tue Mar 22 08:02:36 2011 -0700
+++ b/source/advanced/developing.rst	Tue Mar 22 08:03:17 2011 -0700
@@ -57,7 +57,7 @@
 
 .. code-block:: bash
 
-   $ python2.6 setup.py develop
+   $ python2.7 setup.py develop
 
 This will rebuild all C modules as well.
 


--- a/source/advanced/external_analysis.rst	Tue Mar 22 08:02:36 2011 -0700
+++ b/source/advanced/external_analysis.rst	Tue Mar 22 08:03:17 2011 -0700
@@ -186,7 +186,7 @@
 
 .. code-block:: bash
 
-   $ python2.6 axes_calculator_setup.py build_ext -i
+   $ python2.7 axes_calculator_setup.py build_ext -i
 
 Note that since we don't yet have an ``axes_calculator.pyx``, this will fail.
 But once we have it, it ought to run.


--- a/source/advanced/installing.rst	Tue Mar 22 08:02:36 2011 -0700
+++ b/source/advanced/installing.rst	Tue Mar 22 08:03:17 2011 -0700
@@ -107,7 +107,7 @@
 
 .. code-block:: bash
 
-   $ python2.6 setup.py install
+   $ python2.7 setup.py install
 
 from the ``yt-hg`` directory.  Alternately, you can replace ``install`` with
 ``develop`` if you anticipate making any modifications to the code; ``develop``


--- a/source/advanced/parallel_computation.rst	Tue Mar 22 08:02:36 2011 -0700
+++ b/source/advanced/parallel_computation.rst	Tue Mar 22 08:03:17 2011 -0700
@@ -55,7 +55,7 @@
 
 .. code-block:: bash
 
-   $ mpirun -np 16 python2.6 my_script.py --parallel
+   $ mpirun -np 16 python2.7 my_script.py --parallel
 
 if you wanted it to run in parallel.  If you run into problems, the you can use
 :ref:`remote-debugging` to examine what went wrong.


http://bitbucket.org/yt_analysis/yt-doc/changeset/7c1a717d5806/
changeset:   r45:7c1a717d5806
user:        MatthewTurk
date:        2011-03-22 17:04:14
summary:     Adding changelog.  Should probably be sorted.
affected #:  1 file (1.5 KB)

--- a/source/reference/changelog.rst	Tue Mar 22 08:03:17 2011 -0700
+++ b/source/reference/changelog.rst	Tue Mar 22 09:04:14 2011 -0700
@@ -3,6 +3,37 @@
 
 This is a non-comprehensive log of changes to the code.
 
+Version 2.1
+-----------
+
+ * HEALpix-based volume rendering for 4pi, allsky volume rendering
+ * libconfig is now included
+ * SQLite3 and Forthon now included by default in the install script
+ * Development guide has been lengthened substantially and a development
+   bootstrap script (:ref:`bootstrap-dev`) is now included.
+ * Installation script now installs Python 2.7 and HDF5 1.8.6
+ * iyt now tab-completes field names
+ * Halos can now be stored on-disk much more easily between HaloFinding runs.
+ * Halos found inline in Enzo can be loaded and merger trees calculated
+ * Support for CASTRO particles has been added
+ * Chombo support updated and fixed
+ * New code contributions 
+ * Contour finder has been sped up by a factor of a few
+ * Constrained two-point functions are now possible, for LOS power spectra
+ * Time series analysis (:ref:`time-series-analysis`) now much easier
+ * Stream Lines now a supported 1D data type (:class:`AMRStreamlineBase`)
+ * Stream Lines now able to be calculated and plotted (:ref:`streamlines-viz`)
+ * In situ Enzo visualization now much faster
+ * "gui" source directory reorganized and cleaned up
+ * Cython now a compile-time dependency, reducing the size of source tree
+   updates substantially
+ * ``yt-supplemental`` repository now checked out by default, containing
+   cookbook, documentation, handy mercurial extensions, and advanced plotting
+   examples and helper scripts.
+ * Pasteboards now supported and available (:ref:`pasteboards`)
+ * Parallel yt efficiency improved by removal of barriers and improvement of
+   collective operations
+
 Version 2.0
 -----------
 


http://bitbucket.org/yt_analysis/yt-doc/changeset/e8f532648a50/
changeset:   r46:e8f532648a50
user:        MatthewTurk
date:        2011-03-22 17:09:57
summary:     Adding a section about the bootstrap_dev command
affected #:  1 file (5.9 KB)

--- a/source/advanced/developing.rst	Tue Mar 22 09:04:14 2011 -0700
+++ b/source/advanced/developing.rst	Tue Mar 22 09:09:57 2011 -0700
@@ -34,6 +34,120 @@
 still want to contribute, just drop me a line and I'll put a link on the main
 wiki page to wherever you like!
 
+.. _bootstrap-dev:
+
+Bootstrapping Your Development Environment
+------------------------------------------
+
+Getting up and running with developing yt can be somewhat daunting.  To assist
+with that, yt provides a 'bootstrap' script that handles a couple of the more
+annoying items on the checklist -- getting set up on BitBucket, creating a
+pasteboard, and adding a couple handy extensions to Mercurial.  As time goes
+on, we hope that we will be able to use the extensions added during this
+process to both issue forks and pull requests to BitBucket, enabling much more
+rapid and easy development.  To run the script, on the command line type::
+
+   $ yt bootstrap_dev
+
+.. note:: Although the bootstrap script will manipulate and modify your
+   ``~/.hgrc`` and possibly your BitBucket repositories, it will ask before
+   doing anything.  You should feel free to Ctrl-C out at any time.  If you
+   wish to inspect the source code of the bootstrap script, it is located in
+   ``yt/utilities/command_line.py`` in the function ``do_bootstrap``.
+
+Here is the list of items that the script will attempt to accomplish, along
+with a brief motivation of each.  
+
+ #. **Ensure that the yt-supplemental repository is checked out into
+    ``YT_DEST``**.  To make sure that the extensions we're going to use to
+    facilitate mercurial usage are checked out and ready to go, we optionally
+    clone the repository here.  If you've run with a recent install script,
+    this won't be necessary.
+ #. **Create an ``~/.hgrc`` if it does not exist, and add your username**.
+    Because Mercurial's changesets are all signed with a username, we make sure
+    that your username is set in your ``~/.hgrc``.  The script will prompt you
+    for what you would like it to be.  When committing to yt, we strongly
+    prefer you set it to be of the form "Firstname Lastname
+    <email at address.com>".  If you want to skip this step, simply set the
+    configuration value yourself in ``~/.hgrc``.  Any of the above-listed
+    tutorials on hg can help with this.
+ #. **Create a BitBucket user if you do not have one**.  Because yt is developed
+    on the source code hosting site `BitBucket <http://bitbucket.org/>`_, we
+    make sure that you're set up to have a username there.  You should not feel
+    obliged to do this step if you do not want to, but it provides a much more
+    convenient mechanism for sharing changes, reporting issues, and
+    contributing to the yt wiki.  It also provides a location to host an
+    unlimited number of publicly accessible repositories, if you wish to share
+    other pieces of code with other users.  (See :ref:`included-hg-extensions`
+    for more information about this.)
+ #. **Turn on the ``hgbb`` and ``cedit`` extensions in ``~/.hgrc``**.  This sets
+    up these extensions, described below.  It amounts to adding them to the
+    ``[extensions]`` section and adding your BitBucket username to the ``[bb]``
+    section.
+ #. **Create a pasteboard repository**.  This is the step that is probably the
+    most fun.  yt now comes with pasteboard facilities.  A pasteboard is like a
+    pastebin, except designed to be more persistent -- it's a versioned
+    repository that contains scripts with descriptions, which are automatically
+    posted to the web.  You can download from your pasteboard programmatically
+    using the ``yt pasteboard`` command, and you can download from other
+    pasteboards using the ``yt pastegrab`` command.  For more information, see
+    :ref:`pasteboards`.  This repository will be created on BitBucket, and will
+    be of the name ``your_username.bitbucket.org``, which is also the web
+    address it will be hosted at.
+
+And that's it!  If you run into any trouble, please email ``yt-dev`` with your
+concerns, questions or error messages.  This should put you in a good place to
+start developing yt efficiently.
+
+.. _included-hg-extensions:
+ 
+Included hg Extensions
+^^^^^^^^^^^^^^^^^^^^^^
+
+Mercurial is written in Python, and as such is easily extensible by scripts.
+It comes with a number of extensions (descriptions of which you can find on the
+Mercurial wiki under `UsingExtensions
+<http://mercurial.selenic.com/wiki/UsingExtensions>`_.  Some of my favorites
+are transplant, extdiff, color and progress.) yt now comes bundled with a few
+additional extensions, which should make interacting with other repositories
+and BitBucket a bit easier.
+
+The first of these is ``hgbb``, which is a Mercurial extension that interacts
+with the public-facing BitBucket-API.  It adds several commands, and you can
+get information about these commands by typing: ::
+
+   $ hg help COMMANDNAME
+
+It also adds the URL-specifer ``bb://USERNAME/reponame`` for convenience; this
+means you can reference ``sskory/yt`` to see Stephen's yt fork, for instance.
+
+The most fun of these commands are:
+
+``bbcreate``
+   This creates a new repository on BitBucket and clones it locally.  This is
+   really cool and very convenient when developing.
+``bbforks``
+   This shows the status of all known forks of a given repository, and can show
+   the incoming and outgoing changesets.  You can use this to see what
+   changesets are different between yours and another repository.
+
+As time goes on, and as the BitBucket API is expanded to cover things like
+forking and pull requests, we hope that this extension will also expand.
+
+The other extension that is currently bundled with yt is the ``cedit``
+extension.  This adds the ability to add, remove and set configuration options
+from the command line.  This brings with it the ability to add new sources for
+Mercurial repositories -- for instance, if you become aware of a different
+source repository you want to be able to pull from, you can add it as a source
+and then pull from it directly.
+
+The new commands you may be interested in are:
+
+``cedit``
+   Set an option in either the local or the global configuration file.
+``addsource``
+   Add a mercurial repo to the ``[paths]`` section of the local repository.
+
 How To Get The Source Code
 --------------------------
 
@@ -248,6 +362,8 @@
  * Variable names should be short but descriptive.
  * No globals!
 
+.. _project-ideas:
+
 Project Ideas
 -------------

Repository URL: https://bitbucket.org/yt_analysis/yt-doc/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list