[yt-users] enzo embedded parallel yt problem with gradient field

Bodo Schwabe Bodysseus at gmx.de
Tue Aug 30 06:07:31 PDT 2016


*Hi all,**
**
**I currently try to run enzo with embedded parallelized python/yt with 
the following user_script.py :*

/import yt//
//
//from yt.frontends.enzo.api import EnzoDatasetInMemory//
//
//from yt.fields.field_plugin_registry import \//
//    register_field_plugin//
//from yt.fields.fluid_fields import \//
//    setup_gradient_fields//
//
//@register_field_plugin//
//def setup_my_fields(registry, ftype="enzo", slice_info=None)://
//    setup_gradient_fields(registry, ('enzo','AxImaginary'), '', 
slice_info)//
//
//yt.enable_parallelism()//
//
//def main()://
//
//    ds = EnzoDatasetInMemory()//
//    ds.index//
//    dd = ds.all_data()//
//    print dd.quantities.total_quantity('AxImaginary_gradient_z')/


*It works fine on a single processor (mpirun -n 1 ...) but gives my the 
following error message for multiple processors:*

/Global Dir set to .//
//ENZO_layout 1 x 1 x 2//
//Successfully read in parameter file Bosonstar.enzo.//
//INITIALIZATION TIME =   3.75330448e-02//
//yt : [INFO     ] 2016-08-30 15:02:40,868 Global parallel computation 
enabled: 0 / 2//
//yt : [INFO     ] 2016-08-30 15:02:40,868 Global parallel computation 
enabled: 1 / 2//
//Continuation Flag = 1//
//TopGrid dt = 2.000000e-05     time = 0    cycle = 0//
//P000 yt : [INFO     ] 2016-08-30 15:02:41,001 Parameters: 
current_time              = 2e-05//
//P000 yt : [INFO     ] 2016-08-30 15:02:41,001 Parameters: 
domain_dimensions         = [32 32 32]//
//P000 yt : [INFO     ] 2016-08-30 15:02:41,002 Parameters: 
domain_left_edge          = [-0.256 -0.256 -0.256]//
//P000 yt : [INFO     ] 2016-08-30 15:02:41,002 Parameters: 
domain_right_edge         = [ 0.256  0.256  0.256]//
//P000 yt : [INFO     ] 2016-08-30 15:02:41,003 Parameters: 
cosmological_simulation   = 0.0//
//P000 yt : [INFO     ] 2016-08-30 15:02:41,005 Gathering a field list 
(this may take a moment.)//
//  File "<string>", line 1, in <module>//
//  File "./user_script.py", line 21, in main//
//    print dd.quantities.total_quantity('AxImaginary_gradient_z')//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/derived_quantities.py", 
line 176, in __call__//
//    rv = super(TotalQuantity, self).__call__(fields)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/derived_quantities.py", 
line 67, in __call__//
//    sto.result = self.process_chunk(ds, *args, **kwargs)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/derived_quantities.py", 
line 182, in process_chunk//
//    for field in fields]//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 279, in __getitem__//
//  File "<string>", line 1, in <module>//
//  File "./user_script.py", line 21, in main//
//    print dd.quantities.total_quantity('AxImaginary_gradient_z')//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/derived_quantities.py", 
line 176, in __call__//
//    self.get_data(f)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1291, in get_data//
//    self._generate_fields(fields_to_generate)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1311, in _generate_fields//
//    fd = self._generate_field(field)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 316, in _generate_field//
//    tr = self._generate_fluid_field(field)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 334, in _generate_fluid_field//
//    rv = self._generate_spatial_fluid(field, ngt_exception.ghost_zones)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 364, in _generate_spatial_fluid//
//    gz[field][ngz:-ngz, ngz:-ngz, ngz:-ngz],//
//    rv = super(TotalQuantity, self).__call__(fields)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/derived_quantities.py", 
line 67, in __call__//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 279, in __getitem__//
//    sto.result = self.process_chunk(ds, *args, **kwargs)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/derived_quantities.py", 
line 182, in process_chunk//
//    for field in fields]//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 279, in __getitem__//
//    self.get_data(f)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/construction_data_containers.py", 
line 628, in get_data//
//    self.get_data(f)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1291, in get_data//
//    self._generate_fields(fields_to_generate)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1311, in _generate_fields//
//    fd = self._generate_field(field)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 316, in _generate_field//
//    tr = self._generate_fluid_field(field)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 334, in _generate_fluid_field//
//    rv = self._generate_spatial_fluid(field, ngt_exception.ghost_zones)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 364, in _generate_spatial_fluid//
//    if len(fill) > 0: self._fill_fields(fill)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/construction_data_containers.py", 
line 947, in _fill_fields//
//    gz[field][ngz:-ngz, ngz:-ngz, ngz:-ngz],//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 279, in __getitem__//
//    for chunk in ls.data_source.chunks(fields, "io")://
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1190, in chunks//
//    self.get_data(f)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/construction_data_containers.py", 
line 628, in get_data//
//    self.get_data(fields)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1279, in get_data//
//    fluids, self, self._current_chunk)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/geometry/geometry_handler.py", 
line 245, in _read_fluid_fields//
//    chunk_size)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/frontends/enzo/io.py", 
line 380, in _read_fluid_selection//
//    if len(fill) > 0: self._fill_fields(fill)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/construction_data_containers.py", 
line 947, in _fill_fields//
//    for chunk in ls.data_source.chunks(fields, "io")://
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1190, in chunks//
//    self.get_data(fields)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/data_objects/data_containers.py", 
line 1279, in get_data//
//    fluids, self, self._current_chunk)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/geometry/geometry_handler.py", 
line 245, in _read_fluid_fields//
//    data_view = 
self.grids_in_memory[g.id][fname][self.my_slice].swapaxes(0,2)//
//P001 yt : [ERROR    ] 2016-08-30 15:02:41,783 KeyError: 1//
//    chunk_size)//
//  File 
"/gfs2/work/nipbschw/yt-shared-smp/src/yt-hg/yt/frontends/enzo/io.py", 
line 380, in _read_fluid_selection//
//    data_view = 
self.grids_in_memory[g.id][fname][self.my_slice].swapaxes(0,2)//
//P000 yt : [ERROR    ] 2016-08-30 15:02:41,784 KeyError: 2//
//[hsmp16:59147] 1 more process has sent help message 
help-mpi-btl-openib.txt / default subnet prefix//
//[hsmp16:59147] Set MCA parameter "orte_base_help_aggregate" to 0 to 
see all help / error messages//
//[hsmp16:59147] 1 more process has sent help message 
help-mpi-runtime.txt / mpi_init:warn-fork//
//[hsmp16:59147] 1 more process has sent help message help-mpi-api.txt / 
mpi-abort

/*Thank you very much for any helpful suggestions,**
**Bodo*/
/
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.spacepope.org/pipermail/yt-users-spacepope.org/attachments/20160830/d6bcc41f/attachment-0001.htm>


More information about the yt-users mailing list