[yt-svn] commit/yt-3.0: 2 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Sun Apr 28 17:12:34 PDT 2013
2 new commits in yt-3.0:
https://bitbucket.org/yt_analysis/yt-3.0/commits/00cce9aff2cf/
Changeset: 00cce9aff2cf
Branch: yt-3.0
User: ngoldbaum
Date: 2013-04-29 02:10:23
Summary: Fixing the docstring for the projection object's data_source keyword.
Affected #: 1 file
diff -r d48a016b4b8ce1c8326e23c308fd90789d0b4ec0 -r 00cce9aff2cff342f2971bee8f578fed9d1deab5 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -190,7 +190,7 @@
center : array_like, optional
The 'center' supplied to fields that use it. Note that this does
not have to have `coord` as one value. Strictly optional.
- source : `yt.data_objects.api.AMRData`, optional
+ data_source : `yt.data_objects.api.AMRData`, optional
If specified, this will be the data source used for selecting
regions to project.
node_name: string, optional
https://bitbucket.org/yt_analysis/yt-3.0/commits/346c728780cb/
Changeset: 346c728780cb
Branch: yt-3.0
User: ngoldbaum
Date: 2013-04-29 02:12:13
Summary: Merging with bitbucket tip.
Affected #: 2 files
diff -r 00cce9aff2cff342f2971bee8f578fed9d1deab5 -r 346c728780cb0a4607a16608a8706178a51e1bf3 yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -64,7 +64,8 @@
answer_testing_bitwise = 'False',
gold_standard_filename = 'gold006',
local_standard_filename = 'local001',
- sketchfab_api_key = 'None'
+ sketchfab_api_key = 'None',
+ thread_field_detection = 'False'
)
# Here is the upgrade. We're actually going to parse the file in its entirety
# here. Then, if it has any of the Forbidden Sections, it will be rewritten
diff -r 00cce9aff2cff342f2971bee8f578fed9d1deab5 -r 346c728780cb0a4607a16608a8706178a51e1bf3 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -31,6 +31,9 @@
import string
import re
+from threading import Thread
+import Queue
+
from itertools import izip
from yt.funcs import *
@@ -57,6 +60,14 @@
from yt.utilities.parallel_tools.parallel_analysis_interface import \
parallel_blocking_call
+def get_field_names_helper(filename, id, results):
+ try:
+ names = hdf5_light_reader.ReadListOfDatasets(
+ filename, "/Grid%08i" % id)
+ results.put((names, "Grid %s has: %s" % (id, names)))
+ except (exceptions.KeyError, hdf5_light_reader.ReadingError):
+ results.put((None, "Grid %s is a bit funky?" % id))
+
class EnzoGrid(AMRGridPatch):
"""
Class representing a single Enzo Grid instance.
@@ -401,29 +412,21 @@
self.max_level = self.grid_levels.max()
def _detect_active_particle_fields(self):
- select_grids = np.zeros(len(self.grids), dtype='int32')
- for ptype in self.parameter_file["AppendActiveParticleType"]:
- select_grids += self.grid_active_particle_count[ptype].flat
- gs = self.grids[select_grids > 0]
- grids = sorted((g for g in gs), key = lambda a: a.filename)
- handle = last = None
ap_list = self.parameter_file["AppendActiveParticleType"]
_fields = dict((ap, []) for ap in ap_list)
fields = []
- for g in grids:
- # We inspect every grid, for now, until we have a list of
- # attributes in a defined location.
- if last != g.filename:
- if handle is not None: handle.close()
- handle = h5py.File(g.filename, "r")
+ for ptype in self.parameter_file["AppendActiveParticleType"]:
+ select_grids = self.grid_active_particle_count[ptype].flat
+ gs = self.grids[select_grids > 0]
+ g = gs[0]
+ handle = h5py.File(g.filename)
node = handle["/Grid%08i/Particles/" % g.id]
for ptype in (str(p) for p in node):
if ptype not in _fields: continue
for field in (str(f) for f in node[ptype]):
_fields[ptype].append(field)
fields += [(ptype, field) for field in _fields.pop(ptype)]
- if len(_fields) == 0: break
- if handle is not None: handle.close()
+ handle.close()
return set(fields)
def _setup_derived_fields(self):
@@ -448,15 +451,35 @@
mylog.info("Gathering a field list (this may take a moment.)")
field_list = set()
random_sample = self._generate_random_grids()
- for grid in random_sample:
- if not hasattr(grid, 'filename'): continue
- try:
- gf = self.io._read_field_names(grid)
- except self.io._read_exception:
- mylog.debug("Grid %s is a bit funky?", grid.id)
- continue
- mylog.debug("Grid %s has: %s", grid.id, gf)
- field_list = field_list.union(gf)
+ tothread = ytcfg.getboolean("yt","thread_field_detection")
+ if tothread:
+ jobs = []
+ result_queue = Queue.Queue()
+ # Start threads
+ for grid in random_sample:
+ if not hasattr(grid, 'filename'): continue
+ helper = Thread(target = get_field_names_helper,
+ args = (grid.filename, grid.id, result_queue))
+ jobs.append(helper)
+ helper.start()
+ # Here we make sure they're finished.
+ for helper in jobs:
+ helper.join()
+ for grid in random_sample:
+ res = result_queue.get()
+ mylog.debug(res[1])
+ if res[0] is not None:
+ field_list = field_list.union(res[0])
+ else:
+ for grid in random_sample:
+ if not hasattr(grid, 'filename'): continue
+ try:
+ gf = self.io._read_field_names(grid)
+ except self.io._read_exception:
+ mylog.debug("Grid %s is a bit funky?", grid.id)
+ continue
+ mylog.debug("Grid %s has: %s", grid.id, gf)
+ field_list = field_list.union(gf)
if "AppendActiveParticleType" in self.parameter_file.parameters:
ap_fields = self._detect_active_particle_fields()
field_list = list(set(field_list).union(ap_fields))
@@ -998,8 +1021,8 @@
for p, v in self._conversion_override.items():
self.conversion_factors[p] = v
self.refine_by = self.parameters["RefineBy"]
+ self.dimensionality = self.parameters["TopGridRank"]
self.periodicity = ensure_tuple(self.parameters["LeftFaceBoundaryCondition"] == 3)
- self.dimensionality = self.parameters["TopGridRank"]
self.domain_dimensions = self.parameters["TopGridDimensions"]
self.current_time = self.parameters["InitialTime"]
if "CurrentTimeIdentifier" in self.parameters:
Repository URL: https://bitbucket.org/yt_analysis/yt-3.0/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list