[yt-svn] commit/yt: 3 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Fri Mar 15 17:03:30 PDT 2013
3 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/15d72caf0ea6/
changeset: 15d72caf0ea6
branch: yt
user: MatthewTurk
date: 2013-03-15 17:57:43
summary: Attempting to create a backwards-compatible pickle fix for boolean objects.
The comments explain some of the thought process.
affected #: 1 file
diff -r bda5c9c8fe1bfdda64bdd5f476690ad43fff9b4b -r 15d72caf0ea6927828285981b476fd91725432d9 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4606,22 +4606,38 @@
mylog.error("Problem uploading.")
return upload_id
+# Many of these items are set up specifically to ensure that
+# we are not breaking old pickle files. This means we must only call the
+# _reconstruct_object and that we cannot mandate any additional arguments to
+# the reconstruction function.
+#
+# In the future, this would be better off being set up to more directly
+# reference objects or retain state, perhaps with a context manager.
+#
+# One final detail: time series or multiple parameter files in a single pickle
+# seems problematic.
+
+class ReconstructedObject(tuple):
+ pass
+
+def _check_nested_args(arg, ref_pf):
+ if not isinstance(arg, (tuple, list, ReconstructedObject)):
+ return arg
+ elif isinstance(arg, ReconstructedObject) and ref_pf == arg[0]:
+ return arg[1]
+ narg = [_check_nested_args(a, ref_pf) for a in arg]
+ return narg
def _reconstruct_object(*args, **kwargs):
pfid = args[0]
dtype = args[1]
+ pfs = ParameterFileStore()
+ pf = pfs.get_pf_hash(pfid)
field_parameters = args[-1]
# will be much nicer when we can do pfid, *a, fp = args
- args, new_args = args[2:-1], []
- for arg in args:
- if iterable(arg) and len(arg) == 2 \
- and not isinstance(arg, types.DictType) \
- and isinstance(arg[1], AMRData):
- new_args.append(arg[1])
- else: new_args.append(arg)
- pfs = ParameterFileStore()
- pf = pfs.get_pf_hash(pfid)
+ args = args[2:-1]
+ new_args = [_check_nested_args(a, pf) for a in args]
cls = getattr(pf.h, dtype)
obj = cls(*new_args)
obj.field_parameters.update(field_parameters)
- return pf, obj
+ return ReconstructedObject((pf, obj))
https://bitbucket.org/yt_analysis/yt/commits/3c55a238d857/
changeset: 3c55a238d857
branch: yt
user: ejtasker
date: 2013-03-16 01:00:17
summary: added test for pickle
affected #: 1 file
diff -r ed10fc84bafcefebfdfd813ca59caa245f8a5ebd -r 3c55a238d8577874b0e0f48199dfeb4ae217dce0 yt/data_objects/tests/test_pickle.py
--- /dev/null
+++ b/yt/data_objects/tests/test_pickle.py
@@ -0,0 +1,36 @@
+from yt.testing import fake_random_pf, assert_equal
+from yt.analysis_modules.level_sets.api import identify_contours
+import cPickle
+import os
+
+def setup():
+ """Test specific setup."""
+ from yt.config import ytcfg
+ ytcfg["yt", "__withintesting"] = "True"
+
+
+def test_save_load_pickle():
+
+ test_pf = fake_random_pf(64)
+
+ # create extracted region from boolean (fairly complex object)
+ center = (pf.domain_left_edge + pf.domain_right_edge) / 2
+ sp_outer = pf.h.sphere(center, pf.domain_width[0])
+ sp_inner = pf.h.sphere(center, pf.domain_width[0]/10.0)
+ sp_boolean = pf.h.boolean([sp_outer, "NOT", sp_inner])
+
+ minv, maxv = sp_boolean.quantities["Extrema"]("Density")[0]
+ contour_threshold = min(minv*10, 0.9*maxv)
+
+ contours = sp_boolean.extract_connected_sets("Density", 1, contour_threshold, maxv+1, log_space=True, cache=True)
+
+ # save object
+ cPickle.dump(contours[1][0], open("myobject.cpkl", "wb"))
+
+ # load object
+ test_load = cPickle.load(open("myobject.cpkl", "rb"))
+
+ yield assert_equal, test_load != None, True
+ yield assert_equal, len(contours[1][0]), len(test_load)
+
+ os.remove("myobject.cpkl")
https://bitbucket.org/yt_analysis/yt/commits/e59b164d12e5/
changeset: e59b164d12e5
branch: yt
user: ejtasker
date: 2013-03-16 01:03:12
summary: merged yt
affected #: 1 file
diff -r 3c55a238d8577874b0e0f48199dfeb4ae217dce0 -r e59b164d12e5fd6547e92a02f6333b4868bc4b87 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -4606,22 +4606,38 @@
mylog.error("Problem uploading.")
return upload_id
+# Many of these items are set up specifically to ensure that
+# we are not breaking old pickle files. This means we must only call the
+# _reconstruct_object and that we cannot mandate any additional arguments to
+# the reconstruction function.
+#
+# In the future, this would be better off being set up to more directly
+# reference objects or retain state, perhaps with a context manager.
+#
+# One final detail: time series or multiple parameter files in a single pickle
+# seems problematic.
+
+class ReconstructedObject(tuple):
+ pass
+
+def _check_nested_args(arg, ref_pf):
+ if not isinstance(arg, (tuple, list, ReconstructedObject)):
+ return arg
+ elif isinstance(arg, ReconstructedObject) and ref_pf == arg[0]:
+ return arg[1]
+ narg = [_check_nested_args(a, ref_pf) for a in arg]
+ return narg
def _reconstruct_object(*args, **kwargs):
pfid = args[0]
dtype = args[1]
+ pfs = ParameterFileStore()
+ pf = pfs.get_pf_hash(pfid)
field_parameters = args[-1]
# will be much nicer when we can do pfid, *a, fp = args
- args, new_args = args[2:-1], []
- for arg in args:
- if iterable(arg) and len(arg) == 2 \
- and not isinstance(arg, types.DictType) \
- and isinstance(arg[1], AMRData):
- new_args.append(arg[1])
- else: new_args.append(arg)
- pfs = ParameterFileStore()
- pf = pfs.get_pf_hash(pfid)
+ args = args[2:-1]
+ new_args = [_check_nested_args(a, pf) for a in args]
cls = getattr(pf.h, dtype)
obj = cls(*new_args)
obj.field_parameters.update(field_parameters)
- return pf, obj
+ return ReconstructedObject((pf, obj))
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list