[Yt-svn] yt-commit r1476 - branches/yt-1.5/yt/fido branches/yt-1.5/yt/lagos branches/yt-1.5/yt/raven trunk/yt/fido trunk/yt/lagos trunk/yt/raven
mturk at wrangler.dreamhost.com
mturk at wrangler.dreamhost.com
Tue Oct 6 10:20:06 PDT 2009
Author: mturk
Date: Tue Oct 6 10:19:59 2009
New Revision: 1476
URL: http://yt.enzotools.org/changeset/1476
Log:
Updating docstrings for object and parameter file storage
Modified:
branches/yt-1.5/yt/fido/ParameterFileStorage.py
branches/yt-1.5/yt/lagos/BaseDataTypes.py
branches/yt-1.5/yt/lagos/HierarchyType.py
branches/yt-1.5/yt/raven/FixedResolution.py
trunk/yt/fido/ParameterFileStorage.py
trunk/yt/lagos/BaseDataTypes.py
trunk/yt/lagos/HierarchyType.py
trunk/yt/raven/FixedResolution.py
Modified: branches/yt-1.5/yt/fido/ParameterFileStorage.py
==============================================================================
--- branches/yt-1.5/yt/fido/ParameterFileStorage.py (original)
+++ branches/yt-1.5/yt/fido/ParameterFileStorage.py Tue Oct 6 10:19:59 2009
@@ -60,6 +60,13 @@
return self
def __init__(self, in_memory = False):
+ """
+ This class is designed to be a semi-persistent storage for parameter
+ files. By identifying each parameter file with a unique hash, objects
+ can be stored independently of parameter files -- when an object is
+ loaded, the parameter file is as well, based on the hash. For
+ storage concerns, only a few hundred will be retained in cache.
+ """
if ytcfg.getboolean("yt", "StoreParameterFiles"):
self._read_only = False
self.init_db()
@@ -70,6 +77,9 @@
@parallel_simple_proxy
def init_db(self):
+ """
+ This function ensures that the storage database exists and can be used.
+ """
dbn = self._get_db_name()
dbdir = os.path.dirname(dbn)
try:
@@ -87,9 +97,15 @@
return os.path.expanduser("~/.yt/%s" % base_file_name)
def get_pf_hash(self, hash):
+ """
+ This returns a parameter file based on a hash.
+ """
return self._convert_pf(self._records[hash])
def get_pf_ctid(self, ctid):
+ """
+ This returns a parameter file based on a CurrentTimeIdentifier.
+ """
for h in self._records:
if self._records[h]['ctid'] == ctid:
return self._convert_pf(self._records[h])
@@ -126,6 +142,11 @@
return pf
def check_pf(self, pf):
+ """
+ This will ensure that the parameter file (*pf*) handed to it is
+ recorded in the storage unit. In doing so, it will update path
+ and "last_seen" information.
+ """
hash = pf._hash()
if hash not in self._records:
self.insert_pf(pf)
@@ -138,15 +159,25 @@
self.insert_pf(pf)
def insert_pf(self, pf):
+ """
+ This will insert a new *pf* and flush the database to disk.
+ """
self._records[pf._hash()] = self._adapt_pf(pf)
self.flush_db()
def wipe_hash(self, hash):
+ """
+ This removes a *hash* corresponding to a parameter file from the
+ storage.
+ """
if hash not in self._records: return
del self._records[hash]
self.flush_db()
def flush_db(self):
+ """
+ This flushes the storage to disk.
+ """
if self._read_only: return
self._write_out()
self.read_db()
@@ -167,6 +198,9 @@
@parallel_simple_proxy
def read_db(self):
+ """
+ This will read the storage device from disk.
+ """
f=open(self._get_db_name(), 'rb')
vals = csv.DictReader(f, _field_names)
db = {}
Modified: branches/yt-1.5/yt/lagos/BaseDataTypes.py
==============================================================================
--- branches/yt-1.5/yt/lagos/BaseDataTypes.py (original)
+++ branches/yt-1.5/yt/lagos/BaseDataTypes.py Tue Oct 6 10:19:59 2009
@@ -233,6 +233,11 @@
fid.close()
def save_object(self, name, filename = None):
+ """
+ Save an object. If *filename* is supplied, it will be stored in
+ a :module:`shelve` file of that name. Otherwise, it will be stored via
+ :meth:`yt.lagos.AMRHierarchy.save_object`.
+ """
if filename is not None:
ds = shelve.open(filename, protocol=-1)
if name in ds:
Modified: branches/yt-1.5/yt/lagos/HierarchyType.py
==============================================================================
--- branches/yt-1.5/yt/lagos/HierarchyType.py (original)
+++ branches/yt-1.5/yt/lagos/HierarchyType.py Tue Oct 6 10:19:59 2009
@@ -194,10 +194,18 @@
save_data = parallel_splitter(_save_data, _reload_data_file)
def save_object(self, obj, name):
+ """
+ Save an object (*obj*) to the data_file using the Pickle protocol,
+ under the name *name* on the node /Objects.
+ """
s = cPickle.dumps(obj, protocol=-1)
self.save_data(s, "/Objects", name, force = True)
def load_object(self, name):
+ """
+ Load and return and object from the data_file using the Pickle protocol,
+ under the name *name* on the node /Objects.
+ """
obj = self.get_data("/Objects", name)
if obj is None:
return
Modified: branches/yt-1.5/yt/raven/FixedResolution.py
==============================================================================
--- branches/yt-1.5/yt/raven/FixedResolution.py (original)
+++ branches/yt-1.5/yt/raven/FixedResolution.py Tue Oct 6 10:19:59 2009
@@ -59,6 +59,9 @@
self.data[item] = val
def convert_to_pixel(self, coords):
+ """
+ This converts a code-location to an image-location
+ """
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
px = (coords[0] - self.bounds[0])/dpx
@@ -66,14 +69,24 @@
return (px, py)
def convert_distance_x(self, distance):
+ """
+ This converts a real distance to a pixel distance in x.
+ """
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
return distance/dpx
def convert_distance_y(self, distance):
+ """
+ This converts a real distance to a pixel distance in y.
+ """
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
return distance/dpy
def export_hdf5(self, filename, fields = None):
+ """
+ This function opens (append-mode) an HDF5 file and adds all of the
+ requested *fields* (default: All) to the top level of the data file.
+ """
import h5py
if fields is None: fields = self.data.keys()
output = h5py.File(filename, "a")
@@ -117,6 +130,10 @@
numdisplay.display(data)
class ObliqueFixedResolutionBuffer(FixedResolutionBuffer):
+ """
+ This object is a subclass of :class:`yt.raven.FixedResolution.FixedResolutionBuffer`
+ that supports non-aligned input data objects, primarily cutting planes.
+ """
def __getitem__(self, item):
if item in self.data: return self.data[item]
indices = na.argsort(self.data_source['dx'])[::-1]
@@ -168,4 +185,7 @@
self.data[item] = val
def sum(self, item):
+ """
+ Returns the sum of a given field.
+ """
return self[item].sum()
Modified: trunk/yt/fido/ParameterFileStorage.py
==============================================================================
--- trunk/yt/fido/ParameterFileStorage.py (original)
+++ trunk/yt/fido/ParameterFileStorage.py Tue Oct 6 10:19:59 2009
@@ -60,6 +60,13 @@
return self
def __init__(self, in_memory = False):
+ """
+ This class is designed to be a semi-persistent storage for parameter
+ files. By identifying each parameter file with a unique hash, objects
+ can be stored independently of parameter files -- when an object is
+ loaded, the parameter file is as well, based on the hash. For
+ storage concerns, only a few hundred will be retained in cache.
+ """
if ytcfg.getboolean("yt", "StoreParameterFiles"):
self._read_only = False
self.init_db()
@@ -70,6 +77,9 @@
@parallel_simple_proxy
def init_db(self):
+ """
+ This function ensures that the storage database exists and can be used.
+ """
dbn = self._get_db_name()
dbdir = os.path.dirname(dbn)
try:
@@ -87,9 +97,15 @@
return os.path.expanduser("~/.yt/%s" % base_file_name)
def get_pf_hash(self, hash):
+ """
+ This returns a parameter file based on a hash.
+ """
return self._convert_pf(self._records[hash])
def get_pf_ctid(self, ctid):
+ """
+ This returns a parameter file based on a CurrentTimeIdentifier.
+ """
for h in self._records:
if self._records[h]['ctid'] == ctid:
return self._convert_pf(self._records[h])
@@ -126,6 +142,11 @@
return pf
def check_pf(self, pf):
+ """
+ This will ensure that the parameter file (*pf*) handed to it is
+ recorded in the storage unit. In doing so, it will update path
+ and "last_seen" information.
+ """
hash = pf._hash()
if hash not in self._records:
self.insert_pf(pf)
@@ -138,15 +159,25 @@
self.insert_pf(pf)
def insert_pf(self, pf):
+ """
+ This will insert a new *pf* and flush the database to disk.
+ """
self._records[pf._hash()] = self._adapt_pf(pf)
self.flush_db()
def wipe_hash(self, hash):
+ """
+ This removes a *hash* corresponding to a parameter file from the
+ storage.
+ """
if hash not in self._records: return
del self._records[hash]
self.flush_db()
def flush_db(self):
+ """
+ This flushes the storage to disk.
+ """
if self._read_only: return
self._write_out()
self.read_db()
@@ -167,6 +198,9 @@
@parallel_simple_proxy
def read_db(self):
+ """
+ This will read the storage device from disk.
+ """
f=open(self._get_db_name(), 'rb')
vals = csv.DictReader(f, _field_names)
db = {}
Modified: trunk/yt/lagos/BaseDataTypes.py
==============================================================================
--- trunk/yt/lagos/BaseDataTypes.py (original)
+++ trunk/yt/lagos/BaseDataTypes.py Tue Oct 6 10:19:59 2009
@@ -260,6 +260,11 @@
fid.close()
def save_object(self, name, filename = None):
+ """
+ Save an object. If *filename* is supplied, it will be stored in
+ a :module:`shelve` file of that name. Otherwise, it will be stored via
+ :meth:`yt.lagos.AMRHierarchy.save_object`.
+ """
if filename is not None:
ds = shelve.open(filename, protocol=-1)
if name in ds:
Modified: trunk/yt/lagos/HierarchyType.py
==============================================================================
--- trunk/yt/lagos/HierarchyType.py (original)
+++ trunk/yt/lagos/HierarchyType.py Tue Oct 6 10:19:59 2009
@@ -201,10 +201,18 @@
save_data = parallel_splitter(_save_data, _reload_data_file)
def save_object(self, obj, name):
+ """
+ Save an object (*obj*) to the data_file using the Pickle protocol,
+ under the name *name* on the node /Objects.
+ """
s = cPickle.dumps(obj, protocol=-1)
self.save_data(s, "/Objects", name, force = True)
def load_object(self, name):
+ """
+ Load and return and object from the data_file using the Pickle protocol,
+ under the name *name* on the node /Objects.
+ """
obj = self.get_data("/Objects", name)
if obj is None:
return
Modified: trunk/yt/raven/FixedResolution.py
==============================================================================
--- trunk/yt/raven/FixedResolution.py (original)
+++ trunk/yt/raven/FixedResolution.py Tue Oct 6 10:19:59 2009
@@ -59,6 +59,9 @@
self.data[item] = val
def convert_to_pixel(self, coords):
+ """
+ This converts a code-location to an image-location
+ """
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
px = (coords[0] - self.bounds[0])/dpx
@@ -66,14 +69,24 @@
return (px, py)
def convert_distance_x(self, distance):
+ """
+ This converts a real distance to a pixel distance in x.
+ """
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
return distance/dpx
def convert_distance_y(self, distance):
+ """
+ This converts a real distance to a pixel distance in y.
+ """
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
return distance/dpy
def export_hdf5(self, filename, fields = None):
+ """
+ This function opens (append-mode) an HDF5 file and adds all of the
+ requested *fields* (default: All) to the top level of the data file.
+ """
import h5py
if fields is None: fields = self.data.keys()
output = h5py.File(filename, "a")
@@ -118,6 +131,10 @@
numdisplay.display(data)
class ObliqueFixedResolutionBuffer(FixedResolutionBuffer):
+ """
+ This object is a subclass of :class:`yt.raven.FixedResolution.FixedResolutionBuffer`
+ that supports non-aligned input data objects, primarily cutting planes.
+ """
def __getitem__(self, item):
if item in self.data: return self.data[item]
indices = na.argsort(self.data_source['dx'])[::-1]
@@ -169,4 +186,7 @@
self.data[item] = val
def sum(self, item):
+ """
+ Returns the sum of a given field.
+ """
return self[item].sum()
More information about the yt-svn
mailing list