[yt-svn] commit/yt: MatthewTurk: Merging from deliberate_fields to yt.

Bitbucket commits-noreply at bitbucket.org
Fri Nov 11 13:49:07 PST 2011


1 new commit in yt:


https://bitbucket.org/yt_analysis/yt/changeset/7bc2e29981d2/
changeset:   7bc2e29981d2
branch:      yt
user:        MatthewTurk
date:        2011-11-11 22:45:05
summary:     Merging from deliberate_fields to yt.
affected #:  53 files

diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c doc/coding_styleguide.txt
--- a/doc/coding_styleguide.txt
+++ b/doc/coding_styleguide.txt
@@ -21,6 +21,9 @@
    be "na.multiply(a, 3, a)".
  * In general, avoid all double-underscore method names: __something is usually
    unnecessary.
+ * When writing a subclass, use the super built-in to access the super class,
+   rather than explicitly. Ex: "super(SpecialGrid, self).__init__()" rather than
+   "SpecialGrid.__init__()".
  * Doc strings should describe input, output, behavior, and any state changes
    that occur on an object.  See the file `doc/docstring_example.txt` for a
    fiducial example of a docstring.


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c tests/runall.py
--- a/tests/runall.py
+++ b/tests/runall.py
@@ -1,7 +1,7 @@
 import matplotlib; matplotlib.use('Agg')
 from yt.config import ytcfg
-ytcfg["yt","loglevel"] = "50"
-ytcfg["yt","serialize"] = "False"
+ytcfg["yt", "loglevel"] = "50"
+ytcfg["yt", "serialize"] = "False"
 
 from yt.utilities.answer_testing.api import \
     RegressionTestRunner, clear_registry, create_test, \
@@ -58,23 +58,23 @@
         my_hash = "UNKNOWN%s" % (time.time())
     parser = optparse.OptionParser()
     parser.add_option("-f", "--parameter-file", dest="parameter_file",
-                      default = os.path.join(cwd, "DD0010/moving7_0010"),
-                      help = "The parameter file value to feed to 'load' to test against",
-                      )
+                      default=os.path.join(cwd, "DD0010/moving7_0010"),
+                      help="The parameter file value to feed to 'load' to test against")
     parser.add_option("-l", "--list", dest="list_tests", action="store_true",
-                      default = False, help = "List all tests and then exit")
+                      default=False, help="List all tests and then exit")
     parser.add_option("-t", "--tests", dest="test_pattern", default="*",
-                      help = "The test name pattern to match.  Can include wildcards.")
+                      help="The test name pattern to match.  Can include wildcards.")
     parser.add_option("-o", "--output", dest="storage_dir",
                       default=test_storage_directory,
-                      help = "Base directory for storing test output.")
+                      help="Base directory for storing test output.")
     parser.add_option("-c", "--compare", dest="compare_name",
                       default=None,
-                      help = "The name against which we will compare")
+                      help="The name against which we will compare")
     parser.add_option("-n", "--name", dest="this_name",
                       default=my_hash,
-                      help = "The name we'll call this set of tests")
+                      help="The name we'll call this set of tests")
     opts, args = parser.parse_args()
+
     if opts.list_tests:
         tests_to_run = []
         for m, vals in mapping.items():
@@ -86,10 +86,13 @@
         tests = list(set(tests_to_run))
         print "\n    ".join(tests)
         sys.exit(0)
+
+    # Load the test pf and make sure it's good.
     pf = load(opts.parameter_file)
     if pf is None:
         print "Couldn't load the specified parameter file."
         sys.exit(1)
+
     # Now we modify our compare name and self name to include the pf.
     compare_id = opts.compare_name
     watcher = None
@@ -97,14 +100,17 @@
         compare_id += "_%s_%s" % (pf, pf._hash())
         watcher = Xunit()
     this_id = opts.this_name + "_%s_%s" % (pf, pf._hash())
+
     rtr = RegressionTestRunner(this_id, compare_id,
-            results_path = opts.storage_dir,
-            compare_results_path = opts.storage_dir,
-            io_log = [opts.parameter_file])
+                               results_path=opts.storage_dir,
+                               compare_results_path=opts.storage_dir,
+                               io_log=[opts.parameter_file])
+
     rtr.watcher = watcher
     tests_to_run = []
     for m, vals in mapping.items():
         new_tests = fnmatch.filter(vals, opts.test_pattern)
+
         if len(new_tests) == 0: continue
         load_tests(m, cwd)
         keys = set(registry_entries())




diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/api.py
--- a/yt/data_objects/api.py
+++ b/yt/data_objects/api.py
@@ -68,7 +68,6 @@
 from field_info_container import \
     FieldInfoContainer, \
     FieldInfo, \
-    CodeFieldInfoContainer, \
     NeedsGridType, \
     NeedsOriginalGrid, \
     NeedsDataField, \


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -3421,7 +3421,7 @@
     existing regions.
     """
     _type_name = "boolean"
-    _con_args = {"regions"}
+    _con_args = ("regions")
     def __init__(self, regions, fields = None, pf = None, **kwargs):
         """
         This will build a hybrid region based on the boolean logic


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/field_info_container.py
--- a/yt/data_objects/field_info_container.py
+++ b/yt/data_objects/field_info_container.py
@@ -34,37 +34,17 @@
 
 from yt.funcs import *
 
-class FieldInfoContainer(object): # We are all Borg.
+class FieldInfoContainer(dict): # Resistance has utility
     """
     This is a generic field container.  It contains a list of potential derived
     fields, all of which know how to act on a data object and return a value.
     This object handles converting units as well as validating the availability
     of a given field.
+
     """
-    _shared_state = {}
-    _universal_field_list = {}
-    def __new__(cls, *args, **kwargs):
-        self = object.__new__(cls, *args, **kwargs)
-        self.__dict__ = cls._shared_state
-        return self
-    def __getitem__(self, key):
-        if key in self._universal_field_list:
-            return self._universal_field_list[key]
-        raise KeyError
-    def keys(self):
-        """ Return all the field names this object knows about. """
-        return self._universal_field_list.keys()
+    fallback = None
 
-    def __iter__(self):
-        return self._universal_field_list.iterkeys()
-
-    def __setitem__(self, key, val):
-        self._universal_field_list[key] = val
-
-    def has_key(self, key):
-        return key in self._universal_field_list
-
-    def add_field(self, name, function = None, **kwargs):
+    def add_field(self, name, function=None, **kwargs):
         """
         Add a new field, along with supplemental metadata, to the list of
         available fields.  This respects a number of arguments, all of which
@@ -79,6 +59,41 @@
             return create_function
         self[name] = DerivedField(name, function, **kwargs)
 
+    def has_key(self, key):
+        # This gets used a lot
+        if key in self: return True
+        if self.fallback is None: return False
+        return self.fallback.has_key(key)
+
+    def __missing__(self, key):
+        if self.fallback is None:
+            raise KeyError("No field named %s" % key)
+        return self.fallback[key]
+
+    @classmethod
+    def create_with_fallback(cls, fallback):
+        obj = cls()
+        obj.fallback = fallback
+        return obj
+
+    def __contains__(self, key):
+        if dict.__contains__(self, key): return True
+        if self.fallback is None: return False
+        return self.fallback.has_key(key)
+
+    def __iter__(self):
+        for f in dict.__iter__(self): yield f
+        if self.fallback:
+            for f in self.fallback: yield f
+
+def TranslationFunc(field_name):
+    def _TranslationFunc(field, data):
+        return data[field_name]
+    return _TranslationFunc
+
+def NullFunc(field, data):
+    return
+
 FieldInfo = FieldInfoContainer()
 add_field = FieldInfo.add_field
 
@@ -91,28 +106,6 @@
         return function
     return inner_decorator
 
-class CodeFieldInfoContainer(FieldInfoContainer):
-    def __setitem__(self, key, val):
-        self._field_list[key] = val
-
-    def __iter__(self):
-        return itertools.chain(self._field_list.iterkeys(),
-                               self._universal_field_list.iterkeys())
-
-    def keys(self):
-        return set(self._field_list.keys() + self._universal_field_list.keys())
-
-    def has_key(self, key):
-        return key in self._universal_field_list \
-            or key in self._field_list
-
-    def __getitem__(self, key):
-        if key in self._field_list:
-            return self._field_list[key]
-        if key in self._universal_field_list:
-            return self._universal_field_list[key]
-        raise KeyError(key)
-
 class ValidationException(Exception):
     pass
 
@@ -120,7 +113,6 @@
     def __init__(self, ghost_zones = 0, fields=None):
         self.ghost_zones = ghost_zones
         self.fields = fields
-
     def __str__(self):
         return "(%s, %s)" % (self.ghost_zones, self.fields)
 
@@ -131,21 +123,18 @@
 class NeedsDataField(ValidationException):
     def __init__(self, missing_fields):
         self.missing_fields = missing_fields
-
     def __str__(self):
         return "(%s)" % (self.missing_fields)
 
 class NeedsProperty(ValidationException):
     def __init__(self, missing_properties):
         self.missing_properties = missing_properties
-
     def __str__(self):
         return "(%s)" % (self.missing_properties)
 
 class NeedsParameter(ValidationException):
     def __init__(self, missing_parameters):
         self.missing_parameters = missing_parameters
-
     def __str__(self):
         return "(%s)" % (self.missing_parameters)
 
@@ -159,14 +148,16 @@
         self.nd = nd
         self.flat = flat
         self._spatial = not flat
-        self.ActiveDimensions = [nd, nd, nd]
+        self.ActiveDimensions = [nd,nd,nd]
         self.LeftEdge = [0.0, 0.0, 0.0]
         self.RightEdge = [1.0, 1.0, 1.0]
         self.dds = na.ones(3, "float64")
         self['dx'] = self['dy'] = self['dz'] = na.array([1.0])
         class fake_parameter_file(defaultdict):
             pass
-        if pf is None:  # setup defaults
+
+        if pf is None:
+            # required attrs
             pf = fake_parameter_file(lambda: 1)
             pf.current_redshift = pf.omega_lambda = pf.omega_matter = \
                 pf.hubble_constant = pf.cosmological_simulation = 0.0
@@ -174,6 +165,7 @@
             pf.domain_right_edge = na.ones(3, 'float64')
             pf.dimensionality = 3
         self.pf = pf
+
         class fake_hierarchy(object):
             class fake_io(object):
                 def _read_data_set(io_self, data, field):
@@ -194,47 +186,42 @@
             defaultdict.__init__(self, 
                 lambda: na.ones((nd * nd * nd), dtype='float64')
                 + 1e-4*na.random.random((nd * nd * nd)))
+
     def __missing__(self, item):
         FI = getattr(self.pf, "field_info", FieldInfo)
-        if FI.has_key(item) and \
-            FI[item]._function.func_name != '<lambda>':
+        if FI.has_key(item) and FI[item]._function.func_name != 'NullFunc':
             try:
                 vv = FI[item](self)
             except NeedsGridType as exc:
                 ngz = exc.ghost_zones
-                nfd = FieldDetector(self.nd+ngz*2)
+                nfd = FieldDetector(self.nd + ngz * 2)
                 nfd._num_ghost_zones = ngz
                 vv = FI[item](nfd)
                 if ngz > 0: vv = vv[ngz:-ngz, ngz:-ngz, ngz:-ngz]
-
                 for i in nfd.requested:
                     if i not in self.requested: self.requested.append(i)
-
                 for i in nfd.requested_parameters:
                     if i not in self.requested_parameters:
                         self.requested_parameters.append(i)
-
             if vv is not None:
                 if not self.flat: self[item] = vv
                 else: self[item] = vv.ravel()
                 return self[item]
-
         self.requested.append(item)
         return defaultdict.__missing__(self, item)
 
     def _read_data(self, field_name):
         self.requested.append(field_name)
         FI = getattr(self.pf, "field_info", FieldInfo)
-        if FI.has_key(field_name) and \
-           FI[field_name].particle_type:
+        if FI.has_key(field_name) and FI[field_name].particle_type:
             self.requested.append(field_name)
             return na.ones(self.NumberOfParticles)
         return defaultdict.__missing__(self, field_name)
 
     def get_field_parameter(self, param):
         self.requested_parameters.append(param)
-        if param in ['bulk_velocity','center','height_vector']:
-            return na.random.random(3)*1e-2
+        if param in ['bulk_velocity', 'center', 'height_vector']:
+            return na.random.random(3) * 1e-2
         else:
             return 0.0
     _num_ghost_zones = 0
@@ -258,40 +245,35 @@
         :param function: is a function handle that defines the field
         :param convert_function: must convert to CGS, if it needs to be done
         :param units: is a mathtext-formatted string that describes the field
-        :param projected_units: if we display a projection, what should the units be?
+        :param projected_units: if we display a projection, what should the
+                                units be?
         :param take_log: describes whether the field should be logged
         :param validators: is a list of :class:`FieldValidator` objects
         :param particle_type: is this field based on particles?
         :param vector_field: describes the dimensionality of the field
         :param display_field: governs its appearance in the dropdowns in reason
-        :param not_in_all: is used for baryon fields from the data that are not in
-                           all the grids
+        :param not_in_all: is used for baryon fields from the data that are not
+                           in all the grids
         :param display_name: a name used in the plots
         :param projection_conversion: which unit should we multiply by in a
                                       projection?
-
         """
         self.name = name
         self._function = function
-
         if validators:
             self.validators = ensure_list(validators)
         else:
             self.validators = []
-
         self.take_log = take_log
         self._units = units
         self._projected_units = projected_units
-
         if not convert_function:
             convert_function = lambda a: 1.0
         self._convert_function = convert_function
         self._particle_convert_function = particle_convert_function
-
         self.particle_type = particle_type
         self.vector_field = vector_field
         self.projection_conversion = projection_conversion
-
         self.display_field = display_field
         self.display_name = display_name
         self.not_in_all = not_in_all
@@ -300,7 +282,6 @@
         """
         This raises an exception of the appropriate type if the set of
         validation mechanisms are not met, and otherwise returns True.
-
         """
         for validator in self.validators:
             validator(data)
@@ -310,7 +291,6 @@
     def get_dependencies(self, *args, **kwargs):
         """
         This returns a list of names of fields that this field depends on.
-
         """
         e = FieldDetector(*args, **kwargs)
         if self._function.func_name == '<lambda>':
@@ -320,50 +300,43 @@
         return e
 
     def get_units(self):
-        """ Return a string describing the units.  """
+        """ Return a string describing the units. """
         return self._units
 
     def get_projected_units(self):
         """
         Return a string describing the units if the field has been projected.
-
         """
         return self._projected_units
 
     def __call__(self, data):
-        """ Return the value of the field in a given *data* object.  """
+        """ Return the value of the field in a given *data* object. """
         ii = self.check_available(data)
         original_fields = data.keys() # Copy
         dd = self._function(self, data)
         dd *= self._convert_function(data)
-
         for field_name in data.keys():
             if field_name not in original_fields:
                 del data[field_name]
-
         return dd
 
     def get_source(self):
         """
         Return a string containing the source of the function (if possible.)
-
         """
         return inspect.getsource(self._function)
 
     def get_label(self, projected=False):
         """
         Return a data label for the given field, inluding units.
-
         """
         name = self.name
         if self.display_name is not None: name = self.display_name
         data_label = r"$\rm{%s}" % name
-
         if projected: units = self.get_projected_units()
         else: units = self.get_units()
         if units != "": data_label += r"\/\/ (%s)" % (units)
         data_label += r"$"
-
         return data_label
 
     def particle_convert(self, data):
@@ -378,11 +351,9 @@
     def __init__(self, parameters):
         """
         This validator ensures that the parameter file has a given parameter.
-
         """
         FieldValidator.__init__(self)
         self.parameters = ensure_list(parameters)
-
     def __call__(self, data):
         doesnt_have = []
         for p in self.parameters:
@@ -395,13 +366,11 @@
 class ValidateDataField(FieldValidator):
     def __init__(self, field):
         """
-        This validator ensures that the output file has a given data field
-        stored in it.
-
+        This validator ensures that the output file has a given data field stored
+        in it.
         """
         FieldValidator.__init__(self)
         self.fields = ensure_list(field)
-
     def __call__(self, data):
         doesnt_have = []
         if isinstance(data, FieldDetector): return True
@@ -410,19 +379,15 @@
                 doesnt_have.append(f)
         if len(doesnt_have) > 0:
             raise NeedsDataField(doesnt_have)
-
         return True
 
 class ValidateProperty(FieldValidator):
     def __init__(self, prop):
         """
-        This validator ensures that the data object has a given python
-        attribute.
-
+        This validator ensures that the data object has a given python attribute.
         """
         FieldValidator.__init__(self)
         self.prop = ensure_list(prop)
-
     def __call__(self, data):
         doesnt_have = []
         for p in self.prop:
@@ -430,7 +395,6 @@
                 doesnt_have.append(p)
         if len(doesnt_have) > 0:
             raise NeedsProperty(doesnt_have)
-
         return True
 
 class ValidateSpatial(FieldValidator):
@@ -438,15 +402,13 @@
         """
         This validator ensures that the data handed to the field is of spatial
         nature -- that is to say, 3-D.
-
         """
         FieldValidator.__init__(self)
         self.ghost_zones = ghost_zones
         self.fields = fields
-
     def __call__(self, data):
-        # When we say spatial information, we really mean that it has a
-        # three-dimensional data structure
+        # When we say spatial information, we really mean
+        # that it has a three-dimensional data structure
         #if isinstance(data, FieldDetector): return True
         if not data._spatial:
             raise NeedsGridType(self.ghost_zones,self.fields)
@@ -459,10 +421,8 @@
         """
         This validator ensures that the data handed to the field is an actual
         grid patch, not a covering grid of any kind.
-
         """
         FieldValidator.__init__(self)
-
     def __call__(self, data):
         # We need to make sure that it's an actual AMR grid
         if isinstance(data, FieldDetector): return True


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt-project.org/
+Homepage: http://yt.enzotools.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -25,10 +25,12 @@
 
 import exceptions
 import pdb
-import numpy as na
 import weakref
 
+import numpy as na
+
 from yt.funcs import *
+from yt.utilities.definitions import x_dict, y_dict
 
 from yt.data_objects.data_containers import YTFieldData
 from yt.utilities.definitions import x_dict, y_dict
@@ -75,20 +77,21 @@
         if self.start_index is not None:
             return self.start_index
         if self.Parent == None:
-            iLE = self.LeftEdge - self.pf.domain_left_edge
-            start_index = iLE / self.dds
+            left = self.LeftEdge - self.pf.domain_left_edge
+            start_index = left / self.dds
             return na.rint(start_index).astype('int64').ravel()
+
         pdx = self.Parent.dds
         start_index = (self.Parent.get_global_startindex()) + \
-                       na.rint((self.LeftEdge - self.Parent.LeftEdge)/pdx)
-        self.start_index = (start_index*self.pf.refine_by).astype('int64').ravel()
+                       na.rint((self.LeftEdge - self.Parent.LeftEdge) / pdx)
+        self.start_index = (start_index * self.pf.refine_by).astype('int64').ravel()
         return self.start_index
 
-
     def get_field_parameter(self, name, default=None):
         """
-        This is typically only used by derived field functions, but
-        it returns parameters used to generate fields.
+        This is typically only used by derived field functions, but it returns
+        parameters used to generate fields.
+
         """
         if self.field_parameters.has_key(name):
             return self.field_parameters[name]
@@ -99,19 +102,19 @@
         """
         Here we set up dictionaries that get passed up and down and ultimately
         to derived fields.
+
         """
         self.field_parameters[name] = val
 
     def has_field_parameter(self, name):
-        """
-        Checks if a field parameter is set.
-        """
+        """ Checks if a field parameter is set. """
         return self.field_parameters.has_key(name)
 
     def convert(self, datatype):
         """
-        This will attempt to convert a given unit to cgs from code units.
-        It either returns the multiplicative factor or throws a KeyError.
+        This will attempt to convert a given unit to cgs from code units. It
+        either returns the multiplicative factor or throws a KeyError.
+
         """
         return self.pf[datatype]
 
@@ -119,7 +122,7 @@
         # We'll do this the slow way to be clear what's going on
         s = "%s (%s): " % (self.__class__.__name__, self.pf)
         s += ", ".join(["%s=%s" % (i, getattr(self,i))
-                       for i in self._con_args])
+                        for i in self._con_args])
         return s
 
     def _generate_field(self, field):
@@ -133,7 +136,7 @@
                 f_gz = ngt_exception.fields
                 gz_grid = self.retrieve_ghost_zones(n_gz, f_gz, smoothed=True)
                 temp_array = self.pf.field_info[field](gz_grid)
-                sl = [slice(n_gz,-n_gz)] * 3
+                sl = [slice(n_gz, -n_gz)] * 3
                 self[field] = temp_array[sl]
             else:
                 self[field] = self.pf.field_info[field](self)
@@ -196,14 +199,14 @@
 
     def _setup_dx(self):
         # So first we figure out what the index is.  We don't assume
-        # that dx=dy=dz , at least here.  We probably do elsewhere.
+        # that dx=dy=dz, at least here.  We probably do elsewhere.
         id = self.id - self._id_offset
         if self.Parent is not None:
             self.dds = self.Parent.dds / self.pf.refine_by
         else:
             LE, RE = self.hierarchy.grid_left_edge[id,:], \
                      self.hierarchy.grid_right_edge[id,:]
-            self.dds = na.array((RE-LE)/self.ActiveDimensions)
+            self.dds = na.array((RE - LE) / self.ActiveDimensions)
         if self.pf.dimensionality < 2: self.dds[1] = 1.0
         if self.pf.dimensionality < 3: self.dds[2] = 1.0
         self.field_data['dx'], self.field_data['dy'], self.field_data['dz'] = self.dds
@@ -226,6 +229,7 @@
         Generate a mask that shows which cells overlap with arbitrary arrays
         *LE* and *RE*) of edges, typically grids, along *axis*.
         Use algorithm described at http://www.gamedev.net/reference/articles/article735.asp
+
         """
         x = x_dict[axis]
         y = y_dict[axis]
@@ -243,8 +247,9 @@
 
     def clear_data(self):
         """
-        Clear out the following things: child_mask, child_indices,
-        all fields, all field parameters.
+        Clear out the following things: child_mask, child_indices, all fields,
+        all field parameters.
+
         """
         self._del_child_mask()
         self._del_child_indices()
@@ -255,9 +260,7 @@
         return self._child_mask, self._child_indices
 
     def _prepare_grid(self):
-        """
-        Copies all the appropriate attributes from the hierarchy
-        """
+        """ Copies all the appropriate attributes from the hierarchy. """
         # This is definitely the slowest part of generating the hierarchy
         # Now we give it pointers to all of its attributes
         # Note that to keep in line with Enzo, we have broken PEP-8
@@ -269,33 +272,27 @@
         h.grid_levels[my_ind, 0] = self.Level
         # This might be needed for streaming formats
         #self.Time = h.gridTimes[my_ind,0]
-        self.NumberOfParticles = h.grid_particle_count[my_ind,0]
+        self.NumberOfParticles = h.grid_particle_count[my_ind, 0]
 
     def __len__(self):
         return na.prod(self.ActiveDimensions)
 
     def find_max(self, field):
-        """
-        Returns value, index of maximum value of *field* in this gird
-        """
-        coord1d=(self[field]*self.child_mask).argmax()
-        coord=na.unravel_index(coord1d, self[field].shape)
+        """ Returns value, index of maximum value of *field* in this grid. """
+        coord1d = (self[field] * self.child_mask).argmax()
+        coord = na.unravel_index(coord1d, self[field].shape)
         val = self[field][coord]
         return val, coord
 
     def find_min(self, field):
-        """
-        Returns value, index of minimum value of *field* in this gird
-        """
-        coord1d=(self[field]*self.child_mask).argmin()
-        coord=na.unravel_index(coord1d, self[field].shape)
+        """ Returns value, index of minimum value of *field* in this grid. """
+        coord1d = (self[field] * self.child_mask).argmin()
+        coord = na.unravel_index(coord1d, self[field].shape)
         val = self[field][coord]
         return val, coord
 
     def get_position(self, index):
-        """
-        Returns center position of an *index*
-        """
+        """ Returns center position of an *index*. """
         pos = (index + 0.5) * self.dds + self.LeftEdge
         return pos
 
@@ -303,6 +300,7 @@
         """
         Clears all datafields from memory and calls
         :meth:`clear_derived_quantities`.
+
         """
         for key in self.keys():
             del self.field_data[key]
@@ -313,9 +311,7 @@
         self.clear_derived_quantities()
 
     def clear_derived_quantities(self):
-        """
-        Clears coordinates, child_indices, child_mask.
-        """
+        """ Clears coordinates, child_indices, child_mask. """
         # Access the property raw-values here
         del self.child_mask
         del self.child_ind
@@ -368,10 +364,10 @@
 
     #@time_execution
     def __fill_child_mask(self, child, mask, tofill):
-        rf = self.pf.refine_by**(child.Level - self.Level)
+        rf = self.pf.refine_by
         gi, cgi = self.get_global_startindex(), child.get_global_startindex()
-        startIndex = na.maximum(0, cgi/rf - gi)
-        endIndex = na.minimum( (cgi+child.ActiveDimensions)/rf - gi,
+        startIndex = na.maximum(0, cgi / rf - gi)
+        endIndex = na.minimum((cgi + child.ActiveDimensions) / rf - gi,
                               self.ActiveDimensions)
         endIndex += (startIndex == endIndex)
         mask[startIndex[0]:endIndex[0],
@@ -381,7 +377,8 @@
     def __generate_child_mask(self):
         """
         Generates self.child_mask, which is zero where child grids exist (and
-        thus, where higher resolution data is available.)
+        thus, where higher resolution data is available).
+
         """
         self._child_mask = na.ones(self.ActiveDimensions, 'int32')
         for child in self.Children:
@@ -396,6 +393,7 @@
         """
         Generates self.child_index_mask, which is -1 where there is no child,
         and otherwise has the ID of the grid that resides there.
+
         """
         self._child_index_mask = na.zeros(self.ActiveDimensions, 'int32') - 1
         for child in self.Children:
@@ -410,10 +408,10 @@
         if self.__coords == None: self._generate_coords()
         return self.__coords
 
-    def _set_coords(self, newC):
+    def _set_coords(self, new_c):
         if self.__coords != None:
             mylog.warning("Overriding coords attribute!  This is probably unwise!")
-        self.__coords = newC
+        self.__coords = new_c
 
     def _del_coords(self):
         del self.__coords
@@ -421,12 +419,12 @@
 
     def _generate_coords(self):
         """
-        Creates self.coords, which is of dimensions (3,ActiveDimensions)
+        Creates self.coords, which is of dimensions (3, ActiveDimensions)
+
         """
-        #print "Generating coords"
         ind = na.indices(self.ActiveDimensions)
-        LE = na.reshape(self.LeftEdge,(3,1,1,1))
-        self['x'], self['y'], self['z'] = (ind+0.5)*self.dds+LE
+        left_shaped = na.reshape(self.LeftEdge, (3, 1, 1, 1))
+        self['x'], self['y'], self['z'] = (ind + 0.5) * self.dds + left_shaped
 
     child_mask = property(fget=_get_child_mask, fdel=_del_child_mask)
     child_index_mask = property(fget=_get_child_index_mask, fdel=_del_child_index_mask)
@@ -437,9 +435,10 @@
         # We will attempt this by creating a datacube that is exactly bigger
         # than the grid by nZones*dx in each direction
         nl = self.get_global_startindex() - n_zones
-        nr = nl + self.ActiveDimensions + 2*n_zones
+        nr = nl + self.ActiveDimensions + 2 * n_zones
         new_left_edge = nl * self.dds + self.pf.domain_left_edge
         new_right_edge = nr * self.dds + self.pf.domain_left_edge
+
         # Something different needs to be done for the root grid, though
         level = self.Level
         if all_levels:
@@ -452,32 +451,17 @@
         # those of this grid.
         kwargs.update(self.field_parameters)
         if smoothed:
-            #cube = self.hierarchy.smoothed_covering_grid(
-            #    level, new_left_edge, new_right_edge, **kwargs)
             cube = self.hierarchy.smoothed_covering_grid(
                 level, new_left_edge, **kwargs)
         else:
-            cube = self.hierarchy.covering_grid(
-                level, new_left_edge, **kwargs)
+            cube = self.hierarchy.covering_grid(level, new_left_edge, **kwargs)
+
         return cube
 
-    def get_vertex_centered_data(self, field, smoothed=True,
-                                 no_ghost=False):
-        if not no_ghost:
-            cg = self.retrieve_ghost_zones(1, field, smoothed=smoothed)
-            # We have two extra zones in every direction
-            new_field = na.zeros(self.ActiveDimensions + 1, dtype='float64')
-            na.add(new_field, cg[field][1: ,1: ,1: ], new_field)
-            na.add(new_field, cg[field][:-1,1: ,1: ], new_field)
-            na.add(new_field, cg[field][1: ,:-1,1: ], new_field)
-            na.add(new_field, cg[field][1: ,1: ,:-1], new_field)
-            na.add(new_field, cg[field][:-1,1: ,:-1], new_field)
-            na.add(new_field, cg[field][1: ,:-1,:-1], new_field)
-            na.add(new_field, cg[field][:-1,:-1,1: ], new_field)
-            na.add(new_field, cg[field][:-1,:-1,:-1], new_field)
-            na.multiply(new_field, 0.125, new_field)
-        else:
-            new_field = na.zeros(self.ActiveDimensions + 1, dtype='float64')
+    def get_vertex_centered_data(self, field, smoothed=True, no_ghost=False):
+        new_field = na.zeros(self.ActiveDimensions + 1, dtype='float64')
+
+        if no_ghost:
             of = self[field]
             new_field[:-1,:-1,:-1] += of
             new_field[:-1,:-1,1:] += of
@@ -493,13 +477,23 @@
 
             new_field[:,:, -1] = 2.0*new_field[:,:,-2] - new_field[:,:,-3]
             new_field[:,:, 0]  = 2.0*new_field[:,:,1] - new_field[:,:,2]
-
             new_field[:,-1, :] = 2.0*new_field[:,-2,:] - new_field[:,-3,:]
             new_field[:,0, :]  = 2.0*new_field[:,1,:] - new_field[:,2,:]
-
             new_field[-1,:,:] = 2.0*new_field[-2,:,:] - new_field[-3,:,:]
             new_field[0,:,:]  = 2.0*new_field[1,:,:] - new_field[2,:,:]
+
             if self.pf.field_info[field].take_log:
                 na.power(10.0, new_field, new_field)
+        else:
+            cg = self.retrieve_ghost_zones(1, field, smoothed=smoothed)
+            na.add(new_field, cg[field][1: ,1: ,1: ], new_field)
+            na.add(new_field, cg[field][:-1,1: ,1: ], new_field)
+            na.add(new_field, cg[field][1: ,:-1,1: ], new_field)
+            na.add(new_field, cg[field][1: ,1: ,:-1], new_field)
+            na.add(new_field, cg[field][:-1,1: ,:-1], new_field)
+            na.add(new_field, cg[field][1: ,:-1,:-1], new_field)
+            na.add(new_field, cg[field][:-1,:-1,1: ], new_field)
+            na.add(new_field, cg[field][:-1,:-1,:-1], new_field)
+            na.multiply(new_field, 0.125, new_field)
+
         return new_field
-


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/hierarchy.py
--- a/yt/data_objects/hierarchy.py
+++ b/yt/data_objects/hierarchy.py
@@ -35,12 +35,12 @@
 
 from yt.arraytypes import blankRecordArray
 from yt.config import ytcfg
+from yt.data_objects.field_info_container import NullFunc
 from yt.utilities.definitions import MAXLEVEL
 from yt.utilities.io_handler import io_registry
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface, parallel_splitter
-from object_finding_mixin import \
-    ObjectFindingMixin
+from object_finding_mixin import ObjectFindingMixin
 
 from .data_containers import data_object_registry
 
@@ -137,6 +137,32 @@
             self.proj = self.overlap_proj
         self.object_types.sort()
 
+    def _setup_unknown_fields(self):
+        known_fields = self.parameter_file._fieldinfo_known
+        for field in self.field_list:
+            # By allowing a backup, we don't mandate that it's found in our
+            # current field info.  This means we'll instead simply override
+            # it.
+            ff = self.parameter_file.field_info.pop(field, None)
+            if field not in known_fields:
+                rootloginfo("Adding unknown field %s to list of fields", field)
+                cf = None
+                if self.parameter_file.has_key(field):
+                    def external_wrapper(f):
+                        def _convert_function(data):
+                            return data.convert(f)
+                        return _convert_function
+                    cf = external_wrapper(field)
+                # Note that we call add_field on the field_info directly.  This
+                # will allow the same field detection mechanism to work for 1D, 2D
+                # and 3D fields.
+                self.pf.field_info.add_field(
+                        field, NullFunc,
+                        convert_function=cf, take_log=False, units=r"Unknown")
+            else:
+                mylog.debug("Adding known field %s to list of fields", field)
+                self.parameter_file.field_info[field] = known_fields[field]
+            
     # Now all the object related stuff
 
     def all_data(self, find_max=False):


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -35,6 +35,8 @@
     ParameterFileStore, \
     NoParameterShelf, \
     output_type_registry
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 
 # We want to support the movie format in the future.
 # When such a thing comes to pass, I'll move all the stuff that is contant up
@@ -96,6 +98,8 @@
                 pass
         self.print_key_parameters()
 
+        self.create_field_info()
+
     def __reduce__(self):
         args = (self._hash(),)
         return (_reconstruct_pf, args)
@@ -189,6 +193,17 @@
                 v = getattr(self, a)
                 mylog.info("Parameters: %-25s = %s", a, v)
 
+    def create_field_info(self):
+        if getattr(self, "field_info", None) is None:
+            # The setting up of fields occurs in the hierarchy, which is only
+            # instantiated once.  So we have to double check to make sure that,
+            # in the event of double-loads of a parameter file, we do not blow
+            # away the exising field_info.
+            self.field_info = FieldInfoContainer.create_with_fallback(
+                                self._fieldinfo_fallback)
+
+        
+
 def _reconstruct_pf(*args, **kwargs):
     pfs = ParameterFileStore()
     pf = pfs.get_pf_hash(*args)


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/data_objects/universal_fields.py
--- a/yt/data_objects/universal_fields.py
+++ b/yt/data_objects/universal_fields.py
@@ -139,88 +139,6 @@
 add_field("SoundSpeed", function=_SoundSpeed,
           units=r"\rm{cm}/\rm{s}")
 
-def particle_func(p_field, dtype='float64'):
-    def _Particles(field, data):
-        io = data.hierarchy.io
-        if not data.NumberOfParticles > 0:
-            return na.array([], dtype=dtype)
-        try:
-            return io._read_data_set(data, p_field).astype(dtype)
-        except io._read_exception:
-            pass
-        # This is bad.  But it's the best idea I have right now.
-        return data._read_data(p_field.replace("_"," ")).astype(dtype)
-    return _Particles
-for pf in ["type", "mass"] + \
-          ["position_%s" % ax for ax in 'xyz']:
-    pfunc = particle_func("particle_%s" % (pf))
-    add_field("particle_%s" % pf, function=pfunc,
-              validators = [ValidateSpatial(0)],
-              particle_type=True)
-
-def _convRetainInt(data):
-    return 1
-add_field("particle_index", function=particle_func("particle_index", "int64"),
-          validators = [ValidateSpatial(0)], particle_type=True,
-          convert_function=_convRetainInt)
-
-def _get_vel_convert(ax):
-    def _convert_p_vel(data):
-        return data.convert("%s-velocity" % ax)
-    return _convert_p_vel
-for ax in 'xyz':
-    pf = "particle_velocity_%s" % ax
-    pfunc = particle_func(pf)
-    cfunc = _get_vel_convert(ax)
-    add_field(pf, function=pfunc, convert_function=cfunc,
-              validators = [ValidateSpatial(0)],
-              particle_type=True)
-
-for pf in ["creation_time", "dynamical_time", "metallicity_fraction"]:
-    pfunc = particle_func(pf)
-    add_field(pf, function=pfunc,
-              validators = [ValidateSpatial(0),
-                            ValidateDataField(pf)],
-              particle_type=True)
-add_field("particle_mass", function=particle_func("particle_mass"),
-          validators=[ValidateSpatial(0)], particle_type=True)
-
-def _ParticleAge(field, data):
-    current_time = data.pf.current_time
-    return (current_time - data["creation_time"])
-def _convertParticleAge(data):
-    return data.convert("years")
-add_field("ParticleAge", function=_ParticleAge,
-          validators=[ValidateDataField("creation_time")],
-          particle_type=True, convert_function=_convertParticleAge)
-
-def _ParticleMass(field, data):
-    particles = data["particle_mass"].astype('float64') * \
-                just_one(data["CellVolumeCode"].ravel())
-    # Note that we mandate grid-type here, so this is okay
-    return particles
-
-def _convertParticleMass(data):
-    return data.convert("Density")*(data.convert("cm")**3.0)
-def _IOLevelParticleMass(grid):
-    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
-    cf = (_ParticleMass(None, dd) * _convertParticleMass(grid))[0]
-    return cf
-def _convertParticleMassMsun(data):
-    return data.convert("Density")*((data.convert("cm")**3.0)/1.989e33)
-def _IOLevelParticleMassMsun(grid):
-    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
-    cf = (_ParticleMass(None, dd) * _convertParticleMassMsun(grid))[0]
-    return cf
-add_field("ParticleMass",
-          function=_ParticleMass, validators=[ValidateSpatial(0)],
-          particle_type=True, convert_function=_convertParticleMass,
-          particle_convert_function=_IOLevelParticleMass)
-add_field("ParticleMassMsun",
-          function=_ParticleMass, validators=[ValidateSpatial(0)],
-          particle_type=True, convert_function=_convertParticleMassMsun,
-          particle_convert_function=_IOLevelParticleMassMsun)
-
 def _RadialMachNumber(field, data):
     """M{|v|/t_sound}"""
     return na.abs(data["RadialVelocity"]) / data["SoundSpeed"]


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/art/api.py
--- a/yt/frontends/art/api.py
+++ b/yt/frontends/art/api.py
@@ -34,7 +34,6 @@
       ARTStaticOutput
 
 from .fields import \
-      ARTFieldContainer, \
       ARTFieldInfo, \
       add_art_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -37,8 +37,10 @@
       AMRHierarchy
 from yt.data_objects.static_output import \
       StaticOutput
-from .fields import ARTFieldContainer
-from .fields import add_field
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
+from .fields import \
+    ARTFieldInfo, add_art_field, KnownARTFields
 from yt.utilities.definitions import \
     mpc_conversion
 from yt.utilities.io_handler import \
@@ -113,7 +115,6 @@
     
     def __init__(self, pf, data_style='art'):
         self.data_style = data_style
-        self.field_info = ARTFieldContainer()
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
@@ -346,20 +347,6 @@
             g._setup_dx()
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -372,7 +359,8 @@
 
 class ARTStaticOutput(StaticOutput):
     _hierarchy_class = ARTHierarchy
-    _fieldinfo_class = ARTFieldContainer
+    _fieldinfo_fallback = ARTFieldInfo
+    _fieldinfo_known = KnownARTFields
     _handle = None
     
     def __init__(self, filename, data_style='art',
@@ -382,7 +370,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
         
-        self.field_info = self._fieldinfo_class()
         self.dimensionality = 3
         self.refine_by = 2
         self.parameters["HydroMethod"] = 'art'


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/art/fields.py
--- a/yt/frontends/art/fields.py
+++ b/yt/frontends/art/fields.py
@@ -24,7 +24,10 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
+    NullFunc, \
+    TranslationFunc, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -34,15 +37,11 @@
 from yt.utilities.physical_constants import \
     boltzmann_constant_cgs, mass_hydrogen_cgs
 
-import pdb
+ARTFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = ARTFieldInfo.add_field
 
-class ARTFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-ARTFieldInfo = ARTFieldContainer()
-add_art_field = ARTFieldInfo.add_field
-
-add_field = add_art_field
+KnownARTFields = FieldInfoContainer()
+add_art_field = KnownARTFields.add_field
 
 translation_dict = {"Density":"density",
                     "TotalEnergy":"TotalEnergy",
@@ -54,33 +53,28 @@
                     "GasEnergy":"GasEnergy"
                    }
 
-def _generate_translation(mine, theirs):
-    add_field(theirs, function=lambda a, b: b[mine], take_log=True)
-
 for f,v in translation_dict.items():
-    if v not in ARTFieldInfo:
-        add_field(v, function=lambda a,b: None, take_log=False,
+    add_art_field(v, function=NullFunc, take_log=False,
                   validators = [ValidateDataField(v)])
-    #print "Setting up translator from %s to %s" % (v, f)
-    _generate_translation(v, f)
+    add_art_field(f, function=TranslationFunc(v), take_log=True)
 
 #def _convertMetallicity(data):
 #    return data.convert("Metal_Density1")
-#ARTFieldInfo["Metal_Density1"]._units = r"1"
-#ARTFieldInfo["Metal_Density1"]._projected_units = r"1"
-#ARTFieldInfo["Metal_Density1"]._convert_function=_convertMetallicity
+#KnownARTFields["Metal_Density1"]._units = r"1"
+#KnownARTFields["Metal_Density1"]._projected_units = r"1"
+#KnownARTFields["Metal_Density1"]._convert_function=_convertMetallicity
 
 
 def _convertDensity(data):
     return data.convert("Density")
-ARTFieldInfo["Density"]._units = r"\rm{g}/\rm{cm}^3"
-ARTFieldInfo["Density"]._projected_units = r"\rm{g}/\rm{cm}^2"
-ARTFieldInfo["Density"]._convert_function=_convertDensity
+KnownARTFields["Density"]._units = r"\rm{g}/\rm{cm}^3"
+KnownARTFields["Density"]._projected_units = r"\rm{g}/\rm{cm}^2"
+KnownARTFields["Density"]._convert_function=_convertDensity
 
 def _convertEnergy(data):
     return data.convert("GasEnergy")
-ARTFieldInfo["GasEnergy"]._units = r"\rm{ergs}/\rm{g}"
-ARTFieldInfo["GasEnergy"]._convert_function=_convertEnergy
+KnownARTFields["GasEnergy"]._units = r"\rm{ergs}/\rm{g}"
+KnownARTFields["GasEnergy"]._convert_function=_convertEnergy
 
 def _Temperature(field, data):
     tr  = data["GasEnergy"] / data["Density"]
@@ -89,9 +83,9 @@
     return tr
 def _convertTemperature(data):
     return data.convert("Temperature")
-add_field("Temperature", function=_Temperature, units = r"\mathrm{K}")
-ARTFieldInfo["Temperature"]._units = r"\mathrm{K}"
-ARTFieldInfo["Temperature"]._convert_function=_convertTemperature
+add_art_field("Temperature", function=_Temperature, units = r"\mathrm{K}")
+KnownARTFields["Temperature"]._units = r"\mathrm{K}"
+KnownARTFields["Temperature"]._convert_function=_convertTemperature
 
 def _MetallicitySNII(field, data):
     #get the dimensionless mass fraction
@@ -99,8 +93,8 @@
     tr *= data.pf.conversion_factors["Density"]    
     return tr
     
-add_field("MetallicitySNII", function=_MetallicitySNII, units = r"\mathrm{K}")
-ARTFieldInfo["MetallicitySNII"]._units = r"\mathrm{K}"
+add_art_field("MetallicitySNII", function=_MetallicitySNII, units = r"\mathrm{K}")
+KnownARTFields["MetallicitySNII"]._units = r"\mathrm{K}"
 
 def _MetallicitySNIa(field, data):
     #get the dimensionless mass fraction
@@ -108,8 +102,8 @@
     tr *= data.pf.conversion_factors["Density"]    
     return tr
     
-add_field("MetallicitySNIa", function=_MetallicitySNIa, units = r"\mathrm{K}")
-ARTFieldInfo["MetallicitySNIa"]._units = r"\mathrm{K}"
+add_art_field("MetallicitySNIa", function=_MetallicitySNIa, units = r"\mathrm{K}")
+KnownARTFields["MetallicitySNIa"]._units = r"\mathrm{K}"
 
 def _Metallicity(field, data):
     #get the dimensionless mass fraction of the total metals
@@ -118,14 +112,14 @@
     tr *= data.pf.conversion_factors["Density"]    
     return tr
     
-add_field("Metallicity", function=_Metallicity, units = r"\mathrm{K}")
-ARTFieldInfo["Metallicity"]._units = r"\mathrm{K}"
+add_art_field("Metallicity", function=_Metallicity, units = r"\mathrm{K}")
+KnownARTFields["Metallicity"]._units = r"\mathrm{K}"
 
 def _Metal_Density(field,data):
     return data["Metal_DensitySNII"]+data["Metal_DensitySNIa"]
 def _convert_Metal_Density(data):
     return data.convert("Metal_Density")
 
-add_field("Metal_Density", function=_Metal_Density, units = r"\mathrm{K}")
-ARTFieldInfo["Metal_Density"]._units = r"\mathrm{K}"
-ARTFieldInfo["Metal_Density"]._convert_function=_convert_Metal_Density
+add_art_field("Metal_Density", function=_Metal_Density, units = r"\mathrm{K}")
+KnownARTFields["Metal_Density"]._units = r"\mathrm{K}"
+KnownARTFields["Metal_Density"]._convert_function=_convert_Metal_Density


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/castro/api.py
--- a/yt/frontends/castro/api.py
+++ b/yt/frontends/castro/api.py
@@ -34,7 +34,6 @@
       CastroStaticOutput
 
 from .fields import \
-      CastroFieldContainer, \
       CastroFieldInfo, \
       add_castro_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/castro/data_structures.py
--- a/yt/frontends/castro/data_structures.py
+++ b/yt/frontends/castro/data_structures.py
@@ -27,27 +27,19 @@
 import os
 import weakref
 import itertools
+from collections import defaultdict
+from string import strip, rstrip
+from stat import ST_CTIME
+
 import numpy as na
 
-from collections import \
-    defaultdict
-from string import \
-    strip, \
-    rstrip
-from stat import \
-    ST_CTIME
-
 from yt.funcs import *
-from yt.data_objects.grid_patch import \
-           AMRGridPatch
-from yt.data_objects.hierarchy import \
-           AMRHierarchy
-from yt.data_objects.static_output import \
-           StaticOutput
-from yt.utilities.definitions import \
-    mpc_conversion
-from yt.utilities.amr_utils import \
-    get_box_grids_level
+from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc
+from yt.data_objects.grid_patch import AMRGridPatch
+from yt.data_objects.hierarchy import AMRHierarchy
+from yt.data_objects.static_output import StaticOutput
+from yt.utilities.definitions import mpc_conversion
+from yt.utilities.amr_utils import get_box_grids_level
 
 from .definitions import \
     castro2enzoDict, \
@@ -56,39 +48,40 @@
     castro_FAB_header_pattern, \
     castro_particle_field_names, \
     boxlib_bool_to_int
-
 from .fields import \
-    CastroFieldContainer, \
-    add_field
+    CastroFieldInfo, \
+    KnownCastroFields, \
+    add_castro_field
 
 
 class CastroGrid(AMRGridPatch):
     _id_offset = 0
-    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset, dimensions, start, stop, paranoia=False,**kwargs):
-        AMRGridPatch.__init__(self, index,**kwargs)
+
+    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
+                 dimensions, start, stop, paranoia=False, **kwargs):
+        super(CastroGrid, self).__init__(self, index, **kwargs)
         self.filename = filename
         self._offset = offset
-        self._paranoid = paranoia
+        self._paranoid = paranoia  # TODO: Factor this behavior out in tests
 
-        # should error check this
+        ### TODO: error check this (test)
         self.ActiveDimensions = (dimensions.copy()).astype('int32')#.transpose()
         self.start_index = start.copy()#.transpose()
         self.stop_index = stop.copy()#.transpose()
         self.LeftEdge  = LeftEdge.copy()
         self.RightEdge = RightEdge.copy()
         self.index = index
-        self.Level = level
+        self.level = level
 
     def get_global_startindex(self):
         return self.start_index
 
     def _prepare_grid(self):
-        """
-        Copies all the appropriate attributes from the hierarchy
-        """
-        # This is definitely the slowest part of generating the hierarchy
+        """ Copies all the appropriate attributes from the hierarchy. """
+        # This is definitely the slowest part of generating the hierarchy.
         # Now we give it pointers to all of its attributes
         # Note that to keep in line with Enzo, we have broken PEP-8
+
         h = self.hierarchy # cache it
         #self.StartIndices = h.gridStartIndices[self.id]
         #self.EndIndices = h.gridEndIndices[self.id]
@@ -100,6 +93,7 @@
         self.field_indexes = h.field_indexes
         self.Children = h.gridTree[self.id]
         pIDs = h.gridReverseTree[self.id]
+
         if len(pIDs) > 0:
             self.Parent = [weakref.proxy(h.grids[pID]) for pID in pIDs]
         else:
@@ -115,6 +109,7 @@
             LE, RE = self.hierarchy.grid_left_edge[id,:], \
                      self.hierarchy.grid_right_edge[id,:]
             self.dds = na.array((RE-LE)/self.ActiveDimensions)
+
         if self.pf.dimensionality < 2: self.dds[1] = 1.0
         if self.pf.dimensionality < 3: self.dds[2] = 1.0
         self.field_data['dx'], self.field_data['dy'], self.field_data['dz'] = self.dds
@@ -124,86 +119,90 @@
 
 class CastroHierarchy(AMRHierarchy):
     grid = CastroGrid
+
     def __init__(self, pf, data_style='castro_native'):
-        self.field_info = CastroFieldContainer()
+        super(CastroHierarchy, self).__init__(self, pf, self.data_style)
+
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir, 'Header')
         self.directory = pf.fullpath
         self.data_style = data_style
-        #self._setup_classes()
 
         # This also sets up the grid objects
-        self.read_global_header(header_filename, self.parameter_file.paranoid_read)
+        self.read_global_header(header_filename,
+                                self.parameter_file.paranoid_read) 
         self.read_particle_header()
-        self.__cache_endianness(self.levels[-1].grids[-1])
-        AMRHierarchy.__init__(self, pf, self.data_style)
+        self._cache_endianness(self.levels[-1].grids[-1])
         self._setup_data_io()
         self._setup_field_list()
         self._populate_hierarchy()
 
     def read_global_header(self, filename, paranoid_read):
-        """
-        read the global header file for an Castro plotfile output.
-        """
+        """ Read the global header file for an Castro plotfile output. """
         counter = 0
-        header_file = open(filename,'r')
-        self.__global_header_lines = header_file.readlines()
+        header_file = open(filename, 'r')
+        self._global_header_lines = header_file.readlines()
 
         # parse the file
-        self.castro_version = self.__global_header_lines[0].rstrip()
-        self.n_fields      = int(self.__global_header_lines[1])
+        self.castro_version = self._global_header_lines[0].rstrip()
+        self.n_fields = int(self._global_header_lines[1])
 
-        counter = self.n_fields+2
+        counter = self.n_fields + 2
         self.field_list = []
-        for i, line in enumerate(self.__global_header_lines[2:counter]):
+        for i, line in enumerate(self._global_header_lines[2:counter]):
             self.field_list.append(line.rstrip())
 
         # this is unused...eliminate it?
         #for f in self.field_indexes:
         #    self.field_list.append(castro2ytFieldsDict.get(f, f))
 
-        self.dimension = int(self.__global_header_lines[counter])
+        self.dimension = int(self._global_header_lines[counter])
         if self.dimension != 3:
             raise RunTimeError("Castro must be in 3D to use yt.")
+
         counter += 1
-        self.Time = float(self.__global_header_lines[counter])
+        self.Time = float(self._global_header_lines[counter])
         counter += 1
-        self.finest_grid_level = int(self.__global_header_lines[counter])
+        self.finest_grid_level = int(self._global_header_lines[counter])
         self.n_levels = self.finest_grid_level + 1
         counter += 1
+
         # quantities with _unnecessary are also stored in the inputs
         # file and are not needed.  they are read in and stored in
         # case in the future we want to enable a "backwards" way of
         # taking the data out of the Header file and using it to fill
         # in in the case of a missing inputs file
-        self.domainLeftEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
+        self.domainLeftEdge_unnecessary = na.array(map(float, self._global_header_lines[counter].split()))
         counter += 1
-        self.domainRightEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
+        self.domainRightEdge_unnecessary = na.array(map(float, self._global_header_lines[counter].split()))
         counter += 1
-        self.refinementFactor_unnecessary = self.__global_header_lines[counter].split() #na.array(map(int, self.__global_header_lines[counter].split()))
+        self.refinementFactor_unnecessary = self._global_header_lines[counter].split()
+        #na.array(map(int, self._global_header_lines[counter].split()))
         counter += 1
-        self.globalIndexSpace_unnecessary = self.__global_header_lines[counter]
-        #domain_re.search(self.__global_header_lines[counter]).groups()
+        self.globalIndexSpace_unnecessary = self._global_header_lines[counter]
+        #domain_re.search(self._global_header_lines[counter]).groups()
         counter += 1
-        self.timestepsPerLevel_unnecessary = self.__global_header_lines[counter]
+        self.timestepsPerLevel_unnecessary = self._global_header_lines[counter]
         counter += 1
-        self.dx = na.zeros((self.n_levels,3))
+
+        self.dx = na.zeros((self.n_levels, 3))
         for i, line in enumerate(self.__global_header_lines[counter:counter+self.n_levels]):
             self.dx[i] = na.array(map(float, line.split()))
         counter += self.n_levels
-        self.geometry = int(self.__global_header_lines[counter])
+        self.geometry = int(self._global_header_lines[counter])
         if self.geometry != 0:
             raise RunTimeError("yt only supports cartesian coordinates.")
         counter += 1
 
         # this is just to debug. eventually it should go away.
-        linebreak = int(self.__global_header_lines[counter])
+        linebreak = int(self._global_header_lines[counter])
         if linebreak != 0:
-            raise RunTimeError("INTERNAL ERROR! This should be a zero.")
+            raise RunTimeError("INTERNAL ERROR! Header is unexpected size")
         counter += 1
 
-        # each level is one group with ngrids on it. each grid has 3 lines of 2 reals
+        # Each level is one group with ngrids on it. each grid has 3 lines of 2 reals
+        # BoxLib madness
         self.levels = []
         grid_counter = 0
         file_finder_pattern = r"FabOnDisk: (\w+_D_[0-9]{4}) (\d+)\n"
@@ -214,45 +213,50 @@
         data_files_finder = re.compile(data_files_pattern)
 
         for level in range(0, self.n_levels):
-            tmp = self.__global_header_lines[counter].split()
-            # should this be grid_time or level_time??
+            tmp = self._global_header_lines[counter].split()
+            # Should this be grid_time or level_time??
             lev, ngrids, grid_time = int(tmp[0]), int(tmp[1]), float(tmp[2])
             counter += 1
-            nsteps = int(self.__global_header_lines[counter])
+            nsteps = int(self._global_header_lines[counter])
             counter += 1
             self.levels.append(CastroLevel(lev, ngrids))
-            # open level header, extract file names and offsets for
-            # each grid
-            # read slightly out of order here: at the end of the lo, hi
-            # pairs for x, y, z is a *list* of files types in the Level
-            # directory. each type has Header and a number of data
-            # files (one per processor)
+            # Open level header, extract file names and offsets for each grid.
+            # Read slightly out of order here: at the end of the lo, hi pairs
+            # for x, y, z is a *list* of files types in the Level directory. 
+            # Each type has Header and a number of data files
+            # (one per processor)
             tmp_offset = counter + 3*ngrids
             nfiles = 0
             key_off = 0
             files =   {} # dict(map(lambda a: (a,[]), self.field_list))
             offsets = {} # dict(map(lambda a: (a,[]), self.field_list))
-            while nfiles+tmp_offset < len(self.__global_header_lines) and data_files_finder.match(self.__global_header_lines[nfiles+tmp_offset]):
-                filen = os.path.join(self.parameter_file.fullplotdir, \
-                                     self.__global_header_lines[nfiles+tmp_offset].strip())
+
+            while (nfiles + tmp_offset < len(self._global_header_lines) and
+                   data_files_finder.match(self._global_header_lines[nfiles+tmp_offset])):
+                filen = os.path.join(self.parameter_file.fullplotdir,
+                                     self._global_header_lines[nfiles+tmp_offset].strip())
                 # open each "_H" header file, and get the number of
                 # components within it
                 level_header_file = open(filen+'_H','r').read()
                 start_stop_index = re_dim_finder.findall(level_header_file) # just take the last one
                 grid_file_offset = re_file_finder.findall(level_header_file)
                 ncomp_this_file = int(level_header_file.split('\n')[2])
+
                 for i in range(ncomp_this_file):
                     key = self.field_list[i+key_off]
                     f, o = zip(*grid_file_offset)
                     files[key] = f
                     offsets[key] = o
                     self.field_indexes[key] = i
+
                 key_off += ncomp_this_file
                 nfiles += 1
+
             # convert dict of lists to list of dicts
             fn = []
             off = []
-            lead_path = os.path.join(self.parameter_file.fullplotdir,'Level_%i'%level)
+            lead_path = os.path.join(self.parameter_file.fullplotdir,
+                                     'Level_%i' % level)
             for i in range(ngrids):
                 fi = [os.path.join(lead_path, files[key][i]) for key in self.field_list]
                 of = [int(offsets[key][i]) for key in self.field_list]
@@ -262,21 +266,25 @@
             for grid in range(0, ngrids):
                 gfn = fn[grid]  # filename of file containing this grid
                 gfo = off[grid] # offset within that file
-                xlo, xhi = map(float, self.__global_header_lines[counter].split())
-                counter+=1
-                ylo, yhi = map(float, self.__global_header_lines[counter].split())
-                counter+=1
-                zlo, zhi = map(float, self.__global_header_lines[counter].split())
-                counter+=1
+                xlo, xhi = map(float, self._global_header_lines[counter].split())
+                counter += 1
+                ylo, yhi = map(float, self._global_header_lines[counter].split())
+                counter += 1
+                zlo, zhi = map(float, self._global_header_lines[counter].split())
+                counter += 1
                 lo = na.array([xlo, ylo, zlo])
                 hi = na.array([xhi, yhi, zhi])
-                dims, start, stop = self.__calculate_grid_dimensions(start_stop_index[grid])
-                self.levels[-1].grids.append(self.grid(lo, hi, grid_counter, level, gfn, gfo, dims, start, stop, paranoia=paranoid_read, hierarchy=self))
-                grid_counter += 1 # this is global, and shouldn't be reset
-                                  # for each level
+                dims, start, stop = self._calculate_grid_dimensions(start_stop_index[grid])
+                self.levels[-1].grids.append(self.grid(lo, hi, grid_counter,
+                                                       level, gfn, gfo, dims,
+                                                       start, stop,
+                                                       paranoia=paranoid_read,  ### TODO: at least the code isn't schizophrenic paranoid
+                                                       hierarchy=self))
+                grid_counter += 1   # this is global, and shouldn't be reset
+                                    # for each level
 
             # already read the filenames above...
-            counter+=nfiles
+            counter += nfiles
             self.num_grids = grid_counter
             self.float_type = 'float64'
 
@@ -289,53 +297,55 @@
         if not self.parameter_file.use_particles:
             self.pgrid_info = na.zeros((self.num_grids, 3), dtype='int64')
             return
+
         self.field_list += castro_particle_field_names[:]
-        header = open(os.path.join(self.parameter_file.fullplotdir,
-                        "DM", "Header"))
+        header = open(os.path.join(self.parameter_file.fullplotdir, "DM",
+                                   "Header"))
         version = header.readline()
         ndim = header.readline()
         nfields = header.readline()
         ntotalpart = int(header.readline())
         dummy = header.readline() # nextid
         maxlevel = int(header.readline()) # max level
+
         # Skip over how many grids on each level; this is degenerate
         for i in range(maxlevel+1): dummy = header.readline()
         grid_info = na.fromiter((int(i)
-                    for line in header.readlines()
-                    for i in line.split()
-                    ),
-            dtype='int64', count=3*self.num_grids).reshape((self.num_grids, 3))
+                                 for line in header.readlines()
+                                 for i in line.split()),
+                                dtype='int64',
+                                count=3*self.num_grids).reshape((self.num_grids, 3))
         self.pgrid_info = grid_info
 
-    def __cache_endianness(self, test_grid):
+    def _cache_endianness(self, test_grid):
         """
-        Cache the endianness and bytes perreal of the grids by using a
-        test grid and assuming that all grids have the same
-        endianness. This is a pretty safe assumption since Castro uses
-        one file per processor, and if you're running on a cluster
-        with different endian processors, then you're on your own!
+        Cache the endianness and bytes perreal of the grids by using a test grid
+        and assuming that all grids have the same endianness. This is a pretty
+        safe assumption since Castro uses one file per processor, and if you're
+        running on a cluster with different endian processors, then you're on
+        your own!
+
         """
-        # open the test file & grab the header
-        inFile = open(os.path.expanduser(test_grid.filename[self.field_list[0]]),'rb')
-        header = inFile.readline()
-        inFile.close()
+        # open the test file and grab the header
+        in_file = open(os.path.expanduser(test_grid.filename[self.field_list[0]]), 'rb')
+        header = in_file.readline()
+        in_file.close()
         header.strip()
-
-        # parse it. the patter is in CastroDefs.py
-        headerRe = re.compile(castro_FAB_header_pattern)
-        bytesPerReal, endian, start, stop, centerType, nComponents = headerRe.search(header).groups()
-        self._bytesPerReal = int(bytesPerReal)
-        if self._bytesPerReal == int(endian[0]):
+        # Parse it. The pattern is in castro.definitions.py
+        header_re = re.compile(castro_FAB_header_pattern)
+        bytes_per_real, endian, start, stop, centerType, n_components = header_re.search(header).groups()
+        self._bytes_per_real = int(bytes_per_real)
+        if self._bytes_per_real == int(endian[0]):
             dtype = '<'
-        elif self._bytesPerReal == int(endian[-1]):
+        elif self._bytes_per_real == int(endian[-1]):
             dtype = '>'
         else:
             raise ValueError("FAB header is neither big nor little endian. Perhaps the file is corrupt?")
 
-        dtype += ('f%i' % self._bytesPerReal) # always a floating point
+        dtype += ('f%i' % self._bytes_per_real) # always a floating point
         self._dtype = dtype
 
-    def __calculate_grid_dimensions(self, start_stop):
+    def _calculate_grid_dimensions(self, start_stop):
         start = na.array(map(int, start_stop[0].split(',')))
         stop = na.array(map(int, start_stop[1].split(',')))
         dimension = stop - start + 1
@@ -343,21 +353,28 @@
 
     def _populate_grid_objects(self):
         mylog.debug("Creating grid objects")
+
         self.grids = na.concatenate([level.grids for level in self.levels])
         basedir = self.parameter_file.fullplotdir
+
         for g, pg in itertools.izip(self.grids, self.pgrid_info):
             g.particle_filename = os.path.join(
                 basedir, "DM", "Level_%s" % (g.Level), "DATA_%04i" % pg[0])
             g.NumberOfParticles = pg[1]
             g._particle_offset = pg[2]
+
         self.grid_particle_count[:,0] = self.pgrid_info[:,1]
         del self.pgrid_info
-        gls = na.concatenate([level.ngrids*[level.level] for level in self.levels])
+
+        gls = na.concatenate([level.ngrids * [level.level] for level in self.levels])
         self.grid_levels[:] = gls.reshape((self.num_grids,1))
-        grid_dcs = na.concatenate([level.ngrids*[self.dx[level.level]] for level in self.levels], axis=0)
+        grid_dcs = na.concatenate([level.ngrids * [self.dx[level.level]]
+                                  for level in self.levels], axis=0)
+
         self.grid_dxs = grid_dcs[:,0].reshape((self.num_grids,1))
         self.grid_dys = grid_dcs[:,1].reshape((self.num_grids,1))
         self.grid_dzs = grid_dcs[:,2].reshape((self.num_grids,1))
+
         left_edges = []
         right_edges = []
         dims = []
@@ -365,23 +382,28 @@
             left_edges += [g.LeftEdge for g in level.grids]
             right_edges += [g.RightEdge for g in level.grids]
             dims += [g.ActiveDimensions for g in level.grids]
+
         self.grid_left_edge = na.array(left_edges)
         self.grid_right_edge = na.array(right_edges)
         self.grid_dimensions = na.array(dims)
         self.gridReverseTree = [] * self.num_grids
         self.gridReverseTree = [ [] for i in range(self.num_grids)]
         self.gridTree = [ [] for i in range(self.num_grids)]
+
         mylog.debug("Done creating grid objects")
 
     def _populate_hierarchy(self):
-        self.__setup_grid_tree()
+        self._setup_grid_tree()
         #self._setup_grid_corners()
+
         for i, grid in enumerate(self.grids):
-            if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+            if (i % 1e4) == 0:
+                mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+
             grid._prepare_grid()
             grid._setup_dx()
 
-    def __setup_grid_tree(self):
+    def _setup_grid_tree(self):
         mask = na.empty(self.grids.size, dtype='int32')
         for i, grid in enumerate(self.grids):
             get_box_grids_level(grid.LeftEdge, grid.RightEdge, grid.Level + 1,
@@ -409,16 +431,20 @@
 
     def _setup_field_list(self):
         self.derived_field_list = []
+
         for field in self.field_info:
             try:
-                fd = self.field_info[field].get_dependencies(pf = self.parameter_file)
+                fd = self.field_info[field].get_dependencies(pf=self.parameter_file)
             except:
                 continue
+
             available = na.all([f in self.field_list for f in fd.requested])
             if available: self.derived_field_list.append(field)
+
         for field in self.field_list:
             if field not in self.derived_field_list:
                 self.derived_field_list.append(field)
+
         if self.parameter_file.use_particles:
             # We know which particle fields will exist -- pending further
             # changes in the future.
@@ -428,16 +454,18 @@
                         return data.convert(f)
                     return _convert_function
                 cf = external_wrapper(field)
-                # Note that we call add_field on the field_info directly.  This
+                # Note that we call add_castro_field on the field_info directly.  This
                 # will allow the same field detection mechanism to work for 1D, 2D
                 # and 3D fields.
-                self.pf.field_info.add_field(
+                self.pf.field_info.add_castro_field(
                         field, lambda a, b: None,
                         convert_function=cf, take_log=False,
                         particle_type=True)
 
+    ### TODO: check if this can be removed completely
     def _count_grids(self):
-        """this is already provided in
+        """
+        this is already provided in ???
 
         """
         pass
@@ -456,21 +484,6 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
-
     def _setup_derived_fields(self):
         pass
 
@@ -489,19 +502,21 @@
         self.ngrids = ngrids
         self.grids = []
 
-
 class CastroStaticOutput(StaticOutput):
     """
-    This class is a stripped down class that simply reads and parses
-    *filename*, without looking at the Castro hierarchy.
+    This class is a stripped down class that simply reads and parses *filename*,
+    without looking at the Castro hierarchy.
+
     """
     _hierarchy_class = CastroHierarchy
-    _fieldinfo_class = CastroFieldContainer
+    _fieldinfo_fallback = CastroFieldInfo
+    _fieldinfo_known = KnownCastroFields
 
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='castro_native', paranoia=False,
                  storage_filename = None):
-        """need to override for Castro file structure.
+        """
+        Need to override for Castro file structure.
 
         the paramfile is usually called "inputs"
         and there may be a fortran inputs file usually called "probin"
@@ -512,6 +527,8 @@
          * ASCII (not implemented in yt)
 
         """
+        super(CastroStaticOutput, self).__init__(self, plotname.rstrip("/"),
+                                                 data_style='castro_native')
         self.storage_filename = storage_filename
         self.paranoid_read = paranoia
         self.parameter_filename = paramFilename
@@ -520,13 +537,10 @@
 
         self.fparameters = {}
 
-        StaticOutput.__init__(self, plotname.rstrip("/"),
-                              data_style='castro_native')
-        self.field_info = self._fieldinfo_class()
-
         # These should maybe not be hardcoded?
+        ### TODO: this.
         self.parameters["HydroMethod"] = 'castro' # always PPM DE
-        self.parameters["Time"] = 1. # default unit is 1...
+        self.parameters["Time"] = 1.0 # default unit is 1...
         self.parameters["DualEnergyFormalism"] = 0 # always off.
         self.parameters["EOSType"] = -1 # default
 
@@ -543,13 +557,17 @@
         # fill our args
         pname = args[0].rstrip("/")
         dn = os.path.dirname(pname)
-        if len(args) > 1: kwargs['paramFilename'] = args[1]
+        if len(args) > 1:
+            kwargs['paramFilename'] = args[1]
+
         pfname = kwargs.get("paramFilename", os.path.join(dn, "inputs"))
 
         # We check for the job_info file's existence because this is currently
         # what distinguishes Castro data from MAESTRO data.
+        ### ^ that is nuts
         pfn = os.path.join(pfname)
-        if not os.path.exists(pfn): return False
+        if not os.path.exists(pfn):
+            return False
         castro = any(("castro." in line for line in open(pfn)))
         nyx = any(("nyx." in line for line in open(pfn)))
         castro = castro and (not nyx) # it's only castro if it's not nyx
@@ -559,35 +577,37 @@
 
     def _parse_parameter_file(self):
         """
-        Parses the parameter file and establishes the various
-        dictionaries.
+        Parses the parameter file and establishes the various dictionaries.
+
         """
+        # Boxlib madness
         self.fullplotdir = os.path.abspath(self.parameter_filename)
         self._parse_header_file()
-        self.parameter_filename = self._localize(
-                self.__ipfn, 'inputs')
-        self.fparameter_filename = self._localize(
-                self.fparameter_filename, 'probin')
+        self.parameter_filename = self._localize(self.__ipfn, 'inputs')
+        self.fparameter_filename = self._localize(self.fparameter_filename, 'probin')
         if os.path.isfile(self.fparameter_filename):
             self._parse_fparameter_file()
             for param in self.fparameters:
                 if castro2enzoDict.has_key(param):
-                    self.parameters[castro2enzoDict[param]]=self.fparameters[param]
+                    self.parameters[castro2enzoDict[param]] = self.fparameters[param]
+
         # Let's read the file
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[ST_CTIME])
+        self.unique_identifier = int(os.stat(self.parameter_filename)[ST_CTIME])
         lines = open(self.parameter_filename).readlines()
         self.use_particles = False
-        for lineI, line in enumerate(lines):
+
+        for line in lines:
             if line.find("#") >= 1: # Keep the commented lines...
-                line=line[:line.find("#")]
-            line=line.strip().rstrip()
+                line = line[:line.find("#")]
+            line = line.strip().rstrip()
             if len(line) < 2 or line.find("#") == 0: # ...but skip comments
                 continue
+
             try:
                 param, vals = map(strip, map(rstrip, line.split("=")))
             except ValueError:
                 mylog.error("ValueError: '%s'", line)
+
             if castro2enzoDict.has_key(param):
                 paramName = castro2enzoDict[param]
                 t = map(parameterDict[paramName], vals.split())
@@ -598,13 +618,10 @@
                         self.parameters[paramName] = t[0]
                     else:
                         self.parameters[paramName] = t
-
             elif param.startswith("geometry.prob_hi"):
-                self.domain_right_edge = \
-                    na.array([float(i) for i in vals.split()])
+                self.domain_right_edge = na.array([float(i) for i in vals.split()])
             elif param.startswith("geometry.prob_lo"):
-                self.domain_left_edge = \
-                    na.array([float(i) for i in vals.split()])
+                self.domain_left_edge = na.array([float(i) for i in vals.split()])
             elif param.startswith("particles.write_in_plotfile"):
                 self.use_particles = boxlib_bool_to_int(vals)
 
@@ -613,33 +630,38 @@
         self.domain_dimensions = self.parameters["TopGridDimensions"]
         self.refine_by = self.parameters.get("RefineBy", 2)
 
-        if self.parameters.has_key("ComovingCoordinates") and bool(self.parameters["ComovingCoordinates"]):
+        if (self.parameters.has_key("ComovingCoordinates") and
+            bool(self.parameters["ComovingCoordinates"])):
             self.cosmological_simulation = 1
             self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
             self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
             self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
-            a_file = open(os.path.join(self.fullplotdir,'comoving_a'))
+
+            # Stupid that we have to read a separate file for this :/
+            a_file = open(os.path.join(self.fullplotdir, "comoving_a"))
             line = a_file.readline().strip()
             a_file.close()
-            self.parameters["CosmologyCurrentRedshift"] = 1/float(line) - 1
+
+            self.parameters["CosmologyCurrentRedshift"] = 1 / float(line) - 1
             self.cosmological_scale_factor = float(line)
             self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
         else:
+            ### TODO: make these defaults automatic
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
 
     def _parse_fparameter_file(self):
         """
-        Parses the fortran parameter file for Castro. Most of this will
-        be useless, but this is where it keeps mu = mass per
-        particle/m_hydrogen.
+        Parses the fortran parameter file for Castro. Most of this will be
+        useless, but this is where it keeps mu = mass per particle/m_hydrogen.
+
         """
         lines = open(self.fparameter_filename).readlines()
         for line in lines:
             if line.count("=") == 1:
                 param, vals = map(strip, map(rstrip, line.split("=")))
                 if vals.count("'") == 0:
-                    t = map(float,[a.replace('D','e').replace('d','e') for a in vals.split()]) # all are floating point.
+                    t = map(float, [a.replace('D','e').replace('d','e') for a in vals.split()]) # all are floating point.
                 else:
                     t = vals.split()
                 if len(t) == 1:
@@ -649,36 +671,39 @@
 
     def _parse_header_file(self):
         """
-        Parses the BoxLib header file to get any parameters stored
-        there. Hierarchy information is read out of this file in
-        CastroHierarchy.
+        Parses the BoxLib header file to get any parameters stored there.
+        Hierarchy information is read out of this file in CastroHierarchy. 
 
         Currently, only Time is read here.
+
         """
-        header_file = open(os.path.join(self.fullplotdir,'Header'))
+        header_file = open(os.path.join(self.fullplotdir, "Header"))
         lines = header_file.readlines()
         header_file.close()
         n_fields = int(lines[1])
-        self.current_time = float(lines[3+n_fields])
-
-
+        self.current_time = float(lines[3 + n_fields])
 
     def _set_units(self):
         """
-        Generates the conversion to various physical _units based on the parameter file
+        Generates the conversion to various physical _units based on the
+        parameter file.
+
         """
         self.units = {}
         self.time_units = {}
+
         if len(self.parameters) == 0:
             self._parse_parameter_file()
+
         if self.cosmological_simulation:
-            cf = 1e5*(self.cosmological_scale_factor)
+            cf = 1e5 * self.cosmological_scale_factor   # Where does the 1e5 come from?
             for ax in 'xyz':
                 self.units['particle_velocity_%s' % ax] = cf
-            self.units['particle_mass'] = 1.989e33
+            self.units['particle_mass'] = 1.989e33  ### TODO: Make a global solar mass def
+
         mylog.warning("Setting 1.0 in code units to be 1.0 cm")
         if not self.has_key("TimeUnits"):
-            mylog.warning("No time units.  Setting 1.0 = 1 second.")
+            mylog.warning("No time units. Setting 1.0 = 1 second.")
             self.conversion_factors["Time"] = 1.0
         for unit in mpc_conversion.keys():
             self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
@@ -688,8 +713,8 @@
         self.units['1'] = 1.0
         self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = 1 #self["Time"]
-        self.time_units['years'] = seconds / (365*3600*24.0)
-        self.time_units['days']  = seconds / (3600*24.0)
+        self.time_units['years'] = seconds / (365 * 3600 * 24.0)
+        self.time_units['days']  = seconds / (3600 * 24.0)
         for key in yt2castroFieldsDict:
             self.conversion_factors[key] = 1.0
         for key in castro_particle_field_names:


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/castro/fields.py
--- a/yt/frontends/castro/fields.py
+++ b/yt/frontends/castro/fields.py
@@ -21,106 +21,99 @@
 
   You should have received a copy of the GNU General Public License
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
 """
-from yt.utilities.physical_constants import \
-    mh, kboltz
+
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
+    NullFunc, \
+    TranslationFunc, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
     ValidateSpatial, \
     ValidateGridType
 import yt.data_objects.universal_fields
+from yt.utilities.physical_constants import mh, kboltz
 
-class CastroFieldContainer(CodeFieldInfoContainer):
-    """
-    All Castro-specific fields are stored in here.
-    """
-    _shared_state = {}
-    _field_list = {}
-CastroFieldInfo = CastroFieldContainer()
-add_castro_field = CastroFieldInfo.add_field
+translation_dict = {
+    "x-velocity": "xvel",
+    "y-velocity": "yvel",
+    "z-velocity": "zvel",
+    "Density": "density",
+    "Total_Energy": "eden",
+    "Temperature": "temperature",
+    "x-momentum": "xmom",
+    "y-momentum": "ymom",
+    "z-momentum": "zmom"
+}
 
+# Setup containers for fields possibly in the output files
+KnownCastroFields = FieldInfoContainer()
+add_castro_field = KnownCastroFields.add_field
 
-add_field = add_castro_field
+# and always derived ones
+CastroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = CastroFieldInfo.add_field
 
-# def _convertDensity(data):
-#     return data.convert("Density")
-add_field("density", function=lambda a, b: None, take_log=True,
-          validators = [ValidateDataField("density")],
-          units=r"\rm{g}/\rm{cm}^3")
-CastroFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
-#CastroFieldInfo["density"]._convert_function=_convertDensity
+# Start adding fields
+add_castro_field("density", function=NullFunc, take_log=True,
+                 units=r"\rm{g}/\rm{cm}^3")
 
-add_field("eden", function=lambda a, b: None, take_log=True,
-          validators = [ValidateDataField("eden")],
-          units=r"\rm{erg}/\rm{cm}^3")
+# fix projected units
+KnownCastroFields["density"]._projected_units = r"\rm{g}/\rm{cm}^2"
 
-add_field("xmom", function=lambda a, b: None, take_log=False,
-          validators = [ValidateDataField("xmom")],
-          units=r"\rm{g}/\rm{cm^2\ s}")
+add_castro_field("eden", function=NullFunc, take_log=True,
+                 validators = [ValidateDataField("eden")],
+                 units=r"\rm{erg}/\rm{cm}^3")
 
-add_field("ymom", function=lambda a, b: None, take_log=False,
-          validators = [ValidateDataField("ymom")],
-          units=r"\rm{gm}/\rm{cm^2\ s}")
+add_castro_field("xmom", function=NullFunc, take_log=False,
+                 validators = [ValidateDataField("xmom")],
+                 units=r"\rm{g}/\rm{cm^2\ s}")
 
-add_field("zmom", function=lambda a, b: None, take_log=False,
-          validators = [ValidateDataField("zmom")],
-          units=r"\rm{g}/\rm{cm^2\ s}")
+add_castro_field("ymom", function=NullFunc, take_log=False,
+                 validators = [ValidateDataField("ymom")],
+                 units=r"\rm{gm}/\rm{cm^2\ s}")
 
-translation_dict = {"x-velocity": "xvel",
-                    "y-velocity": "yvel",
-                    "z-velocity": "zvel",
-                    "Density": "density",
-                    "Total_Energy": "eden",
-                    "Temperature": "temperature",
-                    "x-momentum": "xmom",
-                    "y-momentum": "ymom",
-                    "z-momentum": "zmom"
-                   }
+add_castro_field("zmom", function=NullFunc, take_log=False,
+                 validators = [ValidateDataField("zmom")],
+                 units=r"\rm{g}/\rm{cm^2\ s}")
 
-def _generate_translation(mine, theirs):
-    add_field(theirs, function=lambda a, b: b[mine], take_log=True)
+# Now populate derived fields
+for mine, theirs in translation_dict.items():
+    if KnownCastroFields.has_key(theirs):
+        add_field(theirs, function=TranslationFunc(mine),
+                  take_log=KnownCastroFields[theirs].take_log)
 
-for f, v in translation_dict.items():
-    if v not in CastroFieldInfo:
-        add_field(v, function=lambda a, b: None, take_log=False,
-                  validators = [ValidateDataField(v)])
-    #print "Setting up translator from %s to %s" % (v, f)
-    _generate_translation(v, f)
+# Now fallbacks, in case these fields are not output
+def _xVelocity(field, data):
+    """ Generate x-velocity from x-momentum and density. """
+    return data["xmom"] / data["density"]
 
-def _xVelocity(field, data):
-    """generate x-velocity from x-momentum and density
-
-    """
-    return data["xmom"]/data["density"]
 add_field("x-velocity", function=_xVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _yVelocity(field, data):
-    """generate y-velocity from y-momentum and density
+    """ Generate y-velocity from y-momentum and density. """
+    return data["ymom"] / data["density"]
 
-    """
-    #try:
-    #    return data["xvel"]
-    #except KeyError:
-    return data["ymom"]/data["density"]
 add_field("y-velocity", function=_yVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _zVelocity(field, data):
-    """generate z-velocity from z-momentum and density
+    """ Generate z-velocity from z-momentum and density. """
+    return data["zmom"] / data["density"]
 
-    """
-    return data["zmom"]/data["density"]
 add_field("z-velocity", function=_zVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _ThermalEnergy(field, data):
-    """generate thermal (gas energy). Dual Energy Formalism was
-        implemented by Stella, but this isn't how it's called, so I'll
-        leave that commented out for now.
+    """
+    Generate thermal (gas energy). Dual Energy Formalism was implemented by
+    Stella, but this isn't how it's called, so I'll leave that commented out for
+    now.
+
     """
     #if data.pf["DualEnergyFormalism"]:
     #    return data["Gas_Energy"]
@@ -129,26 +122,59 @@
         data["x-velocity"]**2.0
         + data["y-velocity"]**2.0
         + data["z-velocity"]**2.0 )
+
 add_field("ThermalEnergy", function=_ThermalEnergy,
           units=r"\rm{ergs}/\rm{cm^3}")
 
 def _Pressure(field, data):
-    """M{(Gamma-1.0)*e, where e is thermal energy density
-       NB: this will need to be modified for radiation
     """
-    return (data.pf["Gamma"] - 1.0)*data["ThermalEnergy"]
+    M{(Gamma-1.0)*e, where e is thermal energy density
+    
+    NB: this will need to be modified for radiation
+
+    """
+    return (data.pf["Gamma"] - 1.0) * data["ThermalEnergy"]
+
 add_field("Pressure", function=_Pressure, units=r"\rm{dyne}/\rm{cm}^{2}")
 
 def _Temperature(field, data):
-    return (data.pf["Gamma"]-1.0)*data.pf["mu"]*mh*data["ThermalEnergy"]/(kboltz*data["Density"])
-add_field("Temperature", function=_Temperature, units=r"\rm{Kelvin}", take_log=False)
+    return ((data.pf["Gamma"] - 1.0) * data.pf["mu"] * mh *
+            data["ThermalEnergy"] / (kboltz * data["Density"]))
+
+add_field("Temperature", function=_Temperature, units=r"\rm{Kelvin}",
+          take_log=False)
 
 def _convertParticleMassMsun(data):
-    return 1.0/1.989e33
+    return 1.0 / 1.989e33
 def _ParticleMassMsun(field, data):
     return data["particle_mass"]
+
 add_field("ParticleMassMsun",
           function=_ParticleMassMsun, validators=[ValidateSpatial(0)],
           particle_type=True, convert_function=_convertParticleMassMsun,
           particle_convert_function=_ParticleMassMsun)
 
+# Fundamental fields that are usually/always output:
+#   density
+#   xmom
+#   ymom
+#   zmom
+#   rho_E
+#   rho_e
+#   Temp
+#
+# "Derived" fields that are sometimes output:
+#   x_velocity
+#   y_velocity
+#   z_velocity
+#   magvel
+#   grav_x
+#   grav_y
+#   grav_z
+#   maggrav
+#   magvort
+#   pressure
+#   entropy
+#   divu
+#   eint_e (e as derived from the "rho e" variable)
+#   eint_E (e as derived from the "rho E" variable)


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/chombo/api.py
--- a/yt/frontends/chombo/api.py
+++ b/yt/frontends/chombo/api.py
@@ -34,7 +34,6 @@
       ChomboStaticOutput
 
 from .fields import \
-      ChomboFieldContainer, \
       ChomboFieldInfo, \
       add_chombo_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -55,7 +55,9 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
      parallel_root_only
 
-from .fields import ChomboFieldContainer
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
+from .fields import ChomboFieldInfo, KnownChomboFields
 
 class ChomboGrid(AMRGridPatch):
     _id_offset = 0
@@ -92,7 +94,6 @@
         self.domain_left_edge = pf.domain_left_edge # need these to determine absolute grid locations
         self.domain_right_edge = pf.domain_right_edge # need these to determine absolute grid locations
         self.data_style = data_style
-        self.field_info = ChomboFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
@@ -162,9 +163,6 @@
                 g1.Parent.append(g)
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        pass
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -176,7 +174,8 @@
 
 class ChomboStaticOutput(StaticOutput):
     _hierarchy_class = ChomboHierarchy
-    _fieldinfo_class = ChomboFieldContainer
+    _fieldinfo_fallback = ChomboFieldInfo
+    _fieldinfo_known = KnownChomboFields
     
     def __init__(self, filename, data_style='chombo_hdf5',
                  storage_filename = None, ini_filename = None):
@@ -185,7 +184,6 @@
         self.ini_filename = ini_filename
         StaticOutput.__init__(self,filename,data_style)
         self.storage_filename = storage_filename
-        self.field_info = self._fieldinfo_class()
         
     def _set_units(self):
         """


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/chombo/fields.py
--- a/yt/frontends/chombo/fields.py
+++ b/yt/frontends/chombo/fields.py
@@ -24,7 +24,9 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
+    NullFunc, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,47 +34,48 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class ChomboFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-ChomboFieldInfo = ChomboFieldContainer()
+KnownChomboFields = FieldInfoContainer()
+add_chombo_field = KnownChomboFields.add_field
+
+ChomboFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_chombo_field = ChomboFieldInfo.add_field
 
 add_field = add_chombo_field
 
-add_field("density", function=lambda a,b: None, take_log=True,
-          validators=[ValidateDataField("density")],
-          units=r"\rm{g} / \rm{cm}^3")
+add_field("density", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("density")],
+          units=r"\rm{g}/\rm{cm}^3")
+
 ChomboFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
 
-add_field("X-momentum", function=lambda a,b: None, take_log=False,
-          validators=[ValidateDataField("X-Momentum")],
-          units=r"", display_name=r"x momentum")
+add_field("X-momentum", function=NullFunc, take_log=False,
+          validators = [ValidateDataField("X-Momentum")],
+          units=r"",display_name=r"B_x")
 ChomboFieldInfo["X-momentum"]._projected_units=r""
 
-add_field("Y-momentum", function=lambda a,b: None, take_log=False,
-          validators=[ValidateDataField("Y-Momentum")],
-          units=r"", display_name=r"y momentum")
+add_field("Y-momentum", function=NullFunc, take_log=False,
+          validators = [ValidateDataField("Y-Momentum")],
+          units=r"",display_name=r"B_y")
 ChomboFieldInfo["Y-momentum"]._projected_units=r""
 
-add_field("Z-momentum", function=lambda a,b: None, take_log=False,
-          validators=[ValidateDataField("Z-Momentum")],
-          units=r"", display_name=r"z momentum")
+add_field("Z-momentum", function=NullFunc, take_log=False,
+          validators = [ValidateDataField("Z-Momentum")],
+          units=r"",display_name=r"B_z")
 ChomboFieldInfo["Z-momentum"]._projected_units=r""
 
-add_field("X-magnfield", function=lambda a,b: None, take_log=False,
-          validators=[ValidateDataField("X-Magnfield")],
-          units=r"", display_name=r"B_x")
+add_field("X-magnfield", function=NullFunc, take_log=False,
+          validators = [ValidateDataField("X-Magnfield")],
+          units=r"",display_name=r"B_x")
 ChomboFieldInfo["X-magnfield"]._projected_units=r""
 
-add_field("Y-magnfield", function=lambda a,b: None, take_log=False,
-          validators=[ValidateDataField("Y-Magnfield")],
-          units=r"", display_name=r"B_y")
+add_field("Y-magnfield", function=NullFunc, take_log=False,
+          validators = [ValidateDataField("Y-Magnfield")],
+          units=r"",display_name=r"B_y")
 ChomboFieldInfo["Y-magnfield"]._projected_units=r""
 
-add_field("Z-magnfield", function=lambda a,b: None, take_log=False,
-          validators=[ValidateDataField("Z-Magnfield")],
-          units=r"", display_name=r"B_z")
+add_field("Z-magnfield", function=NullFunc, take_log=False,
+          validators = [ValidateDataField("Z-Magnfield")],
+          units=r"",display_name=r"B_z")
 ChomboFieldInfo["Z-magnfield"]._projected_units=r""
 
 def _MagneticEnergy(field,data):


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/enzo/api.py
--- a/yt/frontends/enzo/api.py
+++ b/yt/frontends/enzo/api.py
@@ -39,8 +39,9 @@
       EnzoStaticOutputInMemory
 
 from .fields import \
-      EnzoFieldContainer, \
       EnzoFieldInfo, \
+      Enzo2DFieldInfo, \
+      Enzo1DFieldInfo, \
       add_enzo_field, \
       add_enzo_1d_field, \
       add_enzo_2d_field


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -45,13 +45,17 @@
     AMRHierarchy
 from yt.data_objects.static_output import \
     StaticOutput
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 from yt.utilities.definitions import mpc_conversion
 from yt.utilities import hdf5_light_reader
 from yt.utilities.logger import ytLogger as mylog
 
 from .definitions import parameterDict
-from .fields import EnzoFieldContainer, Enzo1DFieldContainer, \
-    Enzo2DFieldContainer, add_enzo_field
+from .fields import \
+    EnzoFieldInfo, Enzo2DFieldInfo, Enzo1DFieldInfo, \
+    add_enzo_field, add_enzo_2d_field, add_enzo_1d_field, \
+    KnownEnzoFields
 
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_blocking_call
@@ -462,25 +466,6 @@
         self.save_data(list(field_list),"/","DataFields",passthrough=True)
         self.field_list = list(field_list)
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            # Note that we call add_field on the field_info directly.  This
-            # will allow the same field detection mechanism to work for 1D, 2D
-            # and 3D fields.
-            self.pf.field_info.add_field(
-                    field, lambda a, b: None,
-                    convert_function=cf, take_log=False)
-            
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
         for field in self.parameter_file.field_info:
@@ -687,7 +672,8 @@
     Enzo-specific output, set at a fixed time.
     """
     _hierarchy_class = EnzoHierarchy
-    _fieldinfo_class = EnzoFieldContainer
+    _fieldinfo_fallback = EnzoFieldInfo
+    _fieldinfo_known = KnownEnzoFields
     def __init__(self, filename, data_style=None,
                  file_style = None,
                  parameter_override = None,
@@ -730,11 +716,9 @@
         if self["TopGridRank"] == 1: self._setup_1d()
         elif self["TopGridRank"] == 2: self._setup_2d()
 
-        self.field_info = self._fieldinfo_class()
-
     def _setup_1d(self):
         self._hierarchy_class = EnzoHierarchy1D
-        self._fieldinfo_class = Enzo1DFieldContainer
+        self._fieldinfo_fallback = Enzo1DFieldInfo
         self.domain_left_edge = \
             na.concatenate([[self.domain_left_edge], [0.0, 0.0]])
         self.domain_right_edge = \
@@ -742,7 +726,7 @@
 
     def _setup_2d(self):
         self._hierarchy_class = EnzoHierarchy2D
-        self._fieldinfo_class = Enzo2DFieldContainer
+        self._fieldinfo_fallback = Enzo2DFieldInfo
         self.domain_left_edge = \
             na.concatenate([self["DomainLeftEdge"], [0.0]])
         self.domain_right_edge = \
@@ -994,8 +978,6 @@
 
         StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
 
-        self.field_info = self._fieldinfo_class()
-
     def _parse_parameter_file(self):
         enzo = self._obtain_enzo()
         self.basename = "cycle%08i" % (


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -26,7 +26,10 @@
 import numpy as na
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    NullFunc, \
+    TranslationFunc, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -35,42 +38,43 @@
 import yt.data_objects.universal_fields
 from yt.utilities.physical_constants import \
     mh
+from yt.funcs import *
+
 import yt.utilities.amr_utils as amr_utils
 
-class EnzoFieldContainer(CodeFieldInfoContainer):
-    """
-    This is a container for Enzo-specific fields.
-    """
-    _shared_state = {}
-    _field_list = {}
-EnzoFieldInfo = EnzoFieldContainer()
-add_enzo_field = EnzoFieldInfo.add_field
+EnzoFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = EnzoFieldInfo.add_field
 
-add_field = add_enzo_field
+KnownEnzoFields = FieldInfoContainer()
+add_enzo_field = KnownEnzoFields.add_field
 
-_speciesList = ["HI","HII","Electron",
-               "HeI","HeII","HeIII",
-               "H2I","H2II","HM",
-               "DI","DII","HDI","Metal","PreShock"]
-_speciesMass = {"HI":1.0,"HII":1.0,"Electron":1.0,
-                "HeI":4.0,"HeII":4.0,"HeIII":4.0,
-                "H2I":2.0,"H2II":2.0,"HM":1.0,
-                "DI":2.0,"DII":2.0,"HDI":3.0}
+_speciesList = ["HI", "HII", "Electron",
+                "HeI", "HeII", "HeIII",
+                "H2I", "H2II", "HM",
+                "DI", "DII", "HDI", "Metal", "PreShock"]
+_speciesMass = {"HI": 1.0, "HII": 1.0, "Electron": 1.0,
+                "HeI": 4.0, "HeII": 4.0, "HeIII": 4.0,
+                "H2I": 2.0, "H2II": 2.0, "HM": 1.0,
+                "DI": 2.0, "DII": 2.0, "HDI": 3.0}
 
 def _SpeciesComovingDensity(field, data):
     sp = field.name.split("_")[0] + "_Density"
     ef = (1.0 + data.pf.current_redshift)**3.0
-    return data[sp]/ef
+    return data[sp] / ef
+
 def _SpeciesFraction(field, data):
     sp = field.name.split("_")[0] + "_Density"
-    return data[sp]/data["Density"]
+    return data[sp] / data["Density"]
+
 def _SpeciesMass(field, data):
     sp = field.name.split("_")[0] + "_Density"
     return data[sp] * data["CellVolume"]
+
 def _SpeciesNumberDensity(field, data):
     species = field.name.split("_")[0]
     sp = field.name.split("_")[0] + "_Density"
-    return data[sp]/_speciesMass[species]
+    return data[sp] / _speciesMass[species]
+
 def _convertCellMassMsun(data):
     return 5.027854e-34 # g^-1
 def _ConvertNumberDensity(data):
@@ -118,10 +122,10 @@
           validators=ValidateDataField("SN_Colour"),
           projection_conversion="1")
 
-add_field("Cooling_Time", units=r"\rm{s}",
-          function=lambda a, b: None,
-          validators=ValidateDataField("Cooling_Time"),
-          projection_conversion="1")
+add_enzo_field("Cooling_Time", units=r"\rm{s}",
+               function=NullFunc,
+               validators=ValidateDataField("Cooling_Time"),
+               projection_conversion="1")
 
 def _ThermalEnergy(field, data):
     if data.pf["HydroMethod"] == 2:
@@ -154,7 +158,9 @@
 def _convertEnergy(data):
     return data.convert("x-velocity")**2.0
 
-add_field("GasEnergy", function=lambda a, b: None,
+add_enzo_field("GasEnergy", function=NullFunc,
+          units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
+add_enzo_field("Gas_Energy", function=NullFunc,
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
 def _Gas_Energy(field, data):
@@ -162,7 +168,12 @@
 add_field("Gas_Energy", function=_Gas_Energy,
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
-add_field("TotalEnergy", function=lambda a, b: None,
+# We set up fields for both TotalEnergy and Total_Energy in the known fields
+# lists.  Note that this does not mean these will be the used definitions.
+add_enzo_field("TotalEnergy", function=NullFunc,
+          display_name = "\mathrm{Total}\/\mathrm{Energy}",
+          units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
+add_enzo_field("Total_Energy", function=NullFunc,
           display_name = "\mathrm{Total}\/\mathrm{Energy}",
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
@@ -221,38 +232,46 @@
 
 for field in _default_fields:
     dn = field.replace("_","\/")
-    add_field(field, function=lambda a, b: None, take_log=True,
+    add_enzo_field(field, function=NullFunc, take_log=True,
               display_name = dn,
-              validators=[ValidateDataField(field)], units=r"\rm{g}/\rm{cm}^3")
-EnzoFieldInfo["x-velocity"].projection_conversion='1'
-EnzoFieldInfo["y-velocity"].projection_conversion='1'
-EnzoFieldInfo["z-velocity"].projection_conversion='1'
+              validators=[ValidateDataField(field)], units=r"Unknown")
+KnownEnzoFields["x-velocity"].projection_conversion='1'
+KnownEnzoFields["y-velocity"].projection_conversion='1'
+KnownEnzoFields["z-velocity"].projection_conversion='1'
+
+def _convertBfield(data): 
+    return na.sqrt(4*na.pi*data.convert("Density")*data.convert("x-velocity")**2)
+for field in ['Bx','By','Bz']:
+    f = KnownEnzoFields[field]
+    f._convert_function=_convertBfield
+    f._units=r"\mathrm{Gau\ss}"
+    f.take_log=False
 
 # Now we override
 
 def _convertDensity(data):
     return data.convert("Density")
 for field in ["Density"] + [ "%s_Density" % sp for sp in _speciesList ]:
-    EnzoFieldInfo[field]._units = r"\rm{g}/\rm{cm}^3"
-    EnzoFieldInfo[field]._projected_units = r"\rm{g}/\rm{cm}^2"
-    EnzoFieldInfo[field]._convert_function=_convertDensity
+    KnownEnzoFields[field]._units = r"\rm{g}/\rm{cm}^3"
+    KnownEnzoFields[field]._projected_units = r"\rm{g}/\rm{cm}^2"
+    KnownEnzoFields[field]._convert_function=_convertDensity
 
-add_field("Dark_Matter_Density", function=lambda a,b: None,
+add_enzo_field("Dark_Matter_Density", function=NullFunc,
           convert_function=_convertDensity,
           validators=[ValidateDataField("Dark_Matter_Density"),
                       ValidateSpatial(0)],
           display_name = "Dark\ Matter\ Density",
           not_in_all = True)
 
-EnzoFieldInfo["Temperature"]._units = r"\rm{K}"
-EnzoFieldInfo["Temperature"].units = r"K"
-EnzoFieldInfo["Dust_Temperature"]._units = r"\rm{K}"
-EnzoFieldInfo["Dust_Temperature"].units = r"K"
+KnownEnzoFields["Temperature"]._units = r"\rm{K}"
+KnownEnzoFields["Temperature"].units = r"K"
+KnownEnzoFields["Dust_Temperature"]._units = r"\rm{K}"
+KnownEnzoFields["Dust_Temperature"].units = r"K"
 
 def _convertVelocity(data):
     return data.convert("x-velocity")
 for ax in ['x','y','z']:
-    f = EnzoFieldInfo["%s-velocity" % ax]
+    f = KnownEnzoFields["%s-velocity" % ax]
     f._units = r"\rm{cm}/\rm{s}"
     f._convert_function = _convertVelocity
     f.take_log = False
@@ -378,7 +397,7 @@
 def _convertBfield(data): 
     return na.sqrt(4*na.pi*data.convert("Density")*data.convert("x-velocity")**2)
 for field in ['Bx','By','Bz']:
-    f = EnzoFieldInfo[field]
+    f = KnownEnzoFields[field]
     f._convert_function=_convertBfield
     f._units=r"\mathrm{Gauss}"
     f.take_log=False
@@ -390,17 +409,95 @@
 
 add_field("Bmag", function=_Bmag,display_name=r"|B|",units=r"\mathrm{Gauss}")
 
+# Particle functions
+
+def particle_func(p_field, dtype='float64'):
+    def _Particles(field, data):
+        io = data.hierarchy.io
+        if not data.NumberOfParticles > 0:
+            return na.array([], dtype=dtype)
+        try:
+            return io._read_data_set(data, p_field).astype(dtype)
+        except io._read_exception:
+            pass
+        # This is bad.  But it's the best idea I have right now.
+        return data._read_data(p_field.replace("_"," ")).astype(dtype)
+    return _Particles
+for pf in ["type", "mass"] + \
+          ["position_%s" % ax for ax in 'xyz']:
+    pfunc = particle_func("particle_%s" % (pf))
+    add_enzo_field("particle_%s" % pf, function=pfunc,
+              validators = [ValidateSpatial(0)],
+              particle_type=True)
     
+def _convRetainInt(data):
+    return 1
+add_enzo_field("particle_index", function=particle_func("particle_index", "int64"),
+          validators = [ValidateSpatial(0)], particle_type=True,
+          convert_function=_convRetainInt)
+
+def _get_vel_convert(ax):
+    def _convert_p_vel(data):
+        return data.convert("%s-velocity" % ax)
+    return _convert_p_vel
+for ax in 'xyz':
+    pf = "particle_velocity_%s" % ax
+    pfunc = particle_func(pf)
+    cfunc = _get_vel_convert(ax)
+    add_enzo_field(pf, function=pfunc, convert_function=cfunc,
+              validators = [ValidateSpatial(0)],
+              particle_type=True)
+
+for pf in ["creation_time", "dynamical_time", "metallicity_fraction"]:
+    pfunc = particle_func(pf)
+    add_enzo_field(pf, function=pfunc,
+              validators = [ValidateSpatial(0),
+                            ValidateDataField(pf)],
+              particle_type=True)
+add_field("particle_mass", function=particle_func("particle_mass"),
+          validators=[ValidateSpatial(0)], particle_type=True)
+
+def _ParticleAge(field, data):
+    current_time = data.pf.current_time
+    return (current_time - data["creation_time"])
+def _convertParticleAge(data):
+    return data.convert("years")
+add_field("ParticleAge", function=_ParticleAge,
+          validators=[ValidateDataField("creation_time")],
+          particle_type=True, convert_function=_convertParticleAge)
+
+def _ParticleMass(field, data):
+    particles = data["particle_mass"].astype('float64') * \
+                just_one(data["CellVolumeCode"].ravel())
+    # Note that we mandate grid-type here, so this is okay
+    return particles
+
+def _convertParticleMass(data):
+    return data.convert("Density")*(data.convert("cm")**3.0)
+def _IOLevelParticleMass(grid):
+    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
+    cf = (_ParticleMass(None, dd) * _convertParticleMass(grid))[0]
+    return cf
+def _convertParticleMassMsun(data):
+    return data.convert("Density")*((data.convert("cm")**3.0)/1.989e33)
+def _IOLevelParticleMassMsun(grid):
+    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
+    cf = (_ParticleMass(None, dd) * _convertParticleMassMsun(grid))[0]
+    return cf
+add_field("ParticleMass",
+          function=_ParticleMass, validators=[ValidateSpatial(0)],
+          particle_type=True, convert_function=_convertParticleMass,
+          particle_convert_function=_IOLevelParticleMass)
+add_field("ParticleMassMsun",
+          function=_ParticleMass, validators=[ValidateSpatial(0)],
+          particle_type=True, convert_function=_convertParticleMassMsun,
+          particle_convert_function=_IOLevelParticleMassMsun)
+
 #
 # Now we do overrides for 2D fields
 #
 
-class Enzo2DFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = EnzoFieldContainer._field_list.copy()
-# We make a copy of the dict from the other, so we
-# can now update it...
-Enzo2DFieldInfo = Enzo2DFieldContainer()
+Enzo2DFieldInfo = FieldInfoContainer.create_with_fallback(EnzoFieldInfo)
 add_enzo_2d_field = Enzo2DFieldInfo.add_field
 
 def _CellArea(field, data):
@@ -438,12 +535,7 @@
 # Now we do overrides for 1D fields
 #
 
-class Enzo1DFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = EnzoFieldContainer._field_list.copy()
-# We make a copy of the dict from the other, so we
-# can now update it...
-Enzo1DFieldInfo = Enzo1DFieldContainer()
+Enzo1DFieldInfo = FieldInfoContainer.create_with_fallback(EnzoFieldInfo)
 add_enzo_1d_field = Enzo1DFieldInfo.add_field
 
 def _CellLength(field, data):
@@ -474,7 +566,7 @@
 def _convertBfield(data): 
     return na.sqrt(4*na.pi*data.convert("Density")*data.convert("x-velocity")**2)
 for field in ['Bx','By','Bz']:
-    f = EnzoFieldInfo[field]
+    f = KnownEnzoFields[field]
     f._convert_function=_convertBfield
     f._units=r"\mathrm{Gauss}"
     f.take_log=False


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/flash/api.py
--- a/yt/frontends/flash/api.py
+++ b/yt/frontends/flash/api.py
@@ -34,7 +34,6 @@
       FLASHStaticOutput
 
 from .fields import \
-      FLASHFieldContainer, \
       FLASHFieldInfo, \
       add_flash_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -40,9 +40,8 @@
 from yt.utilities.io_handler import \
     io_registry
 
-from .fields import \
-    FLASHFieldContainer, \
-    add_field
+from .fields import FLASHFieldInfo, add_flash_field, KnownFLASHFields
+from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc
 
 class FLASHGrid(AMRGridPatch):
     _id_offset = 1
@@ -63,7 +62,6 @@
     
     def __init__(self,pf,data_style='flash_hdf5'):
         self.data_style = data_style
-        self.field_info = FLASHFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
@@ -148,22 +146,6 @@
             g._setup_dx()
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            pfield = field.startswith("particle_")
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False,
-                      particle_type=pfield)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
         for field in self.parameter_file.field_info:
@@ -183,7 +165,8 @@
 
 class FLASHStaticOutput(StaticOutput):
     _hierarchy_class = FLASHHierarchy
-    _fieldinfo_class = FLASHFieldContainer
+    _fieldinfo_fallback = FLASHFieldInfo
+    _fieldinfo_known = KnownFLASHFields
     _handle = None
     
     def __init__(self, filename, data_style='flash_hdf5',
@@ -197,7 +180,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
-        self.field_info = self._fieldinfo_class()
         # These should be explicitly obtained from the file, but for now that
         # will wait until a reorganization of the source tree and better
         # generalization.


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/flash/fields.py
--- a/yt/frontends/flash/fields.py
+++ b/yt/frontends/flash/fields.py
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,13 +33,12 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class FLASHFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-FLASHFieldInfo = FLASHFieldContainer()
-add_flash_field = FLASHFieldInfo.add_field
 
-add_field = add_flash_field
+KnownFLASHFields = FieldInfoContainer()
+add_flash_field = KnownFLASHFields.add_field
+
+FLASHFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = FLASHFieldInfo.add_field
 
 # Common fields in FLASH: (Thanks to John ZuHone for this list)
 #


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/gadget/api.py
--- a/yt/frontends/gadget/api.py
+++ b/yt/frontends/gadget/api.py
@@ -34,7 +34,6 @@
       GadgetStaticOutput
 
 from .fields import \
-      GadgetFieldContainer, \
       GadgetFieldInfo, \
       add_gadget_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -37,7 +37,9 @@
 from yt.data_objects.static_output import \
     StaticOutput
 
-from .fields import GadgetFieldContainer
+from .fields import GadgetFieldInfo, KnownGadgetFields
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 
 class GadgetGrid(AMRGridPatch):
     _id_offset = 0
@@ -69,7 +71,6 @@
     grid = GadgetGrid
 
     def __init__(self, pf, data_style='gadget_hdf5'):
-        self.field_info = GadgetFieldContainer()
         self.filename = pf.filename
         self.directory = os.path.dirname(pf.filename)
         self.data_style = data_style
@@ -135,19 +136,16 @@
             g._prepare_grid()
             g._setup_dx()
             
-        
-    def _setup_unknown_fields(self):
-        pass
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
 class GadgetStaticOutput(StaticOutput):
     _hierarchy_class = GadgetHierarchy
-    _fieldinfo_class = GadgetFieldContainer
+    _fieldinfo_fallback = GadgetFieldInfo
+    _fieldinfo_known = KnownGadgetFields
+
     def __init__(self, filename,storage_filename=None) :
         self.storage_filename = storage_filename
-        self.field_info = self._fieldinfo_class()
         self.filename = filename
         
         StaticOutput.__init__(self, filename, 'gadget_infrastructure')


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/gadget/fields.py
--- a/yt/frontends/gadget/fields.py
+++ b/yt/frontends/gadget/fields.py
@@ -27,7 +27,8 @@
 
 from yt.funcs import *
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -35,10 +36,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class GadgetFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-GadgetFieldInfo = GadgetFieldContainer()
+GadgetFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_gadget_field = GadgetFieldInfo.add_field
 
 add_field = add_gadget_field


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/gdf/api.py
--- a/yt/frontends/gdf/api.py
+++ b/yt/frontends/gdf/api.py
@@ -27,17 +27,17 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """
-
 from .data_structures import \
       GDFGrid, \
       GDFHierarchy, \
       GDFStaticOutput
 
 from .fields import \
-      GDFFieldContainer, \
       GDFFieldInfo, \
+      KnownGDFFields, \
       add_gdf_field
 
 from .io import \
       IOHandlerGDFHDF5
 
+


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -35,7 +35,9 @@
 from yt.data_objects.static_output import \
            StaticOutput
 
-from .fields import GDFFieldContainer
+from .fields import GDFFieldInfo, KnownGDFFields
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 import pdb
 
 class GDFGrid(AMRGridPatch):
@@ -142,14 +144,14 @@
 
 class GDFStaticOutput(StaticOutput):
     _hierarchy_class = GDFHierarchy
-    _fieldinfo_class = GDFFieldContainer
+    _fieldinfo_fallback = GDFFieldInfo
+    _fieldinfo_known = KnownGDFFields
     
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
         self.filename = filename
-        self.field_info = self._fieldinfo_class()        
         
     def _set_units(self):
         """
@@ -170,6 +172,7 @@
         self._handle = h5py.File(self.parameter_filename, "r")
         for field_name in self._handle["/field_types"]:
             self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+        self._handle.close()
         del self._handle
         
     def _parse_parameter_file(self):
@@ -196,7 +199,7 @@
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
         self.parameters["HydroMethod"] = 0 # Hardcode for now until field staggering is supported.
-        
+        self._handle.close()
         del self._handle
             
     @classmethod






diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/maestro/api.py
--- a/yt/frontends/maestro/api.py
+++ b/yt/frontends/maestro/api.py
@@ -36,7 +36,6 @@
       MaestroStaticOutput
 
 from .fields import \
-      MaestroFieldContainer, \
       MaestroFieldInfo, \
       add_maestro_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/maestro/data_structures.py
--- a/yt/frontends/maestro/data_structures.py
+++ b/yt/frontends/maestro/data_structures.py
@@ -54,9 +54,12 @@
     yt2maestroFieldsDict, \
     maestro_FAB_header_pattern
 
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 from .fields import \
-    MaestroFieldContainer, \
-    add_field
+    MaestroFieldInfo, \
+    add_maestro_field, \
+    KnownMaestroFields
 
 
 class MaestroGrid(AMRGridPatch):
@@ -118,7 +121,6 @@
 class MaestroHierarchy(AMRHierarchy):
     grid = MaestroGrid
     def __init__(self, pf, data_style='maestro'):
-        self.field_info = MaestroFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir,'Header')
@@ -391,21 +393,6 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
-
     def _setup_derived_fields(self):
         pass
 
@@ -431,7 +418,8 @@
     *filename*, without looking at the Maestro hierarchy.
     """
     _hierarchy_class = MaestroHierarchy
-    _fieldinfo_class = MaestroFieldContainer
+    _fieldinfo_fallback = MaestroFieldInfo
+    _fieldinfo_known = KnownMaestroFields
 
     def __init__(self, plotname, paramFilename=None, 
                  data_style='maestro', paranoia=False,
@@ -455,7 +443,6 @@
         # this is the unit of time; NOT the current time
         self.parameters["Time"] = 1 # second
 
-        self.field_info = self._fieldinfo_class()
         self._parse_header_file()
 
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/maestro/fields.py
--- a/yt/frontends/maestro/fields.py
+++ b/yt/frontends/maestro/fields.py
@@ -27,7 +27,8 @@
 from yt.utilities.physical_constants import \
     mh, kboltz
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -35,17 +36,11 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class MaestroFieldContainer(CodeFieldInfoContainer):
-    """
-    All Maestro-specific fields are stored in here.
-    """
-    _shared_state = {}
-    _field_list = {}
-MaestroFieldInfo = MaestroFieldContainer()
-add_maestro_field = MaestroFieldInfo.add_field
+KnownMaestroFields = FieldInfoContainer()
+add_maestro_field = KnownMaestroFields.add_field
 
-
-add_field = add_maestro_field
+MaestroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = MaestroFieldInfo.add_field
 
 add_field("density", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("density")],


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/nyx/api.py
--- a/yt/frontends/nyx/api.py
+++ b/yt/frontends/nyx/api.py
@@ -25,5 +25,5 @@
 """
 
 from .data_structures import NyxGrid, NyxHierarchy, NyxStaticOutput
-from .fields import NyxFieldContainer, nyx_fields, add_nyx_field
+from .fields import NyxFieldInfo, KnownNyxFields, add_nyx_field
 from .io import IOHandlerNative


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/nyx/data_structures.py
--- a/yt/frontends/nyx/data_structures.py
+++ b/yt/frontends/nyx/data_structures.py
@@ -41,13 +41,15 @@
 from yt.data_objects.grid_patch import AMRGridPatch
 from yt.data_objects.hierarchy import AMRHierarchy
 from yt.data_objects.static_output import StaticOutput
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 from yt.utilities.amr_utils import get_box_grids_level
 from yt.utilities.definitions import mpc_conversion
 
 from .definitions import parameter_type_dict, nyx_to_enzo_dict, \
                          fab_header_pattern, nyx_particle_field_names
 from .utils import boxlib_bool_to_int
-from .fields import NyxFieldContainer, add_field
+from .fields import NyxFieldInfo, add_nyx_field, KnownNyxFields
 
 
 class NyxGrid(AMRGridPatch):
@@ -118,7 +120,6 @@
     grid = NyxGrid
 
     def __init__(self, pf, data_style="nyx_native"):
-        self.field_info = NyxFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         self.directory = pf.path
@@ -420,20 +421,6 @@
         return self.grids[mask]
 
     def _setup_field_list(self):
-        self.derived_field_list = []
-
-        for field in self.field_info:
-            try:
-                fd = self.field_info[field].get_dependencies(pf=self.parameter_file)
-            except:
-                continue
-            available = na.all([f in self.field_list for f in fd.requested])
-            if available: self.derived_field_list.append(field)
-
-        for field in self.field_list:
-            if field not in self.derived_field_list:
-                self.derived_field_list.append(field)
-
         if self.parameter_file.use_particles:
             # We know which particle fields will exist -- pending further
             # changes in the future.
@@ -446,7 +433,7 @@
                 # Note that we call add_field on the field_info directly.  This
                 # will allow the same field detection mechanism to work for 1D,
                 # 2D and 3D fields.
-                self.pf.field_info.add_field(field, lambda a, b: None,
+                self.pf.field_info.add_field(field, NullFunc,
                                              convert_function=cf,
                                              take_log=False, particle_type=True)
 
@@ -468,23 +455,19 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        # not sure what the case for this is.
+    def _setup_derived_fields(self):
+        self.derived_field_list = []
+        for field in self.parameter_file.field_info:
+            try:
+                fd = self.parameter_file.field_info[field].get_dependencies(
+                            pf = self.parameter_file)
+            except:
+                continue
+            available = na.all([f in self.field_list for f in fd.requested])
+            if available: self.derived_field_list.append(field)
         for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None, convert_function=cf,
-                      take_log=False)
-
-    def _setup_derived_fields(self):
-        pass
+            if field not in self.derived_field_list:
+                self.derived_field_list.append(field)
 
     def _initialize_state_variables(self):
         """
@@ -509,7 +492,8 @@
 
     """
     _hierarchy_class = NyxHierarchy
-    _fieldinfo_class = NyxFieldContainer
+    _fieldinfo_fallback = NyxFieldInfo
+    _fieldinfo_known = KnownNyxFields
 
     @classmethod
     def _is_valid(cls, *args, **kwargs):
@@ -569,9 +553,6 @@
         # ``self.print_key_parameters()``
         StaticOutput.__init__(self, plotname.rstrip("/"), data_style=data_style)
 
-        # @todo: field pruning should happen here
-        self.field_info = self._fieldinfo_class()
-
         # @todo: check all of these and hopefully factor out of the constructor.
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = "nyx"  # always PPM DE


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/nyx/fields.py
--- a/yt/frontends/nyx/fields.py
+++ b/yt/frontends/nyx/fields.py
@@ -29,30 +29,28 @@
 
 import yt.data_objects.universal_fields
 
-from yt.data_objects.field_info_container import CodeFieldInfoContainer, \
+from yt.data_objects.field_info_container import FieldInfoContainer, \
+    NullFunc, TranslationFunc, FieldInfo, \
     ValidateParameter, ValidateDataField, ValidateProperty, ValidateSpatial, \
     ValidateGridType
 from yt.utilities.physical_constants import mh, kboltz
 
-class NyxFieldContainer(CodeFieldInfoContainer):
-    """ All nyx-specific fields are stored in here. """
-    _shared_state = {}
-    _field_list = {}
+NyxFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = NyxFieldInfo.add_field
 
-nyx_fields = NyxFieldContainer()
-add_field = nyx_fields.add_field
-add_nyx_field = add_field  # alias for API
+KnownNyxFields = FieldInfoContainer()
+add_nyx_field = KnownNyxFields.add_field 
 
 # Density
-add_field("density", function=lambda a, b: None, take_log=True,
+add_nyx_field("density", function=lambda a, b: None, take_log=True,
           validators=[ValidateDataField("density")],
-          units=r"\rm{g} / \rm{cm}^3",
-          projected_units =r"\rm{g} / \rm{cm}^2")
-nyx_fields["density"]._projected_units =r"\rm{g} / \rm{cm}^2"
+          units=r"\rm{g}} / \rm{cm}^3",
+          projected_units =r"\rm{g}} / \rm{cm}^2")
+KnownNyxFields["density"]._projected_units =r"\rm{g}} / \rm{cm}^2"
 
-add_field("Density", function=lambda a, b: b["density"], take_log=True,
-          units=r"\rm{g} / \rm{cm}^3",
-          projected_units =r"\rm{g} / \rm{cm}^2")
+add_field("Density", function=TranslationFunc("density"), take_log=True,
+          units=r"\rm{g}} / \rm{cm}^3",
+          projected_units =r"\rm{g}} / \rm{cm}^2")
 
 # Particle mass in units of $ M_{\odot}
 def _convertParticleMassMsun(data):
@@ -61,28 +59,30 @@
     return data["particle_mass"]
 add_field("ParticleMassMsun", function=_particle_mass_m_sun,
           validators=[ValidateSpatial(0), ValidateDataField("particle_mass")],
-          particle_type=True, convert_function=_convertParticleMassMsun, take_log=True, units=r"\rm{M_{\odot}}")
+          particle_type=True, convert_function=_convertParticleMassMsun,
+          take_log=True, units=r"\rm{M_{\odot}}")
           
-add_field("Dark_Matter_Density", function=lambda a, b: b["particle_mass_density"], take_log=True,
-          units=r"\rm{g} / \rm{cm}^3",particle_type=True,
-          projected_units =r"\rm{g} / \rm{cm}^2")
+add_nyx_field("Dark_Matter_Density", function=TranslationFunc("particle_mass_density"),
+          take_log=True,
+          units=r"\rm{g}} / \rm{cm}^3",particle_type=True,
+          projected_units =r"\rm{g}} / \rm{cm}^2")
 
 
 # Energy Density
 # @todo: ``energy_density``
-add_field("total_energy", function=lambda a, b: None, take_log=True,
+add_nyx_field("total_energy", function=lambda a, b: None, take_log=True,
           validators=[ValidateDataField("total_energy")],
           units=r"\rm{M_{\odot}} (\rm{km} / \rm{s})^2")
 
 # Momentum in each dimension.
 # @todo: ``momentum_x``
-add_field("x-momentum", function=lambda a, b: None, take_log=False,
+add_nyx_field("x-momentum", function=lambda a, b: None, take_log=False,
           validators=[ValidateDataField("x-momentum")],
           units=r"\rm{M_{\odot}} \rm{km} / \rm{s}")
-add_field("y-momentum", function=lambda a, b: None, take_log=False,
+add_nyx_field("y-momentum", function=lambda a, b: None, take_log=False,
           validators=[ValidateDataField("y-momentum")],
           units=r"\rm{M_{\odot}} \rm{km} / \rm{s}")
-add_field("z-momentum", function=lambda a, b: None, take_log=False,
+add_nyx_field("z-momentum", function=lambda a, b: None, take_log=False,
           validators=[ValidateDataField("z-momentum")],
           units=r"\rm{M_{\odot}} \rm{km} / \rm{s}")
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/orion/api.py
--- a/yt/frontends/orion/api.py
+++ b/yt/frontends/orion/api.py
@@ -34,7 +34,6 @@
       OrionStaticOutput
 
 from .fields import \
-      OrionFieldContainer, \
       OrionFieldInfo, \
       add_orion_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/orion/data_structures.py
--- a/yt/frontends/orion/data_structures.py
+++ b/yt/frontends/orion/data_structures.py
@@ -23,46 +23,41 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import os
 import re
-import os
 import weakref
+
+from collections import defaultdict
+from string import strip, rstrip
+from stat import ST_CTIME
+
 import numpy as na
 
-from collections import \
-    defaultdict
-from string import \
-    strip, \
-    rstrip
-from stat import \
-    ST_CTIME
-
 from yt.funcs import *
-from yt.data_objects.grid_patch import \
-           AMRGridPatch
-from yt.data_objects.hierarchy import \
-           AMRHierarchy
-from yt.data_objects.static_output import \
-           StaticOutput
-from yt.utilities.definitions import \
-    mpc_conversion
+from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc
+from yt.data_objects.grid_patch import AMRGridPatch
+from yt.data_objects.hierarchy import AMRHierarchy
+from yt.data_objects.static_output import StaticOutput
+from yt.utilities.definitions import mpc_conversion
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
-     parallel_root_only
+    parallel_root_only
 
 from .definitions import \
     orion2enzoDict, \
     parameterDict, \
     yt2orionFieldsDict, \
     orion_FAB_header_pattern
-
 from .fields import \
-    OrionFieldContainer, \
-    add_field
+    OrionFieldInfo, \
+    add_orion_field, \
+    KnownOrionFields
 
 
 class OrionGrid(AMRGridPatch):
     _id_offset = 0
-    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset, dimensions,start,stop,paranoia=False,**kwargs):
-        AMRGridPatch.__init__(self, index,**kwargs)
+    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
+                 dimensions, start, stop, paranoia=False, **kwargs):
+        AMRGridPatch.__init__(self, index, **kwargs)
         self.filename = filename
         self._offset = offset
         self._paranoid = paranoia
@@ -122,7 +117,6 @@
 class OrionHierarchy(AMRHierarchy):
     grid = OrionGrid
     def __init__(self, pf, data_style='orion_native'):
-        self.field_info = OrionFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir,'Header')
@@ -399,21 +393,6 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
-
     def _setup_derived_fields(self):
         pass
 
@@ -439,7 +418,8 @@
     *filename*, without looking at the Orion hierarchy.
     """
     _hierarchy_class = OrionHierarchy
-    _fieldinfo_class = OrionFieldContainer
+    _fieldinfo_fallback = OrionFieldInfo
+    _fieldinfo_known = KnownOrionFields
 
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='orion_native', paranoia=False,
@@ -461,7 +441,6 @@
 
         StaticOutput.__init__(self, plotname.rstrip("/"),
                               data_style='orion_native')
-        self.field_info = self._fieldinfo_class()
 
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = 'orion' # always PPM DE


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/orion/fields.py
--- a/yt/frontends/orion/fields.py
+++ b/yt/frontends/orion/fields.py
@@ -25,7 +25,8 @@
 from yt.utilities.physical_constants import \
     mh, kboltz
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -33,25 +34,17 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class OrionFieldContainer(CodeFieldInfoContainer):
-    """
-    All Orion-specific fields are stored in here.
-    """
-    _shared_state = {}
-    _field_list = {}
-OrionFieldInfo = OrionFieldContainer()
-add_orion_field = OrionFieldInfo.add_field
 
+KnownOrionFields = FieldInfoContainer()
+add_orion_field = KnownOrionFields.add_field
 
-add_field = add_orion_field
+OrionFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = OrionFieldInfo.add_field
 
-# def _convertDensity(data):
-#     return data.convert("Density")
 add_field("density", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("density")],
           units=r"\rm{g}/\rm{cm}^3")
 OrionFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
-#OrionFieldInfo["density"]._convert_function=_convertDensity
 
 add_field("eden", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("eden")],


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/ramses/api.py
--- a/yt/frontends/ramses/api.py
+++ b/yt/frontends/ramses/api.py
@@ -34,7 +34,6 @@
       RAMSESStaticOutput
 
 from .fields import \
-      RAMSESFieldContainer, \
       RAMSESFieldInfo, \
       add_ramses_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -39,13 +39,15 @@
     import _ramses_reader
 except ImportError:
     _ramses_reader = None
-from .fields import RAMSESFieldContainer
+from .fields import RAMSESFieldInfo, KnownRAMSESFields
 from yt.utilities.definitions import \
     mpc_conversion
 from yt.utilities.amr_utils import \
     get_box_grids_level
 from yt.utilities.io_handler import \
     io_registry
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 
 def num_deep_inc(f):
     def wrap(self, *args, **kwargs):
@@ -108,7 +110,6 @@
     
     def __init__(self,pf,data_style='ramses'):
         self.data_style = data_style
-        self.field_info = RAMSESFieldContainer()
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
@@ -265,20 +266,6 @@
             g._setup_dx()
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -287,7 +274,8 @@
 
 class RAMSESStaticOutput(StaticOutput):
     _hierarchy_class = RAMSESHierarchy
-    _fieldinfo_class = RAMSESFieldContainer
+    _fieldinfo_fallback = RAMSESFieldInfo
+    _fieldinfo_known = KnownRAMSESFields
     _handle = None
     
     def __init__(self, filename, data_style='ramses',
@@ -297,8 +285,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
-        self.field_info = self._fieldinfo_class()
-
     def __repr__(self):
         return self.basename.rsplit(".", 1)[0]
         


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/ramses/fields.py
--- a/yt/frontends/ramses/fields.py
+++ b/yt/frontends/ramses/fields.py
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,13 +33,12 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class RAMSESFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-RAMSESFieldInfo = RAMSESFieldContainer()
-add_ramses_field = RAMSESFieldInfo.add_field
 
-add_field = add_ramses_field
+KnownRAMSESFields = FieldInfoContainer()
+add_ramses_field = KnownRAMSESFields.add_field
+
+RAMSESFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = RAMSESFieldInfo.add_field
 
 known_ramses_fields = [
     "Density",


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -36,12 +36,15 @@
 from yt.data_objects.static_output import \
     StaticOutput
 from yt.utilities.logger import ytLogger as mylog
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 from yt.utilities.amr_utils import \
     get_box_grids_level
 
 from .fields import \
     StreamFieldContainer, \
-    add_stream_field
+    add_stream_field, \
+    KnownStreamFields
 
 class StreamGrid(AMRGridPatch):
     """
@@ -244,6 +247,7 @@
 class StreamStaticOutput(StaticOutput):
     _hierarchy_class = StreamHierarchy
     _fieldinfo_class = StreamFieldContainer
+    _fieldinfo_known = KnownStreamFields
     _data_style = 'stream'
 
     def __init__(self, stream_handler):
@@ -255,7 +259,6 @@
         self.stream_handler = stream_handler
         StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
 
-        self.field_info = self._fieldinfo_class()
         self.units = {}
         self.time_units = {}
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/tiger/api.py
--- a/yt/frontends/tiger/api.py
+++ b/yt/frontends/tiger/api.py
@@ -34,7 +34,6 @@
       TigerStaticOutput
 
 from .fields import \
-      TigerFieldContainer, \
       TigerFieldInfo, \
       add_tiger_field
 


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/tiger/data_structures.py
--- a/yt/frontends/tiger/data_structures.py
+++ b/yt/frontends/tiger/data_structures.py
@@ -31,7 +31,9 @@
 from yt.data_objects.static_output import \
            StaticOutput
 
-from .fields import TigerFieldContainer
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
+from .fields import TigerFieldInfo, KnownTigerFields
 
 class TigerGrid(AMRGridPatch):
     _id_offset = 0
@@ -126,16 +128,13 @@
     def field_list(self):
         return self.file_mapping.keys()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            add_tiger_field(field, lambda a, b: None)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
 class TigerStaticOutput(StaticOutput):
     _hierarchy_class = TigerHierarchy
-    _fieldinfo_class = TigerFieldContainer
+    _fieldinfo_fallback = TigerFieldInfo
+    _fieldinfo_known = KnownTigerFields
 
     def __init__(self, rhobname, root_size, max_grid_size=128,
                  data_style='tiger', storage_filename = None):
@@ -151,7 +150,8 @@
         if not iterable(max_grid_size): max_grid_size = (max_grid_size,) * 3
         self.max_grid_size = max_grid_size
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
         # We assume that we have basename + "rhob" and basename + "temp"
         # to get at our various parameters.


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/frontends/tiger/fields.py
--- a/yt/frontends/tiger/fields.py
+++ b/yt/frontends/tiger/fields.py
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,12 +33,9 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class TigerFieldContainer(CodeFieldInfoContainer):
-    """
-    This is a container for Tiger-specific fields.
-    """
-    _shared_state = {}
-    _field_list = {}
-TigerFieldInfo = TigerFieldContainer()
-add_tiger_field = TigerFieldInfo.add_field
+KnownTigerFields = FieldInfoContainer()
+add_tiger_field = KnownTigerFields.add_field
 
+TigerFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = TigerFieldInfo.add_field
+


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -195,6 +195,11 @@
         return func(*args, **kwargs)
     return check_parallel_rank
 
+def rootloginfo(*args):
+    from yt.config import ytcfg
+    if ytcfg.getint("yt", "__topcomm_parallel_rank") > 0: return
+    mylog.info(*args)
+
 def deprecate(func):
     """
     This decorator issues a deprecation warning.


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/mods.py
--- a/yt/mods.py
+++ b/yt/mods.py
@@ -66,7 +66,7 @@
     CastroStaticOutput, CastroFieldInfo, add_castro_field
 
 from yt.frontends.nyx.api import \
-    NyxStaticOutput, nyx_fields, add_nyx_field
+    NyxStaticOutput, NyxFieldInfo, add_nyx_field
 
 from yt.frontends.orion.api import \
     OrionStaticOutput, OrionFieldInfo, add_orion_field


diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/utilities/command_line.py
--- a/yt/utilities/command_line.py
+++ b/yt/utilities/command_line.py
@@ -1058,6 +1058,19 @@
             print
             loki = raw_input("Press enter to go on, Ctrl-C to exit.")
             cedit.config.setoption(uu, hgrc_path, "bb.username=%s" % bbusername)
+        bb_fp = "81:2b:08:90:dc:d3:71:ee:e0:7c:b4:75:ce:9b:6c:48:94:56:a1:fe"
+        if uu.config("hostfingerprints", "bitbucket.org", None) is None:
+            print "Let's also add bitbucket.org to the known hosts, so hg"
+            print "doesn't warn us about bitbucket."
+            print "We will add this:"
+            print
+            print "   [hostfingerprints]"
+            print "   bitbucket.org = %s" % (bb_fp)
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            cedit.config.setoption(uu, hgrc_path,
+                                   "hostfingerprints.bitbucket.org=%s" % bb_fp)
+
         # We now reload the UI's config file so that it catches the [bb]
         # section changes.
         uu.readconfig(hgrc_path[0])




diff -r 4b786c964c3def97346ec511eec326e84196c31e -r 7bc2e29981d219621ac68998415248880a9ee71c yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -856,5 +856,5 @@
         image *= dl
     else:
         image /= vals[:,:,1]
-        pf.field_info._field_list.pop("temp_weightfield")
+        pf.field_info.pop("temp_weightfield")
     return image

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list