[Yt-svn] yt-commit r935 - in branches/yt-non-3d/yt: . lagos
mturk at wrangler.dreamhost.com
mturk at wrangler.dreamhost.com
Sat Nov 15 14:20:58 PST 2008
Author: mturk
Date: Sat Nov 15 14:20:57 2008
New Revision: 935
URL: http://yt.spacepope.org/changeset/935
Log:
Adding in the FieldInfoContainer, and changing some stuff around to use it.
This is probably not 100% functional at this time.
Added:
branches/yt-non-3d/yt/lagos/EnzoFields.py
branches/yt-non-3d/yt/lagos/FieldInfoContainer.py
branches/yt-non-3d/yt/lagos/UniversalFields.py
Modified:
branches/yt-non-3d/yt/lagos/BaseDataTypes.py
branches/yt-non-3d/yt/lagos/HierarchyType.py
branches/yt-non-3d/yt/lagos/OutputTypes.py
branches/yt-non-3d/yt/lagos/__init__.py
branches/yt-non-3d/yt/mods.py
Modified: branches/yt-non-3d/yt/lagos/BaseDataTypes.py
==============================================================================
--- branches/yt-non-3d/yt/lagos/BaseDataTypes.py (original)
+++ branches/yt-non-3d/yt/lagos/BaseDataTypes.py Sat Nov 15 14:20:57 2008
@@ -315,16 +315,16 @@
temp = grid[field]
def _generate_field(self, field):
- if fieldInfo.has_key(field):
+ if self.pf.field_info.has_key(field):
# First we check the validator
try:
- fieldInfo[field].check_available(self)
+ self.pf.field_info[field].check_available(self)
except NeedsGridType, ngt_exception:
# We leave this to be implementation-specific
self._generate_field_in_grids(field, ngt_exception.ghost_zones)
return False
else:
- self[field] = fieldInfo[field](self)
+ self[field] = self.pf.field_info[field](self)
return True
else: # Can't find the field, try as it might
raise exceptions.KeyError(field)
@@ -497,16 +497,16 @@
def _generate_field(self, field):
- if fieldInfo.has_key(field):
+ if self.pf.field_info.has_key(field):
# First we check the validator
try:
- fieldInfo[field].check_available(self)
+ self.pf.field_info[field].check_available(self)
except NeedsGridType, ngt_exception:
# We leave this to be implementation-specific
self._generate_field_in_grids(field, ngt_exception.ghost_zones)
return False
else:
- self[field] = fieldInfo[field](self)
+ self[field] = self.pf.field_info[field](self)
return True
else: # Can't find the field, try as it might
raise exceptions.KeyError(field)
@@ -674,14 +674,14 @@
sl = [slice(None), slice(None), slice(None)]
sl[self.axis] = slice(wantedIndex, wantedIndex + 1)
sl = tuple(sl)
- if fieldInfo.has_key(field) and fieldInfo[field].particle_type:
+ if self.pf.field_info.has_key(field) and self.pf.field_info[field].particle_type:
return grid[field]
- elif field in fieldInfo and fieldInfo[field].not_in_all:
+ elif field in self.pf.field_info and self.pf.field_info[field].not_in_all:
dv = grid[field][sl]
elif not grid.has_key(field):
conv_factor = 1.0
- if fieldInfo.has_key(field):
- conv_factor = fieldInfo[field]._convert_function(self)
+ if self.pf.field_info.has_key(field):
+ conv_factor = self.pf.field_info[field]._convert_function(self)
dv = self._read_data_slice(grid, field, self.axis, wantedIndex) * conv_factor
else:
dv = grid[field]
@@ -797,7 +797,7 @@
return na.array(coords).swapaxes(0,1)
def _get_data_from_grid(self, grid, field):
- if not fieldInfo[field].particle_type:
+ if not self.pf.field_info[field].particle_type:
pointI = self._get_point_indices(grid)
if grid[field].size == 1: # dx, dy, dz, cellvolume
t = grid[field] * na.ones(grid.ActiveDimensions)
@@ -914,7 +914,7 @@
for field in fields + [self._weight]:
if field is None: continue
dls.append(just_one(grid['d%s' % axis_names[self.axis]]))
- convs.append(self.pf.units[fieldInfo[field].projection_conversion])
+ convs.append(self.pf.units[self.pf.field_info[field].projection_conversion])
return na.array(dls), na.array(convs)
def __project_level(self, level, fields):
@@ -1026,7 +1026,7 @@
#@time_execution
def get_data(self, fields = None):
- if fields is None: fields = self.fields
+ if fields is None: fields = ensure_list(self.fields)[:]
fields = ensure_list(fields)
coord_data = []
field_data = []
@@ -1205,11 +1205,11 @@
@restore_grid_state
def _get_data_from_grid(self, grid, field):
- if field in fieldInfo and fieldInfo[field].particle_type:
+ if field in self.pf.field_info and self.pf.field_info[field].particle_type:
if grid.NumberOfParticles == 0: return na.array([])
pointI = self._get_particle_indices(grid)
return grid[field][pointI].ravel()
- if field in fieldInfo and fieldInfo[field].vector_field:
+ if field in self.pf.field_info and self.pf.field_info[field].vector_field:
pointI = self._get_point_indices(grid)
f = grid[field]
return na.array([f[i,:][pointI] for i in range(3)])
@@ -1237,16 +1237,16 @@
i += np
def _generate_field(self, field):
- if fieldInfo.has_key(field):
+ if self.pf.field_info.has_key(field):
# First we check the validator
try:
- fieldInfo[field].check_available(self)
+ self.pf.field_info[field].check_available(self)
except NeedsGridType, ngt_exception:
# We leave this to be implementation-specific
self._generate_field_in_grids(field, ngt_exception.ghost_zones)
return False
else:
- self[field] = fieldInfo[field](self)
+ self[field] = self.pf.field_info[field](self)
return True
else: # Can't find the field, try as it might
raise exceptions.KeyError(field)
@@ -1772,7 +1772,7 @@
for ax in 'xyz': self['cd%s'%ax] = fake_grid['d%s'%ax]
for field in fields:
# Generate the new grid field
- if field in fieldInfo and fieldInfo[field].take_log:
+ if field in self.pf.field_info and self.pf.field_info[field].take_log:
interpolator = TrilinearFieldInterpolator(
na.log10(self[field]), bounds, ['x','y','z'],
truncate = True)
Added: branches/yt-non-3d/yt/lagos/EnzoFields.py
==============================================================================
--- (empty file)
+++ branches/yt-non-3d/yt/lagos/EnzoFields.py Sat Nov 15 14:20:57 2008
@@ -0,0 +1,164 @@
+"""
+Fields applicable only to Enzo
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+ Copyright (C) 2008 Matthew Turk. All Rights Reserved.
+
+ This file is part of yt.
+
+ yt is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from UniversalFields import *
+
+add_field = add_enzo_field
+
+_speciesList = ["HI","HII","Electron",
+ "HeI","HeII","HeIII",
+ "H2I","H2II","HM",
+ "DI","DII","HDI","Metal"]
+def _SpeciesFraction(field, data):
+ sp = field.name.split("_")[0] + "_Density"
+ return data[sp]/data["Density"]
+for species in _speciesList:
+ add_field("%s_Fraction" % species,
+ function=_SpeciesFraction,
+ validators=ValidateDataField("%s_Density" % species))
+
+def _Metallicity(field, data):
+ return data["Metal_Fraction"] / 0.0204
+add_field("Metallicity", units=r"Z_{\rm{Solar}}",
+ function=_Metallicity,
+ validators=ValidateDataField("Metal_Density"),
+ projection_conversion="1")
+
+def _ThermalEnergy(field, data):
+ if data.pf["HydroMethod"] == 2:
+ return data["Total_Energy"]
+ else:
+ if data.pf["DualEnergyFormalism"]:
+ return data["Gas_Energy"]
+ else:
+ return data["Total_Energy"] - 0.5*(
+ data["x-velocity"]**2.0
+ + data["y-velocity"]**2.0
+ + data["z-velocity"]**2.0 )
+add_field("ThermalEnergy", function=_ThermalEnergy,
+ units=r"\rm{ergs}/\rm{cm^3}")
+
+def _NumberDensity(field, data):
+ # We can assume that we at least have Density
+ # We should actually be guaranteeing the presence of a .shape attribute,
+ # but I am not currently implementing that
+ fieldData = na.zeros(data["Density"].shape,
+ dtype = data["Density"].dtype)
+ if data.pf["MultiSpecies"] == 0:
+ if data.has_field_parameter("mu"):
+ mu = data.get_field_parameter("mu")
+ else:
+ mu = 0.6
+ fieldData += data["Density"] * mu
+ if data.pf["MultiSpecies"] > 0:
+ fieldData += data["HI_Density"] / 1.0
+ fieldData += data["HII_Density"] / 1.0
+ fieldData += data["HeI_Density"] / 4.0
+ fieldData += data["HeII_Density"] / 4.0
+ fieldData += data["HeIII_Density"] / 4.0
+ fieldData += data["Electron_Density"] / 1.0
+ if data.pf["MultiSpecies"] > 1:
+ fieldData += data["HM_Density"] / 1.0
+ fieldData += data["H2I_Density"] / 2.0
+ fieldData += data["H2II_Density"] / 2.0
+ if data.pf["MultiSpecies"] > 2:
+ fieldData += data["DI_Density"] / 2.0
+ fieldData += data["DII_Density"] / 2.0
+ fieldData += data["HDI_Density"] / 3.0
+ return fieldData
+def _ConvertNumberDensity(data):
+ return 1.0/mh
+add_field("NumberDensity", units=r"\rm{cm}^{-3}",
+ function=_NumberDensity,
+ convert_function=_ConvertNumberDensity)
+
+def Overdensity(field,data):
+ return (data['Density'] + data['particle_density']) / \
+ (rho_crit_now * ((1+data.pf['CosmologyCurrentRedshift'])**3))
+add_field("Overdensity",function=Overdensity,units=r"")
+
+# Now we add all the fields that we want to control, but we give a null function
+# This is every Enzo field we can think of. This will be installation-dependent,
+#if data.pf["HydroMethod"] == 'orion':
+_default_fields = ["Density","Temperature","Gas_Energy","Total_Energy",
+ "x-velocity","y-velocity","z-velocity",
+ "x-momentum","y-momentum","z-momentum"]
+# else:
+# _default_fields = ["Density","Temperature","Gas_Energy","Total_Energy",
+# "x-velocity","y-velocity","z-velocity"]
+_default_fields += [ "%s_Density" % sp for sp in _speciesList ]
+
+for field in _default_fields:
+ add_field(field, function=lambda a, b: None, take_log=True,
+ validators=[ValidateDataField(field)], units=r"\rm{g}/\rm{cm}^3")
+EnzoFieldInfo["x-velocity"].projection_conversion='1'
+EnzoFieldInfo["y-velocity"].projection_conversion='1'
+EnzoFieldInfo["z-velocity"].projection_conversion='1'
+
+# Now we override
+
+def _convertDensity(data):
+ return data.convert("Density")
+for field in ["Density"] + [ "%s_Density" % sp for sp in _speciesList ]:
+ EnzoFieldInfo[field]._units = r"\rm{g}/\rm{cm}^3"
+ EnzoFieldInfo[field]._projected_units = r"\rm{g}/\rm{cm}^2"
+ EnzoFieldInfo[field]._convert_function=_convertDensity
+
+add_field("Dark_Matter_Density", function=lambda a,b: None,
+ convert_function=_convertDensity,
+ validators=[ValidateDataField("Dark_Matter_Density"),
+ ValidateSpatial(0)],
+ not_in_all = True)
+
+def _convertEnergy(data):
+ return data.convert("x-velocity")**2.0
+EnzoFieldInfo["Gas_Energy"]._units = r"\rm{ergs}/\rm{g}"
+EnzoFieldInfo["Gas_Energy"]._convert_function = _convertEnergy
+EnzoFieldInfo["Total_Energy"]._units = r"\rm{ergs}/\rm{g}"
+EnzoFieldInfo["Total_Energy"]._convert_function = _convertEnergy
+EnzoFieldInfo["Temperature"]._units = r"\rm{K}"
+
+def _convertVelocity(data):
+ return data.convert("x-velocity")
+for ax in ['x','y','z']:
+ f = EnzoFieldInfo["%s-velocity" % ax]
+ f._units = r"\rm{cm}/\rm{s}"
+ f._convert_function = _convertVelocity
+ f.take_log = False
+
+def _pdensity(field, data):
+ blank = na.zeros(data.ActiveDimensions, dtype='float32', order="FORTRAN")
+ if data.NumberOfParticles == 0: return blank
+ cic_deposit.cic_deposit(data["particle_position_x"],
+ data["particle_position_y"],
+ data["particle_position_z"], 3,
+ data["particle_mass"],
+ blank, data.LeftEdge, data.dx)
+ return blank
+add_field("particle_density", function=_pdensity,
+ validators=[ValidateSpatial(0)], convert_function=_convertDensity)
+
+EnzoFieldInfo["Temperature"].units = r"K"
+
Added: branches/yt-non-3d/yt/lagos/FieldInfoContainer.py
==============================================================================
--- (empty file)
+++ branches/yt-non-3d/yt/lagos/FieldInfoContainer.py Sat Nov 15 14:20:57 2008
@@ -0,0 +1,290 @@
+"""
+The basic field info container resides here. These classes, code specific and
+universal, are the means by which we access fields across YT, both derived and
+native.
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+ Copyright (C) 2008 Matthew Turk. All Rights Reserved.
+
+ This file is part of yt.
+
+ yt is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import types
+import numpy as na
+import inspect
+import copy
+import itertools
+
+from yt.funcs import *
+
+class FieldInfoContainer(object): # We are all Borg.
+ _shared_state = {}
+ _universal_field_list = {}
+ def __new__(cls, *args, **kwargs):
+ self = object.__new__(cls, *args, **kwargs)
+ self.__dict__ = cls._shared_state
+ return self
+ def __getitem__(self, key):
+ if key in self._universal_field_list:
+ return self._universal_field_list[key]
+ raise KeyError
+ def keys(self):
+ return self._universal_field_list.keys()
+ def __iter__(self):
+ return self._universal_field_list.iterkeys()
+ def __setitem__(self, key, val):
+ self._universal_field_list[key] = val
+ def has_key(self, key):
+ return key in self._universal_field_list
+ def add_field(self, name, function = None, **kwargs):
+ if function == None:
+ if kwargs.has_key("function"):
+ function = kwargs.pop("function")
+ else:
+ # This will fail if it does not exist,
+ # which is our desired behavior
+ function = eval("_%s" % name)
+ self[name] = DerivedField(name, function, **kwargs)
+FieldInfo = FieldInfoContainer()
+add_field = FieldInfo.add_field
+
+class CodeFieldInfoContainer(FieldInfoContainer):
+ def __setitem__(self, key, val):
+ self._field_list[key] = val
+ def __iter__(self):
+ return itertools.chain(self._field_list.iterkeys(),
+ self._universal_field_list.iterkeys())
+ def keys(self):
+ return set(self._field_list.keys() + self._universal_field_list.keys())
+ def has_key(self, key):
+ return key in self._universal_field_list \
+ or key in self._field_list
+ def __getitem__(self, key):
+ if key in self._field_list:
+ return self._field_list[key]
+ if key in self._universal_field_list:
+ return self._universal_field_list[key]
+ raise KeyError
+
+class EnzoFieldContainer(CodeFieldInfoContainer):
+ _shared_state = {}
+ _field_list = {}
+EnzoFieldInfo = EnzoFieldContainer()
+add_enzo_field = EnzoFieldInfo.add_field
+
+class OrionFieldContainer(CodeFieldInfoContainer):
+ _shared_state = {}
+ _field_list = {}
+OrionFieldInfo = OrionFieldContainer()
+add_orion_field = OrionFieldInfo.add_field
+
+class ValidationException(Exception):
+ pass
+
+class NeedsGridType(ValidationException):
+ def __init__(self, ghost_zones = 0, fields=None):
+ self.ghost_zones = ghost_zones
+ self.fields = fields
+
+class NeedsDataField(ValidationException):
+ def __init__(self, missing_fields):
+ self.missing_fields = missing_fields
+
+class NeedsProperty(ValidationException):
+ def __init__(self, missing_properties):
+ self.missing_properties = missing_properties
+
+class NeedsParameter(ValidationException):
+ def __init__(self, missing_parameters):
+ self.missing_parameters = missing_parameters
+
+class FieldDetector(defaultdict):
+ Level = 1
+ NumberOfParticles = 0
+ def __init__(self, nd = 16, pf = None):
+ self.nd = nd
+ self.ActiveDimensions = [nd,nd,nd]
+ self.LeftEdge = [0.0,0.0,0.0]
+ self.RightEdge = [1.0,1.0,1.0]
+ self.dx = self.dy = self.dz = na.array([1.0])
+ self.fields = []
+ if pf is None:
+ pf = defaultdict(lambda: 1)
+ self.pf = pf
+ self.requested = []
+ self.requested_parameters = []
+ defaultdict.__init__(self, lambda: na.ones((nd,nd,nd)))
+ def __missing__(self, item):
+ if FieldInfo.has_key(item) and \
+ FieldInfo[item]._function.func_name != '<lambda>':
+ try:
+ vv = FieldInfo[item](self)
+ except NeedsGridType, exc:
+ ngz = exc.ghost_zones
+ nfd = FieldDetector(self.nd+ngz*2)
+ vv = FieldInfo[item](nfd)[ngz:-ngz,ngz:-ngz,ngz:-ngz]
+ for i in vv.requested:
+ if i not in self.requested: self.requested.append(i)
+ for i in vv.requested_parameters:
+ if i not in self.requested_parameters: self.requested_parameters.append(i)
+ if vv is not None:
+ self[item] = vv
+ return self[item]
+ self.requested.append(item)
+ return defaultdict.__missing__(self, item)
+ def get_field_parameter(self, param):
+ self.requested_parameters.append(param)
+ if param in ['bulk_velocity','center','height_vector']:
+ return na.array([0,0,0])
+ else:
+ return 0.0
+ _spatial = True
+ _num_ghost_zones = 0
+ id = 1
+ def has_field_parameter(self, param): return True
+ def convert(self, item): return 1
+
+class DerivedField:
+ def __init__(self, name, function,
+ convert_function = None,
+ units = "", projected_units = "",
+ take_log = True, validators = None,
+ particle_type = False, vector_field=False,
+ display_field = True, not_in_all=False,
+ projection_conversion = "cm"):
+ """
+ This is the base class used to describe a cell-by-cell derived field.
+
+ :param name: is the name of the field.
+ :param function: is a function handle that defines the field
+ :param convert_function: must convert to CGS, if it needs to be done
+ :param units: is a mathtext-formatted string that describes the field
+ :param projected_units: if we display a projection, what should the units be?
+ :param take_log: describes whether the field should be logged
+ :param validators: is a list of :class:`FieldValidator` objects
+ :param particle_type: is this field based on particles?
+ :param vector_field: describes the dimensionality of the field
+ :param display_field: governs its appearance in the dropdowns in reason
+ :param not_in_all: is used for baryon fields from the data that are not in
+ all the grids
+ :param projection_conversion: which unit should we multiply by in a
+ projection?
+ """
+ self.name = name
+ self._function = function
+ if validators:
+ self.validators = ensure_list(validators)
+ else:
+ self.validators = []
+ self.take_log = take_log
+ self._units = units
+ self._projected_units = projected_units
+ if not convert_function:
+ convert_function = lambda a: 1.0
+ self._convert_function = convert_function
+ self.particle_type = particle_type
+ self.vector_field = vector_field
+ self.projection_conversion = projection_conversion
+ self.display_field = display_field
+ self.not_in_all = not_in_all
+ def check_available(self, data):
+ for validator in self.validators:
+ validator(data)
+ # If we don't get an exception, we're good to go
+ return True
+ def get_dependencies(self, *args, **kwargs):
+ e = FieldDetector(*args, **kwargs)
+ if self._function.func_name == '<lambda>':
+ e.requested.append(self.name)
+ else:
+ self(e)
+ return e
+ def get_units(self):
+ return self._units
+ def get_projected_units(self):
+ return self._projected_units
+ def __call__(self, data):
+ ii = self.check_available(data)
+ original_fields = data.fields[:] # Copy
+ dd = self._function(self, data)
+ dd *= self._convert_function(data)
+ for field_name in data.fields:
+ if field_name not in original_fields:
+ del data[field_name]
+ return dd
+ def get_source(self):
+ return inspect.getsource(self._function)
+
+class FieldValidator(object):
+ pass
+
+class ValidateParameter(FieldValidator):
+ def __init__(self, parameters):
+ FieldValidator.__init__(self)
+ self.parameters = ensure_list(parameters)
+ def __call__(self, data):
+ doesnt_have = []
+ for p in self.parameters:
+ if not data.has_field_parameter(p):
+ doesnt_have.append(p)
+ if len(doesnt_have) > 0:
+ raise NeedsParameter(doesnt_have)
+ return True
+
+class ValidateDataField(FieldValidator):
+ def __init__(self, field):
+ FieldValidator.__init__(self)
+ self.fields = ensure_list(field)
+ def __call__(self, data):
+ doesnt_have = []
+ if isinstance(data, FieldDetector): return True
+ for f in self.fields:
+ if f not in data.hierarchy.field_list:
+ doesnt_have.append(f)
+ if len(doesnt_have) > 0:
+ raise NeedsDataField(doesnt_have)
+ return True
+
+class ValidateProperty(FieldValidator):
+ def __init__(self, prop):
+ FieldValidator.__init__(self)
+ self.prop = ensure_list(prop)
+ def __call__(self, data):
+ doesnt_have = []
+ for p in self.prop:
+ if not hasattr(data,p):
+ doesnt_have.append(p)
+ if len(doesnt_have) > 0:
+ raise NeedsProperty(doesnt_have)
+ return True
+
+class ValidateSpatial(FieldValidator):
+ def __init__(self, ghost_zones = 0, fields=None):
+ FieldValidator.__init__(self)
+ self.ghost_zones = ghost_zones
+ self.fields = fields
+ def __call__(self, data):
+ # When we say spatial information, we really mean
+ # that it has a three-dimensional data structure
+ if isinstance(data, FieldDetector): return True
+ if not data._spatial:
+ raise NeedsGridType(self.ghost_zones,self.fields)
+ if self.ghost_zones == data._num_ghost_zones:
+ return True
+ raise NeedsGridType(self.ghost_zones)
Modified: branches/yt-non-3d/yt/lagos/HierarchyType.py
==============================================================================
--- branches/yt-non-3d/yt/lagos/HierarchyType.py (original)
+++ branches/yt-non-3d/yt/lagos/HierarchyType.py Sat Nov 15 14:20:57 2008
@@ -838,7 +838,7 @@
def _setup_field_lists(self):
field_list = self.get_data("/", "DataFields")
- if field_list == None:
+ if field_list is None:
mylog.info("Gathering a field list (this may take a moment.)")
field_list = sets.Set()
random_sample = self._generate_random_grids()
@@ -854,16 +854,16 @@
self.save_data(list(field_list),"/","DataFields")
self.field_list = list(field_list)
for field in self.field_list:
- if field in fieldInfo: continue
+ if field in self.pf.field_info: continue
mylog.info("Adding %s to list of fields", field)
cf = None
if self.parameter_file.has_key(field):
cf = lambda d: d.convert(field)
add_field(field, lambda a, b: None, convert_function=cf)
self.derived_field_list = []
- for field in fieldInfo:
+ for field in self.pf.field_info:
try:
- fd = fieldInfo[field].get_dependencies(pf = self.parameter_file)
+ fd = self.pf.field_info[field].get_dependencies(pf = self.parameter_file)
except:
continue
available = na.all([f in self.field_list for f in fd.requested])
Modified: branches/yt-non-3d/yt/lagos/OutputTypes.py
==============================================================================
--- branches/yt-non-3d/yt/lagos/OutputTypes.py (original)
+++ branches/yt-non-3d/yt/lagos/OutputTypes.py Sat Nov 15 14:20:57 2008
@@ -114,6 +114,7 @@
Enzo-specific output, set at a fixed time.
"""
_hierarchy_class = EnzoHierarchy
+ _fieldinfo_class = EnzoFieldContainer
def __init__(self, filename, data_style=None,
parameter_override = None,
conversion_override = None):
@@ -138,6 +139,7 @@
cp = os.path.join(self.directory, "cool_rates.out")
if os.path.exists(cp):
self.cool = EnzoTable(cp, cool_out_key)
+ self.field_info = self._fieldinfo_class()
# Now fixes for different types of Hierarchies
if self["TopGridRank"] == 2: self._setup_2d()
Added: branches/yt-non-3d/yt/lagos/UniversalFields.py
==============================================================================
--- (empty file)
+++ branches/yt-non-3d/yt/lagos/UniversalFields.py Sat Nov 15 14:20:57 2008
@@ -0,0 +1,663 @@
+"""
+The basic field info container resides here. These classes, code specific and
+universal, are the means by which we access fields across YT, both derived and
+native.
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+ Copyright (C) 2008 Matthew Turk. All Rights Reserved.
+
+ This file is part of yt.
+
+ yt is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import types
+import numpy as na
+import inspect
+import copy
+
+# All our math stuff here:
+try:
+ import scipy.signal
+except ImportError:
+ pass
+
+from math import pi
+
+from yt.funcs import *
+from FieldInfoContainer import *
+
+mh = 1.67e-24 # g
+me = 9.11e-28 # g
+sigma_thompson = 6.65e-25 # cm^2
+clight = 3.0e10 # cm/s
+kboltz = 1.38e-16 # erg K^-1
+G = 6.67e-8 # cm^3 g^-1 s^-2
+
+
+
+# Note that, despite my newfound efforts to comply with PEP-8,
+# I violate it here in order to keep the name/func_name relationship
+
+def _dx(field, data):
+ return data.dx
+ return na.ones(data.ActiveDimensions, dtype='float64') * data.dx
+add_field('dx', function=_dx, display_field=False,
+ validators=[ValidateSpatial(0)])
+
+def _dy(field, data):
+ return data.dy
+ return na.ones(data.ActiveDimensions, dtype='float64') * data.dy
+add_field('dy', function=_dy, display_field=False,
+ validators=[ValidateSpatial(0)])
+
+def _dz(field, data):
+ return data.dz
+ return na.ones(data.ActiveDimensions, dtype='float64') * data.dz
+add_field('dz', function=_dz,
+ display_field=False, validators=[ValidateSpatial(0)])
+
+def _coordX(field, data):
+ dim = data.ActiveDimensions[0]
+ return (na.ones(data.ActiveDimensions, dtype='float64')
+ * na.arange(data.ActiveDimensions[0]).reshape(dim,1,1)
+ +0.5) * data['dx'] + data.LeftEdge[0]
+add_field('x', function=_coordX, display_field=False,
+ validators=[ValidateSpatial(0)])
+
+def _coordY(field, data):
+ dim = data.ActiveDimensions[1]
+ return (na.ones(data.ActiveDimensions, dtype='float64')
+ * na.arange(data.ActiveDimensions[1]).reshape(1,dim,1)
+ +0.5) * data['dy'] + data.LeftEdge[1]
+add_field('y', function=_coordY, display_field=False,
+ validators=[ValidateSpatial(0)])
+
+def _coordZ(field, data):
+ dim = data.ActiveDimensions[2]
+ return (na.ones(data.ActiveDimensions, dtype='float64')
+ * na.arange(data.ActiveDimensions[2]).reshape(1,1,dim)
+ +0.5) * data['dz'] + data.LeftEdge[2]
+add_field('z', function=_coordZ, display_field=False,
+ validators=[ValidateSpatial(0)])
+
+def _GridLevel(field, data):
+ return na.ones(data["Density"].shape)*(data.Level)
+add_field("GridLevel", function=_GridLevel,
+ validators=[#ValidateProperty('Level'),
+ ValidateSpatial(0)])
+
+def _GridIndices(field, data):
+ return na.ones(data["Density"].shape)*(data.id-data._id_offset)
+add_field("GridIndices", function=_GridIndices,
+ validators=[#ValidateProperty('id'),
+ ValidateSpatial(0)], take_log=False)
+
+def _OnesOverDx(field, data):
+ return na.ones(data["Density"].shape,
+ dtype=data["Density"].dtype)/data['dx']
+add_field("OnesOverDx", function=_OnesOverDx,
+ display_field=False)
+
+def _Ones(field, data):
+ return na.ones(data.ActiveDimensions, dtype='float64')
+add_field("Ones", function=_Ones,
+ validators=[ValidateSpatial(0)],
+ projection_conversion="unitary",
+ display_field = False)
+add_field("CellsPerBin", function=_Ones, validators=[ValidateSpatial(0)],
+ display_field = False)
+
+def _SoundSpeed(field, data):
+ return ( data.pf["Gamma"]*data["Pressure"] / \
+ data["Density"] )**(1.0/2.0)
+add_field("SoundSpeed", function=_SoundSpeed,
+ units=r"\rm{cm}/\rm{s}")
+
+def particle_func(p_field):
+ def _Particles(field, data):
+ if not data.NumberOfParticles > 0:
+ return na.array([], dtype='float64')
+ try:
+ return data._read_data(p_field).astype('float64')
+ except data._read_exception:
+ pass
+ # This is bad. But it's the best idea I have right now.
+ return data._read_data(p_field.replace("_"," ")).astype('float64')
+ return _Particles
+for pf in ["index", "type", "mass"] + \
+ ["velocity_%s" % ax for ax in 'xyz'] + \
+ ["position_%s" % ax for ax in 'xyz']:
+ pfunc = particle_func("particle_%s" % (pf))
+ add_field("particle_%s" % pf, function=pfunc,
+ validators = [ValidateSpatial(0)],
+ particle_type=True)
+for pf in ["creation_time", "dynamical_time", "metallicity_fraction"]:
+ pfunc = particle_func(pf)
+ add_field(pf, function=pfunc,
+ validators = [ValidateSpatial(0),
+ ValidateDataField(pf)],
+ particle_type=True)
+add_field("particle mass", function=particle_func("particle_mass"),
+ validators=[ValidateSpatial(0)], particle_type=True)
+
+add_field("Dark matter density", function=lambda a,b: None,
+ validators=[ValidateDataField("Dark matter density"),
+ ValidateSpatial(0)],
+ not_in_all = True)
+
+def _ParticleMass(field, data):
+ particles = data["particle_mass"].astype('float64') * \
+ just_one(data["CellVolumeCode"].ravel())
+ # Note that we mandate grid-type here, so this is okay
+ return particles
+def _convertParticleMass(data):
+ return data.convert("Density")*(data.convert("cm")**3.0)
+def _convertParticleMassMsun(data):
+ return data.convert("Density")*((data.convert("cm")**3.0)/1.989e33)
+add_field("ParticleMass",
+ function=_ParticleMass, validators=[ValidateSpatial(0)],
+ particle_type=True, convert_function=_convertParticleMass)
+add_field("ParticleMassMsun",
+ function=_ParticleMass, validators=[ValidateSpatial(0)],
+ particle_type=True, convert_function=_convertParticleMassMsun)
+
+def _MachNumber(field, data):
+ """M{|v|/t_sound}"""
+ return data["VelocityMagnitude"] / data["SoundSpeed"]
+add_field("MachNumber", function=_MachNumber)
+
+def _CourantTimeStep(field, data):
+ t1 = data['dx'] / (
+ data["SoundSpeed"] + \
+ abs(data["x-velocity"]))
+ t2 = data['dy'] / (
+ data["SoundSpeed"] + \
+ abs(data["y-velocity"]))
+ t3 = data['dz'] / (
+ data["SoundSpeed"] + \
+ abs(data["z-velocity"]))
+ return na.minimum(na.minimum(t1,t2),t3)
+def _convertCourantTimeStep(data):
+ # SoundSpeed and z-velocity are in cm/s, dx is in code
+ return data.convert("cm")
+add_field("CourantTimeStep", function=_CourantTimeStep,
+ convert_function=_convertCourantTimeStep,
+ units=r"$\rm{s}$")
+
+def _VelocityMagnitude(field, data):
+ """M{|v|}"""
+ bulk_velocity = data.get_field_parameter("bulk_velocity")
+ if bulk_velocity == None:
+ bulk_velocity = na.zeros(3)
+ return ( (data["x-velocity"]-bulk_velocity[0])**2.0 + \
+ (data["y-velocity"]-bulk_velocity[1])**2.0 + \
+ (data["z-velocity"]-bulk_velocity[2])**2.0 )**(1.0/2.0)
+add_field("VelocityMagnitude", function=_VelocityMagnitude,
+ take_log=False, units=r"\rm{cm}/\rm{s}")
+
+def _TangentialOverVelocityMagnitude(field, data):
+ return na.abs(data["TangentialVelocity"])/na.abs(data["VelocityMagnitude"])
+add_field("TangentialOverVelocityMagnitude",
+ function=_TangentialOverVelocityMagnitude,
+ take_log=False)
+
+def _TangentialVelocity(field, data):
+ return na.sqrt(data["VelocityMagnitude"]**2.0
+ - data["RadialVelocity"]**2.0)
+add_field("TangentialVelocity",
+ function=_TangentialVelocity,
+ take_log=False, units=r"\rm{cm}/\rm{s}")
+
+def _Pressure(field, data):
+ """M{(Gamma-1.0)*rho*E}"""
+ return (data.pf["Gamma"] - 1.0) * \
+ data["Density"] * data["ThermalEnergy"]
+add_field("Pressure", function=_Pressure, units=r"\rm{dyne}/\rm{cm}^{2}")
+
+def _Entropy(field, data):
+ return data["Density"]**(-2./3.) * \
+ data["Temperature"]
+add_field("Entropy", function=_Entropy, units="WhoKnows")
+
+def _Height(field, data):
+ # We take the dot product of the radius vector with the height-vector
+ center = data.get_field_parameter("center")
+ r_vec = na.array([data["x"] - center[0],
+ data["y"] - center[1],
+ data["z"] - center[2]])
+ h_vec = na.array(data.get_field_parameter("height_vector"))
+ h_vec = h_vec / na.sqrt(h_vec[0]**2.0+
+ h_vec[1]**2.0+
+ h_vec[2]**2.0)
+ height = r_vec[0,:] * h_vec[0] \
+ + r_vec[1,:] * h_vec[1] \
+ + r_vec[2,:] * h_vec[2]
+ return na.abs(height)
+def _convertHeight(data):
+ return data.convert("cm")
+def _convertHeightAU(data):
+ return data.convert("au")
+add_field("Height", function=_Height,
+ convert_function=_convertHeight,
+ validators=[ValidateParameter("height_vector")],
+ units=r"cm", display_field=False)
+add_field("HeightAU", function=_Height,
+ convert_function=_convertHeightAU,
+ validators=[ValidateParameter("height_vector")],
+ units=r"AU", display_field=False)
+
+def _DiskAngle(field, data):
+ # We make both r_vec and h_vec into unit vectors
+ center = data.get_field_parameter("center")
+ r_vec = na.array([data["x"] - center[0],
+ data["y"] - center[1],
+ data["z"] - center[2]])
+ r_vec = r_vec/na.sqrt((r_vec**2.0).sum(axis=0))
+ h_vec = na.array(data.get_field_parameter("height_vector"))
+ dp = r_vec[0,:] * h_vec[0] \
+ + r_vec[1,:] * h_vec[1] \
+ + r_vec[2,:] * h_vec[2]
+ return na.arccos(dp)
+add_field("DiskAngle", function=_DiskAngle,
+ take_log=False,
+ validators=[ValidateParameter("height_vector"),
+ ValidateParameter("center")],
+ display_field=False)
+
+def _DynamicalTime(field, data):
+ """
+ The formulation for the dynamical time is:
+ M{sqrt(3pi/(16*G*rho))} or M{sqrt(3pi/(16G))*rho^-(1/2)}
+ Note that we return in our natural units already
+ """
+ return data["Density"]**(-1./2.)
+def _ConvertDynamicalTime(data):
+ G = data.pf["GravitationalConstant"]
+ t_dyn_coeff = (3*pi/(16*G))**0.5 \
+ * data.convert("Time")
+ return t_dyn_coeff
+add_field("DynamicalTime", function=_DynamicalTime,
+ units=r"\rm{s}",
+ convert_function=_ConvertDynamicalTime)
+
+def JeansMassMsun(field,data):
+ return (MJ_constant *
+ ((data["Temperature"]/data["MeanMolecularWeight"])**(1.5)) *
+ (data["Density"]**(-0.5)))
+add_field("JeansMassMsun",function=JeansMassMsun,units=r"\rm{Msun}")
+
+def _CellMass(field, data):
+ return data["Density"] * data["CellVolume"]
+def _convertCellMassMsun(data):
+ return 5.027854e-34 # g^-1
+add_field("CellMass", function=_CellMass, units=r"\rm{g}")
+add_field("CellMassMsun", units=r"M_{\odot}",
+ function=_CellMass,
+ convert_function=_convertCellMassMsun)
+
+def _CellMassCode(field, data):
+ return data["Density"] * data["CellVolumeCode"]
+def _convertCellMassCode(data):
+ return 1.0/data.convert("Density")
+add_field("CellMassCode",
+ function=_CellMassCode,
+ convert_function=_convertCellMassCode)
+
+def _CellVolume(field, data):
+ if data['dx'].size == 1:
+ try:
+ return data['dx']*data['dy']*data['dx']*\
+ na.ones(data.ActiveDimensions, dtype='float64')
+ except AttributeError:
+ return data['dx']*data['dy']*data['dx']
+ return data["dx"]*data["dy"]*data["dz"]
+def _ConvertCellVolumeMpc(data):
+ return data.convert("mpc")**3.0
+def _ConvertCellVolumeCGS(data):
+ return data.convert("cm")**3.0
+add_field("CellVolumeCode", units=r"\rm{BoxVolume}^3",
+ function=_CellVolume)
+add_field("CellVolumeMpc", units=r"\rm{Mpc}^3",
+ function=_CellVolume,
+ convert_function=_ConvertCellVolumeMpc)
+add_field("CellVolume", units=r"\rm{cm}^3",
+ function=_CellVolume,
+ convert_function=_ConvertCellVolumeCGS)
+
+def _XRayEmissivity(field, data):
+ return ((data["Density"].astype('float64')**2.0) \
+ *data["Temperature"]**0.5)
+def _convertXRayEmissivity(data):
+ return 2.168e60
+add_field("XRayEmissivity", function=_XRayEmissivity,
+ convert_function=_convertXRayEmissivity,
+ projection_conversion="1")
+
+def _SZKinetic(field, data):
+ vel_axis = data.get_field_parameter('axis')
+ if vel_axis > 2:
+ raise NeedsParameter(['axis'])
+ vel = data["%s-velocity" % ({0:'x',1:'y',2:'z'}[vel_axis])]
+ return (vel*data["Density"])
+def _convertSZKinetic(data):
+ return 0.88*((sigma_thompson/mh)/clight)
+add_field("SZKinetic", function=_SZKinetic,
+ convert_function=_convertSZKinetic,
+ validators=[ValidateParameter('axis')])
+
+def _SZY(field, data):
+ return (data["Density"]*data["Temperature"])
+def _convertSZY(data):
+ conv = (0.88/mh) * (kboltz)/(me * clight*clight) * sigma_thompson
+ return conv
+add_field("SZY", function=_SZY, convert_function=_convertSZY)
+
+def _AveragedDensity(field, data):
+ nx, ny, nz = data["Density"].shape
+ new_field = na.zeros((nx-2,ny-2,nz-2), dtype='float64')
+ weight_field = na.zeros((nx-2,ny-2,nz-2), dtype='float64')
+ i_i, j_i, k_i = na.mgrid[0:3,0:3,0:3]
+ for i,j,k in zip(i_i.ravel(),j_i.ravel(),k_i.ravel()):
+ sl = [slice(i,nx-(2-i)),slice(j,ny-(2-j)),slice(k,nz-(2-k))]
+ new_field += data["Density"][sl] * data["CellMass"][sl]
+ weight_field += data["CellMass"][sl]
+ # Now some fancy footwork
+ new_field2 = na.zeros((nx,ny,nz))
+ new_field2[1:-1,1:-1,1:-1] = new_field/weight_field
+ return new_field2
+add_field("AveragedDensity",
+ function=_AveragedDensity,
+ validators=[ValidateSpatial(1)])
+
+def _DivV(field, data):
+ # We need to set up stencils
+ if data.pf["HydroMethod"] == 2:
+ sl_left = slice(None,-2,None)
+ sl_right = slice(1,-1,None)
+ div_fac = 1.0
+ else:
+ sl_left = slice(None,-2,None)
+ sl_right = slice(2,None,None)
+ div_fac = 2.0
+ div_x = (data["x-velocity"][sl_right,1:-1,1:-1] -
+ data["x-velocity"][sl_left,1:-1,1:-1]) \
+ / (div_fac*data["dx"].flat[0])
+ div_y = (data["y-velocity"][1:-1,sl_right,1:-1] -
+ data["y-velocity"][1:-1,sl_left,1:-1]) \
+ / (div_fac*data["dy"].flat[0])
+ div_z = (data["z-velocity"][1:-1,1:-1,sl_right] -
+ data["z-velocity"][1:-1,1:-1,sl_left]) \
+ / (div_fac*data["dz"].flat[0])
+ new_field = na.zeros(data["x-velocity"].shape)
+ new_field[1:-1,1:-1,1:-1] = div_x+div_y+div_z
+ return na.abs(new_field)
+def _convertDivV(data):
+ return data.convert("cm")**-1.0
+add_field("DivV", function=_DivV,
+ validators=[ValidateSpatial(1,
+ ["x-velocity","y-velocity","z-velocity"])],
+ units=r"\rm{s}^{-1}",
+ convert_function=_convertDivV)
+
+def _Contours(field, data):
+ return na.ones(data["Density"].shape)*-1
+add_field("Contours", validators=[ValidateSpatial(0)], take_log=False,
+ display_field=False, function=_Contours)
+add_field("tempContours", function=_Contours, validators=[ValidateSpatial(0)],
+ take_log=False, display_field=False)
+
+def _SpecificAngularMomentum(field, data):
+ """
+ Calculate the angular velocity. Returns a vector for each cell.
+ """
+ if data.has_field_parameter("bulk_velocity"):
+ bv = data.get_field_parameter("bulk_velocity")
+ else: bv = na.zeros(3, dtype='float64')
+ xv = data["x-velocity"] - bv[0]
+ yv = data["y-velocity"] - bv[1]
+ zv = data["z-velocity"] - bv[2]
+ center = data.get_field_parameter('center')
+ coords = na.array([data['x'],data['y'],data['z']], dtype='float64')
+ new_shape = tuple([3] + [1]*(len(coords.shape)-1))
+ r_vec = coords - na.reshape(center,new_shape)
+ v_vec = na.array([xv,yv,zv], dtype='float64')
+ return na.cross(r_vec, v_vec, axis=0)
+def _convertSpecificAngularMomentum(data):
+ return data.convert("cm")
+add_field("SpecificAngularMomentum",
+ function=_SpecificAngularMomentum,
+ convert_function=_convertSpecificAngularMomentum, vector_field=True,
+ units=r"\rm{cm}^2/\rm{s}", validators=[ValidateParameter('center')])
+def _convertSpecificAngularMomentumKMSMPC(data):
+ return data.convert("mpc")/1e5
+add_field("SpecificAngularMomentumKMSMPC",
+ function=_SpecificAngularMomentum,
+ convert_function=_convertSpecificAngularMomentumKMSMPC, vector_field=True,
+ units=r"\rm{km}\rm{Mpc}/\rm{s}", validators=[ValidateParameter('center')])
+def _AngularMomentum(field, data):
+ return data["CellMass"] * data["SpecificAngularMomentum"]
+add_field("AngularMomentum", function=_AngularMomentum,
+ units=r"\rm{g}\/\rm{cm}^2/\rm{s}")
+def _AngularMomentumMSUNKMSMPC(field, data):
+ return data["CellMassMsun"] * data["SpecificAngularMomentumKMSMPC"]
+add_field("AngularMomentumMSUNKMSMPC", function=_AngularMomentum,
+ units=r"M_{\odot}\rm{km}\rm{Mpc}/\rm{s}")
+
+def _ParticleSpecificAngularMomentum(field, data):
+ """
+ Calculate the angular of a particle velocity. Returns a vector for each
+ particle.
+ """
+ if data.has_field_parameter("bulk_velocity"):
+ bv = data.get_field_parameter("bulk_velocity")
+ else: bv = na.zeros(3, dtype='float64')
+ xv = data["particle_velocity_x"] - bv[0]
+ yv = data["particle_velocity_y"] - bv[1]
+ zv = data["particle_velocity_z"] - bv[2]
+ center = data.get_field_parameter('center')
+ coords = na.array([data['particle_position_x'],
+ data['particle_position_y'],
+ data['particle_position_z']], dtype='float64')
+ new_shape = tuple([3] + [1]*(len(coords.shape)-1))
+ r_vec = coords - na.reshape(center,new_shape)
+ v_vec = na.array([xv,yv,zv], dtype='float64')
+ return na.cross(r_vec, v_vec, axis=0)
+add_field("ParticleSpecificAngularMomentum",
+ function=_ParticleSpecificAngularMomentum,
+ convert_function=_convertSpecificAngularMomentum, vector_field=True,
+ units=r"\rm{cm}^2/\rm{s}", validators=[ValidateParameter('center')])
+def _convertSpecificAngularMomentumKMSMPC(data):
+ return data.convert("mpc")/1e5
+add_field("ParticleSpecificAngularMomentumKMSMPC",
+ function=_ParticleSpecificAngularMomentum,
+ convert_function=_convertSpecificAngularMomentumKMSMPC, vector_field=True,
+ units=r"\rm{km}\rm{Mpc}/\rm{s}", validators=[ValidateParameter('center')])
+def _ParticleAngularMomentum(field, data):
+ return data["ParticleMass"] * data["ParticleSpecificAngularMomentum"]
+add_field("ParticleAngularMomentum",
+ function=_ParticleAngularMomentum, units=r"\rm{g}\/\rm{cm}^2/\rm{s}")
+def _ParticleAngularMomentumMSUNKMSMPC(field, data):
+ return data["ParticleMass"] * data["ParticleSpecificAngularMomentumKMSMPC"]
+add_field("ParticleAngularMomentumMSUNKMSMPC",
+ function=_ParticleAngularMomentumMSUNKMSMPC,
+ units=r"M_{\odot}\rm{km}\rm{Mpc}/\rm{s}")
+
+
+def _ParticleRadius(field, data):
+ center = data.get_field_parameter("center")
+ radius = na.sqrt((data["particle_position_x"] - center[0])**2.0 +
+ (data["particle_position_y"] - center[1])**2.0 +
+ (data["particle_position_z"] - center[2])**2.0)
+ return radius
+def _Radius(field, data):
+ center = data.get_field_parameter("center")
+ radius = na.sqrt((data["x"] - center[0])**2.0 +
+ (data["y"] - center[1])**2.0 +
+ (data["z"] - center[2])**2.0)
+ return radius
+def _ConvertRadiusCGS(data):
+ return data.convert("cm")
+add_field("ParticleRadius", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiusCGS, units=r"\rm{cm}",
+ particle_type = True)
+add_field("Radius", function=_Radius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiusCGS, units=r"\rm{cm}")
+
+def _ConvertRadiusMpc(data):
+ return data.convert("mpc")
+add_field("RadiusMpc", function=_Radius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiusMpc, units=r"\rm{Mpc}")
+add_field("ParticleRadiusMpc", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiusMpc, units=r"\rm{Mpc}",
+ particle_type=True)
+
+def _ConvertRadiuskpc(data):
+ return data.convert("kpc")
+add_field("ParticleRadiuskpc", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiuskpc, units=r"\rm{kpc}",
+ particle_type=True)
+add_field("Radiuskpc", function=_Radius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiuskpc, units=r"\rm{kpc}")
+
+def _ConvertRadiuskpch(data):
+ return data.convert("kpch")
+add_field("ParticleRadiuskpch", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiuskpc, units=r"\rm{kpc}/\rm{h}",
+ particle_type=True)
+add_field("Radiuskpch", function=_Radius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiuskpc, units=r"\rm{kpc}/\rm{h}")
+
+def _ConvertRadiuspc(data):
+ return data.convert("pc")
+add_field("ParticleRadiuspc", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiuspc, units=r"\rm{pc}",
+ particle_type=True)
+add_field("Radiuspc", function=_Radius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiuspc, units=r"\rm{pc}")
+
+def _ConvertRadiusAU(data):
+ return data.convert("au")
+add_field("ParticleRadiusAU", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiusAU, units=r"\rm{AU}",
+ particle_type=True)
+add_field("RadiusAU", function=_Radius,
+ validators=[ValidateParameter("center")],
+ convert_function = _ConvertRadiusAU, units=r"\rm{AU}")
+
+add_field("ParticleRadiusCode", function=_ParticleRadius,
+ validators=[ValidateParameter("center")],
+ particle_type=True)
+add_field("RadiusCode", function=_Radius,
+ validators=[ValidateParameter("center")])
+
+def _RadialVelocity(field, data):
+ center = data.get_field_parameter("center")
+ bulk_velocity = data.get_field_parameter("bulk_velocity")
+ if bulk_velocity == None:
+ bulk_velocity = na.zeros(3)
+ new_field = ( (data['x']-center[0])*(data["x-velocity"]-bulk_velocity[0])
+ + (data['y']-center[1])*(data["y-velocity"]-bulk_velocity[1])
+ + (data['z']-center[2])*(data["z-velocity"]-bulk_velocity[2])
+ )/data["RadiusCode"]
+ return new_field
+def _RadialVelocityABS(field, data):
+ return na.abs(_RadialVelocity(field, data))
+def _ConvertRadialVelocityKMS(data):
+ return 1e-5
+add_field("RadialVelocity", function=_RadialVelocity,
+ units=r"\rm{cm}/\rm{s}",
+ validators=[ValidateParameter("center"),
+ ValidateParameter("bulk_velocity")])
+add_field("RadialVelocityABS", function=_RadialVelocityABS,
+ units=r"\rm{cm}/\rm{s}",
+ validators=[ValidateParameter("center"),
+ ValidateParameter("bulk_velocity")])
+add_field("RadialVelocityKMS", function=_RadialVelocity,
+ convert_function=_ConvertRadialVelocityKMS, units=r"\rm{km}/\rm{s}",
+ validators=[ValidateParameter("center"),
+ ValidateParameter("bulk_velocity")])
+
+def _CuttingPlaneVelocityX(field, data):
+ x_vec, y_vec, z_vec = [data.get_field_parameter("cp_%s_vec" % (ax))
+ for ax in 'xyz']
+ bulk_velocity = data.get_field_parameter("bulk_velocity")
+ if bulk_velocity == None:
+ bulk_velocity = na.zeros(3)
+ v_vec = na.array([data["%s-velocity" % ax] for ax in 'xyz']) \
+ - bulk_velocity[...,na.newaxis]
+ return na.dot(x_vec, v_vec)
+add_field("CuttingPlaneVelocityX",
+ function=_CuttingPlaneVelocityX,
+ validators=[ValidateParameter("cp_%s_vec" % ax)
+ for ax in 'xyz'], units=r"\rm{km}/\rm{s}")
+def _CuttingPlaneVelocityY(field, data):
+ x_vec, y_vec, z_vec = [data.get_field_parameter("cp_%s_vec" % (ax))
+ for ax in 'xyz']
+ bulk_velocity = data.get_field_parameter("bulk_velocity")
+ if bulk_velocity == None:
+ bulk_velocity = na.zeros(3)
+ v_vec = na.array([data["%s-velocity" % ax] for ax in 'xyz']) \
+ - bulk_velocity[...,na.newaxis]
+ return na.dot(y_vec, v_vec)
+add_field("CuttingPlaneVelocityY",
+ function=_CuttingPlaneVelocityY,
+ validators=[ValidateParameter("cp_%s_vec" % ax)
+ for ax in 'xyz'], units=r"\rm{km}/\rm{s}")
+
+def _MeanMolecularWeight(field,data):
+ return (data["Density"] / (mh *data["NumberDensity"]))
+add_field("MeanMolecularWeight",function=_MeanMolecularWeight,units=r"")
+
+def _JeansMassMsun(field,data):
+ MJ_constant = (((5*kboltz)/(G*mh))**(1.5)) * \
+ (3/(4*3.1415926535897931))**(0.5) / 1.989e33
+
+ return (MJ_constant *
+ ((data["Temperature"]/data["MeanMolecularWeight"])**(1.5)) *
+ (data["Density"]**(-0.5)))
+add_field("JeansMassMsun",function=_JeansMassMsun,
+ units=r"\rm{M_{\odot}}")
+
+#def _convertMomentum(data):
+# return data.convert("x-velocity")*data.convert("Density")*1e5 # want this in cm/s not km/s
+#for ax in ['x','y','z']:
+# f = fieldInfo["%s-momentum" % ax]
+# f._units = r"\rm{erg\ s}/\rm{cm^3}"
+# f._convert_function = _convertMomentum
+# f.take_log = False
+#
+#fieldInfo["Temperature"].units = r"K"
+#
+#if __name__ == "__main__":
+# k = fieldInfo.keys()
+# k.sort()
+# for f in k:
+# e = FieldDetector()
+# fieldInfo[f](e)
+# print f + ":", ", ".join(e.requested)
Modified: branches/yt-non-3d/yt/lagos/__init__.py
==============================================================================
--- branches/yt-non-3d/yt/lagos/__init__.py (original)
+++ branches/yt-non-3d/yt/lagos/__init__.py Sat Nov 15 14:20:57 2008
@@ -77,8 +77,14 @@
import PointCombine
import HDF5LightReader
from EnzoDefs import *
-from DerivedFields import *
+
from ParallelTools import *
+# Now our fields
+#from DerivedFields import *
+from FieldInfoContainer import *
+from UniversalFields import *
+from EnzoFields import *
+
from DerivedQuantities import DerivedQuantityCollection, GridChildMaskWrapper
from DataReadingFuncs import *
from ClusterFiles import *
Modified: branches/yt-non-3d/yt/mods.py
==============================================================================
--- branches/yt-non-3d/yt/mods.py (original)
+++ branches/yt-non-3d/yt/mods.py Sat Nov 15 14:20:57 2008
@@ -40,7 +40,7 @@
# Now individual component imports from lagos
from yt.lagos import EnzoStaticOutput, \
BinnedProfile1D, BinnedProfile2D, BinnedProfile3D, \
- add_field, fieldInfo, \
+ add_field, \
Clump, write_clump_hierarchy, find_clumps, write_clumps
# Now individual component imports from raven
More information about the yt-svn
mailing list