[yt-svn] commit/yt: ngoldbaum: Merged in jzuhone/yt (pull request #1879)
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Mon Nov 23 11:23:38 PST 2015
1 new commit in yt:
https://bitbucket.org/yt_analysis/yt/commits/cd3c48fde0b2/
Changeset: cd3c48fde0b2
Branch: yt
User: ngoldbaum
Date: 2015-11-23 19:23:28+00:00
Summary: Merged in jzuhone/yt (pull request #1879)
[bugfix] field_units kwarg to load_amr_grids doesn't do anything
Affected #: 5 files
diff -r 2aee7c831d41161f3f9420a676df3f4926994a4f -r cd3c48fde0b29dd566bfebe749215d9071df8791 doc/source/examining/Loading_Generic_Array_Data.ipynb
--- a/doc/source/examining/Loading_Generic_Array_Data.ipynb
+++ b/doc/source/examining/Loading_Generic_Array_Data.ipynb
@@ -1,7 +1,7 @@
{
"metadata": {
"name": "",
- "signature": "sha256:0d8d5fd49877ae68c53b6efec37e2c41a66935f70e5bb77065fe55fa9e82309b"
+ "signature": "sha256:5a62a9f151e691e242c1f5043e9211e166d70fd35a83f61278083c361fb07f12"
},
"nbformat": 3,
"nbformat_minor": 0,
@@ -558,7 +558,8 @@
"cell_type": "code",
"collapsed": false,
"input": [
- "for g in grid_data: g[\"density\"] = np.random.random(g[\"dimensions\"]) * 2**g[\"level\"]"
+ "for g in grid_data: \n",
+ " g[\"density\"] = (np.random.random(g[\"dimensions\"]) * 2**g[\"level\"], \"g/cm**3\")"
],
"language": "python",
"metadata": {},
@@ -577,33 +578,13 @@
"collapsed": false,
"input": [
"grid_data[0][\"number_of_particles\"] = 0 # Set no particles in the top-level grid\n",
- "grid_data[0][\"particle_position_x\"] = np.array([]) # No particles, so set empty arrays\n",
- "grid_data[0][\"particle_position_y\"] = np.array([])\n",
- "grid_data[0][\"particle_position_z\"] = np.array([])\n",
+ "grid_data[0][\"particle_position_x\"] = (np.array([]), \"code_length\") # No particles, so set empty arrays\n",
+ "grid_data[0][\"particle_position_y\"] = (np.array([]), \"code_length\")\n",
+ "grid_data[0][\"particle_position_z\"] = (np.array([]), \"code_length\")\n",
"grid_data[1][\"number_of_particles\"] = 1000\n",
- "grid_data[1][\"particle_position_x\"] = np.random.uniform(low=0.25, high=0.75, size=1000)\n",
- "grid_data[1][\"particle_position_y\"] = np.random.uniform(low=0.25, high=0.75, size=1000)\n",
- "grid_data[1][\"particle_position_z\"] = np.random.uniform(low=0.25, high=0.75, size=1000)"
- ],
- "language": "python",
- "metadata": {},
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "We need to specify the field units in a `field_units` dict:"
- ]
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "field_units = {\"density\":\"code_mass/code_length**3\",\n",
- " \"particle_position_x\":\"code_length\",\n",
- " \"particle_position_y\":\"code_length\",\n",
- " \"particle_position_z\":\"code_length\",}"
+ "grid_data[1][\"particle_position_x\"] = (np.random.uniform(low=0.25, high=0.75, size=1000), \"code_length\")\n",
+ "grid_data[1][\"particle_position_y\"] = (np.random.uniform(low=0.25, high=0.75, size=1000), \"code_length\")\n",
+ "grid_data[1][\"particle_position_z\"] = (np.random.uniform(low=0.25, high=0.75, size=1000), \"code_length\")"
],
"language": "python",
"metadata": {},
@@ -620,7 +601,7 @@
"cell_type": "code",
"collapsed": false,
"input": [
- "ds = yt.load_amr_grids(grid_data, [32, 32, 32], field_units=field_units)"
+ "ds = yt.load_amr_grids(grid_data, [32, 32, 32])"
],
"language": "python",
"metadata": {},
@@ -669,4 +650,4 @@
"metadata": {}
}
]
-}
+}
\ No newline at end of file
diff -r 2aee7c831d41161f3f9420a676df3f4926994a4f -r cd3c48fde0b29dd566bfebe749215d9071df8791 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -367,9 +367,10 @@
return fields
def update_field_names(data):
- orig_names = data.keys()
+ orig_names = list(data.keys())
for k in orig_names:
- if isinstance(k, tuple): continue
+ if isinstance(k, tuple):
+ continue
s = getattr(data[k], "shape", ())
if len(s) == 1:
field = ("io", k)
@@ -381,22 +382,18 @@
raise NotImplementedError
data[field] = data.pop(k)
-def set_particle_types(data) :
-
+def set_particle_types(data):
particle_types = {}
-
- for key in data.keys() :
-
- if key == "number_of_particles": continue
-
+ for key in data.keys():
+ if key == "number_of_particles":
+ continue
if len(data[key].shape) == 1:
particle_types[key] = True
- else :
+ else:
particle_types[key] = False
-
return particle_types
-def assign_particle_data(ds, pdata) :
+def assign_particle_data(ds, pdata):
"""
Assign particle data to the grids using MatchPointsToGrids. This
@@ -519,11 +516,12 @@
data = new_data
return field_units, data
+
def load_uniform_grid(data, domain_dimensions, length_unit=None, bbox=None,
nprocs=1, sim_time=0.0, mass_unit=None, time_unit=None,
velocity_unit=None, magnetic_unit=None,
periodicity=(True, True, True),
- geometry = "cartesian"):
+ geometry="cartesian"):
r"""Load a uniform grid of data into yt as a
:class:`~yt.frontends.stream.data_structures.StreamHandler`.
@@ -538,7 +536,7 @@
Particle fields are detected as one-dimensional fields. The number of
particles is set by the "number_of_particles" key in data.
-
+
Parameters
----------
data : dict
@@ -580,19 +578,19 @@
>>> bbox = np.array([[0., 1.0], [-1.5, 1.5], [1.0, 2.5]])
>>> arr = np.random.random((128, 128, 128))
- >>> data = dict(density = arr)
+ >>> data = dict(density=arr)
>>> ds = load_uniform_grid(data, arr.shape, length_unit='cm',
- bbox=bbox, nprocs=12)
+ ... bbox=bbox, nprocs=12)
>>> dd = ds.all_data()
- >>> dd['Density']
+ >>> dd['density']
#FIXME
YTArray[123.2856, 123.854, ..., 123.456, 12.42] (code_mass/code_length^3)
- >>> data = dict(density = (arr, 'g/cm**3'))
+ >>> data = dict(density=(arr, 'g/cm**3'))
>>> ds = load_uniform_grid(data, arr.shape, 3.03e24, bbox=bbox, nprocs=12)
>>> dd = ds.all_data()
- >>> dd['Density']
+ >>> dd['density']
#FIXME
YTArray[123.2856, 123.854, ..., 123.456, 12.42] (g/cm**3)
@@ -706,11 +704,12 @@
return sds
+
def load_amr_grids(grid_data, domain_dimensions,
- field_units=None, bbox=None, sim_time=0.0, length_unit=None,
+ bbox=None, sim_time=0.0, length_unit=None,
mass_unit=None, time_unit=None, velocity_unit=None,
magnetic_unit=None, periodicity=(True, True, True),
- geometry = "cartesian", refine_by=2):
+ geometry="cartesian", refine_by=2):
r"""Load a set of grids of data into yt as a
:class:`~yt.frontends.stream.data_structures.StreamHandler`.
This should allow a sequence of grids of varying resolution of data to be
@@ -735,9 +734,6 @@
modified in place and can't be assumed to be static.
domain_dimensions : array_like
This is the domain dimensions of the grid
- field_units : dict
- A dictionary mapping string field names to string unit specifications. The field
- names must correspond to the fields in grid_data.
length_unit : string or float
Unit to use for lengths. Defaults to unitless. If set to be a string, the bbox
dimensions are assumed to be in the corresponding units. If set to a float, the
@@ -785,11 +781,9 @@
... ]
...
>>> for g in grid_data:
- ... g["density"] = np.random.random(g["dimensions"]) * 2**g["level"]
+ ... g["density"] = (np.random.random(g["dimensions"])*2**g["level"], "g/cm**3")
...
- >>> units = dict(density='g/cm**3')
- >>> ds = load_amr_grids(grid_data, [32, 32, 32], field_units=units,
- ... length_unit=1.0)
+ >>> ds = load_amr_grids(grid_data, [32, 32, 32], length_unit=1.0)
"""
domain_dimensions = np.array(domain_dimensions)
@@ -810,10 +804,11 @@
grid_right_edges[i,:] = g.pop("right_edge")
grid_dimensions[i,:] = g.pop("dimensions")
grid_levels[i,:] = g.pop("level")
- if "number_of_particles" in g :
- number_of_particles[i,:] = g.pop("number_of_particles")
- update_field_names(g)
- sfh[i] = g
+ if "number_of_particles" in g:
+ number_of_particles[i,:] = g.pop("number_of_particles")
+ field_units, data = unitify_data(g)
+ update_field_names(data)
+ sfh[i] = data
# We now reconstruct our parent ids, so that our particle assignment can
# proceed.
@@ -828,10 +823,6 @@
for ci in ids:
parent_ids[ci] = gi
- for i, g_data in enumerate(grid_data):
- field_units, data = unitify_data(g_data)
- grid_data[i] = data
-
if length_unit is None:
length_unit = 'code_length'
if mass_unit is None:
@@ -845,7 +836,7 @@
particle_types = {}
- for grid in grid_data:
+ for grid in sfh.values():
particle_types.update(set_particle_types(grid))
handler = StreamHandler(
@@ -859,7 +850,8 @@
sfh,
field_units,
(length_unit, mass_unit, time_unit, velocity_unit, magnetic_unit),
- particle_types=particle_types
+ particle_types=particle_types,
+ periodicity=periodicity
)
handler.name = "AMRGridData"
@@ -871,11 +863,12 @@
handler.simulation_time = sim_time
handler.cosmology_simulation = 0
- sds = StreamDataset(handler, geometry = geometry)
+ sds = StreamDataset(handler, geometry=geometry)
return sds
+
def refine_amr(base_ds, refinement_criteria, fluid_operators, max_level,
- callback = None):
+ callback=None):
r"""Given a base dataset, repeatedly apply refinement criteria and
fluid operators until a maximum level is reached.
@@ -911,9 +904,9 @@
number_of_particles = np.sum([grid.NumberOfParticles
for grid in base_ds.index.grids])
- if number_of_particles > 0 :
+ if number_of_particles > 0:
pdata = {}
- for field in base_ds.field_list :
+ for field in base_ds.field_list:
if not isinstance(field, tuple):
field = ("unknown", field)
fi = base_ds._get_field_info(*field)
@@ -921,12 +914,12 @@
pdata[field] = uconcatenate([grid[field]
for grid in base_ds.index.grids])
pdata["number_of_particles"] = number_of_particles
-
+
last_gc = base_ds.index.num_grids
cur_gc = -1
- ds = base_ds
- bbox = np.array( [ (ds.domain_left_edge[i], ds.domain_right_edge[i])
- for i in range(3) ])
+ ds = base_ds
+ bbox = np.array([(ds.domain_left_edge[i], ds.domain_right_edge[i])
+ for i in range(3)])
while ds.index.max_level < max_level and last_gc != cur_gc:
mylog.info("Refining another level. Current max level: %s",
ds.index.max_level)
@@ -962,9 +955,9 @@
if not fi.particle_type :
gd[field] = grid[field]
grid_data.append(gd)
-
- ds = load_amr_grids(grid_data, ds.domain_dimensions, 1.0,
- bbox = bbox)
+
+ ds = load_amr_grids(grid_data, ds.domain_dimensions, bbox=bbox)
+
if number_of_particles > 0:
if ("io", "particle_position_x") not in pdata:
pdata_ftype = {}
@@ -978,8 +971,6 @@
# We need to reassign the field list here.
cur_gc = ds.index.num_grids
- # Now reassign particle data to grids
-
return ds
class StreamParticleIndex(ParticleIndex):
diff -r 2aee7c831d41161f3f9420a676df3f4926994a4f -r cd3c48fde0b29dd566bfebe749215d9071df8791 yt/frontends/stream/tests/test_stream_particles.py
--- a/yt/frontends/stream/tests/test_stream_particles.py
+++ b/yt/frontends/stream/tests/test_stream_particles.py
@@ -8,7 +8,7 @@
# Field information
-def test_stream_particles() :
+def test_stream_particles():
num_particles = 100000
domain_dims = (64, 64, 64)
dens = np.random.random(domain_dims)
@@ -23,30 +23,28 @@
fo.append(ic.TopHatSphere(0.1, [0.2,0.3,0.4],{"density": 2.0}))
fo.append(ic.TopHatSphere(0.05, [0.7,0.4,0.75],{"density": 20.0}))
rc = [fm.flagging_method_registry["overdensity"](1.0)]
-
+
# Check that all of this runs ok without particles
-
+
ug0 = load_uniform_grid({"density": dens}, domain_dims, 1.0, nprocs=8)
amr0 = refine_amr(ug0, rc, fo, 3)
grid_data = []
-
+
for grid in amr0.index.grids :
-
+
data = dict(left_edge = grid.LeftEdge,
right_edge = grid.RightEdge,
level = grid.Level,
dimensions = grid.ActiveDimensions,
number_of_particles = grid.NumberOfParticles)
-
- for field in amr0.field_list :
-
+
+ for field in amr0.field_list:
data[field] = grid[field]
-
grid_data.append(data)
- amr0 = load_amr_grids(grid_data, domain_dims, 1.0)
-
+ amr0 = load_amr_grids(grid_data, domain_dims)
+
# Now add particles
fields1 = {"density": dens,
@@ -65,12 +63,12 @@
number_of_particles1 = np.sum([grid.NumberOfParticles for grid in ug1.index.grids])
number_of_particles2 = np.sum([grid.NumberOfParticles for grid in ug2.index.grids])
-
+
yield assert_equal, number_of_particles1, num_particles
yield assert_equal, number_of_particles1, number_of_particles2
# Check to make sure the fields have been defined correctly
-
+
for ptype in ("all", "io"):
assert ug1._get_field_info(ptype, "particle_position_x").particle_type
assert ug1._get_field_info(ptype, "particle_position_y").particle_type
@@ -84,45 +82,45 @@
assert ug2._get_field_info(ptype, "particle_position_z").particle_type
assert ug2._get_field_info(ptype, "particle_mass").particle_type
assert not ug2._get_field_info("gas", "density").particle_type
-
+
# Now refine this
amr1 = refine_amr(ug1, rc, fo, 3)
for field in sorted(ug1.field_list):
yield assert_equal, (field in amr1.field_list), True
-
+
grid_data = []
-
- for grid in amr1.index.grids :
-
+
+ for grid in amr1.index.grids:
+
data = dict(left_edge = grid.LeftEdge,
right_edge = grid.RightEdge,
level = grid.Level,
dimensions = grid.ActiveDimensions,
number_of_particles = grid.NumberOfParticles)
- for field in amr1.field_list :
+ for field in amr1.field_list:
data[field] = grid[field]
-
+
grid_data.append(data)
-
- amr2 = load_amr_grids(grid_data, domain_dims, 1.0)
+
+ amr2 = load_amr_grids(grid_data, domain_dims)
# Check everything again
number_of_particles1 = [grid.NumberOfParticles for grid in amr1.index.grids]
number_of_particles2 = [grid.NumberOfParticles for grid in amr2.index.grids]
-
+
yield assert_equal, np.sum(number_of_particles1), num_particles
yield assert_equal, number_of_particles1, number_of_particles2
-
+
assert amr1._get_field_info("all", "particle_position_x").particle_type
assert amr1._get_field_info("all", "particle_position_y").particle_type
assert amr1._get_field_info("all", "particle_position_z").particle_type
assert amr1._get_field_info("all", "particle_mass").particle_type
assert not amr1._get_field_info("gas", "density").particle_type
-
+
assert amr2._get_field_info("all", "particle_position_x").particle_type
assert amr2._get_field_info("all", "particle_position_y").particle_type
assert amr2._get_field_info("all", "particle_position_z").particle_type
diff -r 2aee7c831d41161f3f9420a676df3f4926994a4f -r cd3c48fde0b29dd566bfebe749215d9071df8791 yt/geometry/tests/test_grid_container.py
--- a/yt/geometry/tests/test_grid_container.py
+++ b/yt/geometry/tests/test_grid_container.py
@@ -42,8 +42,8 @@
for grid in grid_data:
grid["density"] = \
- np.random.random(grid["dimensions"]) * 2 ** grid["level"]
- return load_amr_grids(grid_data, [16, 16, 16], 1.0)
+ (np.random.random(grid["dimensions"]) * 2 ** grid["level"], "g/cm**3")
+ return load_amr_grids(grid_data, [16, 16, 16])
def test_grid_tree():
diff -r 2aee7c831d41161f3f9420a676df3f4926994a4f -r cd3c48fde0b29dd566bfebe749215d9071df8791 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -231,7 +231,7 @@
gdata[f] = np.random.random(dims)
data.append(gdata)
bbox = np.array([LE, RE]).T
- return load_amr_grids(data, [32, 32, 32], 1.0, geometry=geometry, bbox=bbox)
+ return load_amr_grids(data, [32, 32, 32], geometry=geometry, bbox=bbox)
def fake_particle_ds(
fields = ("particle_position_x",
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list