[yt-svn] commit/yt: 16 new changesets
Bitbucket
commits-noreply at bitbucket.org
Sat Sep 1 10:21:30 PDT 2012
16 new commits in yt:
https://bitbucket.org/yt_analysis/yt/changeset/83d3ca350ab3/
changeset: 83d3ca350ab3
branch: yt
user: jzuhone
date: 2012-08-08 19:42:49
summary: First pass at implementing ParticleIO for FLASH
affected #: 1 file
diff -r a0d43ccae65d9bfe84532a9fb563ae0ab341c8c8 -r 83d3ca350ab3def502b3aea3526f3e8b14faaf1d yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -29,6 +29,77 @@
from yt.utilities.io_handler import \
BaseIOHandler
+def particles_validator_region(x, y, z, args) :
+
+ left_edge = args[0]
+ right_edge = args[1]
+ periodic = args[2]
+ DLE = args[3]
+ DRE = args[4]
+
+ xx = x
+ yy = y
+ zz = z
+
+ if periodic == 1 :
+
+ DW = DRE - DLE
+ xx[x < left_edge[0]] = x + DW[0]
+ xx[x > right_edge[0]] = x - DW[0]
+ yy[y < left_edge[1]] = y + DW[1]
+ yy[y > right_edge[1]] = y - DW[1]
+ zz[z < left_edge[2]] = z + DW[2]
+ zz[z > right_edge[2]] = z - DW[2]
+
+ idxx = na.logical_and(xx >= left_edge[0], xx <= right_edge[0])
+ idxy = na.logical_and(yy >= left_edge[1], yy <= right_edge[1])
+ idxz = na.logical_and(zz >= left_edge[2], zz <= right_edge[2])
+
+ idxs = na.logical_and(idxx, idyy)
+ idxs = na.logical_and(idxz, idxs)
+
+ return idxs
+
+def particles_validator_sphere(x, y, z, args) :
+
+ center = args[0]
+ radius = args[1]
+ periodic = args[2]
+ DLE = args[3]
+ DRE = args[4]
+
+ xx = na.abs(x-center[0])
+ yy = na.abs(y-center[1])
+ zz = na.abs(z-center[2])
+
+ if periodic == 1 :
+
+ DW = DRE - DLE
+
+ xx = na.minimum(xx,DW[0]-xx)
+ yy = na.minimum(yy,DW[1]-yy)
+ zz = na.minimum(zz,DW[2]-zz)
+
+ r = na.sqrt(xx*xx+yy*yy+zz*zz)
+
+ return r <= radius
+
+def particles_validator_disk(x, y, z, args) :
+
+ center = args[0]
+ normal = args[1]
+ radius = args[2]
+ height = args[3]
+
+ d = -na.dot(normal*center)
+
+ ph = na.abs(x*normal[0] + y*normal[1] + z*normal[2] + d)
+ pd2 = (x-center[0])**2+(y-center[1])**2+(z-center[2])**2
+
+ pr = na.sqrt(pd2-ph*ph)
+
+ return na.logical_and(pr <= radius, ph <= height)
+
class IOHandlerFLASH(BaseIOHandler):
_particle_reader = False
_data_style = "flash_hdf5"
@@ -49,8 +120,26 @@
def _read_particles(self, fields_to_read, type, args, grid_list,
count_list, conv_factors):
- pass
+ f = self._handle
+ _particles = []
+ fx = self._particle_fields["particle_posx"]
+ fy = self._particle_fields["particle_posy"]
+ fz = self._particle_fields["particle_posz"]
+ posx = f["/tracer particles"][:,fx]
+ posy = f["/tracer particles"][:,fy]
+ posz = f["/tracer particles"][:,fz]
+ if type == 0 :
+ idxs = particles_validator_region(posx,posy,posz,args)
+ elif type == 1 :
+ idxs = particles_validator_sphere(posx,posy,posz,args)
+ elif type == 2 :
+ idxs = particles_validator_disk(posx,posy,posz,args)
+ for field in fields_to_read :
+ fi = self._particle_fields[field]
+ _particles.append(f["/tracer particles"][idxs,fi])
+ return _particles
+ """
def _select_particles(self, grid, field):
f = self._handle
npart = f["/tracer particles"].shape[0]
@@ -67,6 +156,7 @@
tr.append(f["/tracer particles"][gi,fi])
start = end
return na.concatenate(tr)
+ """
def _read_data_set(self, grid, field):
f = self._handle
https://bitbucket.org/yt_analysis/yt/changeset/84700323b1c2/
changeset: 84700323b1c2
branch: yt
user: jzuhone
date: 2012-08-08 20:19:00
summary: ParticleIO now runs correctly, but it's still kind of slow. Probably will have to rewrite this in Cython.
affected #: 1 file
diff -r 83d3ca350ab3def502b3aea3526f3e8b14faaf1d -r 84700323b1c2a0488488ca0b3851414ad2686e16 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -30,7 +30,7 @@
BaseIOHandler
def particles_validator_region(x, y, z, args) :
-
+
left_edge = args[0]
right_edge = args[1]
periodic = args[2]
@@ -55,7 +55,7 @@
idxy = na.logical_and(yy >= left_edge[1], yy <= right_edge[1])
idxz = na.logical_and(zz >= left_edge[2], zz <= right_edge[2])
- idxs = na.logical_and(idxx, idyy)
+ idxs = na.logical_and(idxx, idxy)
idxs = na.logical_and(idxz, idxs)
return idxs
@@ -101,7 +101,7 @@
return na.logical_and(pr <= radius, ph <= height)
class IOHandlerFLASH(BaseIOHandler):
- _particle_reader = False
+ _particle_reader = True
_data_style = "flash_hdf5"
def __init__(self, pf, *args, **kwargs):
@@ -121,13 +121,14 @@
def _read_particles(self, fields_to_read, type, args, grid_list,
count_list, conv_factors):
f = self._handle
- _particles = []
+ particles = []
+ _particles = f["/tracer particles"][:,:]
fx = self._particle_fields["particle_posx"]
fy = self._particle_fields["particle_posy"]
fz = self._particle_fields["particle_posz"]
- posx = f["/tracer particles"][:,fx]
- posy = f["/tracer particles"][:,fy]
- posz = f["/tracer particles"][:,fz]
+ posx = _particles[:,fx]
+ posy = _particles[:,fy]
+ posz = _particles[:,fz]
if type == 0 :
idxs = particles_validator_region(posx,posy,posz,args)
elif type == 1 :
@@ -136,8 +137,9 @@
idxs = particles_validator_disk(posx,posy,posz,args)
for field in fields_to_read :
fi = self._particle_fields[field]
- _particles.append(f["/tracer particles"][idxs,fi])
- return _particles
+ particles.append(_particles[idxs,fi])
+ del _particles
+ return particles
"""
def _select_particles(self, grid, field):
https://bitbucket.org/yt_analysis/yt/changeset/b086152fcd63/
changeset: b086152fcd63
branch: yt
user: jzuhone
date: 2012-08-08 20:33:04
summary: Removing _select_particles as it is deprecated (and very wrong, actually).
affected #: 1 file
diff -r 84700323b1c2a0488488ca0b3851414ad2686e16 -r b086152fcd63edc6ef507935adff0482482422e2 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -141,25 +141,6 @@
del _particles
return particles
- """
- def _select_particles(self, grid, field):
- f = self._handle
- npart = f["/tracer particles"].shape[0]
- total_selected = 0
- start = 0
- stride = 1e6
- blki = self._particle_fields["particle_blk"]
- bi = grid.id - grid._id_offset
- fi = self._particle_fields[field]
- tr = []
- while start < npart:
- end = min(start + stride - 1, npart)
- gi = f["/tracer particles"][start:end,blki] == bi
- tr.append(f["/tracer particles"][gi,fi])
- start = end
- return na.concatenate(tr)
- """
-
def _read_data_set(self, grid, field):
f = self._handle
if field in self._particle_fields:
https://bitbucket.org/yt_analysis/yt/changeset/3cb50470c165/
changeset: 3cb50470c165
branch: yt
user: jzuhone
date: 2012-08-08 23:31:36
summary: Cythonized the bulk of _read_particles. Still too slow...
affected #: 2 files
diff -r b086152fcd63edc6ef507935adff0482482422e2 -r 3cb50470c16598aeb048b216d6ae734aedb6ee0f yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -25,6 +25,7 @@
import numpy as na
import h5py
+from yt.frontends.flash import _flash_particle_reader
from yt.utilities.io_handler import \
BaseIOHandler
@@ -120,6 +121,7 @@
def _read_particles(self, fields_to_read, type, args, grid_list,
count_list, conv_factors):
+ """
f = self._handle
particles = []
_particles = f["/tracer particles"][:,:]
@@ -139,7 +141,18 @@
fi = self._particle_fields[field]
particles.append(_particles[idxs,fi])
del _particles
- return particles
+ """
+ fx = self._particle_fields["particle_posx"]
+ fy = self._particle_fields["particle_posy"]
+ fz = self._particle_fields["particle_posz"]
+ field_indices = na.array([self._particle_fields[field]
+ for field in fields_to_read],
+ dtype='int32')
+ return _flash_particle_reader.read_particles(self._handle.fid,
+ fx, fy, fz,
+ len(fields_to_read),
+ type, args,
+ field_indices)
def _read_data_set(self, grid, field):
f = self._handle
diff -r b086152fcd63edc6ef507935adff0482482422e2 -r 3cb50470c16598aeb048b216d6ae734aedb6ee0f yt/frontends/flash/setup.py
--- a/yt/frontends/flash/setup.py
+++ b/yt/frontends/flash/setup.py
@@ -8,6 +8,10 @@
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('flash', parent_package, top_path)
+ config.add_extension("_flash_particle_reader",
+ ["yt/frontends/flash/_flash_particle_reader.pyx"],
+ language="c"
+ )
config.make_config_py() # installs __config__.py
#config.make_svn_version_py()
return config
https://bitbucket.org/yt_analysis/yt/changeset/9fe563700715/
changeset: 9fe563700715
branch: yt
user: jzuhone
date: 2012-08-08 23:33:14
summary: Removing old python code we moved to cython
affected #: 1 file
diff -r 3cb50470c16598aeb048b216d6ae734aedb6ee0f -r 9fe5637007154ad1fecdac000c83904cfc16100f yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -30,77 +30,6 @@
from yt.utilities.io_handler import \
BaseIOHandler
-def particles_validator_region(x, y, z, args) :
-
- left_edge = args[0]
- right_edge = args[1]
- periodic = args[2]
- DLE = args[3]
- DRE = args[4]
-
- xx = x
- yy = y
- zz = z
-
- if periodic == 1 :
-
- DW = DRE - DLE
- xx[x < left_edge[0]] = x + DW[0]
- xx[x > right_edge[0]] = x - DW[0]
- yy[y < left_edge[1]] = y + DW[1]
- yy[y > right_edge[1]] = y - DW[1]
- zz[z < left_edge[2]] = z + DW[2]
- zz[z > right_edge[2]] = z - DW[2]
-
- idxx = na.logical_and(xx >= left_edge[0], xx <= right_edge[0])
- idxy = na.logical_and(yy >= left_edge[1], yy <= right_edge[1])
- idxz = na.logical_and(zz >= left_edge[2], zz <= right_edge[2])
-
- idxs = na.logical_and(idxx, idxy)
- idxs = na.logical_and(idxz, idxs)
-
- return idxs
-
-def particles_validator_sphere(x, y, z, args) :
-
- center = args[0]
- radius = args[1]
- periodic = args[2]
- DLE = args[3]
- DRE = args[4]
-
- xx = na.abs(x-center[0])
- yy = na.abs(y-center[1])
- zz = na.abs(z-center[2])
-
- if periodic == 1 :
-
- DW = DRE - DLE
-
- xx = na.minimum(xx,DW[0]-xx)
- yy = na.minimum(yy,DW[1]-yy)
- zz = na.minimum(zz,DW[2]-zz)
-
- r = na.sqrt(xx*xx+yy*yy+zz*zz)
-
- return r <= radius
-
-def particles_validator_disk(x, y, z, args) :
-
- center = args[0]
- normal = args[1]
- radius = args[2]
- height = args[3]
-
- d = -na.dot(normal*center)
-
- ph = na.abs(x*normal[0] + y*normal[1] + z*normal[2] + d)
- pd2 = (x-center[0])**2+(y-center[1])**2+(z-center[2])**2
-
- pr = na.sqrt(pd2-ph*ph)
-
- return na.logical_and(pr <= radius, ph <= height)
-
class IOHandlerFLASH(BaseIOHandler):
_particle_reader = True
_data_style = "flash_hdf5"
@@ -121,27 +50,6 @@
def _read_particles(self, fields_to_read, type, args, grid_list,
count_list, conv_factors):
- """
- f = self._handle
- particles = []
- _particles = f["/tracer particles"][:,:]
- fx = self._particle_fields["particle_posx"]
- fy = self._particle_fields["particle_posy"]
- fz = self._particle_fields["particle_posz"]
- posx = _particles[:,fx]
- posy = _particles[:,fy]
- posz = _particles[:,fz]
- if type == 0 :
- idxs = particles_validator_region(posx,posy,posz,args)
- elif type == 1 :
- idxs = particles_validator_sphere(posx,posy,posz,args)
- elif type == 2 :
- idxs = particles_validator_disk(posx,posy,posz,args)
- for field in fields_to_read :
- fi = self._particle_fields[field]
- particles.append(_particles[idxs,fi])
- del _particles
- """
fx = self._particle_fields["particle_posx"]
fy = self._particle_fields["particle_posy"]
fz = self._particle_fields["particle_posz"]
https://bitbucket.org/yt_analysis/yt/changeset/2a4951ccdade/
changeset: 2a4951ccdade
branch: yt
user: jzuhone
date: 2012-08-08 23:37:46
summary: Forgot to add this in
affected #: 2 files
diff -r 9fe5637007154ad1fecdac000c83904cfc16100f -r 2a4951ccdade83639f4179c09016cf3955f51862 yt/data_objects/particle_trajectories.py
--- a/yt/data_objects/particle_trajectories.py
+++ b/yt/data_objects/particle_trajectories.py
@@ -23,6 +23,7 @@
from yt.data_objects.data_containers import YTFieldData
from yt.data_objects.time_series import TimeSeriesData
from yt.utilities.lib import sample_field_at_positions
+from yt.convenience import load
from yt.funcs import *
import numpy as na
@@ -79,7 +80,8 @@
indices.sort() # Just in case the caller wasn't careful
self.field_data = YTFieldData()
- self.pfs = TimeSeriesData.from_filenames(filenames)
+ #self.pfs = TimeSeriesData.from_filenames(filenames)
+ self.pfs = [load(fn) for fn in filenames]
self.masks = []
self.sorts = []
self.indices = indices
@@ -112,14 +114,14 @@
for pf in self.pfs :
dd = pf.h.all_data()
newtags = dd["particle_index"].astype("int")
- if not na.all(na.in1d(indices, newtags, assume_unique=True)) :
- print "Not all requested particle ids contained in this file!"
- raise IndexError
- mask = na.in1d(newtags, indices, assume_unique=True)
- sorts = na.argsort(newtags[mask])
- self.masks.append(mask)
- self.sorts.append(sorts)
- self.times.append(pf.current_time)
+ #if not na.all(na.in1d(indices, newtags, assume_unique=True)) :
+ # print "Not all requested particle ids contained in this file!"
+ # raise IndexError
+ #mask = na.in1d(newtags, indices, assume_unique=True)
+ #sorts = na.argsort(newtags[mask])
+ #self.masks.append(mask)
+ #self.sorts.append(sorts)
+ #self.times.append(pf.current_time)
self.times = na.array(self.times)
diff -r 9fe5637007154ad1fecdac000c83904cfc16100f -r 2a4951ccdade83639f4179c09016cf3955f51862 yt/frontends/flash/_flash_particle_reader.pyx
--- /dev/null
+++ b/yt/frontends/flash/_flash_particle_reader.pyx
@@ -0,0 +1,232 @@
+import numpy as np
+cimport numpy as np
+cimport cython
+import h5py
+
+cdef particles_validator_region(np.ndarray[np.float64_t, ndim=1] x,
+ np.ndarray[np.float64_t, ndim=1] y,
+ np.ndarray[np.float64_t, ndim=1] z,
+ np.ndarray[np.float64_t, ndim=1] left_edge,
+ np.ndarray[np.float64_t, ndim=1] right_edge,
+ np.int32_t periodic,
+ np.ndarray[np.float64_t, ndim=1] DLE,
+ np.ndarray[np.float64_t, ndim=1] DRE) :
+
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxx
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxy
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxz
+
+ cdef np.ndarray[np.float64_t, ndim=1] xx
+ cdef np.ndarray[np.float64_t, ndim=1] yy
+ cdef np.ndarray[np.float64_t, ndim=1] zz
+
+ cdef np.ndarray[np.float64_t, ndim=1] DW
+
+ idxs = np.zeros(x.shape[0], 'bool')
+ idxx = np.zeros(x.shape[0], 'bool')
+ idxy = np.zeros(x.shape[0], 'bool')
+ idxz = np.zeros(x.shape[0], 'bool')
+
+ xx = np.zeros(x.shape[0], 'float64')
+ yy = np.zeros(x.shape[0], 'float64')
+ zz = np.zeros(x.shape[0], 'float64')
+
+ DW = np.zeros(3, 'float64')
+
+ xx = x
+ yy = y
+ zz = z
+
+ if periodic == 1 :
+
+ DW = DRE - DLE
+ xx[x < left_edge[0]] = x + DW[0]
+ xx[x > right_edge[0]] = x - DW[0]
+ yy[y < left_edge[1]] = y + DW[1]
+ yy[y > right_edge[1]] = y - DW[1]
+ zz[z < left_edge[2]] = z + DW[2]
+ zz[z > right_edge[2]] = z - DW[2]
+
+ idxx = np.logical_and(xx >= left_edge[0], xx <= right_edge[0])
+ idxy = np.logical_and(yy >= left_edge[1], yy <= right_edge[1])
+ idxz = np.logical_and(zz >= left_edge[2], zz <= right_edge[2])
+
+ idxs = np.logical_and(idxx, idxy)
+ idxs = np.logical_and(idxz, idxs)
+
+ return idxs
+
+cdef particles_validator_sphere(np.ndarray[np.float64_t, ndim=1] x,
+ np.ndarray[np.float64_t, ndim=1] y,
+ np.ndarray[np.float64_t, ndim=1] z,
+ np.ndarray[np.float64_t, ndim=1] center,
+ np.float64_t radius,
+ np.int32_t periodic,
+ np.ndarray[np.float64_t, ndim=1] DLE,
+ np.ndarray[np.float64_t, ndim=1] DRE) :
+
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
+
+ cdef np.ndarray[np.float64_t, ndim=1] r
+ cdef np.ndarray[np.float64_t, ndim=1] xx
+ cdef np.ndarray[np.float64_t, ndim=1] yy
+ cdef np.ndarray[np.float64_t, ndim=1] zz
+
+ cdef np.ndarray[np.float64_t, ndim=1] DW
+
+ idxs = np.zeros(x.shape[0], 'bool')
+
+ r = np.zeros(x.shape[0], 'float64')
+ xx = np.zeros(x.shape[0], 'float64')
+ yy = np.zeros(x.shape[0], 'float64')
+ zz = np.zeros(x.shape[0], 'float64')
+
+ DW = np.zeros(3, 'float64')
+
+ xx = np.abs(x-center[0])
+ yy = np.abs(y-center[1])
+ zz = np.abs(z-center[2])
+
+ if periodic == 1 :
+
+ DW = DRE - DLE
+
+ xx = np.minimum(xx,DW[0]-xx)
+ yy = np.minimum(yy,DW[1]-yy)
+ zz = np.minimum(zz,DW[2]-zz)
+
+ r = np.sqrt(xx*xx+yy*yy+zz*zz)
+
+ idxs = np.array(r <= radius)
+
+ return idxs
+
+cdef particles_validator_disk(np.ndarray[np.float64_t, ndim=1] x,
+ np.ndarray[np.float64_t, ndim=1] y,
+ np.ndarray[np.float64_t, ndim=1] z,
+ np.ndarray[np.float64_t, ndim=1] center,
+ np.ndarray[np.float64_t, ndim=1] normal,
+ np.float64_t radius, np.float64_t height) :
+
+ cdef np.float64_t d
+
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
+
+ cdef np.ndarray[np.float64_t, ndim=1] ph
+ cdef np.ndarray[np.float64_t, ndim=1] pd2
+ cdef np.ndarray[np.float64_t, ndim=1] pr
+
+ idxs = np.zeros(x.shape[0], 'bool')
+
+ ph = np.zeros(x.shape[0], 'float64')
+ pd2 = np.zeros(x.shape[0], 'float64')
+ pr = np.zeros(x.shape[0], 'float64')
+
+ d = -np.dot(normal*center)
+
+ ph = np.abs(x*normal[0] + y*normal[1] + z*normal[2] + d)
+ pd2 = (x-center[0])**2+(y-center[1])**2+(z-center[2])**2
+
+ pr = np.sqrt(pd2-ph*ph)
+
+ idxs = np.logical_and(pr <= radius, ph <= height)
+
+ return idxs
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+def read_particles(file_id, int x_index, int y_index, int z_index,
+ int num_fields, int rtype, args,
+ np.ndarray[np.int32_t, ndim=1] field_indices) :
+
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
+ cdef int i
+ cdef int num_particles
+ cdef np.int32_t periodic
+ cdef np.ndarray[np.float64_t, ndim=1] left_edge
+ cdef np.ndarray[np.float64_t, ndim=1] right_edge
+ cdef np.ndarray[np.float64_t, ndim=1] DLE
+ cdef np.ndarray[np.float64_t, ndim=1] DRE
+ cdef np.float64_t radius
+ cdef np.float64_t height
+ cdef np.ndarray[np.float64_t, ndim=1] normal
+ cdef np.ndarray[np.float64_t, ndim=1] center
+ cdef np.ndarray[np.float64_t, ndim=1] particle_field
+ cdef np.ndarray[np.float64_t, ndim=1] posx
+ cdef np.ndarray[np.float64_t, ndim=1] posy
+ cdef np.ndarray[np.float64_t, ndim=1] posz
+
+ left_edge = np.zeros(3, 'float64')
+ right_edge = np.zeros(3, 'float64')
+ DLE = np.zeros(3, 'float64')
+ DRE = np.zeros(3, 'float64')
+ normal = np.zeros(3, 'float64')
+ center = np.zeros(3, 'float64')
+
+ dataset = h5py.h5d.open(file_id, "tracer particles")
+ dataspace = dataset.get_space()
+ rank = dataspace.get_simple_extent_dims()
+ memspace = h5py.h5s.create_simple((rank[0],))
+
+ num_particles = rank[0]
+ count = (num_particles,1)
+
+ posx = np.zeros(num_particles, 'float64')
+ posy = np.zeros(num_particles, 'float64')
+ posz = np.zeros(num_particles, 'float64')
+
+ start = (0,x_index)
+ dataspace.select_hyperslab(start,count)
+ dataset.read(memspace, dataspace, posx)
+
+ start = (0,y_index)
+ dataspace.select_hyperslab(start,count)
+ dataset.read(memspace, dataspace, posy)
+
+ start = (0,z_index)
+ dataspace.select_hyperslab(start,count)
+ dataset.read(memspace, dataspace, posz)
+
+ idxs = np.zeros(num_particles, 'bool')
+
+ particle_field = np.zeros(num_particles, 'float64')
+
+ if rtype == 0 :
+ left_edge = args[0]
+ right_edge = args[1]
+ periodic = args[2]
+ DLE = args[3]
+ DRE = args[4]
+ idxs = particles_validator_region(posx,posy,posz,
+ left_edge,right_edge,
+ periodic,DLE,DRE)
+ elif rtype == 1:
+ center = args[0]
+ radius = args[1]
+ periodic = args[2]
+ DLE = args[3]
+ DRE = args[4]
+ idxs = particles_validator_sphere(posx,posy,posz,
+ center,radius,
+ periodic,DLE,DRE)
+ elif rtype == 2:
+ center = args[0]
+ normal = args[1]
+ radius = args[2]
+ height = args[3]
+ idxs = particles_validator_disk(posx,posy,posz,
+ center,normal,
+ radius,height)
+
+ _particles = []
+
+ for i in range(num_fields) :
+
+ start = (0,field_indices[i])
+ dataspace.select_hyperslab(start,count)
+ dataset.read(memspace, dataspace, particle_field)
+ _particles.append(particle_field[idxs])
+
+ return _particles
+
https://bitbucket.org/yt_analysis/yt/changeset/479bd429db27/
changeset: 479bd429db27
branch: yt
user: MatthewTurk
date: 2012-08-09 00:01:05
summary: Unrolling a bunch of loops in Cython for the FLASH particle reader for regions
affected #: 1 file
diff -r 2a4951ccdade83639f4179c09016cf3955f51862 -r 479bd429db270b6ca60e2b7be32e763593d07a28 yt/frontends/flash/_flash_particle_reader.pyx
--- a/yt/frontends/flash/_flash_particle_reader.pyx
+++ b/yt/frontends/flash/_flash_particle_reader.pyx
@@ -12,50 +12,34 @@
np.ndarray[np.float64_t, ndim=1] DLE,
np.ndarray[np.float64_t, ndim=1] DRE) :
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxx
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxy
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxz
-
- cdef np.ndarray[np.float64_t, ndim=1] xx
- cdef np.ndarray[np.float64_t, ndim=1] yy
- cdef np.ndarray[np.float64_t, ndim=1] zz
+ cdef np.ndarray[np.uint8_t, cast=True, ndim=1] mask
+ cdef int i, ax
- cdef np.ndarray[np.float64_t, ndim=1] DW
-
- idxs = np.zeros(x.shape[0], 'bool')
- idxx = np.zeros(x.shape[0], 'bool')
- idxy = np.zeros(x.shape[0], 'bool')
- idxz = np.zeros(x.shape[0], 'bool')
-
- xx = np.zeros(x.shape[0], 'float64')
- yy = np.zeros(x.shape[0], 'float64')
- zz = np.zeros(x.shape[0], 'float64')
+ mask = np.zeros(x.shape[0], 'bool')
DW = np.zeros(3, 'float64')
- xx = x
- yy = y
- zz = z
+ if periodic == 1:
+ DW = DRE - DLE
- if periodic == 1 :
+ cdef np.float64_t pos[3]
+ cdef int inside
+ for i in range(x.shape[0]):
+ pos[0] = x[i]
+ pos[1] = y[i]
+ pos[2] = z[i]
+ inside = 1
+ for ax in range(3):
+ if pos[ax] < left_edge[ax]: pos[ax] += DW[ax]
+ if pos[ax] > right_edge[ax]: pos[ax] -= DW[ax]
+ for ax in range(3):
+ if pos[ax] < left_edge[ax] or pos[ax] > right_edge[ax]:
+ inside = 0
+ break
+ if inside == 1:
+ mask[i] = 1
- DW = DRE - DLE
- xx[x < left_edge[0]] = x + DW[0]
- xx[x > right_edge[0]] = x - DW[0]
- yy[y < left_edge[1]] = y + DW[1]
- yy[y > right_edge[1]] = y - DW[1]
- zz[z < left_edge[2]] = z + DW[2]
- zz[z > right_edge[2]] = z - DW[2]
-
- idxx = np.logical_and(xx >= left_edge[0], xx <= right_edge[0])
- idxy = np.logical_and(yy >= left_edge[1], yy <= right_edge[1])
- idxz = np.logical_and(zz >= left_edge[2], zz <= right_edge[2])
-
- idxs = np.logical_and(idxx, idxy)
- idxs = np.logical_and(idxz, idxs)
-
- return idxs
+ return mask
cdef particles_validator_sphere(np.ndarray[np.float64_t, ndim=1] x,
np.ndarray[np.float64_t, ndim=1] y,
https://bitbucket.org/yt_analysis/yt/changeset/298587cec10f/
changeset: 298587cec10f
branch: yt
user: MatthewTurk
date: 2012-08-09 00:04:14
summary: Another minor speedup.
affected #: 1 file
diff -r 479bd429db270b6ca60e2b7be32e763593d07a28 -r 298587cec10f5fae81862ab241b7ea4944ba9841 yt/frontends/flash/_flash_particle_reader.pyx
--- a/yt/frontends/flash/_flash_particle_reader.pyx
+++ b/yt/frontends/flash/_flash_particle_reader.pyx
@@ -3,6 +3,9 @@
cimport cython
import h5py
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
cdef particles_validator_region(np.ndarray[np.float64_t, ndim=1] x,
np.ndarray[np.float64_t, ndim=1] y,
np.ndarray[np.float64_t, ndim=1] z,
@@ -17,10 +20,10 @@
mask = np.zeros(x.shape[0], 'bool')
- DW = np.zeros(3, 'float64')
+ cdef np.ndarray[np.float64_t, ndim=1] DW = np.zeros(3, 'float64')
if periodic == 1:
- DW = DRE - DLE
+ DW[:] = DRE - DLE
cdef np.float64_t pos[3]
cdef int inside
https://bitbucket.org/yt_analysis/yt/changeset/c03e94c86cd5/
changeset: c03e94c86cd5
branch: yt
user: jzuhone
date: 2012-08-09 06:57:14
summary: Merging
affected #: 6 files
diff -r 298587cec10f5fae81862ab241b7ea4944ba9841 -r c03e94c86cd5ec73681871f49360a998edc50740 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -417,6 +417,7 @@
echo 'fb3cf421b2dc48c31956b3e3ee4ab6ebc743deec3bf626c2238a1996c8c51be87260bd6aa662793a1f0c34dcda9b3146763777bb162dfad6fec4ca7acc403b2e zeromq-2.2.0.tar.gz' > zeromq-2.2.0.tar.gz.sha512
echo 'd761b492352841cdc125d9f0c99ee6d6c435812472ea234728b7f0fb4ad1048e1eec9b399df2081fbc926566f333f7780fedd0ce23255a6633fe5c60ed15a6af pyzmq-2.1.11.tar.gz' > pyzmq-2.1.11.tar.gz.sha512
echo '57fa5e57dfb98154a42d2d477f29401c2260ae7ad3a8128a4098b42ee3b35c54367b1a3254bc76b9b3b14b4aab7c3e1135858f68abc5636daedf2f01f9b8a3cf tornado-2.2.tar.gz' > tornado-2.2.tar.gz.sha512
+echo '1332e3d5465ca249c357314cf15d2a4e5e83a941841021b8f6a17a107dce268a7a082838ade5e8db944ecde6bfb111211ab218aa414ee90aafbb81f1491b3b93 Forthon-0.8.10.tar.gz' > Forthon-0.8.10.tar.gz.sha512
# Individual processes
[ -z "$HDF5_DIR" ] && get_ytproject hdf5-1.8.7.tar.gz
@@ -437,6 +438,7 @@
get_ytproject h5py-2.0.1.tar.gz
get_ytproject Cython-0.16.tar.gz
get_ytproject reason-js-20120623.zip
+get_ytproject Forthon-0.8.10.tar.gz
if [ $INST_BZLIB -eq 1 ]
then
@@ -674,6 +676,7 @@
do_setup_py ipython-0.13
do_setup_py h5py-2.0.1
do_setup_py Cython-0.16
+do_setup_py Forthon-0.8.10
[ $INST_PYX -eq 1 ] && do_setup_py PyX-0.11.1
echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
diff -r 298587cec10f5fae81862ab241b7ea4944ba9841 -r c03e94c86cd5ec73681871f49360a998edc50740 yt/frontends/stream/api.py
--- a/yt/frontends/stream/api.py
+++ b/yt/frontends/stream/api.py
@@ -28,7 +28,8 @@
StreamGrid, \
StreamHierarchy, \
StreamStaticOutput, \
- StreamHandler
+ StreamHandler, \
+ load_uniform_grid
from .fields import \
KnownStreamFields, \
diff -r 298587cec10f5fae81862ab241b7ea4944ba9841 -r c03e94c86cd5ec73681871f49360a998edc50740 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -40,6 +40,8 @@
FieldInfoContainer, NullFunc
from yt.utilities.lib import \
get_box_grids_level
+from yt.utilities.definitions import \
+ mpc_conversion, sec_conversion
from .fields import \
StreamFieldInfo, \
@@ -288,3 +290,89 @@
@classmethod
def _is_valid(cls, *args, **kwargs):
return False
+
+class StreamDictFieldHandler(dict):
+
+ @property
+ def all_fields(self): return self[0].keys()
+
+def load_uniform_grid(data, domain_dimensions, domain_size_in_cm):
+ r"""Load a uniform grid of data into yt as a
+ :class:`~yt.frontends.stream.data_structures.StreamHandler`.
+
+ This should allow a uniform grid of data to be loaded directly into yt and
+ analyzed as would any others. This comes with several caveats:
+ * Units will be incorrect unless the data has already been converted to
+ cgs.
+ * Some functions may behave oddly, and parallelism will be
+ disappointing or non-existent in most cases.
+ * Particles may be difficult to integrate.
+
+ Parameters
+ ----------
+ data : dict
+ This is a dict of numpy arrays, where the keys are the field names.
+ domain_dimensiosn : array_like
+ This is the domain dimensions of the grid
+ domain_size_in_cm : float
+ The size of the domain, in centimeters
+
+ Examples
+ --------
+
+ >>> arr = na.random.random((256, 256, 256))
+ >>> data = dict(Density = arr)
+ >>> pf = load_uniform_grid(data, [256, 256, 256], 3.08e24)
+ """
+ sfh = StreamDictFieldHandler()
+ sfh.update({0:data})
+ domain_dimensions = na.array(domain_dimensions)
+ if na.unique(domain_dimensions).size != 1:
+ print "We don't support variably sized domains yet."
+ raise RuntimeError
+ domain_left_edge = na.zeros(3, 'float64')
+ domain_right_edge = na.ones(3, 'float64')
+ grid_left_edges = na.zeros(3, "int64").reshape((1,3))
+ grid_right_edges = na.array(domain_dimensions, "int64").reshape((1,3))
+
+ grid_levels = na.array([0], dtype='int32').reshape((1,1))
+ grid_dimensions = grid_right_edges - grid_left_edges
+
+ grid_left_edges = grid_left_edges.astype("float64")
+ grid_left_edges /= domain_dimensions*2**grid_levels
+ grid_left_edges *= domain_right_edge - domain_left_edge
+ grid_left_edges += domain_left_edge
+
+ grid_right_edges = grid_right_edges.astype("float64")
+ grid_right_edges /= domain_dimensions*2**grid_levels
+ grid_right_edges *= domain_right_edge - domain_left_edge
+ grid_right_edges += domain_left_edge
+
+ handler = StreamHandler(
+ grid_left_edges,
+ grid_right_edges,
+ grid_dimensions,
+ grid_levels,
+ na.array([-1], dtype='int64'),
+ na.zeros(1, dtype='int64').reshape((1,1)),
+ na.zeros(1).reshape((1,1)),
+ sfh,
+ )
+
+ handler.name = "UniformGridData"
+ handler.domain_left_edge = domain_left_edge
+ handler.domain_right_edge = domain_right_edge
+ handler.refine_by = 2
+ handler.dimensionality = 3
+ handler.domain_dimensions = domain_dimensions
+ handler.simulation_time = 0.0
+ handler.cosmology_simulation = 0
+
+ spf = StreamStaticOutput(handler)
+ spf.units["cm"] = domain_size_in_cm
+ spf.units['1'] = 1.0
+ spf.units["unitary"] = 1.0
+ box_in_mpc = domain_size_in_cm / mpc_conversion['cm']
+ for unit in mpc_conversion.keys():
+ spf.units[unit] = mpc_conversion[unit] * box_in_mpc
+ return spf
diff -r 298587cec10f5fae81862ab241b7ea4944ba9841 -r c03e94c86cd5ec73681871f49360a998edc50740 yt/utilities/kdtree/Makefile
--- a/yt/utilities/kdtree/Makefile
+++ b/yt/utilities/kdtree/Makefile
@@ -9,9 +9,10 @@
endif
fKD: fKD.f90 fKD.v fKD_source.f90
-# Forthon --compile_first fKD_source --no2underscores --with-numpy -g fKD fKD.f90 fKD_source.f90
+# Forthon --compile_first fKD_source --no2underscores -g fKD fKD.f90 fKD_source.f90
@echo "Using $(FORTHON) ($(FORTHON_EXE))"
- $(FORTHON) -F gfortran --compile_first fKD_source --no2underscores --with-numpy --fopt "-O3" fKD fKD_source.f90
+ $(FORTHON) -F gfortran --compile_first fKD_source --no2underscores --fopt "-O3" fKD fKD_source.f90
+ mv build/lib*/fKDpy.so .
clean:
rm -rf build fKDpy.a fKDpy.so
diff -r 298587cec10f5fae81862ab241b7ea4944ba9841 -r c03e94c86cd5ec73681871f49360a998edc50740 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -458,10 +458,15 @@
Log on/off.
"""
- if log:
- self._field_transform[field] = log_transform
+ if field == 'all':
+ fields = self.plots.keys()
else:
- self._field_transform[field] = linear_transform
+ fields = [field]
+ for field in fields:
+ if log:
+ self._field_transform[field] = log_transform
+ else:
+ self._field_transform[field] = linear_transform
@invalidate_plot
def set_transform(self, field, name):
@@ -472,34 +477,70 @@
@invalidate_plot
def set_cmap(self, field, cmap_name):
"""set the colormap for one of the fields
-
+
Parameters
----------
field : string
- the field to set a transform
+ the field to set the colormap
+ if field == 'all', applies to all plots.
cmap_name : string
name of the colormap
"""
- self._colorbar_valid = False
- self._colormaps[field] = cmap_name
+
+ if field is 'all':
+ fields = self.plots.keys()
+ else:
+ fields = [field]
+ for field in fields:
+ self._colorbar_valid = False
+ self._colormaps[field] = cmap_name
@invalidate_plot
- def set_zlim(self, field, zmin, zmax):
+ def set_zlim(self, field, zmin, zmax, dynamic_range=None):
"""set the scale of the colormap
-
+
Parameters
----------
field : string
- the field to set a transform
+ the field to set a colormap scale
+ if field == 'all', applies to all plots.
zmin : float
- the new minimum of the colormap scale
+ the new minimum of the colormap scale. If 'min', will
+ set to the minimum value in the current view.
zmax : float
- the new maximum of the colormap scale
+ the new maximum of the colormap scale. If 'max', will
+ set to the maximum value in the current view.
+
+ Keyword Parameters
+ ------------------
+ dyanmic_range : float (default: None)
+ The dynamic range of the image.
+ If zmin == None, will set zmin = zmax / dynamic_range
+ If zmax == None, will set zmax = zmin * dynamic_range
+ When dynamic_range is specified, defaults to setting
+ zmin = zmax / dynamic_range.
"""
- self.plots[field].zmin = zmin
- self.plots[field].zmax = zmax
+ if field is 'all':
+ fields = self.plots.keys()
+ else:
+ fields = [field]
+ for field in fields:
+ myzmin = zmin
+ myzmax = zmax
+ if zmin == 'min':
+ myzmin = self.plots[field].image._A.min()
+ if zmax == 'max':
+ myzmax = self.plots[field].image._A.max()
+ if dynamic_range is not None:
+ if zmax is None:
+ myzmax = myzmin * dynamic_range
+ else:
+ myzmin = myzmax / dynamic_range
+
+ self.plots[field].zmin = myzmin
+ self.plots[field].zmax = myzmax
def setup_callbacks(self):
for key in callback_registry:
@@ -512,7 +553,7 @@
callback = invalidate_plot(apply_callback(CallbackMaker))
callback.__doc__ = CallbackMaker.__init__.__doc__
self.__dict__['annotate_'+cbname] = types.MethodType(callback,self)
-
+
def get_metadata(self, field, strip_mathml = True, return_string = True):
fval = self._frb[field]
mi = fval.min()
@@ -651,25 +692,32 @@
@invalidate_plot
def set_cmap(self, field, cmap):
"""set the colormap for one of the fields
-
+
Parameters
----------
field : string
the field to set a transform
+ if field == 'all', applies to all plots.
cmap_name : string
name of the colormap
"""
- self._colorbar_valid = False
- self._colormaps[field] = cmap
- if isinstance(cmap, types.StringTypes):
- if str(cmap) in yt_colormaps:
- cmap = yt_colormaps[str(cmap)]
- elif hasattr(matplotlib.cm, cmap):
- cmap = getattr(matplotlib.cm, cmap)
- if not is_colormap(cmap) and cmap is not None:
- raise RuntimeError("Colormap '%s' does not exist!" % str(cmap))
- self.plots[field].image.set_cmap(cmap)
+ if field == 'all':
+ fields = self.plots.keys()
+ else:
+ fields = [field]
+
+ for field in fields:
+ self._colorbar_valid = False
+ self._colormaps[field] = cmap
+ if isinstance(cmap, types.StringTypes):
+ if str(cmap) in yt_colormaps:
+ cmap = yt_colormaps[str(cmap)]
+ elif hasattr(matplotlib.cm, cmap):
+ cmap = getattr(matplotlib.cm, cmap)
+ if not is_colormap(cmap) and cmap is not None:
+ raise RuntimeError("Colormap '%s' does not exist!" % str(cmap))
+ self.plots[field].image.set_cmap(cmap)
def save(self,name=None):
"""saves the plot to disk.
@@ -762,7 +810,7 @@
the image centers on the location of the maximum density
cell. If set to 'c' or 'center', the plot is centered on
the middle of the domain.
- width : tuple or a float.
+ width : tuple or a float.
Width can have four different formats to support windows with variable
x and y widths. They are:
@@ -781,7 +829,7 @@
the y axis. In the other two examples, code units are assumed, for example
(0.2, 0.3) requests a plot that has and x width of 0.2 and a y width of 0.3
in code units.
- origin : string
+ origin : string
The location of the origin of the plot coordinate system.
Currently, can be set to three options: 'left-domain', corresponding
to the bottom-left hand corner of the simulation domain, 'center-domain',
@@ -830,7 +878,7 @@
the image centers on the location of the maximum density
cell. If set to 'c' or 'center', the plot is centered on
the middle of the domain.
- width : tuple or a float.
+ width : tuple or a float.
Width can have four different formats to support windows with variable
x and y widths. They are:
https://bitbucket.org/yt_analysis/yt/changeset/166ef5a2bbab/
changeset: 166ef5a2bbab
branch: yt
user: jzuhone
date: 2012-08-10 01:53:27
summary: Adding "sim_time" and "number_of_particles" as optional arguments to load_uniform_grid. With these we can add a simulation time and particles to the uniform grid.
affected #: 1 file
diff -r c03e94c86cd5ec73681871f49360a998edc50740 -r 166ef5a2bbab3c3fedab52c16a615d4735fd35a6 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -296,7 +296,8 @@
@property
def all_fields(self): return self[0].keys()
-def load_uniform_grid(data, domain_dimensions, domain_size_in_cm):
+def load_uniform_grid(data, domain_dimensions, domain_size_in_cm,
+ sim_time=0.0, number_of_particles=0):
r"""Load a uniform grid of data into yt as a
:class:`~yt.frontends.stream.data_structures.StreamHandler`.
@@ -316,13 +317,18 @@
This is the domain dimensions of the grid
domain_size_in_cm : float
The size of the domain, in centimeters
-
+ sim_time : float, optional
+ The simulation time in seconds
+ number_of_particles : int, optional
+ If particle fields are included, set this to the number of particles
+
Examples
--------
>>> arr = na.random.random((256, 256, 256))
>>> data = dict(Density = arr)
>>> pf = load_uniform_grid(data, [256, 256, 256], 3.08e24)
+
"""
sfh = StreamDictFieldHandler()
sfh.update({0:data})
@@ -354,7 +360,7 @@
grid_dimensions,
grid_levels,
na.array([-1], dtype='int64'),
- na.zeros(1, dtype='int64').reshape((1,1)),
+ number_of_particles*na.ones(1, dtype='int64').reshape((1,1)),
na.zeros(1).reshape((1,1)),
sfh,
)
@@ -365,7 +371,7 @@
handler.refine_by = 2
handler.dimensionality = 3
handler.domain_dimensions = domain_dimensions
- handler.simulation_time = 0.0
+ handler.simulation_time = sim_time
handler.cosmology_simulation = 0
spf = StreamStaticOutput(handler)
https://bitbucket.org/yt_analysis/yt/changeset/f5b355b6ad0d/
changeset: f5b355b6ad0d
branch: yt
user: jzuhone
date: 2012-09-01 05:09:57
summary: Getting rid of FLASH ParticleIO for now since it may be reincarnated in yt 3.0
affected #: 3 files
diff -r 166ef5a2bbab3c3fedab52c16a615d4735fd35a6 -r f5b355b6ad0d34f8bcc2d9f890114aae3ed56f60 yt/frontends/flash/_flash_particle_reader.pyx
--- a/yt/frontends/flash/_flash_particle_reader.pyx
+++ /dev/null
@@ -1,219 +0,0 @@
-import numpy as np
-cimport numpy as np
-cimport cython
-import h5py
-
- at cython.boundscheck(False)
- at cython.wraparound(False)
- at cython.cdivision(True)
-cdef particles_validator_region(np.ndarray[np.float64_t, ndim=1] x,
- np.ndarray[np.float64_t, ndim=1] y,
- np.ndarray[np.float64_t, ndim=1] z,
- np.ndarray[np.float64_t, ndim=1] left_edge,
- np.ndarray[np.float64_t, ndim=1] right_edge,
- np.int32_t periodic,
- np.ndarray[np.float64_t, ndim=1] DLE,
- np.ndarray[np.float64_t, ndim=1] DRE) :
-
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] mask
- cdef int i, ax
-
- mask = np.zeros(x.shape[0], 'bool')
-
- cdef np.ndarray[np.float64_t, ndim=1] DW = np.zeros(3, 'float64')
-
- if periodic == 1:
- DW[:] = DRE - DLE
-
- cdef np.float64_t pos[3]
- cdef int inside
- for i in range(x.shape[0]):
- pos[0] = x[i]
- pos[1] = y[i]
- pos[2] = z[i]
- inside = 1
- for ax in range(3):
- if pos[ax] < left_edge[ax]: pos[ax] += DW[ax]
- if pos[ax] > right_edge[ax]: pos[ax] -= DW[ax]
- for ax in range(3):
- if pos[ax] < left_edge[ax] or pos[ax] > right_edge[ax]:
- inside = 0
- break
- if inside == 1:
- mask[i] = 1
-
- return mask
-
-cdef particles_validator_sphere(np.ndarray[np.float64_t, ndim=1] x,
- np.ndarray[np.float64_t, ndim=1] y,
- np.ndarray[np.float64_t, ndim=1] z,
- np.ndarray[np.float64_t, ndim=1] center,
- np.float64_t radius,
- np.int32_t periodic,
- np.ndarray[np.float64_t, ndim=1] DLE,
- np.ndarray[np.float64_t, ndim=1] DRE) :
-
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
-
- cdef np.ndarray[np.float64_t, ndim=1] r
- cdef np.ndarray[np.float64_t, ndim=1] xx
- cdef np.ndarray[np.float64_t, ndim=1] yy
- cdef np.ndarray[np.float64_t, ndim=1] zz
-
- cdef np.ndarray[np.float64_t, ndim=1] DW
-
- idxs = np.zeros(x.shape[0], 'bool')
-
- r = np.zeros(x.shape[0], 'float64')
- xx = np.zeros(x.shape[0], 'float64')
- yy = np.zeros(x.shape[0], 'float64')
- zz = np.zeros(x.shape[0], 'float64')
-
- DW = np.zeros(3, 'float64')
-
- xx = np.abs(x-center[0])
- yy = np.abs(y-center[1])
- zz = np.abs(z-center[2])
-
- if periodic == 1 :
-
- DW = DRE - DLE
-
- xx = np.minimum(xx,DW[0]-xx)
- yy = np.minimum(yy,DW[1]-yy)
- zz = np.minimum(zz,DW[2]-zz)
-
- r = np.sqrt(xx*xx+yy*yy+zz*zz)
-
- idxs = np.array(r <= radius)
-
- return idxs
-
-cdef particles_validator_disk(np.ndarray[np.float64_t, ndim=1] x,
- np.ndarray[np.float64_t, ndim=1] y,
- np.ndarray[np.float64_t, ndim=1] z,
- np.ndarray[np.float64_t, ndim=1] center,
- np.ndarray[np.float64_t, ndim=1] normal,
- np.float64_t radius, np.float64_t height) :
-
- cdef np.float64_t d
-
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
-
- cdef np.ndarray[np.float64_t, ndim=1] ph
- cdef np.ndarray[np.float64_t, ndim=1] pd2
- cdef np.ndarray[np.float64_t, ndim=1] pr
-
- idxs = np.zeros(x.shape[0], 'bool')
-
- ph = np.zeros(x.shape[0], 'float64')
- pd2 = np.zeros(x.shape[0], 'float64')
- pr = np.zeros(x.shape[0], 'float64')
-
- d = -np.dot(normal*center)
-
- ph = np.abs(x*normal[0] + y*normal[1] + z*normal[2] + d)
- pd2 = (x-center[0])**2+(y-center[1])**2+(z-center[2])**2
-
- pr = np.sqrt(pd2-ph*ph)
-
- idxs = np.logical_and(pr <= radius, ph <= height)
-
- return idxs
-
- at cython.boundscheck(False)
- at cython.wraparound(False)
-def read_particles(file_id, int x_index, int y_index, int z_index,
- int num_fields, int rtype, args,
- np.ndarray[np.int32_t, ndim=1] field_indices) :
-
- cdef np.ndarray[np.uint8_t, cast=True, ndim=1] idxs
- cdef int i
- cdef int num_particles
- cdef np.int32_t periodic
- cdef np.ndarray[np.float64_t, ndim=1] left_edge
- cdef np.ndarray[np.float64_t, ndim=1] right_edge
- cdef np.ndarray[np.float64_t, ndim=1] DLE
- cdef np.ndarray[np.float64_t, ndim=1] DRE
- cdef np.float64_t radius
- cdef np.float64_t height
- cdef np.ndarray[np.float64_t, ndim=1] normal
- cdef np.ndarray[np.float64_t, ndim=1] center
- cdef np.ndarray[np.float64_t, ndim=1] particle_field
- cdef np.ndarray[np.float64_t, ndim=1] posx
- cdef np.ndarray[np.float64_t, ndim=1] posy
- cdef np.ndarray[np.float64_t, ndim=1] posz
-
- left_edge = np.zeros(3, 'float64')
- right_edge = np.zeros(3, 'float64')
- DLE = np.zeros(3, 'float64')
- DRE = np.zeros(3, 'float64')
- normal = np.zeros(3, 'float64')
- center = np.zeros(3, 'float64')
-
- dataset = h5py.h5d.open(file_id, "tracer particles")
- dataspace = dataset.get_space()
- rank = dataspace.get_simple_extent_dims()
- memspace = h5py.h5s.create_simple((rank[0],))
-
- num_particles = rank[0]
- count = (num_particles,1)
-
- posx = np.zeros(num_particles, 'float64')
- posy = np.zeros(num_particles, 'float64')
- posz = np.zeros(num_particles, 'float64')
-
- start = (0,x_index)
- dataspace.select_hyperslab(start,count)
- dataset.read(memspace, dataspace, posx)
-
- start = (0,y_index)
- dataspace.select_hyperslab(start,count)
- dataset.read(memspace, dataspace, posy)
-
- start = (0,z_index)
- dataspace.select_hyperslab(start,count)
- dataset.read(memspace, dataspace, posz)
-
- idxs = np.zeros(num_particles, 'bool')
-
- particle_field = np.zeros(num_particles, 'float64')
-
- if rtype == 0 :
- left_edge = args[0]
- right_edge = args[1]
- periodic = args[2]
- DLE = args[3]
- DRE = args[4]
- idxs = particles_validator_region(posx,posy,posz,
- left_edge,right_edge,
- periodic,DLE,DRE)
- elif rtype == 1:
- center = args[0]
- radius = args[1]
- periodic = args[2]
- DLE = args[3]
- DRE = args[4]
- idxs = particles_validator_sphere(posx,posy,posz,
- center,radius,
- periodic,DLE,DRE)
- elif rtype == 2:
- center = args[0]
- normal = args[1]
- radius = args[2]
- height = args[3]
- idxs = particles_validator_disk(posx,posy,posz,
- center,normal,
- radius,height)
-
- _particles = []
-
- for i in range(num_fields) :
-
- start = (0,field_indices[i])
- dataspace.select_hyperslab(start,count)
- dataset.read(memspace, dataspace, particle_field)
- _particles.append(particle_field[idxs])
-
- return _particles
-
diff -r 166ef5a2bbab3c3fedab52c16a615d4735fd35a6 -r f5b355b6ad0d34f8bcc2d9f890114aae3ed56f60 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -25,13 +25,12 @@
import numpy as na
import h5py
-from yt.frontends.flash import _flash_particle_reader
from yt.utilities.io_handler import \
BaseIOHandler
class IOHandlerFLASH(BaseIOHandler):
- _particle_reader = True
+ _particle_reader = False
_data_style = "flash_hdf5"
def __init__(self, pf, *args, **kwargs):
@@ -50,17 +49,7 @@
def _read_particles(self, fields_to_read, type, args, grid_list,
count_list, conv_factors):
- fx = self._particle_fields["particle_posx"]
- fy = self._particle_fields["particle_posy"]
- fz = self._particle_fields["particle_posz"]
- field_indices = na.array([self._particle_fields[field]
- for field in fields_to_read],
- dtype='int32')
- return _flash_particle_reader.read_particles(self._handle.fid,
- fx, fy, fz,
- len(fields_to_read),
- type, args,
- field_indices)
+ pass
def _read_data_set(self, grid, field):
f = self._handle
diff -r 166ef5a2bbab3c3fedab52c16a615d4735fd35a6 -r f5b355b6ad0d34f8bcc2d9f890114aae3ed56f60 yt/frontends/flash/setup.py
--- a/yt/frontends/flash/setup.py
+++ b/yt/frontends/flash/setup.py
@@ -8,10 +8,6 @@
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('flash', parent_package, top_path)
- config.add_extension("_flash_particle_reader",
- ["yt/frontends/flash/_flash_particle_reader.pyx"],
- language="c"
- )
config.make_config_py() # installs __config__.py
#config.make_svn_version_py()
return config
https://bitbucket.org/yt_analysis/yt/changeset/b0690ab390d0/
changeset: b0690ab390d0
branch: yt
user: jzuhone
date: 2012-09-01 05:15:23
summary: Changed the order of units setup and comoving units setup so that the latter always goes last and doesn't get overridden.
Fixing the conversion for velocity units as well.
affected #: 1 file
diff -r f5b355b6ad0d34f8bcc2d9f890114aae3ed56f60 -r b0690ab390d0128e3ddc0456b313448eca70a54c yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -229,13 +229,13 @@
self.conversion_factors = defaultdict(lambda: 1.0)
if "EOSType" not in self.parameters:
self.parameters["EOSType"] = -1
- if self.cosmological_simulation == 1:
- self._setup_comoving_units()
if "pc_unitsbase" in self.parameters:
if self.parameters["pc_unitsbase"] == "CGS":
self._setup_cgs_units()
else:
self._setup_nounits_units()
+ if self.cosmological_simulation == 1:
+ self._setup_comoving_units()
self.time_units['1'] = 1
self.units['1'] = 1.0
self.units['unitary'] = 1.0 / \
@@ -252,10 +252,10 @@
self.conversion_factors['eint'] = (1.0 + self.current_redshift)**-2.0
self.conversion_factors['ener'] = (1.0 + self.current_redshift)**-2.0
self.conversion_factors['temp'] = (1.0 + self.current_redshift)**-2.0
- self.conversion_factors['velx'] = (1.0 + self.current_redshift)
+ self.conversion_factors['velx'] = (1.0 + self.current_redshift)**-1.0
self.conversion_factors['vely'] = self.conversion_factors['velx']
self.conversion_factors['velz'] = self.conversion_factors['velx']
- self.conversion_factors['particle_velx'] = (1.0 + self.current_redshift)
+ self.conversion_factors['particle_velx'] = (1.0 + self.current_redshift)**-1.0
self.conversion_factors['particle_vely'] = \
self.conversion_factors['particle_velx']
self.conversion_factors['particle_velz'] = \
https://bitbucket.org/yt_analysis/yt/changeset/68e12d11d7a4/
changeset: 68e12d11d7a4
branch: yt
user: jzuhone
date: 2012-09-01 07:21:23
summary: Applying the scale factor to the units so that any units other than '1' refer to proper distances.
Also, changed the units of the hubble parameter from H0 in s^-1 to h.
affected #: 1 file
diff -r b0690ab390d0128e3ddc0456b313448eca70a54c -r 68e12d11d7a473853913fd25ce422e0fd42ccacf yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -39,7 +39,7 @@
mpc_conversion, sec_conversion
from yt.utilities.io_handler import \
io_registry
-
+from yt.utilities.physical_constants import cm_per_mpc
from .fields import FLASHFieldInfo, add_flash_field, KnownFLASHFields
from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc, \
ValidateDataField
@@ -265,7 +265,8 @@
self.conversion_factors["Time"] = 1.0
for unit in mpc_conversion.keys():
self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-
+ self.units[unit] /= (1.0+self.current_redshift)
+
def _setup_cgs_units(self):
self.conversion_factors['dens'] = 1.0
self.conversion_factors['pres'] = 1.0
@@ -407,6 +408,7 @@
self.omega_lambda = self.parameters['cosmologicalconstant']
self.omega_matter = self.parameters['omegamatter']
self.hubble_constant = self.parameters['hubbleconstant']
+ self.hubble_constant *= cm_per_mpc * 1.0e-5 * 1.0e-2 # convert to 'h'
except:
self.current_redshift = self.omega_lambda = self.omega_matter = \
self.hubble_constant = self.cosmological_simulation = 0.0
https://bitbucket.org/yt_analysis/yt/changeset/e0af089357f3/
changeset: e0af089357f3
branch: yt
user: jzuhone
date: 2012-09-01 07:35:23
summary: Reverting my changes which I never intended to commit
affected #: 1 file
diff -r 68e12d11d7a473853913fd25ce422e0fd42ccacf -r e0af089357f3f5360d748620a098b330443056b9 yt/data_objects/particle_trajectories.py
--- a/yt/data_objects/particle_trajectories.py
+++ b/yt/data_objects/particle_trajectories.py
@@ -80,8 +80,7 @@
indices.sort() # Just in case the caller wasn't careful
self.field_data = YTFieldData()
- #self.pfs = TimeSeriesData.from_filenames(filenames)
- self.pfs = [load(fn) for fn in filenames]
+ self.pfs = TimeSeriesData.from_filenames(filenames)
self.masks = []
self.sorts = []
self.indices = indices
@@ -114,14 +113,14 @@
for pf in self.pfs :
dd = pf.h.all_data()
newtags = dd["particle_index"].astype("int")
- #if not na.all(na.in1d(indices, newtags, assume_unique=True)) :
- # print "Not all requested particle ids contained in this file!"
- # raise IndexError
- #mask = na.in1d(newtags, indices, assume_unique=True)
- #sorts = na.argsort(newtags[mask])
- #self.masks.append(mask)
- #self.sorts.append(sorts)
- #self.times.append(pf.current_time)
+ if not na.all(na.in1d(indices, newtags, assume_unique=True)) :
+ print "Not all requested particle ids contained in this file!"
+ raise IndexError
+ mask = na.in1d(newtags, indices, assume_unique=True)
+ sorts = na.argsort(newtags[mask])
+ self.masks.append(mask)
+ self.sorts.append(sorts)
+ self.times.append(pf.current_time)
self.times = na.array(self.times)
https://bitbucket.org/yt_analysis/yt/changeset/51341cebfb77/
changeset: 51341cebfb77
branch: yt
user: jzuhone
date: 2012-09-01 07:36:23
summary: And another
affected #: 1 file
diff -r e0af089357f3f5360d748620a098b330443056b9 -r 51341cebfb77402c1386aafb2a56704e8137741d yt/data_objects/particle_trajectories.py
--- a/yt/data_objects/particle_trajectories.py
+++ b/yt/data_objects/particle_trajectories.py
@@ -23,7 +23,6 @@
from yt.data_objects.data_containers import YTFieldData
from yt.data_objects.time_series import TimeSeriesData
from yt.utilities.lib import sample_field_at_positions
-from yt.convenience import load
from yt.funcs import *
import numpy as na
https://bitbucket.org/yt_analysis/yt/changeset/4561e3ff9d02/
changeset: 4561e3ff9d02
branch: yt
user: MatthewTurk
date: 2012-09-01 19:21:28
summary: Merged in jzuhone/yt (pull request #260)
affected #: 5 files
diff -r b768f6d5c7756bd548c02011354f86675ffb21c1 -r 4561e3ff9d02043ae20e66fc2623c82b69f5ef42 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -39,7 +39,7 @@
mpc_conversion, sec_conversion
from yt.utilities.io_handler import \
io_registry
-
+from yt.utilities.physical_constants import cm_per_mpc
from .fields import FLASHFieldInfo, add_flash_field, KnownFLASHFields
from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc, \
ValidateDataField
@@ -229,13 +229,13 @@
self.conversion_factors = defaultdict(lambda: 1.0)
if "EOSType" not in self.parameters:
self.parameters["EOSType"] = -1
- if self.cosmological_simulation == 1:
- self._setup_comoving_units()
if "pc_unitsbase" in self.parameters:
if self.parameters["pc_unitsbase"] == "CGS":
self._setup_cgs_units()
else:
self._setup_nounits_units()
+ if self.cosmological_simulation == 1:
+ self._setup_comoving_units()
self.time_units['1'] = 1
self.units['1'] = 1.0
self.units['unitary'] = 1.0 / \
@@ -252,10 +252,10 @@
self.conversion_factors['eint'] = (1.0 + self.current_redshift)**-2.0
self.conversion_factors['ener'] = (1.0 + self.current_redshift)**-2.0
self.conversion_factors['temp'] = (1.0 + self.current_redshift)**-2.0
- self.conversion_factors['velx'] = (1.0 + self.current_redshift)
+ self.conversion_factors['velx'] = (1.0 + self.current_redshift)**-1.0
self.conversion_factors['vely'] = self.conversion_factors['velx']
self.conversion_factors['velz'] = self.conversion_factors['velx']
- self.conversion_factors['particle_velx'] = (1.0 + self.current_redshift)
+ self.conversion_factors['particle_velx'] = (1.0 + self.current_redshift)**-1.0
self.conversion_factors['particle_vely'] = \
self.conversion_factors['particle_velx']
self.conversion_factors['particle_velz'] = \
@@ -265,7 +265,8 @@
self.conversion_factors["Time"] = 1.0
for unit in mpc_conversion.keys():
self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-
+ self.units[unit] /= (1.0+self.current_redshift)
+
def _setup_cgs_units(self):
self.conversion_factors['dens'] = 1.0
self.conversion_factors['pres'] = 1.0
@@ -407,6 +408,7 @@
self.omega_lambda = self.parameters['cosmologicalconstant']
self.omega_matter = self.parameters['omegamatter']
self.hubble_constant = self.parameters['hubbleconstant']
+ self.hubble_constant *= cm_per_mpc * 1.0e-5 * 1.0e-2 # convert to 'h'
except:
self.current_redshift = self.omega_lambda = self.omega_matter = \
self.hubble_constant = self.cosmological_simulation = 0.0
diff -r b768f6d5c7756bd548c02011354f86675ffb21c1 -r 4561e3ff9d02043ae20e66fc2623c82b69f5ef42 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -51,23 +51,6 @@
count_list, conv_factors):
pass
- def _select_particles(self, grid, field):
- f = self._handle
- npart = f["/tracer particles"].shape[0]
- total_selected = 0
- start = 0
- stride = 1e6
- blki = self._particle_fields["particle_blk"]
- bi = grid.id - grid._id_offset
- fi = self._particle_fields[field]
- tr = []
- while start < npart:
- end = min(start + stride - 1, npart)
- gi = f["/tracer particles"][start:end,blki] == bi
- tr.append(f["/tracer particles"][gi,fi])
- start = end
- return na.concatenate(tr)
-
def _read_data_set(self, grid, field):
f = self._handle
if field in self._particle_fields:
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list