[yt-svn] commit/yt: 3 new changesets
Bitbucket
commits-noreply at bitbucket.org
Fri Feb 24 06:44:42 PST 2012
3 new commits in yt:
https://bitbucket.org/yt_analysis/yt/changeset/63be95c6afcc/
changeset: 63be95c6afcc
branch: yt
user: alrosen
date: 2012-02-03 02:23:35
summary: Orion frontend now looks for sink particles as well as star particles
affected #: 1 file
diff -r 9d72d2c6f211dd5342291506a799b297942eae1b -r 63be95c6afcc9cd5ff8109a5adb377adda5f5931 yt/frontends/orion/data_structures.py
--- a/yt/frontends/orion/data_structures.py
+++ b/yt/frontends/orion/data_structures.py
@@ -135,31 +135,59 @@
"""
self.grid_particle_count = na.zeros(len(self.grids))
- fn = self.pf.fullplotdir + "/StarParticles"
- with open(fn, 'r') as f:
- lines = f.readlines()
- self.num_stars = int(lines[0].strip())
- for line in lines[1:]:
- particle_position_x = float(line.split(' ')[1])
- particle_position_y = float(line.split(' ')[2])
- particle_position_z = float(line.split(' ')[3])
- coord = [particle_position_x, particle_position_y, particle_position_z]
- # for each particle, determine which grids contain it
- # copied from object_finding_mixin.py
- mask=na.ones(self.num_grids)
- for i in xrange(len(coord)):
- na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
- na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
- ind = na.where(mask == 1)
- selected_grids = self.grids[ind]
- # in orion, particles always live on the finest level.
- # so, we want to assign the particle to the finest of
- # the grids we just found
- if len(selected_grids) != 0:
- grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
- ind = na.where(self.grids == grid)[0][0]
- self.grid_particle_count[ind] += 1
- self.grids[ind].NumberOfParticles += 1
+ try:
+ fn = self.pf.fullplotdir + "/StarParticles"
+ with open(fn, 'r') as f:
+ print 'You do not appear to have a star particle file :(\n'
+ lines = f.readlines()
+ self.num_stars = int(lines[0].strip())
+ for line in lines[1:]:
+ particle_position_x = float(line.split(' ')[1])
+ particle_position_y = float(line.split(' ')[2])
+ particle_position_z = float(line.split(' ')[3])
+ coord = [particle_position_x, particle_position_y, particle_position_z]
+ # for each particle, determine which grids contain it
+ # copied from object_finding_mixin.py
+ mask=na.ones(self.num_grids)
+ for i in xrange(len(coord)):
+ na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
+ na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
+ ind = na.where(mask == 1)
+ selected_grids = self.grids[ind]
+ # in orion, particles always live on the finest level.
+ # so, we want to assign the particle to the finest of
+ # the grids we just found
+ if len(selected_grids) != 0:
+ grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
+ ind = na.where(self.grids == grid)[0][0]
+ self.grid_particle_count[ind] += 1
+ self.grids[ind].NumberOfParticles += 1
+ except IOError:
+ fn = self.pf.fullplotdir + "/SinkParticles"
+ with open(fn, 'r') as f:
+ lines = f.readlines()
+ self.num_stars = int(lines[0].strip())
+ for line in lines[1:]:
+ particle_position_x = float(line.split(' ')[1])
+ particle_position_y = float(line.split(' ')[2])
+ particle_position_z = float(line.split(' ')[3])
+ coord = [particle_position_x, particle_position_y, particle_position_z]
+ # for each particle, determine which grids contain it
+ # copied from object_finding_mixin.py
+ mask=na.ones(self.num_grids)
+ for i in xrange(len(coord)):
+ na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
+ na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
+ ind = na.where(mask == 1)
+ selected_grids = self.grids[ind]
+ # in orion, particles always live on the finest level.
+ # so, we want to assign the particle to the finest of
+ # the grids we just found
+ if len(selected_grids) != 0:
+ grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
+ ind = na.where(self.grids == grid)[0][0]
+ self.grid_particle_count[ind] += 1
+ self.grids[ind].NumberOfParticles += 1
def readGlobalHeader(self,filename,paranoid_read):
"""
https://bitbucket.org/yt_analysis/yt/changeset/c1135a8515ea/
changeset: c1135a8515ea
branch: yt
user: alrosen
date: 2012-02-21 23:05:13
summary: Altered orion frontend to find Starparticle or SinkParticle file. If neither exists, the IOError is skipped.
affected #: 1 file
diff -r 63be95c6afcc9cd5ff8109a5adb377adda5f5931 -r c1135a8515eaef292fe5f45b971043db1968c846 yt/frontends/orion/data_structures.py
--- a/yt/frontends/orion/data_structures.py
+++ b/yt/frontends/orion/data_structures.py
@@ -138,7 +138,6 @@
try:
fn = self.pf.fullplotdir + "/StarParticles"
with open(fn, 'r') as f:
- print 'You do not appear to have a star particle file :(\n'
lines = f.readlines()
self.num_stars = int(lines[0].strip())
for line in lines[1:]:
@@ -163,32 +162,35 @@
self.grid_particle_count[ind] += 1
self.grids[ind].NumberOfParticles += 1
except IOError:
- fn = self.pf.fullplotdir + "/SinkParticles"
- with open(fn, 'r') as f:
- lines = f.readlines()
- self.num_stars = int(lines[0].strip())
- for line in lines[1:]:
- particle_position_x = float(line.split(' ')[1])
- particle_position_y = float(line.split(' ')[2])
- particle_position_z = float(line.split(' ')[3])
- coord = [particle_position_x, particle_position_y, particle_position_z]
- # for each particle, determine which grids contain it
- # copied from object_finding_mixin.py
- mask=na.ones(self.num_grids)
- for i in xrange(len(coord)):
- na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
- na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
- ind = na.where(mask == 1)
- selected_grids = self.grids[ind]
- # in orion, particles always live on the finest level.
- # so, we want to assign the particle to the finest of
- # the grids we just found
- if len(selected_grids) != 0:
- grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
- ind = na.where(self.grids == grid)[0][0]
- self.grid_particle_count[ind] += 1
- self.grids[ind].NumberOfParticles += 1
-
+ try:
+ fn = self.pf.fullplotdir + "/SinkParticles"
+ with open(fn, 'r') as f:
+ lines = f.readlines()
+ self.num_stars = int(lines[0].strip())
+ for line in lines[1:]:
+ particle_position_x = float(line.split(' ')[1])
+ particle_position_y = float(line.split(' ')[2])
+ particle_position_z = float(line.split(' ')[3])
+ coord = [particle_position_x, particle_position_y, particle_position_z]
+ # for each particle, determine which grids contain it
+ # copied from object_finding_mixin.py
+ mask=na.ones(self.num_grids)
+ for i in xrange(len(coord)):
+ na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
+ na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
+ ind = na.where(mask == 1)
+ selected_grids = self.grids[ind]
+ # in orion, particles always live on the finest level.
+ # so, we want to assign the particle to the finest of
+ # the grids we just found
+ if len(selected_grids) != 0:
+ grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
+ ind = na.where(self.grids == grid)[0][0]
+ self.grid_particle_count[ind] += 1
+ self.grids[ind].NumberOfParticles += 1
+ except IOError:
+ pass
+
def readGlobalHeader(self,filename,paranoid_read):
"""
read the global header file for an Orion plotfile output.
https://bitbucket.org/yt_analysis/yt/changeset/628a21c5a006/
changeset: 628a21c5a006
branch: yt
user: MatthewTurk
date: 2012-02-24 15:43:41
summary: Merging from Anna, reverting to Jeff's changed routine.
affected #: 31 files
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -15,7 +15,7 @@
# And, feel free to drop me a line: matthewturk at gmail.com
#
-DEST_SUFFIX="yt-`uname -p`"
+DEST_SUFFIX="yt-`uname -m`"
DEST_DIR="`pwd`/${DEST_SUFFIX/ /}" # Installation location
BRANCH="yt" # This is the branch to which we will forcibly update.
@@ -40,7 +40,6 @@
INST_FTYPE=1 # Install FreeType2 locally?
INST_ENZO=0 # Clone a copy of Enzo?
INST_SQLITE3=1 # Install a local version of SQLite3?
-INST_FORTHON=1 # Install Forthon?
INST_PYX=0 # Install PyX? Sometimes PyX can be problematic without a
# working TeX installation.
@@ -204,10 +203,6 @@
get_willwont ${INST_SQLITE3}
echo "be installing SQLite3"
-printf "%-15s = %s so I " "INST_FORTHON" "${INST_FORTHON}"
-get_willwont ${INST_FORTHON}
-echo "be installing Forthon (for Halo Finding, etc)"
-
printf "%-15s = %s so I " "INST_HG" "${INST_HG}"
get_willwont ${INST_HG}
echo "be installing Mercurial"
@@ -323,7 +318,6 @@
# Now we dump all our SHA512 files out.
echo '8da1b0af98203254a1cf776d73d09433f15b5090871f9fd6d712cea32bcd44446b7323ae1069b28907d2728e77944a642825c61bc3b54ceb46c91897cc4f6051 Cython-0.15.1.tar.gz' > Cython-0.15.1.tar.gz.sha512
-echo '2564011f64cd7ea24d49c6103603ced857bcb79a3837032b959005b64f9da226a08c95d920ae59034ca2c5957a45c99949811649de9e5e73cdbb23396e11f756 Forthon-0.8.5.tar.gz' > Forthon-0.8.5.tar.gz.sha512
echo 'b8a12bf05b3aafa71135e47da81440fd0f16a4bd91954bc5615ad3d3b7f9df7d5a7d5620dc61088dc6b04952c5c66ebda947a4cfa33ed1be614c8ca8c0f11dff PhiloGL-1.4.2.zip' > PhiloGL-1.4.2.zip.sha512
echo '44eea803870a66ff0bab08d13a8b3388b5578ebc1c807d1d9dca0a93e6371e91b15d02917a00b3b20dc67abb5a21dabaf9b6e9257a561f85eeff2147ac73b478 PyX-0.11.1.tar.gz' > PyX-0.11.1.tar.gz.sha512
echo '1a754d560bfa433f0960ab3b5a62edb5f291be98ec48cf4e5941fa5b84139e200b87a52efbbd6fa4a76d6feeff12439eed3e7a84db4421940d1bbb576f7a684e Python-2.7.2.tgz' > Python-2.7.2.tgz.sha512
@@ -361,7 +355,6 @@
get_enzotools ipython-0.10.tar.gz
get_enzotools h5py-2.0.1.tar.gz
get_enzotools Cython-0.15.1.tar.gz
-get_enzotools Forthon-0.8.5.tar.gz
get_enzotools ext-3.3.2.zip
get_enzotools ext-slate-110328.zip
get_enzotools PhiloGL-1.4.2.zip
@@ -577,7 +570,6 @@
do_setup_py ipython-0.10
do_setup_py h5py-2.0.1
do_setup_py Cython-0.15.1
-[ $INST_FORTHON -eq 1 ] && do_setup_py Forthon-0.8.5
[ $INST_PYX -eq 1 ] && do_setup_py PyX-0.11.1
echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
@@ -589,7 +581,6 @@
echo $HDF5_DIR > hdf5.cfg
[ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
[ $INST_FTYPE -eq 1 ] && echo $FTYPE_DIR > freetype.cfg
-[ $INST_FORTHON -eq 1 ] && ( ( cd yt/utilities/kdtree && FORTHON_EXE=${DEST_DIR}/bin/Forthon make 2>&1 ) 1>> ${LOG_FILE} )
( ${DEST_DIR}/bin/python2.7 setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
touch done
cd $MY_PWD
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -2243,7 +2243,7 @@
total_mass = \
self.comm.mpi_allreduce((self._data_source["ParticleMassMsun"][select]).sum(dtype='float64'), op='sum')
else:
- total_mass = self.comm.mpi_allreduce(self._data_source["ParticleMassMsun"].sum(dtype='float64'), op='sum')
+ total_mass = self.comm.mpi_allreduce(self._data_source.quantities["TotalQuantity"]("ParticleMassMsun")[0], op='sum')
# MJT: Note that instead of this, if we are assuming that the particles
# are all on different processors, we should instead construct an
# object representing the entire domain and sum it "lazily" with
@@ -2255,14 +2255,17 @@
self.partition_hierarchy_3d(ds=self._data_source,
padding=self.padding)
self.bounds = (LE, RE)
- # reflect particles around the periodic boundary
- #self._reposition_particles((LE, RE))
- if dm_only:
+ # sub_mass can be skipped if subvolume is not used and this is not
+ # parallel.
+ if subvolume is None and \
+ ytcfg.getint("yt", "__topcomm_parallel_size") == 1:
+ sub_mass = total_mass
+ elif dm_only:
select = self._get_dm_indices()
sub_mass = self._data_source["ParticleMassMsun"][select].sum(dtype='float64')
else:
sub_mass = \
- self._data_source["ParticleMassMsun"].sum(dtype='float64')
+ self._data_source.quantities["TotalQuantity"]("ParticleMassMsun")[0]
HOPHaloList.__init__(self, self._data_source,
threshold * total_mass / sub_mass, dm_only)
self._parse_halolist(total_mass / sub_mass)
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
--- a/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
+++ b/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
@@ -295,6 +295,7 @@
Maximum number of child halos each leaf can have.
"""
self.halonum = halonum
+ self.max_children = max_children
self.output_numbers = sorted(self.relationships, reverse=True)
self.levels = {}
trunk = self.output_numbers[0]
@@ -376,7 +377,7 @@
print "--> Most massive progenitor == Halo %d" % \
(br.progenitor)
for i,c in enumerate(br.children):
- if i > max_child: break
+ if i > self.max_children: break
print "--> Halo %8.8d :: fraction = %g" % (c[0], c[1])
def write_dot(self, filename=None):
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -52,6 +52,8 @@
pasteboard_repo = '',
test_storage_dir = '/does/not/exist',
enzo_db = '',
+ hub_url = 'https://127.0.0.1:5000/',
+ hub_api_key = '',
)
# Here is the upgrade. We're actually going to parse the file in its entirety
# here. Then, if it has any of the Forbidden Sections, it will be rewritten
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/data_objects/api.py
--- a/yt/data_objects/api.py
+++ b/yt/data_objects/api.py
@@ -82,3 +82,6 @@
ValidateGridType, \
add_field, \
derived_field
+
+from particle_trajectories import \
+ ParticleTrajectoryCollection
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -3483,16 +3483,62 @@
self._base_dx = (
(self.pf.domain_right_edge - self.pf.domain_left_edge) /
self.pf.domain_dimensions.astype("float64"))
+ self.global_endindex = None
AMRCoveringGridBase.__init__(self, *args, **kwargs)
self._final_start_index = self.global_startindex
def _get_list_of_grids(self):
if self._grids is not None: return
- buffer = ((self.pf.domain_right_edge - self.pf.domain_left_edge)
- / self.pf.domain_dimensions).max()
- AMRCoveringGridBase._get_list_of_grids(self, buffer)
- # We reverse the order to ensure that coarse grids are first
- self._grids = self._grids[::-1]
+ # Check for ill-behaved AMR schemes (Enzo) where we may have
+ # root-tile-boundary issues. This is specific to the root tiles not
+ # allowing grids to cross them and also allowing > 1 level of
+ # difference between neighboring areas.
+ nz = 0
+ buf = 0.0
+ self.min_level = 0
+ dl = ((self.global_startindex.astype("float64") + 1)
+ / (self.pf.refine_by**self.level))
+ dr = ((self.global_startindex.astype("float64")
+ + self.ActiveDimensions - 1)
+ / (self.pf.refine_by**self.level))
+ if na.any(dl == na.rint(dl)) or na.any(dr == na.rint(dr)):
+ nz = 2 * self.pf.refine_by**self.level
+ buf = self._base_dx
+ if nz <= self.pf.refine_by**3: # delta level of 3
+ last_buf = [None,None,None]
+ count = 0
+ # Repeat until no more grids are covered (up to a delta level of 3)
+ while na.any(buf != last_buf) or count == 3:
+ cg = self.pf.h.covering_grid(self.level,
+ self.left_edge - buf, self.ActiveDimensions + nz)
+ cg._use_pbar = False
+ count = cg.ActiveDimensions.prod()
+ for g in cg._grids:
+ count -= cg._get_data_from_grid(g, [])
+ if count <= 0:
+ self.min_level = g.Level
+ break
+ last_buf = buf
+ # Increase box by 2 cell widths at the min covering level
+ buf = 2*self._base_dx / self.pf.refine_by**self.min_level
+ nz += 4 * self.pf.refine_by**(self.level-self.min_level)
+ count += 1
+ else:
+ nz = buf = 0
+ self.min_level = 0
+ # This should not cost substantial additional time.
+ BLE = self.left_edge - buf
+ BRE = self.right_edge + buf
+ if na.any(BLE < self.pf.domain_left_edge) or \
+ na.any(BRE > self.pf.domain_right_edge):
+ grids,ind = self.pf.hierarchy.get_periodic_box_grids_below_level(
+ BLE, BRE, self.level, self.min_level)
+ else:
+ grids,ind = self.pf.hierarchy.get_box_grids_below_level(
+ BLE, BRE, self.level,
+ min(self.level, self.min_level))
+ sort_ind = na.argsort(self.pf.h.grid_levels.ravel()[ind])
+ self._grids = self.pf.hierarchy.grids[ind][(sort_ind,)]
def get_data(self, field=None):
self._get_list_of_grids()
@@ -3508,11 +3554,11 @@
# We jump-start our task here
mylog.debug("Getting fields %s from %s possible grids",
fields_to_get, len(self._grids))
- self._update_level_state(0, fields_to_get)
+ self._update_level_state(self.min_level, fields_to_get, initialize=True)
if self._use_pbar: pbar = \
get_pbar('Searching grids for values ', len(self._grids))
# The grids are assumed to be pre-sorted
- last_level = 0
+ last_level = self.min_level
for gi, grid in enumerate(self._grids):
if self._use_pbar: pbar.update(gi)
if grid.Level > last_level and grid.Level <= self.level:
@@ -3530,27 +3576,31 @@
raise KeyError(n_bad)
if self._use_pbar: pbar.finish()
- def _update_level_state(self, level, fields = None):
+ def _update_level_state(self, level, fields = None, initialize=False):
dx = self._base_dx / self.pf.refine_by**level
self.field_data['cdx'] = dx[0]
self.field_data['cdy'] = dx[1]
self.field_data['cdz'] = dx[2]
LL = self.left_edge - self.pf.domain_left_edge
+ RL = self.right_edge - self.pf.domain_left_edge
self._old_global_startindex = self.global_startindex
- self.global_startindex = na.rint(LL / dx).astype('int64') - 1
+ self._old_global_endindex = self.global_endindex
+ # We use one grid cell at LEAST, plus one buffer on all sides
+ self.global_startindex = na.floor(LL / dx).astype('int64') - 1
+ self.global_endindex = na.ceil(RL / dx).astype('int64') + 1
self.domain_width = na.rint((self.pf.domain_right_edge -
self.pf.domain_left_edge)/dx).astype('int64')
- if level == 0 and self.level > 0:
- # We use one grid cell at LEAST, plus one buffer on all sides
- idims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64') + 2
+ if (level == 0 or initialize) and self.level > 0:
+ idims = self.global_endindex - self.global_startindex
fields = ensure_list(fields)
for field in fields:
self.field_data[field] = na.zeros(idims,dtype='float64')-999
self._cur_dims = idims.astype("int32")
- elif level == 0 and self.level == 0:
+ elif (level == 0 or initialize) and self.level == 0:
DLE = self.pf.domain_left_edge
self.global_startindex = na.array(na.floor(LL/ dx), dtype='int64')
idims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64')
+ #idims = self.global_endindex - self.global_startindex
fields = ensure_list(fields)
for field in fields:
self.field_data[field] = na.zeros(idims,dtype='float64')-999
@@ -3559,15 +3609,16 @@
def _refine(self, dlevel, fields):
rf = float(self.pf.refine_by**dlevel)
- input_left = (self._old_global_startindex + 0.5) * rf
- dx = na.fromiter((self['cd%s' % ax] for ax in 'xyz'), count=3, dtype='float64')
- output_dims = na.rint((self.right_edge-self.left_edge)/dx).astype('int32') + 2
+ input_left = (self._old_global_startindex + 0.5) * rf
+ input_right = (self._old_global_endindex - 0.5) * rf
+ output_left = self.global_startindex + 0.5
+ output_right = self.global_endindex - 0.5
+ output_dims = (output_right - output_left + 1).astype('int32')
self._cur_dims = output_dims
for field in fields:
output_field = na.zeros(output_dims, dtype="float64")
- output_left = self.global_startindex + 0.5
ghost_zone_interpolate(rf, self[field], input_left,
output_field, output_left)
self.field_data[field] = output_field
@@ -3641,7 +3692,8 @@
def _make_overlaps(self):
# Using the processed cut_masks, we'll figure out what grids
# are left in the hybrid region.
- for region in self._all_regions:
+ pbar = get_pbar("Building boolean", len(self._all_regions))
+ for i, region in enumerate(self._all_regions):
try:
region._get_list_of_grids()
alias = region
@@ -3668,6 +3720,8 @@
# Some of local is in overall
self._some_overlap.append(grid)
continue
+ pbar.update(i)
+ pbar.finish()
def __repr__(self):
# We'll do this the slow way to be clear what's going on
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/data_objects/particle_trajectories.py
--- /dev/null
+++ b/yt/data_objects/particle_trajectories.py
@@ -0,0 +1,387 @@
+"""
+Author: John ZuHone <jzuhone at gmail.com>
+Affiliation: NASA/GSFC
+License:
+ Copyright (C) 2012 John ZuHone All Rights Reserved.
+
+ This file is part of yt.
+
+ yt is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from yt.data_objects.data_containers import YTFieldData
+from yt.data_objects.time_series import TimeSeriesData
+from yt.utilities.amr_utils import sample_field_at_positions
+from yt.funcs import *
+
+import numpy as na
+import h5py
+
+class ParticleTrajectoryCollection(object) :
+
+ r"""A collection of particle trajectories in time over a series of
+ parameter files.
+
+ The ParticleTrajectoryCollection object contains a collection of
+ particle trajectories for a specified set of particle indices.
+
+ Parameters
+ ----------
+ filenames : list of strings
+ A time-sorted list of filenames to construct the TimeSeriesData
+ object.
+ indices : array_like
+ An integer array of particle indices whose trajectories we
+ want to track. If they are not sorted they will be sorted.
+ fields : list of strings, optional
+ A set of fields that is retrieved when the trajectory
+ collection is instantiated.
+ Default : None (will default to the fields 'particle_position_x',
+ 'particle_position_y', 'particle_position_z')
+
+ Examples
+ ________
+ >>> from yt.mods import *
+ >>> my_fns = glob.glob("orbit_hdf5_chk_00[0-9][0-9]")
+ >>> my_fns.sort()
+ >>> fields = ["particle_position_x", "particle_position_y",
+ >>> "particle_position_z", "particle_velocity_x",
+ >>> "particle_velocity_y", "particle_velocity_z"]
+ >>> pf = load(my_fns[0])
+ >>> init_sphere = pf.h.sphere(pf.domain_center, (.5, "unitary"))
+ >>> indices = init_sphere["particle_index"].astype("int")
+ >>> trajs = ParticleTrajectoryCollection(my_fns, indices, fields=fields)
+ >>> for t in trajs :
+ >>> print t["particle_velocity_x"].max(), t["particle_velocity_x"].min()
+
+ Notes
+ -----
+ As of this time only particle trajectories that are complete over the
+ set of specified parameter files are supported. If any particle's history
+ ends for some reason (e.g. leaving the simulation domain or being actively
+ destroyed), the whole trajectory collection of which it is a set must end
+ at or before the particle's last timestep. This is a limitation we hope to
+ lift at some point in the future.
+ """
+ def __init__(self, filenames, indices, fields = None) :
+
+ indices.sort() # Just in case the caller wasn't careful
+
+ self.field_data = YTFieldData()
+ self.pfs = TimeSeriesData.from_filenames(filenames)
+ self.masks = []
+ self.sorts = []
+ self.indices = indices
+ self.num_indices = len(indices)
+ self.num_steps = len(filenames)
+ self.times = []
+
+ # Default fields
+
+ if fields is None : fields = []
+
+ # Must ALWAYS have these fields
+
+ fields = fields + ["particle_position_x",
+ "particle_position_y",
+ "particle_position_z"]
+
+ """
+ The following loops through the parameter files
+ and performs two tasks. The first is to isolate
+ the particles with the correct indices, and the
+ second is to create a sorted list of these particles.
+ We also make a list of the current time from each file.
+ Right now, the code assumes (and checks for) the
+ particle indices existing in each file, a limitation I
+ would like to lift at some point since some codes
+ (e.g., FLASH) destroy particles leaving the domain.
+ """
+
+ for pf in self.pfs :
+ dd = pf.h.all_data()
+ newtags = dd["particle_index"].astype("int")
+ if not na.all(na.in1d(indices, newtags, assume_unique=True)) :
+ print "Not all requested particle ids contained in this file!"
+ raise IndexError
+ mask = na.in1d(newtags, indices, assume_unique=True)
+ sorts = na.argsort(newtags[mask])
+ self.masks.append(mask)
+ self.sorts.append(sorts)
+ self.times.append(pf.current_time)
+
+ self.times = na.array(self.times)
+
+ # Set up the derived field list and the particle field list
+ # so that if the requested field is a particle field, we'll
+ # just copy the field over, but if the field is a grid field,
+ # we will first copy the field over to the particle positions
+ # and then return the field.
+
+ self.derived_field_list = self.pfs[0].h.derived_field_list
+ self.particle_fields = [field for field in self.derived_field_list
+ if self.pfs[0].field_info[field].particle_type]
+
+ # Now instantiate the requested fields
+ for field in fields :
+
+ self._get_data(field)
+
+ def has_key(self, key) :
+
+ return (key in self.field_data)
+
+ def keys(self) :
+
+ return self.field_data.keys()
+
+ def __getitem__(self, key) :
+ """
+ Get the field associated with key,
+ checking to make sure it is a particle field.
+ """
+
+ if not self.field_data.has_key(key) :
+
+ self._get_data(key)
+
+ return self.field_data[key]
+
+ def __setitem__(self, key, val):
+ """
+ Sets a field to be some other value.
+ """
+ self.field_data[key] = val
+
+ def __delitem__(self, key) :
+ """
+ Delete the field from the trajectory
+ """
+ del self.field_data[key]
+
+ def __iter__(self) :
+
+ """
+ This iterates over the trajectories for
+ the different particles, returning dicts
+ of fields for each trajectory
+ """
+ for idx in xrange(self.num_indices) :
+ traj = {}
+ traj["particle_index"] = self.indices[idx]
+ traj["particle_time"] = self.times
+ for field in self.field_data.keys() :
+ traj[field] = self[field][idx,:]
+ yield traj
+
+ def __len__(self) :
+
+ """
+ The number of individual trajectories
+ """
+ return self.num_indices
+
+ def add_fields(self, fields) :
+
+ """
+ Add a list of fields to an existing trajectory
+
+ Parameters
+ ----------
+ fields : list of strings
+ A list of fields to be added to the current trajectory
+ collection.
+
+ Examples
+ ________
+ >>> from yt.mods import *
+ >>> trajs = ParticleTrajectoryCollection(my_fns, indices)
+ >>> trajs.add_fields(["particle_mass", "particle_gpot"])
+ """
+
+ for field in fields :
+
+ if not self.field_data.has_key(field):
+
+ self._get_data(field)
+
+ def _get_data(self, field) :
+
+ """
+ Get a field to include in the trajectory collection.
+ The trajectory collection itself is a dict of 2D numpy arrays,
+ with shape (num_indices, num_steps)
+ """
+
+ if not self.field_data.has_key(field):
+
+ particles = na.empty((0))
+
+ step = int(0)
+
+ for pf, mask, sort in zip(self.pfs, self.masks, self.sorts) :
+
+ if field in self.particle_fields :
+
+ # This is easy... just get the particle fields
+
+ dd = pf.h.all_data()
+ pfield = dd[field][mask]
+ particles = na.append(particles, pfield[sort])
+
+ else :
+
+ # This is hard... must loop over grids
+
+ pfield = na.zeros((self.num_indices))
+ x = self["particle_position_x"][:,step]
+ y = self["particle_position_y"][:,step]
+ z = self["particle_position_z"][:,step]
+
+ leaf_grids = [g for g in pf.h.grids if len(g.Children) == 0]
+
+ for grid in leaf_grids :
+
+ pfield += sample_field_at_positions(grid[field],
+ grid.LeftEdge,
+ grid.RightEdge,
+ x, y, z)
+
+ particles = na.append(particles, pfield)
+
+ step += 1
+
+ self[field] = particles.reshape(self.num_steps,
+ self.num_indices).transpose()
+
+ return self.field_data[field]
+
+ def trajectory_from_index(self, index) :
+
+ """
+ Retrieve a single trajectory corresponding to a specific particle
+ index
+
+ Parameters
+ ----------
+ index : int
+ This defines which particle trajectory from the
+ ParticleTrajectoryCollection object will be returned.
+
+ Returns
+ -------
+ A dictionary corresponding to the particle's trajectory and the
+ fields along that trajectory
+
+ Examples
+ --------
+ >>> from yt.mods import *
+ >>> import matplotlib.pylab as pl
+ >>> trajs = ParticleTrajectoryCollection(my_fns, indices)
+ >>> traj = trajs.trajectory_from_index(indices[0])
+ >>> pl.plot(traj["particle_time"], traj["particle_position_x"], "-x")
+ >>> pl.savefig("orbit")
+ """
+
+ mask = na.in1d(self.indices, (index,), assume_unique=True)
+
+ if not na.any(mask) :
+ print "The particle index %d is not in the list!" % (index)
+ raise IndexError
+
+ fields = [field for field in sorted(self.field_data.keys())]
+
+ traj = {}
+
+ traj["particle_time"] = self.times
+ traj["particle_index"] = index
+
+ for field in fields :
+
+ traj[field] = self[field][mask,:][0]
+
+ return traj
+
+ def write_out(self, filename_base) :
+
+ """
+ Write out particle trajectories to tab-separated ASCII files (one
+ for each trajectory) with the field names in the file header. Each
+ file is named with a basename and the index number.
+
+ Parameters
+ ----------
+ filename_base : string
+ The prefix for the outputted ASCII files.
+
+ Examples
+ --------
+ >>> from yt.mods import *
+ >>> trajs = ParticleTrajectoryCollection(my_fns, indices)
+ >>> trajs.write_out("orbit_trajectory")
+ """
+
+ fields = [field for field in sorted(self.field_data.keys())]
+
+ num_fields = len(fields)
+
+ first_str = "# particle_time\t" + "\t".join(fields)+"\n"
+
+ template_str = "%g\t"*num_fields+"%g\n"
+
+ for ix in xrange(self.num_indices) :
+
+ outlines = [first_str]
+
+ for it in xrange(self.num_steps) :
+ outlines.append(template_str %
+ tuple([self.times[it]]+[self[field][ix,it] for field in fields]))
+
+ fid = open(filename_base + "_%d.dat" % self.indices[ix], "w")
+ fid.writelines(outlines)
+ fid.close()
+ del fid
+
+ def write_out_h5(self, filename) :
+
+ """
+ Write out all the particle trajectories to a single HDF5 file
+ that contains the indices, the times, and the 2D array for each
+ field individually
+
+ Parameters
+ ---------
+ filename : string
+ The output filename for the HDF5 file
+
+ Examples
+ --------
+ >>> from yt.mods import *
+ >>> trajs = ParticleTrajectoryCollection(my_fns, indices)
+ >>> trajs.write_out_h5("orbit_trajectories")
+ """
+
+ fid = h5py.File(filename, "w")
+
+ fields = [field for field in sorted(self.field_data.keys())]
+
+ fid.create_dataset("particle_indices", dtype=na.int32,
+ data=self.indices)
+ fid.create_dataset("particle_time", data=self.times)
+
+ for field in fields :
+
+ fid.create_dataset("%s" % field, data=self[field])
+
+ fid.close()
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -91,6 +91,7 @@
self._parse_parameter_file()
self._set_units()
+ self._set_derived_attrs()
# Because we need an instantiated class to check the pf's existence in
# the cache, we move that check to here from __new__. This avoids
@@ -103,6 +104,10 @@
self.create_field_info()
+ def _set_derived_attrs(self):
+ self.domain_center = 0.5 * (self.domain_right_edge + self.domain_left_edge)
+ self.domain_width = self.domain_right_edge - self.domain_left_edge
+
def __reduce__(self):
args = (self._hash(),)
return (_reconstruct_pf, args)
@@ -200,16 +205,23 @@
v = getattr(self, a)
mylog.info("Parameters: %-25s = %s", a, v)
+ _field_info = None
def create_field_info(self):
- if getattr(self, "field_info", None) is None:
+ if getattr(self, "_field_info", None) is None:
# The setting up of fields occurs in the hierarchy, which is only
# instantiated once. So we have to double check to make sure that,
# in the event of double-loads of a parameter file, we do not blow
# away the exising field_info.
- self.field_info = FieldInfoContainer.create_with_fallback(
+ self._field_info = FieldInfoContainer.create_with_fallback(
self._fieldinfo_fallback)
-
+ _get_hierarchy = True
+ @property
+ def field_info(self):
+ if self._get_hierarchy:
+ self._get_hierarchy=False
+ self.hierarchy
+ return self._field_info
def _reconstruct_pf(*args, **kwargs):
pfs = ParameterFileStore()
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/data_objects/universal_fields.py
--- a/yt/data_objects/universal_fields.py
+++ b/yt/data_objects/universal_fields.py
@@ -701,7 +701,7 @@
return data.convert("kpch")
add_field("ParticleRadiuskpch", function=_ParticleRadius,
validators=[ValidateParameter("center")],
- convert_function = _ConvertRadiuskpc, units=r"\rm{kpc}/\rm{h}",
+ convert_function = _ConvertRadiuskpch, units=r"\rm{kpc}/\rm{h}",
particle_type=True,
display_name = "Particle Radius")
add_field("Radiuskpch", function=_Radius,
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -226,6 +226,8 @@
if len(self.parameters) == 0:
self._parse_parameter_file()
self.conversion_factors = defaultdict(lambda: 1.0)
+ if "EOSType" not in self.parameters:
+ self.parameters["EOSType"] = -1
if self.cosmological_simulation == 1:
self._setup_comoving_units()
else:
@@ -315,16 +317,19 @@
nxb = self._find_parameter("integer", "nxb", scalar = True)
nyb = self._find_parameter("integer", "nyb", scalar = True)
nzb = self._find_parameter("integer", "nzb", scalar = True)
- dimensionality = self._find_parameter("integer", "dimensionality",
- scalar = True)
except KeyError:
nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
- for ax in 'xyz']
+ for ax in 'xyz'] # FLASH2 only!
+ try:
+ dimensionality = self._find_parameter("integer", "dimensionality",
+ scalar = True)
+ except KeyError:
dimensionality = 3
if nzb == 1: dimensionality = 2
if nyb == 1: dimensionality = 1
if dimensionality < 3:
mylog.warning("Guessing dimensionality as %s", dimensionality)
+
nblockx = self._find_parameter("integer", "nblockx")
nblocky = self._find_parameter("integer", "nblocky")
nblockz = self._find_parameter("integer", "nblockz")
@@ -332,6 +337,11 @@
self.domain_dimensions = \
na.array([nblockx*nxb,nblocky*nyb,nblockz*nzb])
+ try:
+ self.parameters['Gamma'] = self._find_parameter("real", "gamma")
+ except KeyError:
+ pass
+
if self._flash_version == 7:
self.current_time = float(
self._handle["simulation parameters"][:]["time"])
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/frontends/flash/fields.py
--- a/yt/frontends/flash/fields.py
+++ b/yt/frontends/flash/fields.py
@@ -148,6 +148,9 @@
add_flash_field("temp", function=NullFunc, take_log=True,
convert_function=_get_convert("temp"),
units=r"\rm{K}")
+add_flash_field("tele", function=NullFunc, take_log=True,
+ convert_function=_get_convert("tele"),
+ units = r"\rm{K}")
add_flash_field("pres", function=NullFunc, take_log=True,
convert_function=_get_convert("pres"),
units=r"\rm{erg}\//\/\rm{cm}^{3}")
@@ -196,9 +199,11 @@
else :
dname = f
ff = KnownFLASHFields[v]
+ pfield = f.startswith("particle")
add_field(f, TranslationFunc(v),
take_log=KnownFLASHFields[v].take_log,
- units = ff._units, display_name=dname)
+ units = ff._units, display_name=dname,
+ particle_type = pfield)
def _convertParticleMassMsun(data):
return 1.0/1.989e33
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -43,6 +43,11 @@
FieldInfoContainer, NullFunc
import pdb
+def _get_convert(fname):
+ def _conv(data):
+ return data.convert(fname)
+ return _conv
+
class GDFGrid(AMRGridPatch):
_id_offset = 0
def __init__(self, id, hierarchy, level, start, dimensions):
@@ -174,10 +179,18 @@
# This should be improved.
self._handle = h5py.File(self.parameter_filename, "r")
for field_name in self._handle["/field_types"]:
+ current_field = self._handle["/field_types/%s" % field_name]
try:
- self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+ self.units[field_name] = current_field.attrs['field_to_cgs']
except:
self.units[field_name] = 1.0
+ try:
+ current_fields_unit = current_field.attrs['field_units'][0]
+ except:
+ current_fields_unit = ""
+ self._fieldinfo_known.add_field(field_name, function=NullFunc, take_log=False,
+ units=current_fields_unit, projected_units="",
+ convert_function=_get_convert(field_name))
self._handle.close()
del self._handle
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/frontends/orion/data_structures.py
--- a/yt/frontends/orion/data_structures.py
+++ b/yt/frontends/orion/data_structures.py
@@ -131,12 +131,27 @@
def _read_particles(self):
"""
- reads in particles and assigns them to grids
+ reads in particles and assigns them to grids. Will search for
+ Star particles, then sink particles if no star particle file
+ is found, and finally will simply note that no particles are
+ found if neither works. To add a new Orion particle type,
+ simply add it to the if/elif/else block.
"""
self.grid_particle_count = na.zeros(len(self.grids))
+
+ if self._readOrionParticleFile(self.pf.fullplotdir + "/StarParticles"):
+ pass
+ elif self._readOrionParticleFile(self.pf.fullplotdir + "/SinkParticles"):
+ pass
+ else:
+ mylog.warning("No particles found.")
+
+ def _readOrionParticleFile(self, fn):
+ """actually reads the orion particle data file itself.
+
+ """
try:
- fn = self.pf.fullplotdir + "/StarParticles"
with open(fn, 'r') as f:
lines = f.readlines()
self.num_stars = int(lines[0].strip())
@@ -152,44 +167,19 @@
na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
ind = na.where(mask == 1)
- selected_grids = self.grids[ind]
- # in orion, particles always live on the finest level.
- # so, we want to assign the particle to the finest of
- # the grids we just found
- if len(selected_grids) != 0:
- grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
- ind = na.where(self.grids == grid)[0][0]
- self.grid_particle_count[ind] += 1
- self.grids[ind].NumberOfParticles += 1
+ selected_grids = self.grids[ind]
+ # in orion, particles always live on the finest level.
+ # so, we want to assign the particle to the finest of
+ # the grids we just found
+ if len(selected_grids) != 0:
+ grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
+ ind = na.where(self.grids == grid)[0][0]
+ self.grid_particle_count[ind] += 1
+ self.grids[ind].NumberOfParticles += 1
except IOError:
- try:
- fn = self.pf.fullplotdir + "/SinkParticles"
- with open(fn, 'r') as f:
- lines = f.readlines()
- self.num_stars = int(lines[0].strip())
- for line in lines[1:]:
- particle_position_x = float(line.split(' ')[1])
- particle_position_y = float(line.split(' ')[2])
- particle_position_z = float(line.split(' ')[3])
- coord = [particle_position_x, particle_position_y, particle_position_z]
- # for each particle, determine which grids contain it
- # copied from object_finding_mixin.py
- mask=na.ones(self.num_grids)
- for i in xrange(len(coord)):
- na.choose(na.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
- na.choose(na.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
- ind = na.where(mask == 1)
- selected_grids = self.grids[ind]
- # in orion, particles always live on the finest level.
- # so, we want to assign the particle to the finest of
- # the grids we just found
- if len(selected_grids) != 0:
- grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1]
- ind = na.where(self.grids == grid)[0][0]
- self.grid_particle_count[ind] += 1
- self.grids[ind].NumberOfParticles += 1
- except IOError:
- pass
+ return None
+
+ return True
def readGlobalHeader(self,filename,paranoid_read):
"""
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/gui/reason/extdirect_repl.py
--- a/yt/gui/reason/extdirect_repl.py
+++ b/yt/gui/reason/extdirect_repl.py
@@ -130,7 +130,6 @@
def execute_one(self, code, hide):
self.repl.executed_cell_texts.append(code)
-
result = ProgrammaticREPL.execute(self.repl, code)
if self.repl.debug:
print "==================== Cell Execution ===================="
@@ -562,10 +561,12 @@
_tfield_list = list(set(_tpf.h.field_list + _tpf.h.derived_field_list))
_tfield_list.sort()
_tcb = _tpw._get_cbar_image()
+ _ttrans = _tpw._field_transform[_tpw._current_field].name
_twidget_data = {'fields': _tfield_list,
'initial_field': _tfield,
'title': "%%s Slice" %% (_tpf),
- 'colorbar': _tcb}
+ 'colorbar': _tcb,
+ 'initial_transform' : _ttrans}
""" % dict(pfname = pfname,
center_string = center_string,
axis = inv_axis_names[axis],
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/gui/reason/html/js/functions.js
--- a/yt/gui/reason/html/js/functions.js
+++ b/yt/gui/reason/html/js/functions.js
@@ -65,7 +65,6 @@
repl_input.get("input_line").setValue("");
}
if (OutputContainer.items.length > 1) {
- examine = cell;
OutputContainer.body.dom.scrollTop =
OutputContainer.body.dom.scrollHeight -
cell.body.dom.scrollHeight - 20;
@@ -142,7 +141,6 @@
iconCls: 'pf_icon'}));
this_pf = treePanel.root.lastChild
Ext.each(pf.objects, function(obj, obj_index) {
- examine = this_pf;
this_pf.appendChild(new Ext.tree.TreeNode(
{text: obj.name,
leaf: true,
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/gui/reason/html/js/widget_plotwindow.js
--- a/yt/gui/reason/html/js/widget_plotwindow.js
+++ b/yt/gui/reason/html/js/widget_plotwindow.js
@@ -43,7 +43,78 @@
}
this.widget_keys = new Ext.KeyMap(document, [
- {key: 'z', fn: function(){control_panel.get("zoom10x").handler();}}
+ {key: 'z',
+ shift: false,
+ fn: function(){
+ control_panel.get("zoom2x").handler();
+ }
+ },
+ {key: 'Z',
+ shift: true,
+ fn: function(){
+ control_panel.get("zoom10x").handler();
+ }
+ },
+ {key: 'x',
+ shift: false,
+ fn: function(){
+ control_panel.get("zoomout2x").handler();
+ }
+ },
+ {key: 'X',
+ shift: true,
+ fn: function(){
+ control_panel.get("zoomout10x").handler();
+ }
+ },
+ {key: 'k',
+ shift: false,
+ fn: function(){
+ control_panel.get("singleuparrow").handler();
+ }
+ },
+ {key: 'j',
+ shift: false,
+ fn: function(){
+ control_panel.get("singledownarrow").handler();
+ }
+ },
+ {key: 'h',
+ shift: false,
+ fn: function(){
+ control_panel.get("singleleftarrow").handler();
+ }
+ },
+ {key: 'l',
+ shift: false,
+ fn: function(){
+ control_panel.get("singlerightarrow").handler();
+ }
+ },
+ {key: 'K',
+ shift: true,
+ fn: function(){
+ control_panel.get("doubleuparrow").handler();
+ }
+ },
+ {key: 'J',
+ shift: true,
+ fn: function(){
+ control_panel.get("doubledownarrow").handler();
+ }
+ },
+ {key: 'H',
+ shift: true,
+ fn: function(){
+ control_panel.get("doubleleftarrow").handler();
+ }
+ },
+ {key: 'L',
+ shift: true,
+ fn: function(){
+ control_panel.get("doublerightarrow").handler();
+ }
+ },
]);
var widget_keys = this.widget_keys;
widget_keys.disable();
@@ -159,6 +230,7 @@
/* the single buttons for 10% pan*/
xtype:'button',
iconCls: 'singleuparrow',
+ id: 'singleuparrow',
//text: 'North',
x: 40,
y: 10,
@@ -170,6 +242,7 @@
}, {
xtype:'button',
iconCls: 'singlerightarrow',
+ id: 'singlerightarrow',
//text:'East',
x : 60,
y : 30,
@@ -182,6 +255,7 @@
}, {
xtype:'button',
iconCls: 'singledownarrow',
+ id: 'singledownarrow',
//text: 'South',
x: 40,
y: 50,
@@ -194,6 +268,7 @@
}, {
xtype: 'button',
iconCls: 'singleleftarrow',
+ id: 'singleleftarrow',
//text: 'West',
x: 20,
y: 30,
@@ -208,6 +283,7 @@
{
xtype:'button',
iconCls: 'doubleuparrow',
+ id:'doubleuparrow',
//text: 'North',
x: 40,
y: 80,
@@ -219,6 +295,7 @@
}, {
xtype:'button',
iconCls: 'doublerightarrow',
+ id:'doublerightarrow',
//text:'East',
x : 60,
y : 100,
@@ -232,6 +309,7 @@
xtype:'button',
iconCls: 'doubledownarrow',
//text: 'South',
+ id: 'doubledownarrow',
x: 40,
y: 120,
handler: function(b,e) {
@@ -243,6 +321,7 @@
}, {
xtype: 'button',
iconCls: 'doubleleftarrow',
+ id: 'doubleleftarrow',
//text: 'West',
x: 20,
y: 100,
@@ -270,6 +349,7 @@
},{
xtype: 'button',
text: 'Zoom In 2x',
+ id: "zoom2x",
x: 10,
y: 185,
width: 80,
@@ -282,6 +362,7 @@
},{
xtype: 'button',
text: 'Zoom Out 2x',
+ id:'zoomout2x',
x: 10,
y: 210,
width: 80,
@@ -294,6 +375,7 @@
},{
xtype: 'button',
text: 'Zoom Out 10x',
+ id:'zoomout10x',
x: 10,
y: 235,
width: 80,
@@ -365,11 +447,168 @@
html: 'Welcome to the Plot Window.',
height: 200,
}, {
+ xtype: 'tabpanel',
+ id: 'editor_panel',
+ flex: 1,
+ activeTab: 0,
+ items: [
+ {
xtype: 'panel',
title: 'Plot Editor',
id: 'plot_edit',
+ style: {fontFamily: '"Inconsolata", monospace'},
+ layout: 'absolute',
flex: 1,
- }]
+ items : [
+ {
+ x: 10,
+ y: 20,
+ width: 70,
+ xtype: 'label',
+ text: 'Display',
+ },
+ {
+ x: 80,
+ y: 20,
+ width : 80,
+ xtype: 'combo',
+ editable: false,
+ triggerAction: 'all',
+ validateOnBlur: false,
+ store: ['log10', 'linear'],
+ value: widget_data['initial_transform'],
+ listeners: {select: function(combo, record, index){
+ var newValue = '"' + record.data['field1'] + '"';
+ yt_rpc.ExtDirectREPL.execute(
+ {code:python_varname + '.set_transform('
+ + python_varname + '._current_field, '
+ + newValue + ')', hide:false},
+ cell_finished);
+ }}
+ },
+ {
+ x: 10,
+ y: 60,
+ width: 70,
+ xtype: 'label',
+ text: 'Colormap',
+ },
+ {
+ x: 80,
+ y: 60,
+ width : 140,
+ xtype: 'combo',
+ editable: false,
+ triggerAction: 'all',
+ validateOnBlur: false,
+ store: ['algae', 'RdBu', 'gist_stern',
+ 'hot', 'jet', 'kamae',
+ 'B-W LINEAR', 'BLUE',
+ 'GRN-RED-BLU-WHT', 'RED TEMPERATURE',
+ 'BLUE', 'STD GAMMA-II', 'PRISM',
+ 'RED-PURPLE', 'GREEN', 'GRN',
+ 'GREEN-PINK', 'BLUE-RED', '16 LEVEL',
+ 'RAINBOW', 'STEPS', 'STERN SPECIAL',
+ 'Haze', 'Blue - Pastel - Red',
+ 'Pastels', 'Hue Sat Lightness 1',
+ 'Hue Sat Lightness 2', 'Hue Sat Value 1',
+ 'Hue Sat Value 2', 'Purple-Red + Stripes',
+ 'Beach', 'Mac Style', 'Eos A', 'Eos B',
+ 'Hardcandy', 'Nature', 'Ocean', 'Peppermint',
+ 'Plasma', 'Blue-Red', 'Rainbow', 'Blue Waves',
+ 'Volcano', 'Waves', 'Rainbow18',
+ 'Rainbow + white', 'Rainbow + black'],
+ value: 'algae',
+ listeners: {select: function(combo, record, index){
+ var newValue = '"' + record.data['field1'] + '"';
+ yt_rpc.ExtDirectREPL.execute(
+ {code:python_varname + '.set_cmap('
+ + python_varname + '._current_field, '
+ + newValue + ')', hide:false},
+ cell_finished);
+ }}
+ }
+ ]
+ }, {
+ xtype: 'panel',
+ title: 'Contours',
+ id: 'contour_edit',
+ style: {fontFamily: '"Inconsolata", monospace'},
+ layout: 'absolute',
+ flex: 1,
+ items : [
+ {
+ x: 10,
+ y: 20,
+ width: 70,
+ xtype: 'label',
+ text: 'Field',
+ },
+ {
+ x: 80,
+ y: 20,
+ width : 160,
+ xtype: 'combo',
+ editable: false,
+ id: 'field',
+ triggerAction: 'all',
+ validateOnBlur: false,
+ value:widget_data['initial_field'],
+ store: widget_data['fields'],
+ }, {
+ x: 10,
+ y: 60,
+ width: 70,
+ xtype: 'label',
+ text: 'Levels',
+ }, {
+ x: 80,
+ y: 60,
+ width : 160,
+ xtype: 'slider',
+ id: 'ncont',
+ minValue: 0,
+ maxValue: 10,
+ value: 5,
+ increment: 1,
+ plugins: new Ext.slider.Tip(),
+ }, {
+ x: 10,
+ y: 100,
+ width: 70,
+ xtype: 'label',
+ text: 'Logspaced',
+ }, {
+ x: 80,
+ y: 100,
+ width : 160,
+ xtype: 'checkbox',
+ id: 'logit',
+ checked: true,
+ }, {
+ x: 10,
+ y: 180,
+ width: 80,
+ xtype: 'button',
+ text: 'Apply',
+ handler: function(b, e) {
+ field = contour_window.get('field').getValue();
+ ncont = contour_window.get('ncont').getValue();
+ logit = contour_window.get('logit').getValue();
+ if (logit == false) logit = 'False';
+ else if (logit == true) logit = 'True';
+ yt_rpc.ExtDirectREPL.execute(
+ {code:python_varname
+ + '.set_contour_info("' + field + '", '
+ + ncont + ', ' + logit + ')',
+ hide:false},
+ cell_finished);
+ }
+ }
+ ]
+ }
+ ] } /* tabpanel items and entry */
+ ]
}
]
}
@@ -384,8 +623,12 @@
this.image_panel = this.panel.get("image_panel_"+python_varname);
this.ticks = this.panel.get("ticks_"+python_varname);
var ticks = this.ticks;
+ var colorbar = this.panel.get("colorbar_"+python_varname);
this.metadata_panel = this.panel.get("rhs_panel_" + python_varname).get("metadata_" + python_varname);
this.zoom_scroll = this.panel.get("slider_" + python_varname);
+ var contour_window = this.panel.get("rhs_panel_" + python_varname);
+ contour_window = contour_window.get("editor_panel");
+ contour_window = contour_window.get("contour_edit");
var image_dom = this.image_panel.el.dom;
var control_panel = this.panel;
var metadata_string;
@@ -393,12 +636,10 @@
this.accept_results = function(payload) {
this.image_panel.el.dom.src = "data:image/png;base64," + payload['image_data'];
this.zoom_scroll.setValue(0, payload['zoom'], true);
- examine = this.metadata_panel;
this.metadata_panel.update(payload['metadata_string']);
metadata_string = payload['metadata_string'];
ticks.removeAll();
Ext.each(payload['ticks'], function(tick, index) {
- console.log(tick);
ticks.add({xtype:'panel',
width: 10, height:1,
style: 'background-color: #000000;',
@@ -411,9 +652,11 @@
'font-size: 12px;',
html: '' + tick[2] + '',
x:12, y: 4 + tick[0]});
- examine = tick;
});
- examine = payload['ticks'];
+ if (payload['colorbar_image'] != null) {
+ colorbar.el.dom.src = "data:image/png;base64," +
+ payload['colorbar_image'];
+ }
ticks.doLayout();
}
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/mods.py
--- a/yt/mods.py
+++ b/yt/mods.py
@@ -60,7 +60,8 @@
derived_field, add_field, FieldInfo, \
ValidateParameter, ValidateDataField, ValidateProperty, \
ValidateSpatial, ValidateGridType, \
- TimeSeriesData, AnalysisTask, analysis_task
+ TimeSeriesData, AnalysisTask, analysis_task, \
+ ParticleTrajectoryCollection
from yt.data_objects.derived_quantities import \
add_quantity, quantity_info
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/utilities/_amr_utils/CICDeposit.pyx
--- a/yt/utilities/_amr_utils/CICDeposit.pyx
+++ b/yt/utilities/_amr_utils/CICDeposit.pyx
@@ -78,3 +78,32 @@
field[i1 ,j1-1,k1 ] += mass[n] * dx2 * dy * dz2
field[i1-1,j1 ,k1 ] += mass[n] * dx * dy2 * dz2
field[i1 ,j1 ,k1 ] += mass[n] * dx2 * dy2 * dz2
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+def sample_field_at_positions(np.ndarray[np.float64_t, ndim=3] arr,
+ np.ndarray[np.float64_t, ndim=1] left_edge,
+ np.ndarray[np.float64_t, ndim=1] right_edge,
+ np.ndarray[np.float64_t, ndim=1] pos_x,
+ np.ndarray[np.float64_t, ndim=1] pos_y,
+ np.ndarray[np.float64_t, ndim=1] pos_z):
+ cdef np.float64_t idds[3], pp[3]
+ cdef int dims[3], npart, ind[3]
+ cdef int i, j
+ npart = pos_x.shape[0]
+ cdef np.ndarray[np.float64_t, ndim=1] sample
+ sample = np.zeros(npart, dtype='float64')
+ for i in range(3):
+ dims[i] = arr.shape[i]
+ idds[i] = (<np.float64_t> dims[i]) / (right_edge[i] - left_edge[i])
+ for i in range(npart):
+ if not ((left_edge[0] <= pos_x[i] <= right_edge[0]) and
+ (left_edge[1] <= pos_y[i] <= right_edge[1]) and
+ (left_edge[2] <= pos_z[i] <= right_edge[2])):
+ continue
+ ind[0] = <int> ((pos_x[i] - left_edge[0]) * idds[0])
+ ind[1] = <int> ((pos_y[i] - left_edge[1]) * idds[1])
+ ind[2] = <int> ((pos_z[i] - left_edge[2]) * idds[2])
+ sample[i] = arr[ind[0], ind[1], ind[2]]
+ return sample
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/utilities/command_line.py
--- a/yt/utilities/command_line.py
+++ b/yt/utilities/command_line.py
@@ -47,10 +47,11 @@
def _add_arg(sc, arg):
if isinstance(arg, types.StringTypes):
arg = _common_options[arg].copy()
+ argc = dict(arg.items())
argnames = []
- if "short" in arg: argnames.append(arg.pop('short'))
- if "long" in arg: argnames.append(arg.pop('long'))
- sc.add_argument(*argnames, **arg)
+ if "short" in argc: argnames.append(argc.pop('short'))
+ if "long" in argc: argnames.append(argc.pop('long'))
+ sc.add_argument(*argnames, **argc)
class YTCommand(object):
args = ()
@@ -63,12 +64,14 @@
def __init__(cls, name, b, d):
type.__init__(cls, name, b, d)
if cls.name is not None:
- sc = subparsers.add_parser(cls.name,
- description = cls.description,
- help = cls.description)
- sc.set_defaults(func=cls.run)
- for arg in cls.args:
- _add_arg(sc, arg)
+ names = ensure_list(cls.name)
+ for name in names:
+ sc = subparsers.add_parser(name,
+ description = cls.description,
+ help = cls.description)
+ sc.set_defaults(func=cls.run)
+ for arg in cls.args:
+ _add_arg(sc, arg)
@classmethod
def run(cls, args):
@@ -1140,76 +1143,6 @@
if not os.path.isdir(args.output): os.makedirs(args.output)
pc.save(os.path.join(args.output,"%s" % (pf)))
-class YTReasonCmd(YTCommand):
- name = "reason"
- args = (
- dict(short="-o", long="--open-browser", action="store_true",
- default = False, dest='open_browser',
- help="Open a web browser."),
- dict(short="-p", long="--port", action="store",
- default = 0, dest='port',
- help="Port to listen on"),
- dict(short="-f", long="--find", action="store_true",
- default = False, dest="find",
- help="At startup, find all *.hierarchy files in the CWD"),
- dict(short="-d", long="--debug", action="store_true",
- default = False, dest="debug",
- help="Add a debugging mode for cell execution")
- )
- description = \
- """
- Run the Web GUI Reason
- """
-
- def __call__(self, args):
- # We have to do a couple things.
- # First, we check that YT_DEST is set.
- if "YT_DEST" not in os.environ:
- print
- print "*** You must set the environment variable YT_DEST ***"
- print "*** to point to the installation location! ***"
- print
- sys.exit(1)
- if args.port == 0:
- # This means, choose one at random. We do this by binding to a
- # socket and allowing the OS to choose the port for that socket.
- import socket
- sock = socket.socket()
- sock.bind(('', 0))
- args.port = sock.getsockname()[-1]
- del sock
- elif args.port == '-1':
- port = raw_input("Desired yt port? ")
- try:
- args.port = int(port)
- except ValueError:
- print "Please try a number next time."
- return 1
- base_extjs_path = os.path.join(os.environ["YT_DEST"], "src")
- if not os.path.isfile(os.path.join(base_extjs_path, "ext-resources", "ext-all.js")):
- print
- print "*** You are missing the ExtJS support files. You ***"
- print "*** You can get these by either rerunning the ***"
- print "*** install script installing, or downloading ***"
- print "*** them manually. ***"
- print
- sys.exit(1)
- from yt.config import ytcfg;ytcfg["yt","__withinreason"]="True"
- import yt.utilities.bottle as bottle
- from yt.gui.reason.extdirect_repl import ExtDirectREPL
- from yt.gui.reason.bottle_mods import uuid_serve_functions, PayloadHandler
- hr = ExtDirectREPL(base_extjs_path)
- hr.debug = PayloadHandler.debug = args.debug
- if args.find:
- # We just have to find them and store references to them.
- command_line = ["pfs = []"]
- for fn in sorted(glob.glob("*/*.hierarchy")):
- command_line.append("pfs.append(load('%s'))" % fn[:-10])
- hr.execute("\n".join(command_line))
- bottle.debug()
- uuid_serve_functions(open_browser=args.open_browser,
- port=int(args.port), repl=hr)
-
class YTRenderCmd(YTCommand):
args = ("width", "unit", "center","enhance",'outputfn',
@@ -1312,8 +1245,8 @@
import rpdb
rpdb.run_rpdb(int(task))
-class YTServeCmd(YTCommand):
- name = "serve"
+class YTGUICmd(YTCommand):
+ name = ["serve", "reason"]
args = (
dict(short="-o", long="--open-browser", action="store_true",
default = False, dest='open_browser',
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -48,3 +48,13 @@
def __str__(self):
return "The supplied axes are not orthogonal. %s" % (self.axes)
+
+class YTNoDataInObjectError(YTException):
+ def __init__(self, obj):
+ self.obj_type = getattr(obj, "_type_name", "")
+
+ def __str__(self):
+ s = "The object requested has no data included in it."
+ if self.obj_type == "slice":
+ s += " It may lie on a grid face. Try offsetting slightly."
+ return s
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/utilities/minimal_representation.py
--- a/yt/utilities/minimal_representation.py
+++ b/yt/utilities/minimal_representation.py
@@ -23,7 +23,29 @@
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
+import numpy as na
import abc
+import json
+import urllib2
+from tempfile import TemporaryFile
+from yt.config import ytcfg
+from yt.funcs import *
+
+try:
+ from poster.streaminghttp import register_openers
+ from poster.encode import multipart_encode
+ register_openers()
+except ImportError:
+ pass
+
+class UploaderBar(object):
+ pbar = None
+ def __call__(self, name, prog, total):
+ if self.pbar is None:
+ self.pbar = get_pbar("Uploading %s" % name, total)
+ self.pbar.update(prog)
+ if prog == total:
+ self.pbar.finish()
class ContainerClass(object):
pass
@@ -67,6 +89,45 @@
setattr(cc, a, v)
return cls(cc)
+ def upload(self):
+ api_key = ytcfg.get("yt","hub_api_key")
+ url = ytcfg.get("yt","hub_url")
+ metadata, (final_name, chunks) = self._generate_post()
+ for i in metadata:
+ if isinstance(metadata[i], na.ndarray):
+ metadata[i] = metadata[i].tolist()
+ metadata['obj_type'] = self.type
+ if len(chunks) == 0:
+ chunk_info = {'chunks': []}
+ else:
+ chunk_info = {'final_name' : final_name, 'chunks': []}
+ for cn, cv in chunks:
+ chunk_info['chunks'].append((cn, cv.size * cv.itemsize))
+ metadata = json.dumps(metadata)
+ chunk_info = json.dumps(chunk_info)
+ datagen, headers = multipart_encode({'metadata' : metadata,
+ 'chunk_info' : chunk_info,
+ 'api_key' : api_key})
+ request = urllib2.Request(url, datagen, headers)
+ # Actually do the request, and get the response
+ rv = urllib2.urlopen(request).read()
+ uploader_info = json.loads(rv)
+ new_url = url + "/handler/%s" % uploader_info['handler_uuid']
+ for cn, cv in chunks:
+ remaining = cv.size * cv.itemsize
+ f = TemporaryFile()
+ na.save(f, cv)
+ f.seek(0)
+ pbar = UploaderBar()
+ datagen, headers = multipart_encode({'chunk_data' : f}, cb = pbar)
+ request = urllib2.Request(new_url, datagen, headers)
+ rv = urllib2.urlopen(request).read()
+
+ datagen, headers = multipart_encode({'status' : 'FINAL'})
+ request = urllib2.Request(new_url, datagen, headers)
+ rv = urllib2.urlopen(request).read()
+ return json.loads(rv)
+
class FilteredRepresentation(MinimalRepresentation):
def _generate_post(self):
raise RuntimeError
@@ -77,6 +138,7 @@
"unique_identifier", "current_redshift", "output_hash",
"cosmological_simulation", "omega_matter", "omega_lambda",
"hubble_constant", "name")
+ type = 'simulation_output'
def __init__(self, obj):
super(MinimalStaticOutput, self).__init__(obj)
@@ -86,7 +148,7 @@
def _generate_post(self):
metadata = self._attrs
chunks = []
- return metadata, chunks
+ return (metadata, (None, chunks))
class MinimalMappableData(MinimalRepresentation):
@@ -97,10 +159,7 @@
nobj = self._return_filtered_object(("field_data",))
metadata = nobj._attrs
chunks = [(arr, self.field_data[arr]) for arr in self.field_data]
- return (metadata, chunks)
+ return (metadata, ('field_data', chunks))
class MinimalProjectionData(MinimalMappableData):
-
- def __init__(self, obj):
- super(MinimalProjectionData, self).__init__(obj)
- self.type = "proj"
+ type = 'proj'
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/utilities/parallel_tools/parallel_analysis_interface.py
--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py
@@ -349,7 +349,7 @@
else:
yield obj
if parallel_capable:
- communication_system.communicators.pop()
+ communication_system.pop()
if storage is not None:
# Now we have to broadcast it
new_storage = my_communicator.par_combine_object(
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/visualization/_colormap_data.py
--- a/yt/visualization/_colormap_data.py
+++ b/yt/visualization/_colormap_data.py
@@ -7757,3 +7757,44 @@
1.0, 1.0, 1.0, 1.0, 1.0, 1.0]),
)
+color_map_luts['B-W LINEAR'] = color_map_luts['idl00']
+color_map_luts['BLUE'] = color_map_luts['idl01']
+color_map_luts['GRN-RED-BLU-WHT'] = color_map_luts['idl02']
+color_map_luts['RED TEMPERATURE'] = color_map_luts['idl03']
+color_map_luts['BLUE'] = color_map_luts['idl04']
+color_map_luts['STD GAMMA-II'] = color_map_luts['idl05']
+color_map_luts['PRISM'] = color_map_luts['idl06']
+color_map_luts['RED-PURPLE'] = color_map_luts['idl07']
+color_map_luts['GREEN'] = color_map_luts['idl08']
+color_map_luts['GRN'] = color_map_luts['idl09']
+color_map_luts['GREEN-PINK'] = color_map_luts['idl10']
+color_map_luts['BLUE-RED'] = color_map_luts['idl11']
+color_map_luts['16 LEVEL'] = color_map_luts['idl12']
+color_map_luts['RAINBOW'] = color_map_luts['idl13']
+color_map_luts['STEPS'] = color_map_luts['idl14']
+color_map_luts['STERN SPECIAL'] = color_map_luts['idl15']
+color_map_luts['Haze'] = color_map_luts['idl16']
+color_map_luts['Blue - Pastel - Red'] = color_map_luts['idl17']
+color_map_luts['Pastels'] = color_map_luts['idl18']
+color_map_luts['Hue Sat Lightness 1'] = color_map_luts['idl19']
+color_map_luts['Hue Sat Lightness 2'] = color_map_luts['idl20']
+color_map_luts['Hue Sat Value 1'] = color_map_luts['idl21']
+color_map_luts['Hue Sat Value 2'] = color_map_luts['idl22']
+color_map_luts['Purple-Red + Stripes'] = color_map_luts['idl23']
+color_map_luts['Beach'] = color_map_luts['idl24']
+color_map_luts['Mac Style'] = color_map_luts['idl25']
+color_map_luts['Eos A'] = color_map_luts['idl26']
+color_map_luts['Eos B'] = color_map_luts['idl27']
+color_map_luts['Hardcandy'] = color_map_luts['idl28']
+color_map_luts['Nature'] = color_map_luts['idl29']
+color_map_luts['Ocean'] = color_map_luts['idl30']
+color_map_luts['Peppermint'] = color_map_luts['idl31']
+color_map_luts['Plasma'] = color_map_luts['idl32']
+color_map_luts['Blue-Red'] = color_map_luts['idl33']
+color_map_luts['Rainbow'] = color_map_luts['idl34']
+color_map_luts['Blue Waves'] = color_map_luts['idl35']
+color_map_luts['Volcano'] = color_map_luts['idl36']
+color_map_luts['Waves'] = color_map_luts['idl37']
+color_map_luts['Rainbow18'] = color_map_luts['idl38']
+color_map_luts['Rainbow + white'] = color_map_luts['idl39']
+color_map_luts['Rainbow + black'] = color_map_luts['idl40']
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/visualization/eps_writer.py
--- a/yt/visualization/eps_writer.py
+++ b/yt/visualization/eps_writer.py
@@ -293,6 +293,7 @@
_xlabel = ""
_ylabel = ""
else:
+ units = units.replace('mpc', 'Mpc')
_xlabel = '%s (%s)' % (x_names[plot.data.axis], units)
_ylabel = '%s (%s)' % (y_names[plot.data.axis], units)
_tickcolor = pyx.color.cmyk.white
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/visualization/plot_collection.py
--- a/yt/visualization/plot_collection.py
+++ b/yt/visualization/plot_collection.py
@@ -940,7 +940,7 @@
x_bins, fields[0], x_min, x_max, x_log,
lazy_reader)
if len(fields) > 1:
- profile.add_fields(fields[1], weight=weight, accumulation=accumulation)
+ profile.add_fields(fields[1:], weight=weight, accumulation=accumulation)
if id is None: id = self._get_new_id()
p = self._add_plot(Profile1DPlot(profile, fields, id,
axes=axes, figure=figure))
@@ -1148,13 +1148,15 @@
x_bins, fields[0], x_min, x_max, x_log,
y_bins, fields[1], y_min, y_max, y_log,
lazy_reader)
+ # This will add all the fields to the profile object
+ if len(fields)>2:
+ profile.add_fields(fields[2:], weight=weight,
+ accumulation=accumulation, fractional=fractional)
+
if id is None: id = self._get_new_id()
p = self._add_plot(PhasePlot(profile, fields,
id, cmap=cmap,
figure=figure, axes=axes))
- if len(fields) > 2:
- # This will add all the fields to the profile object
- p.switch_z(fields[2], weight=weight, accumulation=accumulation, fractional=fractional)
return p
def add_phase_sphere(self, radius, unit, fields, center = None, cmap=None,
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/visualization/plot_types.py
--- a/yt/visualization/plot_types.py
+++ b/yt/visualization/plot_types.py
@@ -36,6 +36,7 @@
y_dict, \
axis_names
from .color_maps import yt_colormaps, is_colormap
+from yt.utilities.exceptions import YTNoDataInObjectError
class CallbackRegistryHandler(object):
def __init__(self, plot):
@@ -379,6 +380,8 @@
def _redraw_image(self, *args):
buff = self._get_buff()
+ if self[self.axis_names["Z"]].size == 0:
+ raise YTNoDataInObjectError(self.data)
mylog.debug("Received buffer of min %s and max %s (data: %s %s)",
na.nanmin(buff), na.nanmax(buff),
self[self.axis_names["Z"]].min(),
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -24,6 +24,7 @@
"""
import base64
import matplotlib.pyplot
+import cStringIO
from functools import wraps
import numpy as na
@@ -36,27 +37,59 @@
from yt.funcs import *
from yt.utilities.amr_utils import write_png_to_string
+from yt.utilities.definitions import \
+ x_dict, x_names, \
+ y_dict, y_names, \
+ axis_names, \
+ axis_labels
def invalidate_data(f):
@wraps(f)
def newfunc(*args, **kwargs):
- f(*args, **kwargs)
+ rv = f(*args, **kwargs)
args[0]._data_valid = False
args[0]._plot_valid = False
args[0]._recreate_frb()
if args[0]._initfinished:
args[0]._setup_plots()
+ return rv
return newfunc
def invalidate_plot(f):
@wraps(f)
def newfunc(*args, **kwargs):
+ rv = f(*args, **kwargs)
args[0]._plot_valid = False
args[0]._setup_plots()
- return f(*args, **kwargs)
+ return rv
return newfunc
+field_transforms = {}
+
+class FieldTransform(object):
+ def __init__(self, name, func, locator):
+ self.name = name
+ self.func = func
+ self.locator = locator
+ field_transforms[name] = self
+
+ def __call__(self, *args, **kwargs):
+ return self.func(*args, **kwargs)
+
+ def ticks(self, mi, ma):
+ try:
+ ticks = self.locator(mi, ma)
+ except:
+ ticks = []
+ return ticks
+
+log_transform = FieldTransform('log10', na.log10, LogLocator())
+linear_transform = FieldTransform('linear', lambda x: x, LinearLocator())
+
class PlotWindow(object):
+ _plot_valid = False
+ _colorbar_valid = False
+ _contour_info = None
def __init__(self, data_source, bounds, buff_size=(800,800), antialias = True, periodic = True):
r"""
PlotWindow(data_source, bounds, buff_size=(800,800), antialias = True)
@@ -232,6 +265,14 @@
def set_antialias(self,aa):
self.antialias = aa
+ @invalidate_plot
+ def set_contour_info(self, field_name, n_cont = 8, colors = None,
+ logit = True):
+ if field_name == "None" or n_cont == 0:
+ self._contour_info = None
+ return
+ self._contour_info = (field_name, n_cont, colors, logit)
+
class PWViewer(PlotWindow):
"""A viewer for PlotWindows.
@@ -240,16 +281,17 @@
setup = kwargs.pop("setup", True)
PlotWindow.__init__(self, *args,**kwargs)
self._field_transform = {}
+ self._colormaps = defaultdict(lambda: 'algae')
for field in self._frb.data.keys():
if self._frb.pf.field_info[field].take_log:
- self._field_transform[field] = na.log
+ self._field_transform[field] = log_transform
else:
- self._field_transform[field] = lambda x: x
+ self._field_transform[field] = linear_transform
if setup: self._setup_plots()
@invalidate_plot
- def set_log(self,field,log):
+ def set_log(self, field, log):
"""set a field to log or linear.
Parameters
@@ -261,16 +303,20 @@
"""
if log:
- self._field_transform[field] = na.log
+ self._field_transform[field] = log_transform
else:
- self._field_transform[field] = lambda x: x
-
- def set_transform(self, field, func):
- self._field_transform[field] = func
+ self._field_transform[field] = linear_transform
@invalidate_plot
- def set_cmap(self):
- pass
+ def set_transform(self, field, name):
+ if name not in field_transforms:
+ raise KeyError(name)
+ self._field_transform[field] = field_transforms[name]
+
+ @invalidate_plot
+ def set_cmap(self, field, cmap_name):
+ self._colorbar_valid = False
+ self._colormaps[field] = cmap_name
@invalidate_plot
def set_zlim(self):
@@ -309,7 +355,11 @@
<br>
Field of View: %(x_width)0.3f %(unit)s<br>
Minimum Value: %(mi)0.3e %(units)s<br>
-Maximum Value: %(ma)0.3e %(units)s
+Maximum Value: %(ma)0.3e %(units)s<br>
+Central Point: (data coords)<br>
+ %(xc)0.14f<br>
+ %(yc)0.14f<br>
+ %(zc)0.14f
"""
class PWViewerExtJS(PWViewer):
@@ -319,7 +369,6 @@
_ext_widget_id = None
_current_field = None
_widget_name = "plot_window"
- cmap = 'algae'
def _setup_plots(self):
from yt.gui.reason.bottle_mods import PayloadHandler
@@ -332,18 +381,21 @@
else:
fields = self._frb.data.keys()
addl_keys = {}
+ if self._colorbar_valid == False:
+ addl_keys['colorbar_image'] = self._get_cbar_image()
+ self._colorbar_valid = True
min_zoom = 200*self._frb.pf.h.get_smallest_dx() * self._frb.pf['unitary']
for field in fields:
- to_plot = apply_colormap(self._frb[field], func = self._field_transform[field])
- pngs = write_png_to_string(to_plot)
+ to_plot = apply_colormap(self._frb[field],
+ func = self._field_transform[field],
+ cmap_name = self._colormaps[field])
+ pngs = self._apply_modifications(to_plot)
img_data = base64.b64encode(pngs)
# We scale the width between 200*min_dx and 1.0
x_width = self.xlim[1] - self.xlim[0]
zoom_fac = na.log10(x_width*self._frb.pf['unitary'])/na.log10(min_zoom)
zoom_fac = 100.0*max(0.0, zoom_fac)
- ticks = self.get_ticks(self._frb[field].min(),
- self._frb[field].max(),
- take_log = self._frb.pf.field_info[field].take_log)
+ ticks = self.get_ticks(field)
payload = {'type':'png_string',
'image_data':img_data,
'metadata_string': self.get_metadata(field),
@@ -352,34 +404,64 @@
payload.update(addl_keys)
ph.add_payload(payload)
- def get_ticks(self, mi, ma, height = 400, take_log = False):
+ def _apply_modifications(self, img):
+ if self._contour_info is None:
+ return write_png_to_string(img)
+ from matplotlib.figure import Figure
+ from yt.visualization._mpl_imports import \
+ FigureCanvasAgg, FigureCanvasPdf, FigureCanvasPS
+ from yt.utilities.delaunay.triangulate import Triangulation as triang
+ plot_args = {}
+ field, number, colors, logit = self._contour_info
+ if colors is not None: plot_args['colors'] = colors
+
+ vi, vj, vn = img.shape
+
+ # Now we need to get our field values
+ raw_data = self._frb.data_source
+ b = self._frb.bounds
+ xi, yi = na.mgrid[b[0]:b[1]:(vi / 8) * 1j,
+ b[2]:b[3]:(vj / 8) * 1j]
+ x = raw_data['px']
+ y = raw_data['py']
+ z = raw_data[field]
+ if logit: z = na.log10(z)
+ fvals = triang(x,y).nn_interpolator(z)(xi,yi).transpose()
+
+ fig = Figure((vi/100.0, vj/100.0), dpi = 100)
+ fig.figimage(img)
+ # Add our contour
+ ax = fig.add_axes([0.0, 0.0, 1.0, 1.0], frameon=False)
+ ax.patch.set_alpha(0.0)
+
+ # Now contour it
+ ax.contour(fvals, number, colors='w')
+ canvas = FigureCanvasAgg(fig)
+ f = cStringIO.StringIO()
+ canvas.print_figure(f)
+ f.seek(0)
+ img = f.read()
+ return img
+
+ def get_ticks(self, field, height = 400):
# This will eventually change to work with non-logged fields
ticks = []
- if take_log and mi > 0.0 and ma > 0.0:
- ll = LogLocator()
- tick_locs = ll(mi, ma)
- mi = na.log10(mi)
- ma = na.log10(ma)
- for v1,v2 in zip(tick_locs, na.log10(tick_locs)):
- if v2 < mi or v2 > ma: continue
- p = height - height * (v2 - mi)/(ma - mi)
- ticks.append((p,v1,v2))
- #print v1, v2, mi, ma, height, p
- else:
- ll = LinearLocator()
- tick_locs = ll(mi, ma)
- for v in tick_locs:
- p = height - height * (v - mi)/(ma-mi)
- ticks.append((p,v,"%0.3e" % (v)))
-
+ transform = self._field_transform[field]
+ mi, ma = self._frb[field].min(), self._frb[field].max()
+ tick_locs = transform.ticks(mi, ma)
+ mi, ma = transform((mi, ma))
+ for v1,v2 in zip(tick_locs, transform(tick_locs)):
+ if v2 < mi or v2 > ma: continue
+ p = height - height * (v2 - mi)/(ma - mi)
+ ticks.append((p,v1,v2))
return ticks
- def _get_cbar_image(self, height = 400, width = 40):
- # Right now there's just the single 'cmap', but that will eventually
- # change. I think?
+ def _get_cbar_image(self, height = 400, width = 40, field = None):
+ if field is None: field = self._current_field
+ cmap_name = self._colormaps[field]
vals = na.mgrid[1:0:height * 1j] * na.ones(width)[:,None]
vals = vals.transpose()
- to_plot = apply_colormap(vals)
+ to_plot = apply_colormap(vals, cmap_name = cmap_name)
pngs = write_png_to_string(to_plot)
img_data = base64.b64encode(pngs)
return img_data
@@ -402,11 +484,21 @@
y_width = self.ylim[1] - self.ylim[0]
unit = get_smallest_appropriate_unit(x_width, self._frb.pf)
units = self.get_field_units(field)
+ center = getattr(self._frb.data_source, "center", None)
+ if center is None or self._frb.axis == 4:
+ xc, yc, zc = -999, -999, -999
+ else:
+ center[x_dict[self._frb.axis]] = 0.5 * (
+ self.xlim[0] + self.xlim[1])
+ center[y_dict[self._frb.axis]] = 0.5 * (
+ self.ylim[0] + self.ylim[1])
+ xc, yc, zc = center
md = _metadata_template % dict(
pf = self._frb.pf,
x_width = x_width*self._frb.pf[unit],
y_width = y_width*self._frb.pf[unit],
- unit = unit, units = units, mi = mi, ma = ma)
+ unit = unit, units = units, mi = mi, ma = ma,
+ xc = xc, yc = yc, zc = zc)
return md
def image_recenter(self, img_x, img_y, img_size_x, img_size_y):
@@ -422,9 +514,9 @@
self._current_field = field
self._frb[field]
if self._frb.pf.field_info[field].take_log:
- self._field_transform[field] = na.log
+ self._field_transform[field] = log_transform
else:
- self._field_transform[field] = lambda x: x
+ self._field_transform[field] = linear_transform
def get_field_units(self, field, strip_mathml = True):
ds = self._frb.data_source
@@ -439,7 +531,6 @@
units = units.replace(r"\rm{", "").replace("}","")
return units
-
class YtPlot(object):
"""A base class for all yt plots. It should abstract the actual
plotting engine completely, allowing plotting even without matplotlib.
@@ -474,7 +565,6 @@
class Yt2DPlot(YtPlot):
zmin = None
zmax = None
- cmap = 'algae'
zlabel = None
# def __init__(self, data):
@@ -485,17 +575,14 @@
self.zmin = zmin
self.zmax = zmax
- @invalidate_plot
- def set_cmap(self,cmap):
- self.cmap = cmap
-
class YtWindowPlot(Yt2DPlot):
def __init__(self, data, size=(10,8)):
YtPlot.__init__(self, data, size)
self.__init_image(data)
def __init_image(self, data):
- self.image = self.axes.imshow(data,cmap=self.cmap)
+ #self.image = self.axes.imshow(data, cmap=self.cmap)
+ pass
class YtProfilePlot(Yt2DPlot):
def __init__(self):
diff -r c1135a8515eaef292fe5f45b971043db1968c846 -r 628a21c5a006843c58c4c3ee893f2c1d2366be69 yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -340,6 +340,8 @@
func = na.log10
else:
func = lambda a: a
+ raw_data = na.repeat(raw_data, 3, axis=0)
+ raw_data = na.repeat(raw_data, 3, axis=1)
to_plot = apply_colormap(raw_data, self.plot.cbar.bounds,
self.plot.cbar.cmap, func)
if self.plot.cbar.scale == 'log':
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list