[yt-svn] commit/yt: 2 new changesets
Bitbucket
commits-noreply at bitbucket.org
Mon Mar 26 11:58:04 PDT 2012
2 new commits in yt:
https://bitbucket.org/yt_analysis/yt/changeset/2f021f47d6d2/
changeset: 2f021f47d6d2
branch: yt
user: MatthewTurk
date: 2012-03-26 20:57:35
summary: Adding cython annotations to bin_profil1d, which should speed it up
considerably.
affected #: 1 file
diff -r 9df7dbbc3c4720f6758435ee621005cc6f1a7b5e -r 2f021f47d6d27d9d01963dc9866b5d0b9d1962c1 yt/utilities/_amr_utils/misc_utilities.pyx
--- a/yt/utilities/_amr_utils/misc_utilities.pyx
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx
@@ -42,8 +42,7 @@
np.ndarray[np.float64_t, ndim=1] mresult,
np.ndarray[np.float64_t, ndim=1] qresult,
np.ndarray[np.float64_t, ndim=1] used):
- cdef int n
- cdef np.int64_t bin
+ cdef int n, bin
cdef np.float64_t wval, bval
for n in range(bins_x.shape[0]):
bin = bins_x[n]
@@ -55,6 +54,7 @@
bresult[bin] += wval*bval
mresult[bin] += wval * (bval - mresult[bin]) / wresult[bin]
used[bin] = 1
+ return
@cython.boundscheck(False)
@cython.wraparound(False)
@@ -68,7 +68,7 @@
np.ndarray[np.float64_t, ndim=2] mresult,
np.ndarray[np.float64_t, ndim=2] qresult,
np.ndarray[np.float64_t, ndim=2] used):
- cdef int n
+ cdef int n, bini, binj
cdef np.int64_t bin
cdef np.float64_t wval, bval
for n in range(bins_x.shape[0]):
@@ -82,6 +82,7 @@
bresult[bini, binj] += wval*bval
mresult[bini, binj] += wval * (bval - mresult[bini, binj]) / wresult[bini, binj]
used[bini, binj] = 1
+ return
@cython.boundscheck(False)
@cython.wraparound(False)
@@ -96,7 +97,7 @@
np.ndarray[np.float64_t, ndim=3] mresult,
np.ndarray[np.float64_t, ndim=3] qresult,
np.ndarray[np.float64_t, ndim=3] used):
- cdef int n
+ cdef int n, bini, binj, bink
cdef np.int64_t bin
cdef np.float64_t wval, bval
for n in range(bins_x.shape[0]):
@@ -111,6 +112,7 @@
bresult[bini, binj, bink] += wval*bval
mresult[bini, binj, bink] += wval * (bval - mresult[bini, binj, bink]) / wresult[bini, binj, bink]
used[bini, binj, bink] = 1
+ return
@cython.boundscheck(False)
@cython.wraparound(False)
https://bitbucket.org/yt_analysis/yt/changeset/495e3a9c0b06/
changeset: 495e3a9c0b06
branch: yt
user: MatthewTurk
date: 2012-03-26 20:57:52
summary: Merging
affected #: 1 file
diff -r 2f021f47d6d27d9d01963dc9866b5d0b9d1962c1 -r 495e3a9c0b067e11ce218fdb447065092a362175 yt/analysis_modules/halo_merger_tree/merger_tree.py
--- a/yt/analysis_modules/halo_merger_tree/merger_tree.py
+++ b/yt/analysis_modules/halo_merger_tree/merger_tree.py
@@ -86,6 +86,22 @@
"ChildHaloID3", "ChildHaloFrac3",
"ChildHaloID4", "ChildHaloFrac4"]
+# Below we make the SQL command that creates the table "Halos" in the
+# database. This table is where all the data is stored.
+# Each column of data is named and its datatype is specified.
+# The GlobalHaloID is given the PRIMARY KEY property, which means that
+# the SQLite machinery assigns a consecutive and unique integer value
+# to that field automatically as each new entry is entered (that is,
+# if GlobalHaloID isn't specified already).
+create_db_line = "CREATE TABLE Halos ("
+for i, col in enumerate(columns):
+ if i == 0:
+ create_db_line += "%s %s PRIMARY KEY," % (col, column_types[col])
+ else:
+ create_db_line += " %s %s," % (col, column_types[col])
+# Clean of trailing comma, and closing stuff.
+create_db_line = create_db_line[:-1] + ");"
+
NumNeighbors = 15
NumDB = 5
@@ -162,6 +178,8 @@
self.FOF_link_length= FOF_link_length # For FOF
self.dm_only = dm_only
self.refresh = refresh
+ self.index = index
+ self.zs = {}
# MPI stuff
if self.comm.rank is None:
self.comm.rank = 0
@@ -173,12 +191,19 @@
os.unlink(self.database)
except:
pass
- self._open_create_database()
- self._create_halo_table()
+ if self.comm.rank == 0:
+ self._open_create_database()
+ self._create_halo_table()
self._run_halo_finder_add_to_db()
# Find the h5 file names for all the halos.
for snap in self.restart_files:
self._build_h5_refs(snap)
+ # Find out how much work is already stored in the database.
+ if self.comm.rank == 0:
+ z_progress = self._find_progress()
+ else:
+ z_progress = None
+ z_progress = self.comm.mpi_bcast(z_progress)
# Loop over the pairs of snapshots to locate likely neighbors, and
# then use those likely neighbors to compute fractional contributions.
last = None
@@ -187,16 +212,22 @@
for snap, pair in enumerate(zip(self.restart_files[:-1], self.restart_files[1:])):
if not self.with_halos[snap] or not self.with_halos[snap+1]:
continue
+ if self.zs[pair[0]] > z_progress:
+ continue
self._find_likely_children(pair[0], pair[1])
# last is the data for the parent dataset, which can be supplied
# as the child from the previous round for all but the first loop.
last = self._compute_child_fraction(pair[0], pair[1], last)
+ if self.comm.rank == 0:
+ mylog.info("Updating database with parent-child relationships.")
+ self._copy_and_update_db()
+ # This has to happen because we delete the old database above.
+ self._open_create_database()
del last
- # Now update the database with all the writes.
- mylog.info("Updating database with parent-child relationships.")
if self.comm.rank == 0:
- self._copy_and_update_db()
- self.comm.barrier()
+ if self.index:
+ self._write_index()
+ self._close_database()
self.comm.barrier()
mylog.info("Done!")
@@ -210,6 +241,7 @@
for cycle, file in enumerate(self.restart_files):
gc.collect()
pf = load(file)
+ self.zs[file] = pf.current_redshift
self.period = pf.domain_right_edge - pf.domain_left_edge
# If the halos are already found, skip this data step, unless
# refresh is True.
@@ -281,30 +313,16 @@
def _open_create_database(self):
# open the database. This creates the database file on disk if it
# doesn't already exist. Open it on root only.
- if self.comm.rank == 0:
- self.conn = sql.connect(self.database)
- self.cursor = self.conn.cursor()
+ self.conn = sql.connect(self.database)
+ self.cursor = self.conn.cursor()
def _create_halo_table(self):
- if self.comm.rank == 0:
- # Handle the error if it already exists.
- try:
- # Create the table that will store the halo data.
- line = "CREATE TABLE Halos (GlobalHaloID INTEGER PRIMARY KEY,\
- SnapCurrentTimeIdentifier INTEGER, SnapZ FLOAT, SnapHaloID INTEGER, \
- HaloMass FLOAT,\
- NumPart INTEGER, CenMassX FLOAT, CenMassY FLOAT,\
- CenMassZ FLOAT, BulkVelX FLOAT, BulkVelY FLOAT, BulkVelZ FLOAT,\
- MaxRad FLOAT,\
- ChildHaloID0 INTEGER, ChildHaloFrac0 FLOAT, \
- ChildHaloID1 INTEGER, ChildHaloFrac1 FLOAT, \
- ChildHaloID2 INTEGER, ChildHaloFrac2 FLOAT, \
- ChildHaloID3 INTEGER, ChildHaloFrac3 FLOAT, \
- ChildHaloID4 INTEGER, ChildHaloFrac4 FLOAT);"
- self.cursor.execute(line)
- self.conn.commit()
- except sql.OperationalError:
- pass
+ # Handle the error if the table already exists by doing nothing.
+ try:
+ self.cursor.execute(create_db_line)
+ self.conn.commit()
+ except sql.OperationalError:
+ pass
def _find_likely_children(self, parentfile, childfile):
# For each halo in the parent list, identify likely children in the
@@ -680,16 +698,33 @@
for insert in to_write:
temp_cursor.execute(line, insert)
temp_conn.commit()
+ temp_cursor.close()
+ temp_conn.close()
+ self._close_database()
+ os.rename(temp_name, self.database)
+
+ def _write_index(self):
mylog.info("Creating database index.")
line = "CREATE INDEX IF NOT EXISTS HalosIndex ON Halos ("
for name in columns:
line += name +","
line = line[:-1] + ");"
- temp_cursor.execute(line)
- temp_cursor.close()
- temp_conn.close()
- self._close_database()
- os.rename(temp_name, self.database)
+ self.cursor.execute(line)
+
+ def _find_progress(self):
+ # This queries the database to see how far along work has already come
+ # to identify parent->child relationships.
+ line = """SELECT ChildHaloID0, SnapZ from halos WHERE SnapHaloID = 0
+ ORDER BY SnapZ DESC;"""
+ self.cursor.execute(line)
+ results = self.cursor.fetchone()
+ while results:
+ results = list(results)
+ if results[0] == -1:
+ # We've hit a dump that does not have relationships. Save this.
+ return results[1] # the SnapZ.
+ results = self.cursor.fetchone()
+ return 0.
class MergerTreeConnect(DatabaseFunctions):
def __init__(self, database='halos.db'):
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list