[yt-svn] commit/yt: 3 new changesets

Bitbucket commits-noreply at bitbucket.org
Wed Jul 11 15:09:42 PDT 2012


3 new commits in yt:


https://bitbucket.org/yt_analysis/yt/changeset/4cdf6fc1e890/
changeset:   4cdf6fc1e890
branch:      yt
user:        Christopher Moody
date:        2012-07-11 23:47:00
summary:     Added is_valid for ART. now works with load.
affected #:  1 file

diff -r 6d45f89c2d8609edf398edf11cbbc731332da964 -r 4cdf6fc1e89044ec0deedcbaf9744fb96ba6e8ef yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -944,5 +944,14 @@
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
-        return False # We make no effort to auto-detect ART data
+        """
+        Defined for Daniel Ceverino's file naming scheme.
+        This could differ for other formats.
+        """
+        fn = ("%s" % (os.path.basename(args[0])))
+        f = ("%s" % args[0])
+        if fn.endswith(".d") and fn.startswith('10Mpc') and\
+                os.path.exists(f): 
+                return True
+        return False
 



https://bitbucket.org/yt_analysis/yt/changeset/82ec87ff6c9b/
changeset:   82ec87ff6c9b
branch:      yt
user:        Christopher Moody
date:        2012-07-12 00:07:01
summary:     fixes to rockstar
affected #:  1 file

diff -r 4cdf6fc1e89044ec0deedcbaf9744fb96ba6e8ef -r 82ec87ff6c9b8b35795ab6a93a8e3f1214935ef8 yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
@@ -238,11 +238,9 @@
 
 cdef class RockstarInterface
 
-cdef RockstarInterface rh
 cdef void rh_read_particles(char *filename, particle **p, np.int64_t *num_p):
-    cdef int i, fi, npart, tnpart
-    cdef np.float64_t conv[6], left_edge[6], right_edge[3]
-    dd = rh.data_source
+    print 'reading from particle filename %s'%filename # should print ./inline.0
+    cdef np.float64_t conv[6], left_edge[6]
     cdef np.ndarray[np.int64_t, ndim=1] arri
     cdef np.ndarray[np.float64_t, ndim=1] arr
     block = int(str(filename).rsplit(".")[-1])
@@ -257,12 +255,9 @@
     #print "Loading indices: size = ", tnpart
     conv[0] = conv[1] = conv[2] = rh.pf["mpchcm"]
     conv[3] = conv[4] = conv[5] = 1e-5
-    left_edge[0] = rh.le[0]
-    left_edge[1] = rh.le[1]
-    left_edge[2] = rh.le[2]
-    right_edge[0] = rh.re[0]
-    right_edge[1] = rh.re[1]
-    right_edge[2] = rh.re[2]
+    left_edge[0] = rh.pf.domain_left_edge[0]
+    left_edge[1] = rh.pf.domain_left_edge[1]
+    left_edge[2] = rh.pf.domain_left_edge[2]
     left_edge[3] = left_edge[4] = left_edge[5] = 0.0
     pi = 0
     for g in grids:
@@ -277,9 +272,6 @@
                       "particle_velocity_z"]:
             arr = dd._get_data_from_grid(g, field).astype("float64")
             for i in range(npart):
-                if fi<3: 
-                    if  left_edge[i] > arr[i]: continue
-                    if right_edge[i] < arr[i]: continue
                 p[0][i+pi].pos[fi] = (arr[i]-left_edge[fi])*conv[fi]
             fi += 1
         pi += npart
@@ -304,8 +296,9 @@
                        int parallel = False, int num_readers = 1,
                        int num_writers = 1,
                        int writing_port = -1, int block_ratio = 1,
-                       int periodic = 1, int min_halo_size = 20,
-                       char *outbase = 'None'):
+                       int periodic = 1, int num_snaps = 1,
+                       int min_halo_size = 25,
+                       int periodic = 1, 
         global PARALLEL_IO, PARALLEL_IO_SERVER_ADDRESS, PARALLEL_IO_SERVER_PORT
         global FILENAME, FILE_FORMAT, NUM_SNAPS, STARTING_SNAP, h0, Ol, Om
         global BOX_SIZE, PERIODIC, PARTICLE_MASS, NUM_BLOCKS, NUM_READERS
@@ -324,10 +317,11 @@
         FILENAME = "inline.<block>"
         FILE_FORMAT = "GENERIC"
         OUTPUT_FORMAT = "ASCII"
+        NUM_SNAPS = num_snaps
+        NUM_READERS = num_readers
         NUM_SNAPS = 1
-        NUM_READERS = num_readers
-        NUM_BLOCKS = num_readers * block_ratio
         NUM_WRITERS = num_writers
+        MIN_HALO_OUTPUT_SIZE=min_halo_size
         self.block_ratio = block_ratio
 
         h0 = self.pf.hubble_constant



https://bitbucket.org/yt_analysis/yt/changeset/d7b6451bdf7b/
changeset:   d7b6451bdf7b
branch:      yt
user:        MatthewTurk
date:        2012-07-12 00:09:02
summary:     Merging
affected #:  2 files

diff -r 720605eff77a78c3d17763ea647b874ff7fca6d6 -r d7b6451bdf7bdf4d17e0408ef600dd9cd4203c9b yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar_interface.pyx
@@ -238,11 +238,9 @@
 
 cdef class RockstarInterface
 
-cdef RockstarInterface rh
 cdef void rh_read_particles(char *filename, particle **p, np.int64_t *num_p):
-    cdef int i, fi, npart, tnpart
-    cdef np.float64_t conv[6], left_edge[6], right_edge[3]
-    dd = rh.data_source
+    print 'reading from particle filename %s'%filename # should print ./inline.0
+    cdef np.float64_t conv[6], left_edge[6]
     cdef np.ndarray[np.int64_t, ndim=1] arri
     cdef np.ndarray[np.float64_t, ndim=1] arr
     block = int(str(filename).rsplit(".")[-1])
@@ -257,12 +255,9 @@
     #print "Loading indices: size = ", tnpart
     conv[0] = conv[1] = conv[2] = rh.pf["mpchcm"]
     conv[3] = conv[4] = conv[5] = 1e-5
-    left_edge[0] = rh.le[0]
-    left_edge[1] = rh.le[1]
-    left_edge[2] = rh.le[2]
-    right_edge[0] = rh.re[0]
-    right_edge[1] = rh.re[1]
-    right_edge[2] = rh.re[2]
+    left_edge[0] = rh.pf.domain_left_edge[0]
+    left_edge[1] = rh.pf.domain_left_edge[1]
+    left_edge[2] = rh.pf.domain_left_edge[2]
     left_edge[3] = left_edge[4] = left_edge[5] = 0.0
     pi = 0
     for g in grids:
@@ -277,9 +272,6 @@
                       "particle_velocity_z"]:
             arr = dd._get_data_from_grid(g, field).astype("float64")
             for i in range(npart):
-                if fi<3: 
-                    if  left_edge[i] > arr[i]: continue
-                    if right_edge[i] < arr[i]: continue
                 p[0][i+pi].pos[fi] = (arr[i]-left_edge[fi])*conv[fi]
             fi += 1
         pi += npart
@@ -304,8 +296,9 @@
                        int parallel = False, int num_readers = 1,
                        int num_writers = 1,
                        int writing_port = -1, int block_ratio = 1,
-                       int periodic = 1, int min_halo_size = 20,
-                       char *outbase = 'None'):
+                       int periodic = 1, int num_snaps = 1,
+                       int min_halo_size = 25,
+                       int periodic = 1, 
         global PARALLEL_IO, PARALLEL_IO_SERVER_ADDRESS, PARALLEL_IO_SERVER_PORT
         global FILENAME, FILE_FORMAT, NUM_SNAPS, STARTING_SNAP, h0, Ol, Om
         global BOX_SIZE, PERIODIC, PARTICLE_MASS, NUM_BLOCKS, NUM_READERS
@@ -324,10 +317,11 @@
         FILENAME = "inline.<block>"
         FILE_FORMAT = "GENERIC"
         OUTPUT_FORMAT = "ASCII"
+        NUM_SNAPS = num_snaps
+        NUM_READERS = num_readers
         NUM_SNAPS = 1
-        NUM_READERS = num_readers
-        NUM_BLOCKS = num_readers * block_ratio
         NUM_WRITERS = num_writers
+        MIN_HALO_OUTPUT_SIZE=min_halo_size
         self.block_ratio = block_ratio
 
         h0 = self.pf.hubble_constant


diff -r 720605eff77a78c3d17763ea647b874ff7fca6d6 -r d7b6451bdf7bdf4d17e0408ef600dd9cd4203c9b yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -944,5 +944,14 @@
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
-        return False # We make no effort to auto-detect ART data
+        """
+        Defined for Daniel Ceverino's file naming scheme.
+        This could differ for other formats.
+        """
+        fn = ("%s" % (os.path.basename(args[0])))
+        f = ("%s" % args[0])
+        if fn.endswith(".d") and fn.startswith('10Mpc') and\
+                os.path.exists(f): 
+                return True
+        return False

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list