[Yt-svn] yt: 2 new changesets

hg at spacepope.org hg at spacepope.org
Fri Jan 28 07:22:49 PST 2011


hg Repository: yt
details:   yt/rev/5a76ebe4f642
changeset: 3693:5a76ebe4f642
user:      Matthew Turk <matthewturk at gmail.com>
date:
Fri Jan 28 10:22:27 2011 -0500
description:
Important fix: non-uniform dx domains were receiving incorrect slices.  Thanks
ot JC Passy for bringing this to my attention.

hg Repository: yt
details:   yt/rev/bd48b060d52e
changeset: 3694:bd48b060d52e
user:      Matthew Turk <matthewturk at gmail.com>
date:
Fri Jan 28 10:22:27 2011 -0500
description:
Important fix: non-uniform dx domains were receiving incorrect slices.  Thanks
ot JC Passy for bringing this to my attention.

diffstat:

 .hgtags                                                     |      2 +
 README                                                      |      2 +-
 doc/install_script.sh                                       |      2 +-
 scripts/iyt                                                 |     48 +-
 setup.py                                                    |     68 +-
 yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py |     42 +-
 yt/data_objects/data_containers.py                          |      2 +-
 yt/frontends/ramses/_ramses_reader.cpp                      |  11579 +++
 yt/frontends/ramses/setup.py                                |      9 +-
 yt/utilities/_libconfig/AUTHORS                             |      5 -
 yt/utilities/_libconfig/COPYING.LIB                         |    510 -
 yt/utilities/_libconfig/README                              |     58 -
 yt/utilities/_libconfig/README.yt                           |     20 -
 yt/utilities/_libconfig/grammar.c                           |   2001 -
 yt/utilities/_libconfig/grammar.h                           |    113 -
 yt/utilities/_libconfig/libconfig.c                         |   1585 -
 yt/utilities/_libconfig/libconfig.h                         |    309 -
 yt/utilities/_libconfig/parsectx.h                          |     48 -
 yt/utilities/_libconfig/scanctx.c                           |    171 -
 yt/utilities/_libconfig/scanctx.h                           |     62 -
 yt/utilities/_libconfig/scanner.c                           |   2342 -
 yt/utilities/_libconfig/scanner.h                           |    326 -
 yt/utilities/_libconfig/scanner.l                           |    188 -
 yt/utilities/_libconfig/strbuf.c                            |     58 -
 yt/utilities/_libconfig/strbuf.h                            |     40 -
 yt/utilities/_libconfig/wincompat.h                         |     90 -
 yt/utilities/amr_utils.c                                    |  35083 ++++++++++
 yt/utilities/logger.py                                      |     20 +-
 yt/utilities/parallel_tools/parallel_analysis_interface.py  |      5 +-
 yt/utilities/setup.py                                       |     22 +-
 30 files changed, 46684 insertions(+), 8126 deletions(-)

diffs (truncated from 55053 to 300 lines):

diff -r ece55615d3ec -r bd48b060d52e .hgtags
--- a/.hgtags	Fri Jan 28 00:09:01 2011 -0500
+++ b/.hgtags	Fri Jan 28 10:22:27 2011 -0500
@@ -5152,3 +5152,5 @@
 0000000000000000000000000000000000000000 svn.993
 fff7118f00e25731ccf37cba3082b8fcb73cf90e svn.371
 0000000000000000000000000000000000000000 svn.371
+ca6e536c15a60070e6988fd472dc771a1897e170 yt-2.0
+882c41eed5dd4a3cdcbb567bcb79b833e46b1f42 yt-2.0.1
diff -r ece55615d3ec -r bd48b060d52e README
--- a/README	Fri Jan 28 00:09:01 2011 -0500
+++ b/README	Fri Jan 28 10:22:27 2011 -0500
@@ -1,4 +1,4 @@
-Hi there!  You've just downloaded yt, an analysis tool for 3D Enzo adaptive
+Hi there!  You've just downloaded yt, an analysis tool for Enzo adaptive
 mesh refinement datasets.  It's written in python and based on the NumPy and
 Matplotlib components.
 
diff -r ece55615d3ec -r bd48b060d52e doc/install_script.sh
--- a/doc/install_script.sh	Fri Jan 28 00:09:01 2011 -0500
+++ b/doc/install_script.sh	Fri Jan 28 10:22:27 2011 -0500
@@ -17,7 +17,7 @@
 
 DEST_SUFFIX="yt-`uname -p`"
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
-BRANCH="yt" # This is the branch to which we will forcibly update.
+BRANCH="stable" # This is the branch to which we will forcibly update.
 
 # Here's where you put the HDF5 path if you like; otherwise it'll download it
 # and install it on its own
diff -r ece55615d3ec -r bd48b060d52e scripts/iyt
--- a/scripts/iyt	Fri Jan 28 00:09:01 2011 -0500
+++ b/scripts/iyt	Fri Jan 28 10:22:27 2011 -0500
@@ -1,7 +1,6 @@
 #!python
-import os, re
+import os
 from yt.mods import *
-from yt.data_objects.data_containers import AMRData
 namespace = locals().copy()
 
 doc = """\
@@ -266,49 +265,4 @@
 
 #main()
 
-
-# Now we add some tab completers, in the vein of:
-# http://pymel.googlecode.com/svn/trunk/tools/ipymel.py
-# We'll start with some fields.
-
-def yt_fieldname_completer(self, event):
-    """Match dictionary completions"""
-    #print "python_matches", event.symbol
-    #text = event.symbol # Not sure why this no longer works
-    text = event.line
-    #print repr(text)
-    # Another option, seems to work great. Catches things like ''.<tab>
-    #print repr(text), dir(text)
-    #m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text)
-    m = re.match(r"(\S+(\.\w+)*)\[[\'\\\"](\w*)$", text)
-
-    if not m:
-        raise IPython.ipapi.TryNext 
-    
-    expr, attr = m.group(1, 3)
-    #print "COMPLETING ON ", expr, attr
-    #print type(self.Completer), dir(self.Completer)
-    #print self.Completer.namespace
-    #print self.Completer.global_namespace
-    try:
-        obj = eval(expr, self.Completer.namespace)
-    except:
-        try:
-            obj = eval(expr, self.Completer.global_namespace)
-        except:
-            raise IPython.ipapi.TryNext 
-        
-    if isinstance(obj, (AMRData, ) ):
-        #print "COMPLETING ON THIS THING"
-        all_fields = [f for f in sorted(
-                obj.pf.h.field_list + obj.pf.h.derived_field_list)]
-        #matches = self.Completer.python_matches(text)
-        #print "RETURNING ", all_fields
-        return all_fields
-
-
-    raise IPython.ipapi.TryNext 
-
-ip.set_hook('complete_command', yt_fieldname_completer , re_key = ".*" )
-
 ip_shell.mainloop(sys_exit=1,banner=doc)
diff -r ece55615d3ec -r bd48b060d52e setup.py
--- a/setup.py	Fri Jan 28 00:09:01 2011 -0500
+++ b/setup.py	Fri Jan 28 10:22:27 2011 -0500
@@ -5,76 +5,10 @@
 import distribute_setup
 distribute_setup.use_setuptools()
 
-from numpy.distutils.misc_util import appendpath
-from numpy.distutils import log
-
-# Verify that we have Cython installed
-try:
-    import Cython
-except ImportError as e:
-    print "Received error on importing Cython:"
-    print e
-    print "Now attempting to install Cython"
-    import pip
-    rv = pip.main(["install",
-              "http://yt.enzotools.org/dependencies/Cython-latest.tar.gz"])
-    if rv == 1:
-        print "Unable to install Cython.  Please report this bug to yt-users."
-        sys.exit(1)
-
-######
-# This next bit comes from Matthew Brett, to get Cython working with NumPy
-# distutils.  I added a bit to get C++ Cython working.
-from os.path import join as pjoin, dirname
-from distutils.dep_util import newer_group
-from distutils.errors import DistutilsError
-
-
-def generate_a_pyrex_source(self, base, ext_name, source, extension):
-    ''' Monkey patch for numpy build_src.build_src method
-
-    Uses Cython instead of Pyrex.
-
-    Assumes Cython is present
-    '''
-    if self.inplace:
-        target_dir = dirname(base)
-    else:
-        target_dir = appendpath(self.build_src, dirname(base))
-    if extension.language == "c++":
-        cplus = True
-        file_ext = ".cpp"
-    else:
-        cplus = False
-        file_ext = ".c"
-    target_file = pjoin(target_dir, ext_name + file_ext)
-    depends = [source] + extension.depends
-    if self.force or newer_group(depends, target_file, 'newer'):
-        import Cython.Compiler.Main
-        log.info("cythonc:> %s" % (target_file))
-        self.mkpath(target_dir)
-        options = Cython.Compiler.Main.CompilationOptions(
-            defaults=Cython.Compiler.Main.default_options,
-            include_path=extension.include_dirs,
-            language=extension.language, cplus = cplus,
-            output_file=target_file)
-        cython_result = Cython.Compiler.Main.compile(source,
-                                                   options=options)
-        if cython_result.num_errors != 0:
-            raise DistutilsError("%d errors while compiling %r with Cython" \
-                  % (cython_result.num_errors, source))
-    return target_file
-
-
-from numpy.distutils.command import build_src
-build_src.build_src.generate_a_pyrex_source = generate_a_pyrex_source
-# End snippet
-######
-
 import setuptools
 
 DATA_FILES = []
-VERSION = "2.1dev"
+VERSION = "2.0stable"
 
 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
 
diff -r ece55615d3ec -r bd48b060d52e yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py
--- a/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py	Fri Jan 28 00:09:01 2011 -0500
+++ b/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py	Fri Jan 28 10:22:27 2011 -0500
@@ -171,8 +171,7 @@
         return of_child_from_me, of_mine_from_me
 
 class EnzoFOFMergerBranch(object):
-    def __init__(self, tree, output_num, halo_id, max_children,
-                 min_relation=0.25):
+    def __init__(self, tree, output_num, halo_id, max_children):
         self.output_num = output_num
         self.halo_id = halo_id
         self.npart = tree.relationships[output_num][halo_id]["NumberOfParticles"]
@@ -184,7 +183,7 @@
         for k in sorted_keys:
             if not str(k).isdigit(): continue
             v = tree.relationships[output_num][halo_id][k]
-            if v[1] > min_relation and halo_count < max_children:
+            if v[1] != 0.0 and halo_count < max_children:
                 halo_count += 1
                 self.children.append((k,v[1],v[2]))
                 if v[1] > max_relationship:
@@ -325,43 +324,6 @@
                     this_halos.append(c[0])
             self.filter_small_halos(this, min_particles)
 
-    def get_massive_progenitors(self, halonum, min_relation=0.25):
-        r"""Returns a list of the most massive progenitor halos.
-
-        This routine walks down the tree, following the most massive
-        progenitor on each node.
-
-        Parameters
-        ----------
-        halonum : int
-            Halo number at the last output to trace.
-
-        Output
-        ------
-        output : dict
-            Dictionary of redshifts, cycle numbers, and halo numbers
-            of the most massive progenitor.  keys = {redshift, cycle,
-            halonum}
-        """
-        output = {"redshift": [], "cycle": [], "halonum": []}
-        # First (lowest redshift) node in tree
-        halo0 = halonum
-        for cycle in sorted(self.numbers, reverse=True):
-            if cycle not in self.relationships: break
-            if halo0 not in self.relationships[cycle]: break
-            node = self.relationships[cycle][halo0]
-            output["redshift"].append(self.redshifts[cycle])
-            output["cycle"].append(cycle)
-            output["halonum"].append(halo0)
-            # Find progenitor
-            max_rel = 0.0
-            for k,v in node.items():
-                if not str(k).isdigit(): continue
-                if v[1] > max_rel and v[1] > min_relation:
-                    halo0 = k
-                    max_rel = v[1]
-        return output
-
     def print_tree(self):
         r"""Prints the merger tree to stdout.
         """
diff -r ece55615d3ec -r bd48b060d52e yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py	Fri Jan 28 00:09:01 2011 -0500
+++ b/yt/data_objects/data_containers.py	Fri Jan 28 10:22:27 2011 -0500
@@ -800,7 +800,7 @@
         xaxis = x_dict[self.axis]
         yaxis = y_dict[self.axis]
         ds, dx, dy = grid.dds[self.axis], grid.dds[xaxis], grid.dds[yaxis]
-        sl_ind = int((self.coord-self.pf.domain_left_edge[self.axis])/dx) - \
+        sl_ind = int((self.coord-self.pf.domain_left_edge[self.axis])/ds) - \
                      grid.get_global_startindex()[self.axis]
         sl = [slice(None), slice(None), slice(None)]
         sl[self.axis] = slice(sl_ind, sl_ind + 1)
diff -r ece55615d3ec -r bd48b060d52e yt/frontends/ramses/_ramses_reader.cpp
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/ramses/_ramses_reader.cpp	Fri Jan 28 10:22:27 2011 -0500
@@ -0,0 +1,11579 @@
+/* Generated by Cython 0.13.beta0 on Wed Dec  1 13:02:43 2010 */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+    #error Python headers needed to compile C extensions, please install development version of Python.
+#else
+
+#include <stddef.h> /* For offsetof */
+#ifndef offsetof
+#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+  #ifndef __stdcall
+    #define __stdcall
+  #endif
+  #ifndef __cdecl
+    #define __cdecl
+  #endif
+  #ifndef __fastcall
+    #define __fastcall
+  #endif
+#endif
+
+#ifndef DL_IMPORT
+  #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+  #define DL_EXPORT(t) t
+#endif
+
+#ifndef PY_LONG_LONG
+  #define PY_LONG_LONG LONG_LONG
+#endif
+
+#if PY_VERSION_HEX < 0x02040000
+  #define METH_COEXIST 0
+  #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type)
+  #define PyDict_Contains(d,o)   PySequence_Contains(d,o)
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  typedef int Py_ssize_t;
+  #define PY_SSIZE_T_MAX INT_MAX



More information about the yt-svn mailing list