[yt-svn] commit/yt: 2 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Jul 13 07:45:40 PDT 2015


2 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/9dba2e28c5ca/
Changeset:   9dba2e28c5ca
Branch:      yt
User:        jzuhone
Date:        2015-07-13 00:24:19+00:00
Summary:     Got this completely backwards
Affected #:  1 file

diff -r 4a6c9dac6675fc9c37acb478efde3717651b1e0b -r 9dba2e28c5cab042fbe7a16fda4afc9853288c18 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -28,7 +28,7 @@
     get_box_grids_level
 from yt.geometry.geometry_handler import \
     YTDataChunk
-from yt.extern.six import PY3
+from yt.extern.six import PY2, PY3
 
 from .fields import AthenaFieldInfo
 from yt.units.yt_array import YTQuantity
@@ -36,13 +36,13 @@
     decompose_array, get_psize
 
 def chk23(strin):
-    if PY3:
+    if PY2:
         return strin
     else:
         return strin.encode('utf-8')
 
 def str23(strin):
-    if PY3:
+    if PY2:
         return strin
     else:
         if isinstance(strin, list):
@@ -137,7 +137,7 @@
     elif chk23("time") in splitup:
         time_index = splitup.index(chk23("time"))
         grid['time'] = float(str23(splitup[time_index+1]))
-    
+
 class AthenaHierarchy(GridIndex):
 
     grid = AthenaGrid
@@ -150,7 +150,7 @@
         self.dataset_type = dataset_type
         # for now, the index file is the dataset!
         self.index_filename = os.path.join(os.getcwd(), self.dataset.filename)
-        if PY3:
+        if PY2:
             self._fhandle = file(self.index_filename,'rb')
         else:
             self._fhandle = open(self.index_filename,'rb')
@@ -232,7 +232,7 @@
             line = f.readline()
         f.close()
 
-        # It seems some datasets have a mismatch between ncells and 
+        # It seems some datasets have a mismatch between ncells and
         # the actual grid dimensions.
         if np.prod(grid['dimensions']) != grid['ncells']:
             grid['dimensions'] -= 1
@@ -315,9 +315,9 @@
             j=j+1
 
         gres = glis + gdims*gdds
-        # Now we convert the glis, which were left edges (floats), to indices 
+        # Now we convert the glis, which were left edges (floats), to indices
         # from the domain left edge.  Then we do a bunch of fixing now that we
-        # know the extent of all the grids. 
+        # know the extent of all the grids.
         glis = np.round((glis - self.dataset.domain_left_edge.ndarray_view())/gdds).astype('int')
         new_dre = np.max(gres,axis=0)
         self.dataset.domain_right_edge[:] = np.round(new_dre, decimals=12)[:]
@@ -473,8 +473,8 @@
             storage_filename = '%s.yt' % filename.split('/')[-1]
         self.storage_filename = storage_filename
         self.backup_filename = self.filename[:-4] + "_backup.gdf"
-        # Unfortunately we now have to mandate that the index gets 
-        # instantiated so that we can make sure we have the correct left 
+        # Unfortunately we now have to mandate that the index gets
+        # instantiated so that we can make sure we have the correct left
         # and right domain edges.
         self.index
 
@@ -563,7 +563,7 @@
         if dataset_dir.endswith("id0"):
             dname = "id0/"+dname
             dataset_dir = dataset_dir[:-3]
-            
+
         gridlistread = glob.glob(os.path.join(dataset_dir, 'id*/%s-id*%s' % (dname[4:-9],dname[-9:])))
         if 'id0' in dname :
             gridlistread += glob.glob(os.path.join(dataset_dir, 'id*/lev*/%s*-lev*%s' % (dname[4:-9],dname[-9:])))
@@ -571,7 +571,7 @@
             gridlistread += glob.glob(os.path.join(dataset_dir, 'lev*/%s*-lev*%s' % (dname[:-9],dname[-9:])))
         ndots = dname.count(".")
         gridlistread = [fn for fn in gridlistread if os.path.basename(fn).count(".") == ndots]
-        self.nvtk = len(gridlistread)+1 
+        self.nvtk = len(gridlistread)+1
 
         self.current_redshift = self.omega_lambda = self.omega_matter = \
             self.hubble_constant = self.cosmological_simulation = 0.0
@@ -599,4 +599,3 @@
 
     def __repr__(self):
         return self.basename.rsplit(".", 1)[0]
-


https://bitbucket.org/yt_analysis/yt/commits/c014ad925d83/
Changeset:   c014ad925d83
Branch:      yt
User:        jzuhone
Date:        2015-07-13 13:33:47+00:00
Summary:     Prevent test failures on Python 3 by making all of these field names tuples
Affected #:  1 file

diff -r 9dba2e28c5cab042fbe7a16fda4afc9853288c18 -r c014ad925d83b6f1a7d2124cd941861a87e20d19 yt/fields/tests/test_fields.py
--- a/yt/fields/tests/test_fields.py
+++ b/yt/fields/tests/test_fields.py
@@ -222,12 +222,12 @@
     def unitless_data(field, data):
             return np.ones(data['density'].shape)
 
-    ds.add_field('density_alias_no_units', function=density_alias)
-    ds.add_field('density_alias_auto', function=density_alias,
+    ds.add_field(('gas','density_alias_no_units'), function=density_alias)
+    ds.add_field(('gas','density_alias_auto'), function=density_alias,
                  units='auto')
-    ds.add_field('density_alias_wrong_units', function=density_alias,
+    ds.add_field(('gas','density_alias_wrong_units'), function=density_alias,
                  units='m/s')
-    ds.add_field('density_alias_unparseable_units', function=density_alias,
+    ds.add_field(('gas','density_alias_unparseable_units'), function=density_alias,
                  units='dragons')
 
     assert_raises(YTFieldUnitError, get_data, ds, 'density_alias_no_units')
@@ -238,11 +238,11 @@
     dens = ad['density_alias_auto']
     assert_equal(str(dens.units), 'g/cm**3')
 
-    ds.add_field('dimensionless', function=unitless_data)
-    ds.add_field('dimensionless_auto', function=unitless_data,
+    ds.add_field(('gas','dimensionless'), function=unitless_data)
+    ds.add_field(('gas','dimensionless_auto'), function=unitless_data,
                  units='auto')
-    ds.add_field('dimensionless_explicit', function=unitless_data, units='')
-    ds.add_field('dimensionful', function=unitless_data, units='g/cm**3')
+    ds.add_field(('gas','dimensionless_explicit'), function=unitless_data, units='')
+    ds.add_field(('gas','dimensionful'), function=unitless_data, units='g/cm**3')
 
     assert_equal(str(ad['dimensionless'].units), 'dimensionless')
     assert_equal(str(ad['dimensionless_auto'].units), 'dimensionless')

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list