[yt-svn] commit/yt: 13 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Nov 17 12:34:22 PST 2014


13 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/50bf74645806/
Changeset:   50bf74645806
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 00:40:52+00:00
Summary:     RAMSESIndex.min_level and .max_level now correspond to the values associated with the min/max spatial resolution levels in RAMSES. Checks like get_smallest_dx() *should* work fine with RAMSES data analysis now.
Affected #:  1 file

diff -r feba284df7fea6ac46d0329b9ed2a0bfde6487a2 -r 50bf74645806154a408decb98faf00795aa6e225 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -161,8 +161,10 @@
                 ("particle_identifier", "I"),
                 ("particle_refinement_level", "I")]
         if hvals["nstar_tot"] > 0:
-            particle_fields += [("particle_age", "d"),
+                particle_fields += [("particle_age", "d"),
                                 ("particle_metallicity", "d")]
+
+             
         field_offsets = {}
         _pfields = {}
         for field, vtype in particle_fields:
@@ -224,7 +226,7 @@
                                 self.amr_header['nboundary']*l]
             return ng
         min_level = self.ds.min_level
-        max_level = min_level
+        max_level = self.ds.max_level
         nx, ny, nz = (((i-1.0)/2.0) for i in self.amr_header['nx'])
         for level in range(self.amr_header['nlevelmax']):
             # Easier if do this 1-indexed
@@ -331,6 +333,7 @@
 class RAMSESIndex(OctreeIndex):
 
     def __init__(self, ds, dataset_type='ramses'):
+        self._ds = ds # TODO: Figure out the class composition better!
         self.fluid_field_list = ds._fields_in_file
         self.dataset_type = dataset_type
         self.dataset = weakref.proxy(ds)
@@ -348,6 +351,7 @@
                          for dom in self.domains)
         self.max_level = max(dom.max_level for dom in self.domains)
         self.num_grids = total_octs
+#	print self.max_level
 
     def _detect_output_fields(self):
         # Do we want to attempt to figure out what the fields are in the file?
@@ -369,12 +373,12 @@
         
 
         # TODO: copy/pasted from DomainFile; needs refactoring!
-        num = os.path.basename(self.dataset.parameter_filename).split("."
+        num = os.path.basename(self._ds.parameter_filename).split("."
                 )[0].split("_")[1]
         testdomain = 1 # Just pick the first domain file to read
         basename = "%s/%%s_%s.out%05i" % (
             os.path.abspath(
-              os.path.dirname(self.dataset.parameter_filename)),
+              os.path.dirname(self._ds.parameter_filename)),
             num, testdomain)
         hydro_fn = basename % "hydro"
         # Do we have a hydro file?
@@ -422,6 +426,7 @@
                       "Pressure","Metallicity"]
         while len(fields) < nvar:
             fields.append("var"+str(len(fields)))
+
         mylog.debug("No fields specified by user; automatically setting fields array to %s", str(fields))
         self.fluid_field_list = fields
 
@@ -455,14 +460,23 @@
         for subset in oobjs:
             yield YTDataChunk(dobj, "io", [subset], None, cache = cache)
 
+    # since RAMSES works in indexing of 1, we want 2**self.max_level rather than 2**self.max_level+1
+    def get_smallest_dx(self):
+        """
+        Returns (in code units) the smallest cell size in the simulation.
+        """
+        return (self.dataset.domain_width /
+                (2**(self.max_level))).min()    
+
+
+
 class RAMSESDataset(Dataset):
     _index_class = RAMSESIndex
     _field_info_class = RAMSESFieldInfo
     gamma = 1.4 # This will get replaced on hydro_fn open
     
     def __init__(self, filename, dataset_type='ramses',
-                 fields = None, storage_filename = None,
-                 units_override=None):
+                 fields = None, storage_filename = None):
         # Here we want to initiate a traceback, if the reader is not built.
         if isinstance(fields, types.StringTypes):
             fields = field_aliases[fields]
@@ -472,7 +486,7 @@
         '''
         self.fluid_types += ("ramses",)
         self._fields_in_file = fields
-        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
+        Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
     def __repr__(self):
@@ -528,7 +542,7 @@
         # one-indexed, but it also does refer to the *oct* dimensions -- so
         # this means that a levelmin of 1 would have *1* oct in it.  So a
         # levelmin of 2 would have 8 octs at the root mesh level.
-        self.min_level = rheader['levelmin'] - 1
+        self.min_level = rheader['levelmin']
         # Now we read the hilbert indices
         self.hilbert_indices = {}
         if rheader['ordering type'] == "hilbert":
@@ -540,7 +554,7 @@
         self.current_time = self.parameters['time'] * self.parameters['unit_t']
         self.domain_left_edge = np.zeros(3, dtype='float64')
         self.domain_dimensions = np.ones(3, dtype='int32') * \
-                        2**(self.min_level+1)
+                        2**(self.min_level)
         self.domain_right_edge = np.ones(3, dtype='float64')
         # This is likely not true, but I am not sure how to otherwise
         # distinguish them.
@@ -551,8 +565,8 @@
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-        self.max_level = rheader['levelmax'] - self.min_level
-        f.close()
+#        self.max_level = rheader['levelmax'] - self.min_level
+	self.max_level = rheader['levelmax'] ### change
 
     @classmethod
     def _is_valid(self, *args, **kwargs):


https://bitbucket.org/yt_analysis/yt/commits/721fcda4c526/
Changeset:   721fcda4c526
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 00:48:47+00:00
Summary:     updating
Affected #:  1 file

diff -r 50bf74645806154a408decb98faf00795aa6e225 -r 721fcda4c52628894c3326be860acdc9770fea72 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -161,10 +161,9 @@
                 ("particle_identifier", "I"),
                 ("particle_refinement_level", "I")]
         if hvals["nstar_tot"] > 0:
-                particle_fields += [("particle_age", "d"),
+             particle_fields += [("particle_age", "d"),
                                 ("particle_metallicity", "d")]
 
-             
         field_offsets = {}
         _pfields = {}
         for field, vtype in particle_fields:
@@ -333,7 +332,6 @@
 class RAMSESIndex(OctreeIndex):
 
     def __init__(self, ds, dataset_type='ramses'):
-        self._ds = ds # TODO: Figure out the class composition better!
         self.fluid_field_list = ds._fields_in_file
         self.dataset_type = dataset_type
         self.dataset = weakref.proxy(ds)
@@ -351,7 +349,6 @@
                          for dom in self.domains)
         self.max_level = max(dom.max_level for dom in self.domains)
         self.num_grids = total_octs
-#	print self.max_level
 
     def _detect_output_fields(self):
         # Do we want to attempt to figure out what the fields are in the file?
@@ -373,12 +370,12 @@
         
 
         # TODO: copy/pasted from DomainFile; needs refactoring!
-        num = os.path.basename(self._ds.parameter_filename).split("."
+        num = os.path.basename(self.dataset.parameter_filename).split("."
                 )[0].split("_")[1]
         testdomain = 1 # Just pick the first domain file to read
         basename = "%s/%%s_%s.out%05i" % (
             os.path.abspath(
-              os.path.dirname(self._ds.parameter_filename)),
+              os.path.dirname(self.dataset.parameter_filename)),
             num, testdomain)
         hydro_fn = basename % "hydro"
         # Do we have a hydro file?
@@ -426,7 +423,6 @@
                       "Pressure","Metallicity"]
         while len(fields) < nvar:
             fields.append("var"+str(len(fields)))
-
         mylog.debug("No fields specified by user; automatically setting fields array to %s", str(fields))
         self.fluid_field_list = fields
 
@@ -476,7 +472,8 @@
     gamma = 1.4 # This will get replaced on hydro_fn open
     
     def __init__(self, filename, dataset_type='ramses',
-                 fields = None, storage_filename = None):
+                 fields = None, storage_filename = None,
+                 units_override=None):
         # Here we want to initiate a traceback, if the reader is not built.
         if isinstance(fields, types.StringTypes):
             fields = field_aliases[fields]
@@ -486,7 +483,7 @@
         '''
         self.fluid_types += ("ramses",)
         self._fields_in_file = fields
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
         self.storage_filename = storage_filename
 
     def __repr__(self):
@@ -565,8 +562,7 @@
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-#        self.max_level = rheader['levelmax'] - self.min_level
-	self.max_level = rheader['levelmax'] ### change
+	self.max_level = rheader['levelmax']
 
     @classmethod
     def _is_valid(self, *args, **kwargs):


https://bitbucket.org/yt_analysis/yt/commits/e04971a65679/
Changeset:   e04971a65679
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 01:09:35+00:00
Summary:     data_structures.py edited online with Bitbucket
Affected #:  1 file

diff -r 721fcda4c52628894c3326be860acdc9770fea72 -r e04971a65679e6cd97f3cdc6738e414cea76c9e4 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -161,7 +161,7 @@
                 ("particle_identifier", "I"),
                 ("particle_refinement_level", "I")]
         if hvals["nstar_tot"] > 0:
-             particle_fields += [("particle_age", "d"),
+            particle_fields += [("particle_age", "d"),
                                 ("particle_metallicity", "d")]
 
         field_offsets = {}


https://bitbucket.org/yt_analysis/yt/commits/691538887db4/
Changeset:   691538887db4
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 01:30:46+00:00
Summary:     data_structures.py edited online with Bitbucket
Affected #:  1 file

diff -r e04971a65679e6cd97f3cdc6738e414cea76c9e4 -r 691538887db40b91e68ae8b2c322f035d99fd4b9 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -558,11 +558,12 @@
         mylog.warning("RAMSES frontend assumes all simulations are cosmological!")
         self.cosmological_simulation = 1
         self.periodicity = (True, True, True)
-        self.current_redshift = (1.0 / rheader["aexp"]) - 1.0
+        self.current_redshift = (1.0 / r.header["aexp"]) - 1.0
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-	self.max_level = rheader['levelmax']
+        self.max_level = rheader['levelmax']
+        f.close()
 
     @classmethod
     def _is_valid(self, *args, **kwargs):


https://bitbucket.org/yt_analysis/yt/commits/611013af00d6/
Changeset:   611013af00d6
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 02:08:33+00:00
Summary:     adjusting ramses indices so that they are 0-based for the octree within YT
Affected #:  1 file

diff -r 691538887db40b91e68ae8b2c322f035d99fd4b9 -r 611013af00d6d8123ef2e164b3ac9fb4e3ec2438 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -18,7 +18,6 @@
 import stat
 import weakref
 import cStringIO
-
 from yt.funcs import *
 from yt.geometry.oct_geometry_handler import \
     OctreeIndex
@@ -456,16 +455,6 @@
         for subset in oobjs:
             yield YTDataChunk(dobj, "io", [subset], None, cache = cache)
 
-    # since RAMSES works in indexing of 1, we want 2**self.max_level rather than 2**self.max_level+1
-    def get_smallest_dx(self):
-        """
-        Returns (in code units) the smallest cell size in the simulation.
-        """
-        return (self.dataset.domain_width /
-                (2**(self.max_level))).min()    
-
-
-
 class RAMSESDataset(Dataset):
     _index_class = RAMSESIndex
     _field_info_class = RAMSESFieldInfo
@@ -539,7 +528,7 @@
         # one-indexed, but it also does refer to the *oct* dimensions -- so
         # this means that a levelmin of 1 would have *1* oct in it.  So a
         # levelmin of 2 would have 8 octs at the root mesh level.
-        self.min_level = rheader['levelmin']
+        self.min_level = rheader['levelmin'] - 1
         # Now we read the hilbert indices
         self.hilbert_indices = {}
         if rheader['ordering type'] == "hilbert":
@@ -551,7 +540,7 @@
         self.current_time = self.parameters['time'] * self.parameters['unit_t']
         self.domain_left_edge = np.zeros(3, dtype='float64')
         self.domain_dimensions = np.ones(3, dtype='int32') * \
-                        2**(self.min_level)
+                        2**(self.min_level + 1)
         self.domain_right_edge = np.ones(3, dtype='float64')
         # This is likely not true, but I am not sure how to otherwise
         # distinguish them.
@@ -562,7 +551,7 @@
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-        self.max_level = rheader['levelmax']
+        self.max_level = rheader['levelmax'] - 1
         f.close()
 
     @classmethod


https://bitbucket.org/yt_analysis/yt/commits/4d341d969fd9/
Changeset:   4d341d969fd9
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 02:09:20+00:00
Summary:     slight bugfix
Affected #:  1 file

diff -r 611013af00d6d8123ef2e164b3ac9fb4e3ec2438 -r 4d341d969fd92486a440ed391b55274fb06e5648 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -547,7 +547,7 @@
         mylog.warning("RAMSES frontend assumes all simulations are cosmological!")
         self.cosmological_simulation = 1
         self.periodicity = (True, True, True)
-        self.current_redshift = (1.0 / r.header["aexp"]) - 1.0
+        self.current_redshift = (1.0 / rheader["aexp"]) - 1.0
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100


https://bitbucket.org/yt_analysis/yt/commits/5528cae2b7a2/
Changeset:   5528cae2b7a2
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 15:46:41+00:00
Summary:     data_structures.py edited online with Bitbucket
Affected #:  1 file

diff -r 4d341d969fd92486a440ed391b55274fb06e5648 -r 5528cae2b7a20298179cdfae2db5c2d5edd43e55 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -551,7 +551,7 @@
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-        self.max_level = rheader['levelmax'] - 1
+        self.max_level = rheader['levelmax'] - self.min_level
         f.close()
 
     @classmethod


https://bitbucket.org/yt_analysis/yt/commits/aca76eb4847f/
Changeset:   aca76eb4847f
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 15:47:37+00:00
Summary:     data_structures.py edited online with Bitbucket
Affected #:  1 file

diff -r 5528cae2b7a20298179cdfae2db5c2d5edd43e55 -r aca76eb4847fdb9146237c8893e5ab07175d680f yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -224,7 +224,7 @@
                                 self.amr_header['nboundary']*l]
             return ng
         min_level = self.ds.min_level
-        max_level = self.ds.max_level
+        max_level = min_level
         nx, ny, nz = (((i-1.0)/2.0) for i in self.amr_header['nx'])
         for level in range(self.amr_header['nlevelmax']):
             # Easier if do this 1-indexed
@@ -540,7 +540,7 @@
         self.current_time = self.parameters['time'] * self.parameters['unit_t']
         self.domain_left_edge = np.zeros(3, dtype='float64')
         self.domain_dimensions = np.ones(3, dtype='int32') * \
-                        2**(self.min_level + 1)
+                        2**(self.min_level+1)
         self.domain_right_edge = np.ones(3, dtype='float64')
         # This is likely not true, but I am not sure how to otherwise
         # distinguish them.


https://bitbucket.org/yt_analysis/yt/commits/e8d103c80063/
Changeset:   e8d103c80063
Branch:      yt
User:        cosmosquark
Date:        2014-11-11 15:49:24+00:00
Summary:     needs the domain_dimensions in the get_smallest_dx calculation
Affected #:  1 file

diff -r aca76eb4847fdb9146237c8893e5ab07175d680f -r e8d103c800632624e899b30361d872500f939299 yt/geometry/oct_geometry_handler.py
--- a/yt/geometry/oct_geometry_handler.py
+++ b/yt/geometry/oct_geometry_handler.py
@@ -45,7 +45,7 @@
         Returns (in code units) the smallest cell size in the simulation.
         """
         return (self.dataset.domain_width /
-                (2**(self.max_level+1))).min()
+                (self.dataset.domain_dimensions * 2**(self.max_level))).min()
 
     def convert(self, unit):
         return self.dataset.conversion_factors[unit]


https://bitbucket.org/yt_analysis/yt/commits/7d390496848a/
Changeset:   7d390496848a
Branch:      yt
User:        cosmosquark
Date:        2014-11-12 14:43:30+00:00
Summary:     final edit, going to double check things and then we are good to pull
Affected #:  1 file

diff -r e8d103c800632624e899b30361d872500f939299 -r 7d390496848a01fa113d24d1af249fac14c759a7 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -224,7 +224,7 @@
                                 self.amr_header['nboundary']*l]
             return ng
         min_level = self.ds.min_level
-        max_level = min_level
+        max_level = 0 # yt max level is not the same as the RAMSES one.. it is the maximum extra refinement levels ... so for a uni grid run with no refinement, it would be 0... so we start off assuming that
         nx, ny, nz = (((i-1.0)/2.0) for i in self.amr_header['nx'])
         for level in range(self.amr_header['nlevelmax']):
             # Easier if do this 1-indexed
@@ -551,7 +551,7 @@
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-        self.max_level = rheader['levelmax'] - self.min_level
+        self.max_level = rheader['levelmax'] - self.min_level - 1
         f.close()
 
     @classmethod


https://bitbucket.org/yt_analysis/yt/commits/fd1d74a9b28f/
Changeset:   fd1d74a9b28f
Branch:      yt
User:        cosmosquark
Date:        2014-11-13 13:46:42+00:00
Summary:     updated comment in accordance to PEP-8
Affected #:  1 file

diff -r 7d390496848a01fa113d24d1af249fac14c759a7 -r fd1d74a9b28fa5c1ae38f67e9822dcb6118a1eec yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -224,7 +224,9 @@
                                 self.amr_header['nboundary']*l]
             return ng
         min_level = self.ds.min_level
-        max_level = 0 # yt max level is not the same as the RAMSES one.. it is the maximum extra refinement levels ... so for a uni grid run with no refinement, it would be 0... so we start off assuming that
+        # yt max level is not the same as the RAMSES one.
+        # yt max level is the maximum number of additional refinement levels
+        # so for a uni grid run with no refinement, it would be 0. So we initially assume that.
         nx, ny, nz = (((i-1.0)/2.0) for i in self.amr_header['nx'])
         for level in range(self.amr_header['nlevelmax']):
             # Easier if do this 1-indexed


https://bitbucket.org/yt_analysis/yt/commits/64e3fa56068d/
Changeset:   64e3fa56068d
Branch:      yt
User:        cosmosquark
Date:        2014-11-13 13:47:33+00:00
Summary:     updated again
Affected #:  1 file

diff -r fd1d74a9b28fa5c1ae38f67e9822dcb6118a1eec -r 64e3fa56068d40f931e4668dff0d6c66ab8cc6e6 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -226,7 +226,9 @@
         min_level = self.ds.min_level
         # yt max level is not the same as the RAMSES one.
         # yt max level is the maximum number of additional refinement levels
-        # so for a uni grid run with no refinement, it would be 0. So we initially assume that.
+        # so for a uni grid run with no refinement, it would be 0. 
+        # So we initially assume that.
+        max_level = 0
         nx, ny, nz = (((i-1.0)/2.0) for i in self.amr_header['nx'])
         for level in range(self.amr_header['nlevelmax']):
             # Easier if do this 1-indexed


https://bitbucket.org/yt_analysis/yt/commits/6b9ada3a86ad/
Changeset:   6b9ada3a86ad
Branch:      yt
User:        ngoldbaum
Date:        2014-11-17 20:34:14+00:00
Summary:     Merged in cosmosquark/yt (pull request #1302)

Ramses frontend improvement  (min_level and max_level consistancy) + slight oct tree geometry change
Affected #:  2 files

diff -r e8fb57e66ca42e26052dadf054a5c782740abec9 -r 6b9ada3a86ad7c86171132650c032e4d157f8f79 yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -18,7 +18,6 @@
 import stat
 import weakref
 import cStringIO
-
 from yt.funcs import *
 from yt.geometry.oct_geometry_handler import \
     OctreeIndex
@@ -163,6 +162,7 @@
         if hvals["nstar_tot"] > 0:
             particle_fields += [("particle_age", "d"),
                                 ("particle_metallicity", "d")]
+
         field_offsets = {}
         _pfields = {}
         for field, vtype in particle_fields:
@@ -224,7 +224,11 @@
                                 self.amr_header['nboundary']*l]
             return ng
         min_level = self.ds.min_level
-        max_level = min_level
+        # yt max level is not the same as the RAMSES one.
+        # yt max level is the maximum number of additional refinement levels
+        # so for a uni grid run with no refinement, it would be 0. 
+        # So we initially assume that.
+        max_level = 0
         nx, ny, nz = (((i-1.0)/2.0) for i in self.amr_header['nx'])
         for level in range(self.amr_header['nlevelmax']):
             # Easier if do this 1-indexed
@@ -551,7 +555,7 @@
         self.omega_lambda = rheader["omega_l"]
         self.omega_matter = rheader["omega_m"]
         self.hubble_constant = rheader["H0"] / 100.0 # This is H100
-        self.max_level = rheader['levelmax'] - self.min_level
+        self.max_level = rheader['levelmax'] - self.min_level - 1
         f.close()
 
     @classmethod

diff -r e8fb57e66ca42e26052dadf054a5c782740abec9 -r 6b9ada3a86ad7c86171132650c032e4d157f8f79 yt/geometry/oct_geometry_handler.py
--- a/yt/geometry/oct_geometry_handler.py
+++ b/yt/geometry/oct_geometry_handler.py
@@ -45,7 +45,7 @@
         Returns (in code units) the smallest cell size in the simulation.
         """
         return (self.dataset.domain_width /
-                (2**(self.max_level+1))).min()
+                (self.dataset.domain_dimensions * 2**(self.max_level))).min()
 
     def convert(self, unit):
         return self.dataset.conversion_factors[unit]

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list