[yt-svn] commit/yt: 37 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Oct 12 11:29:59 PDT 2015


37 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/418fecc430b2/
Changeset:   418fecc430b2
Branch:      yt
User:        ngoldbaum
Date:        2015-09-14 22:17:21+00:00
Summary:     Linting frontends/setup.py
Affected #:  1 file

diff -r 43e11e9fc943815fd62611e5a0626a27982e24db -r 418fecc430b2d645421f659c1180360809812abc yt/frontends/setup.py
--- a/yt/frontends/setup.py
+++ b/yt/frontends/setup.py
@@ -1,5 +1,4 @@
 #!/usr/bin/env python
-import setuptools
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/fd6034f6fe7f/
Changeset:   fd6034f6fe7f
Branch:      yt
User:        ngoldbaum
Date:        2015-09-14 22:17:33+00:00
Summary:     Linting _skeleton frontend
Affected #:  3 files

diff -r 418fecc430b2d645421f659c1180360809812abc -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 yt/frontends/_skeleton/data_structures.py
--- a/yt/frontends/_skeleton/data_structures.py
+++ b/yt/frontends/_skeleton/data_structures.py
@@ -13,6 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import os
+
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \

diff -r 418fecc430b2d645421f659c1180360809812abc -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 yt/frontends/_skeleton/fields.py
--- a/yt/frontends/_skeleton/fields.py
+++ b/yt/frontends/_skeleton/fields.py
@@ -13,8 +13,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
 

diff -r 418fecc430b2d645421f659c1180360809812abc -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 yt/frontends/_skeleton/setup.py
--- a/yt/frontends/_skeleton/setup.py
+++ b/yt/frontends/_skeleton/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/a8704e3b35f4/
Changeset:   a8704e3b35f4
Branch:      yt
User:        ngoldbaum
Date:        2015-09-14 22:56:02+00:00
Summary:     Linting the art frontend
Affected #:  5 files

diff -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 -r a8704e3b35f4af5d510890caaeb75690a033d6c1 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -9,48 +9,47 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
+import glob
 import numpy as np
 import os
 import stat
+import struct
 import weakref
-from yt.extern.six.moves import cStringIO
-import difflib
-import glob
 
-from yt.funcs import *
 from yt.geometry.oct_geometry_handler import \
     OctreeIndex
 from yt.geometry.geometry_handler import \
-    Index, YTDataChunk
+    YTDataChunk
 from yt.data_objects.static_output import \
     Dataset, ParticleFile
 from yt.data_objects.octree_subset import \
     OctreeSubset
+from yt.funcs import \
+    mylog
 from yt.geometry.oct_container import \
     ARTOctreeContainer
-from .fields import ARTFieldInfo
-from yt.utilities.io_handler import \
-    io_registry
-from yt.utilities.lib.misc_utilities import \
-    get_box_grids_level
+from yt.frontends.art.definitions import \
+    fluid_fields, \
+    particle_fields, \
+    filename_pattern, \
+    particle_header_struct, \
+    amr_header_struct, \
+    dmparticle_header_struct, \
+    constants, \
+    seek_extras
+from yt.frontends.art.fields import ARTFieldInfo
 from yt.data_objects.particle_unions import \
     ParticleUnion
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
-from yt.utilities.lib.geometry_utils import compute_morton
 
-from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
-from .io import _read_art_level_info
-from .io import _read_child_level
-from .io import _read_root_level
-from .io import b2t
-from .io import a2b
-
-from yt.utilities.io_handler import \
-    io_registry
-from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
+from yt.frontends.art.io import \
+    _read_art_level_info, \
+    _read_child_level, \
+    _read_root_level, \
+    b2t, \
+    a2b
 
 
 class ARTIndex(OctreeIndex):
@@ -228,7 +227,6 @@
         aexpn = self.parameters["aexpn"]
 
         # all other units
-        wmu = self.parameters["wmu"]
         Om0 = self.parameters['Om0']
         ng = self.parameters['ng']
         boxh = self.parameters['boxh']
@@ -265,7 +263,7 @@
         with open(self._file_amr, 'rb') as f:
             amr_header_vals = fpu.read_attrs(f, amr_header_struct, '>')
             for to_skip in ['tl', 'dtl', 'tlold', 'dtlold', 'iSO']:
-                skipped = fpu.skip(f, endian='>')
+                fpu.skip(f, endian='>')
             (self.ncell) = fpu.read_vector(f, 'i', '>')[0]
             # Try to figure out the root grid dimensions
             est = int(np.rint(self.ncell**(1.0/3.0)))
@@ -380,7 +378,7 @@
             return False
         with open(f, 'rb') as fh:
             try:
-                amr_header_vals = fpu.read_attrs(fh, amr_header_struct, '>')
+                fpu.read_attrs(fh, amr_header_struct, '>')
                 return True
             except:
                 return False
@@ -467,7 +465,6 @@
         aexpn = self.parameters["aexpn"]
 
         # all other units
-        wmu = self.parameters["wmu"]
         Om0 = self.parameters['Om0']
         ng = self.parameters['ng']
         boxh = self.parameters['boxh']
@@ -630,32 +627,32 @@
             try:
                 seek = 4
                 fh.seek(seek)
-                headerstr = np.fromfile(fh, count=1, dtype=(str,45))
-                aexpn = np.fromfile(fh, count=1, dtype='>f4')
-                aexp0 = np.fromfile(fh, count=1, dtype='>f4')
-                amplt = np.fromfile(fh, count=1, dtype='>f4')
-                astep = np.fromfile(fh, count=1, dtype='>f4')
-                istep = np.fromfile(fh, count=1, dtype='>i4')
-                partw = np.fromfile(fh, count=1, dtype='>f4')
-                tintg = np.fromfile(fh, count=1, dtype='>f4')
-                ekin = np.fromfile(fh, count=1, dtype='>f4')
-                ekin1 = np.fromfile(fh, count=1, dtype='>f4')
-                ekin2 = np.fromfile(fh, count=1, dtype='>f4')
-                au0 = np.fromfile(fh, count=1, dtype='>f4')
-                aeu0 = np.fromfile(fh, count=1, dtype='>f4')
-                nrowc = np.fromfile(fh, count=1, dtype='>i4')
-                ngridc = np.fromfile(fh, count=1, dtype='>i4')
-                nspecs = np.fromfile(fh, count=1, dtype='>i4')
-                nseed = np.fromfile(fh, count=1, dtype='>i4')
-                Om0 = np.fromfile(fh, count=1, dtype='>f4')
-                Oml0 = np.fromfile(fh, count=1, dtype='>f4')
-                hubble = np.fromfile(fh, count=1, dtype='>f4')
-                Wp5 = np.fromfile(fh, count=1, dtype='>f4')
-                Ocurv = np.fromfile(fh, count=1, dtype='>f4')
-                wspecies = np.fromfile(fh, count=10, dtype='>f4')
-                lspecies = np.fromfile(fh, count=10, dtype='>i4')
-                extras = np.fromfile(fh, count=79, dtype='>f4')
-                boxsize = np.fromfile(fh, count=1, dtype='>f4')
+                headerstr = np.fromfile(fh, count=1, dtype=(str,45))  # NOQA
+                aexpn = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                aexp0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                amplt = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                astep = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                istep = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                partw = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                tintg = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                ekin = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                ekin1 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                ekin2 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                au0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                aeu0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                nrowc = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                ngridc = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                nspecs = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                nseed = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                Om0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                Oml0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                hubble = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                Wp5 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                Ocurv = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                wspecies = np.fromfile(fh, count=10, dtype='>f4')  # NOQA
+                lspecies = np.fromfile(fh, count=10, dtype='>i4')  # NOQA
+                extras = np.fromfile(fh, count=79, dtype='>f4')  # NOQA
+                boxsize = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
                 return True
             except:
                 return False
@@ -786,9 +783,7 @@
             Level[Lev], iNOLL[Lev], iHOLL[Lev] = fpu.read_vector(f, 'i', '>')
             # print 'Level %i : '%Lev, iNOLL
             # print 'offset after level record:',f.tell()
-            iOct = iHOLL[Lev] - 1
             nLevel = iNOLL[Lev]
-            nLevCells = nLevel * nchild
             ntot = ntot + nLevel
 
             # Skip all the oct hierarchy data
@@ -831,11 +826,9 @@
 
     def _read_amr_root(self, oct_handler):
         self.level_offsets
-        f = open(self.ds._file_amr, "rb")
         # add the root *cell* not *oct* mesh
         root_octs_side = self.ds.domain_dimensions[0]/2
         NX = np.ones(3)*root_octs_side
-        octs_side = NX*2 # Level == 0
         LE = np.array([0.0, 0.0, 0.0], dtype='float64')
         RE = np.array([1.0, 1.0, 1.0], dtype='float64')
         root_dx = (RE - LE) / NX
@@ -846,7 +839,7 @@
                            LL[1]:RL[1]:NX[1]*1j,
                            LL[2]:RL[2]:NX[2]*1j]
         root_fc = np.vstack([p.ravel() for p in root_fc]).T
-        nocts_check = oct_handler.add(self.domain_id, 0, root_fc)
+        oct_handler.add(self.domain_id, 0, root_fc)
         assert(oct_handler.nocts == root_fc.shape[0])
         mylog.debug("Added %07i octs on level %02i, cumulative is %07i",
                     root_octs_side**3, 0, oct_handler.nocts)

diff -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 -r a8704e3b35f4af5d510890caaeb75690a033d6c1 yt/frontends/art/fields.py
--- a/yt/frontends/art/fields.py
+++ b/yt/frontends/art/fields.py
@@ -13,13 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-from yt.frontends.art.definitions import *
 
 b_units = "code_magnetic"
 ra_units = "code_length / code_time**2"
@@ -68,7 +63,7 @@
             tr *= data.ds.parameters['wmu'] * data.ds.parameters['Om0']
             tr *= (data.ds.parameters['gamma'] - 1.)
             tr /= data.ds.parameters['aexpn']**2
-            return  tr * data['art', 'GasEnergy'] / data['art', 'Density']
+            return tr * data['art', 'GasEnergy'] / data['art', 'Density']
         self.add_field(('gas', 'temperature'),
                        function=_temperature, 
                        units='K')

diff -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 -r a8704e3b35f4af5d510890caaeb75690a033d6c1 yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -15,24 +15,30 @@
 
 
 import numpy as np
-import struct
 import os
 import os.path
 import sys
+
+from collections import defaultdict
+
 if sys.version_info >= (3,0,0):
     long = int
-    
-from yt.funcs import *
+
+from yt.frontends.art.definitions import \
+    particle_star_fields, \
+    particle_fields, \
+    star_struct, \
+    hydro_struct
 from yt.utilities.io_handler import \
     BaseIOHandler
-from yt.utilities.fortran_utils import *
 from yt.utilities.logger import ytLogger as mylog
-from yt.frontends.art.definitions import *
-from yt.utilities.physical_constants import sec_per_year
 from yt.utilities.lib.geometry_utils import compute_morton
-from yt.geometry.oct_container import _ORDER_MAX
-from yt.units.yt_array import YTQuantity
-
+from yt.utilities.fortran_utils import \
+    read_vector, \
+    skip
+from yt.units.yt_array import \
+    YTQuantity, \
+    YTArray
 
 class IOHandlerART(BaseIOHandler):
     _dataset_type = "art"
@@ -80,7 +86,6 @@
         key = (selector, ftype)
         if key in self.masks.keys() and self.caching:
             return self.masks[key]
-        ds = self.ds
         pstr = 'particle_position_%s'
         x,y,z = [self._get_field((ftype, pstr % ax)) for ax in 'xyz']
         mask = selector.select_points(x, y, z, 0.0)
@@ -120,7 +125,7 @@
         tr = {}
         ftype, fname = field
         ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(self.ds, ftype, 
+        pbool, idxa, idxb = _determine_field_size(self.ds, ftype,
                                                   self.ls, ptmax)
         npa = idxb - idxa
         sizes = np.diff(np.concatenate(([0], self.ls)))
@@ -178,7 +183,7 @@
             # dark_matter -- stars are regular matter.
             tr[field] /= self.ds.domain_dimensions.prod()
         if tr == {}:
-            tr = dict((f, np.array([])) for f in fields)
+            tr = dict((f, np.array([])) for f in [field])
         if self.caching:
             self.cache[field] = tr[field]
             return self.cache[field]
@@ -195,7 +200,6 @@
         count = data_file.ds.parameters['lspecies'][-1]
         DLE = data_file.ds.domain_left_edge
         DRE = data_file.ds.domain_right_edge
-        dx = (DRE - DLE) / 2**_ORDER_MAX
         with open(data_file.filename, "rb") as f:
             # The first total_particles * 3 values are positions
             pp = np.fromfile(f, dtype = '>f4', count = totcount*3)
@@ -209,7 +213,6 @@
 
     def _identify_fields(self, domain):
         field_list = []
-        tp = domain.total_particles
         self.particle_field_list = [f for f in particle_fields]
         for ptype in self.ds.particle_types_raw:
             for pfield in self.particle_field_list:
@@ -225,7 +228,7 @@
         tr = {}
         ftype, fname = field
         ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(self.ds, ftype, 
+        pbool, idxa, idxb = _determine_field_size(self.ds, ftype,
                                                   self.ls, ptmax)
         npa = idxb - idxa
         sizes = np.diff(np.concatenate(([0], self.ls)))
@@ -258,17 +261,6 @@
                     data[a: a + size] = i
                     a += size
             tr[field] = data
-        if fname == "particle_creation_time":
-            self.tb, self.ages, data = interpolate_ages(
-                tr[field][-nstars:],
-                self.file_stars,
-                self.tb,
-                self.ages,
-                self.ds.current_time)
-            temp = tr.get(field, np.zeros(npa, 'f8'))
-            temp[-nstars:] = data
-            tr[field] = temp
-            del data
         # We check again, after it's been filled
         if fname.startswith("particle_mass"):
             # We now divide by NGrid in order to make this match up.  Note that
@@ -356,7 +348,6 @@
     # ioct always represents the index of the next variable
     # not the current, so shift forward one index
     # the last index isn't used
-    ioctso = iocts.copy()
     iocts[1:] = iocts[:-1]  # shift
     iocts = iocts[:nLevel]  # chop off the last, unused, index
     iocts[0] = iOct  # starting value
@@ -400,11 +391,11 @@
     # Posy   = d_x * (iOctPs(2,iO) + sign ( id , idelta(j,2) ))
     # Posz   = d_x * (iOctPs(3,iO) + sign ( id , idelta(j,3) ))
     # idelta = [[-1,  1, -1,  1, -1,  1, -1,  1],
-              #[-1, -1,  1,  1, -1, -1,  1,  1],
-              #[-1, -1, -1, -1,  1,  1,  1,  1]]
+    #           [-1, -1,  1,  1, -1, -1,  1,  1],
+    #           [-1, -1, -1, -1,  1,  1,  1,  1]]
     # idelta = np.array(idelta)
     # if ncell0 is None:
-        # ncell0 = coarse_grid**3
+    #     ncell0 = coarse_grid**3
     # nchild = 8
     # ndim = 3
     # nshift = nchild -1
@@ -424,15 +415,13 @@
     f.seek(pos)
     return unitary_center, fl, iocts, nLevel, root_level
 
-def get_ranges(skip, count, field, words=6, real_size=4, np_per_page=4096**2, 
+def get_ranges(skip, count, field, words=6, real_size=4, np_per_page=4096**2,
                   num_pages=1):
     #translate every particle index into a file position ranges
     ranges = []
     arr_size = np_per_page * real_size
-    page_size = words * np_per_page * real_size
     idxa, idxb = 0, 0
     posa, posb = 0, 0
-    left = count
     for page in range(num_pages):
         idxb += np_per_page
         for i, fname in enumerate(['x', 'y', 'z', 'vx', 'vy', 'vz']):
@@ -462,7 +451,7 @@
     num_pages = os.path.getsize(file)/(real_size*words*np_per_page)
     fh = open(file, 'r')
     skip, count = idxa, idxb - idxa
-    kwargs = dict(words=words, real_size=real_size, 
+    kwargs = dict(words=words, real_size=real_size,
                   np_per_page=np_per_page, num_pages=num_pages)
     arrs = []
     for field in fields:
@@ -495,7 +484,6 @@
 
 def _read_child_mask_level(f, level_child_offsets, level, nLevel, nhydro_vars):
     f.seek(level_child_offsets[level])
-    nvals = nLevel * (nhydro_vars + 6)  # 2 vars, 2 pads
     ioctch = np.zeros(nLevel, dtype='uint8')
     idc = np.zeros(nLevel, dtype='int32')
 
@@ -639,8 +627,6 @@
         return a2t(b2a(tb))
     if len(tb) < n:
         n = len(tb)
-    age_min = a2t(b2a(tb.max(), **kwargs), **kwargs)
-    age_max = a2t(b2a(tb.min(), **kwargs), **kwargs)
     tbs = -1.*np.logspace(np.log10(-tb.min()),
                           np.log10(-tb.max()), n)
     ages = []

diff -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 -r a8704e3b35f4af5d510890caaeb75690a033d6c1 yt/frontends/art/setup.py
--- a/yt/frontends/art/setup.py
+++ b/yt/frontends/art/setup.py
@@ -1,6 +1,5 @@
 #!/usr/bin/env python
-import setuptools
-import os, sys, os.path
+
 
 def configuration(parent_package='',top_path=None):
     from numpy.distutils.misc_util import Configuration

diff -r fd6034f6fe7fe02743d419f7725a6c1a5dc521c2 -r a8704e3b35f4af5d510890caaeb75690a033d6c1 yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -17,7 +17,10 @@
 from yt.testing import \
     requires_file, \
     assert_equal, \
-    units_override_check
+    units_override_check, \
+    assert_almost_equal
+from yt.units.yt_array import \
+    YTQuantity
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     big_patch_amr, \
@@ -58,8 +61,8 @@
         ad[('specie2', 'particle_type')].size + \
         ad[('specie3', 'particle_type')].size, AnaNDM
 
-    AnaBoxSize = yt.units.yt_array.YTQuantity(7.1442196564,'Mpc')
-    AnaVolume = yt.units.yt_array.YTQuantity(364.640074656,'Mpc**3')
+    AnaBoxSize = YTQuantity(7.1442196564,'Mpc')
+    AnaVolume = YTQuantity(364.640074656,'Mpc**3')
     Volume = 1
     for i in ds.domain_width.in_units('Mpc'):
         yield assert_almost_equal, i, AnaBoxSize
@@ -69,23 +72,23 @@
     AnaNCells = 4087490
     yield assert_equal, len(ad[('index','cell_volume')]), AnaNCells
 
-    AnaTotDMMass = yt.units.yt_array.YTQuantity(1.01191786811e+14,'Msun')
+    AnaTotDMMass = YTQuantity(1.01191786811e+14,'Msun')
     yield assert_almost_equal, ad[('darkmatter','particle_mass')].sum()\
         .in_units('Msun'), AnaTotDMMass
 
-    AnaTotStarMass = yt.units.yt_array.YTQuantity(1776251.,'Msun')
+    AnaTotStarMass = YTQuantity(1776251.,'Msun')
     yield assert_almost_equal, ad[('stars','particle_mass')].sum()\
         .in_units('Msun'), AnaTotStarMass
 
-    AnaTotStarMassInitial = yt.units.yt_array.YTQuantity(2422854.,'Msun')
+    AnaTotStarMassInitial = YTQuantity(2422854.,'Msun')
     yield assert_almost_equal, ad[('stars','particle_mass_initial')].sum()\
-        .in_units('Msun'), AnaTotStarMass
+        .in_units('Msun'), AnaTotStarMassInitial
 
-    AnaTotGasMass = yt.units.yt_array.YTQuantity(1.781994e+13,'Msun')
+    AnaTotGasMass = YTQuantity(1.781994e+13,'Msun')
     yield assert_almost_equal, ad[('gas','cell_mass')].sum()\
         .in_units('Msun'), AnaTotGasMass
 
-    AnaTotTemp = yt.units.yt_array.YTQuantity(1.5019e11, 'K') #just leaves
+    AnaTotTemp = YTQuantity(1.5019e11, 'K') #just leaves
     yield assert_equal, ad[('gas','temperature')].sum(), AnaTotTemp
 
 


https://bitbucket.org/yt_analysis/yt/commits/26ee2fd54e4a/
Changeset:   26ee2fd54e4a
Branch:      yt
User:        ngoldbaum
Date:        2015-09-16 20:40:55+00:00
Summary:     Fix some issues in the NMSU Art answer test
Affected #:  2 files

diff -r a8704e3b35f4af5d510890caaeb75690a033d6c1 -r 26ee2fd54e4a5d631e9db6bd365bd68a4578f6b1 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -696,7 +696,7 @@
         oct_handler.fill_level(0, levels, cell_inds, file_inds, tr, source)
         del source
         # Now we continue with the additional levels.
-        for level in range(1, self.ds.max_level + 1):
+        for level in range(1, self.ds.index.max_level + 1):
             no = self.domain.level_count[level]
             noct_range = [0, no]
             source = _read_child_level(

diff -r a8704e3b35f4af5d510890caaeb75690a033d6c1 -r 26ee2fd54e4a5d631e9db6bd365bd68a4578f6b1 yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -23,22 +23,25 @@
     YTQuantity
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    big_patch_amr, \
+    FieldValuesTest, \
     PixelizedProjectionValuesTest, \
     data_dir_load
 from yt.frontends.art.api import ARTDataset
 
-_fields = ("density", "temperature", "particle_mass", ("all", "particle_position_x"))
+_fields = (
+    "density",
+    "temperature",
+    "particle_mass",
+    ("all", "particle_position_x")
+)
 
 d9p = "D9p_500/10MpcBox_HartGal_csf_a0.500.d"
 
 @requires_ds(d9p, big_data=True)
 def test_d9p():
     ds = data_dir_load(d9p)
-    yield assert_equal, str(ds), "10MpcBox_HartGal_csf_a0.500.d"
-    for test in big_patch_amr(d9p, _fields):
-        test_d9p.__name__ = test.description
-        yield test
+    ds.index
+    assert_equal(str(ds), "10MpcBox_HartGal_csf_a0.500.d")
     dso = [None, ("sphere", ("max", (0.1, 'unitary')))]
     for field in _fields:
         for axis in [0, 1, 2]:
@@ -47,22 +50,22 @@
                     yield PixelizedProjectionValuesTest(
                         d9p, axis, field, weight_field,
                         dobj_name)
-
+            yield FieldValuesTest(d9p, field, dobj_name)
 
     ad = ds.all_data()
     # 'Ana' variable values output from the ART Fortran 'ANA' analysis code
     AnaNStars = 6255
-    yield assert_equal, ad[('stars','particle_type')].size, AnaNStars
+    yield assert_equal, ad[('stars', 'particle_type')].size, AnaNStars
     yield assert_equal, ad[('specie4', 'particle_type')].size, AnaNStars
     AnaNDM = 2833405
-    yield assert_equal, ad[('darkmatter','particle_type')].size, AnaNDM
+    yield assert_equal, ad[('darkmatter', 'particle_type')].size, AnaNDM
     yield assert_equal, ad[('specie0', 'particle_type')].size + \
         ad[('specie1', 'particle_type')].size + \
         ad[('specie2', 'particle_type')].size + \
         ad[('specie3', 'particle_type')].size, AnaNDM
 
-    AnaBoxSize = YTQuantity(7.1442196564,'Mpc')
-    AnaVolume = YTQuantity(364.640074656,'Mpc**3')
+    AnaBoxSize = YTQuantity(7.1442196564, 'Mpc')
+    AnaVolume = YTQuantity(364.640074656, 'Mpc**3')
     Volume = 1
     for i in ds.domain_width.in_units('Mpc'):
         yield assert_almost_equal, i, AnaBoxSize
@@ -70,26 +73,28 @@
     yield assert_almost_equal, Volume, AnaVolume
 
     AnaNCells = 4087490
-    yield assert_equal, len(ad[('index','cell_volume')]), AnaNCells
+    yield assert_equal, len(ad[('index', 'cell_volume')]), AnaNCells
 
-    AnaTotDMMass = YTQuantity(1.01191786811e+14,'Msun')
-    yield assert_almost_equal, ad[('darkmatter','particle_mass')].sum()\
-        .in_units('Msun'), AnaTotDMMass
+    AnaTotDMMass = YTQuantity(1.01191786811e+14, 'Msun')
+    yield (assert_almost_equal,
+           ad[('darkmatter', 'particle_mass')].sum().in_units('Msun'),
+           AnaTotDMMass)
 
-    AnaTotStarMass = YTQuantity(1776251.,'Msun')
-    yield assert_almost_equal, ad[('stars','particle_mass')].sum()\
-        .in_units('Msun'), AnaTotStarMass
+    AnaTotStarMass = YTQuantity(1776251., 'Msun')
+    yield (assert_almost_equal,
+           ad[('stars', 'particle_mass')].sum().in_units('Msun'), AnaTotStarMass)
 
-    AnaTotStarMassInitial = YTQuantity(2422854.,'Msun')
-    yield assert_almost_equal, ad[('stars','particle_mass_initial')].sum()\
-        .in_units('Msun'), AnaTotStarMassInitial
+    AnaTotStarMassInitial = YTQuantity(2422854., 'Msun')
+    yield (assert_almost_equal,
+           ad[('stars', 'particle_mass_initial')].sum().in_units('Msun'),
+           AnaTotStarMassInitial)
 
-    AnaTotGasMass = YTQuantity(1.781994e+13,'Msun')
-    yield assert_almost_equal, ad[('gas','cell_mass')].sum()\
-        .in_units('Msun'), AnaTotGasMass
+    AnaTotGasMass = YTQuantity(1.781994e+13, 'Msun')
+    yield (assert_almost_equal, ad[('gas', 'cell_mass')].sum().in_units('Msun'),
+           AnaTotGasMass)
 
-    AnaTotTemp = YTQuantity(1.5019e11, 'K') #just leaves
-    yield assert_equal, ad[('gas','temperature')].sum(), AnaTotTemp
+    AnaTotTemp = YTQuantity(1.5019e11, 'K')  # just leaves
+    yield assert_equal, ad[('gas', 'temperature')].sum(), AnaTotTemp
 
 
 @requires_file(d9p)
@@ -100,4 +105,3 @@
 def test_units_override():
     for test in units_override_check(d9p):
         yield test
-


https://bitbucket.org/yt_analysis/yt/commits/dd8d23174899/
Changeset:   dd8d23174899
Branch:      yt
User:        ngoldbaum
Date:        2015-09-16 22:19:02+00:00
Summary:     Updating NMSU Art answer tests to run correctly
Affected #:  2 files

diff -r 26ee2fd54e4a5d631e9db6bd365bd68a4578f6b1 -r dd8d2317489904eb14b24cb63d7e8cfd0e32541f yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -180,8 +180,6 @@
         self.max_level = limit_level
         self.force_max_level = force_max_level
         self.spread_age = spread_age
-        self.domain_left_edge = np.zeros(3, dtype='float')
-        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         Dataset.__init__(self, filename, dataset_type,
                          units_override=units_override)
         self.storage_filename = storage_filename
@@ -250,6 +248,8 @@
         """
         Get the various simulation parameters & constants.
         """
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         self.dimensionality = 3
         self.refine_by = 2
         self.periodicity = (True, True, True)
@@ -420,8 +420,6 @@
         self.parameter_filename = filename
         self.skip_stars = skip_stars
         self.spread_age = spread_age
-        self.domain_left_edge = np.zeros(3, dtype='float')
-        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
@@ -488,6 +486,8 @@
         """
         Get the various simulation parameters & constants.
         """
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         self.dimensionality = 3
         self.refine_by = 2
         self.periodicity = (True, True, True)

diff -r 26ee2fd54e4a5d631e9db6bd365bd68a4578f6b1 -r dd8d2317489904eb14b24cb63d7e8cfd0e32541f yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -29,9 +29,9 @@
 from yt.frontends.art.api import ARTDataset
 
 _fields = (
-    "density",
-    "temperature",
-    "particle_mass",
+    ("gas", "density"),
+    ("gas", "temperature"),
+    ("all", "particle_mass"),
     ("all", "particle_position_x")
 )
 
@@ -41,15 +41,16 @@
 def test_d9p():
     ds = data_dir_load(d9p)
     ds.index
-    assert_equal(str(ds), "10MpcBox_HartGal_csf_a0.500.d")
+    yield assert_equal, str(ds), "10MpcBox_HartGal_csf_a0.500.d"
     dso = [None, ("sphere", ("max", (0.1, 'unitary')))]
     for field in _fields:
         for axis in [0, 1, 2]:
             for dobj_name in dso:
                 for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        d9p, axis, field, weight_field,
-                        dobj_name)
+                    if field[0] not in ds.particle_types:
+                        yield PixelizedProjectionValuesTest(
+                            d9p, axis, field, weight_field,
+                            dobj_name)
             yield FieldValuesTest(d9p, field, dobj_name)
 
     ad = ds.all_data()
@@ -57,12 +58,12 @@
     AnaNStars = 6255
     yield assert_equal, ad[('stars', 'particle_type')].size, AnaNStars
     yield assert_equal, ad[('specie4', 'particle_type')].size, AnaNStars
-    AnaNDM = 2833405
+    AnaNDM = 2833404
     yield assert_equal, ad[('darkmatter', 'particle_type')].size, AnaNDM
-    yield assert_equal, ad[('specie0', 'particle_type')].size + \
-        ad[('specie1', 'particle_type')].size + \
-        ad[('specie2', 'particle_type')].size + \
-        ad[('specie3', 'particle_type')].size, AnaNDM
+    yield assert_equal, (ad[('specie0', 'particle_type')].size +
+                         ad[('specie1', 'particle_type')].size +
+                         ad[('specie2', 'particle_type')].size +
+                         ad[('specie3', 'particle_type')].size), AnaNDM
 
     AnaBoxSize = YTQuantity(7.1442196564, 'Mpc')
     AnaVolume = YTQuantity(364.640074656, 'Mpc**3')
@@ -75,25 +76,26 @@
     AnaNCells = 4087490
     yield assert_equal, len(ad[('index', 'cell_volume')]), AnaNCells
 
-    AnaTotDMMass = YTQuantity(1.01191786811e+14, 'Msun')
+    AnaTotDMMass = YTQuantity(1.01191786808255e+14, 'Msun')
     yield (assert_almost_equal,
            ad[('darkmatter', 'particle_mass')].sum().in_units('Msun'),
            AnaTotDMMass)
 
-    AnaTotStarMass = YTQuantity(1776251., 'Msun')
+    AnaTotStarMass = YTQuantity(1776701.3990607238, 'Msun')
     yield (assert_almost_equal,
-           ad[('stars', 'particle_mass')].sum().in_units('Msun'), AnaTotStarMass)
+           ad[('stars', 'particle_mass')].sum().in_units('Msun'),
+           AnaTotStarMass)
 
-    AnaTotStarMassInitial = YTQuantity(2422854., 'Msun')
+    AnaTotStarMassInitial = YTQuantity(2423468.2801332865, 'Msun')
     yield (assert_almost_equal,
            ad[('stars', 'particle_mass_initial')].sum().in_units('Msun'),
            AnaTotStarMassInitial)
 
-    AnaTotGasMass = YTQuantity(1.781994e+13, 'Msun')
+    AnaTotGasMass = YTQuantity(1.7826982029216785e+13, 'Msun')
     yield (assert_almost_equal, ad[('gas', 'cell_mass')].sum().in_units('Msun'),
            AnaTotGasMass)
 
-    AnaTotTemp = YTQuantity(1.5019e11, 'K')  # just leaves
+    AnaTotTemp = YTQuantity(150219844793.39072, 'K')  # just leaves
     yield assert_equal, ad[('gas', 'temperature')].sum(), AnaTotTemp
 
 


https://bitbucket.org/yt_analysis/yt/commits/6d69c5574ca5/
Changeset:   6d69c5574ca5
Branch:      yt
User:        ngoldbaum
Date:        2015-09-17 00:55:32+00:00
Summary:     Adding a comment explaining that one of the answers for art is wrong
Affected #:  1 file

diff -r dd8d2317489904eb14b24cb63d7e8cfd0e32541f -r 6d69c5574ca5d956a04bb7c92e3a57cb7b7fdfb2 yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -58,6 +58,10 @@
     AnaNStars = 6255
     yield assert_equal, ad[('stars', 'particle_type')].size, AnaNStars
     yield assert_equal, ad[('specie4', 'particle_type')].size, AnaNStars
+
+    # The *real* asnwer is 2833405, but yt misses one particle since it lives
+    # on a domain boundary. See issue 814. When that is fixed, this test
+    # will need to be updated
     AnaNDM = 2833404
     yield assert_equal, ad[('darkmatter', 'particle_type')].size, AnaNDM
     yield assert_equal, (ad[('specie0', 'particle_type')].size +


https://bitbucket.org/yt_analysis/yt/commits/924044fa9386/
Changeset:   924044fa9386
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 19:27:59+00:00
Summary:     Linting the artio frontend
Affected #:  4 files

diff -r 6d69c5574ca5d956a04bb7c92e3a57cb7b7fdfb2 -r 924044fa93863868f92315e933cdb35802b081c9 yt/frontends/artio/data_structures.py
--- a/yt/frontends/artio/data_structures.py
+++ b/yt/frontends/artio/data_structures.py
@@ -13,26 +13,27 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
+
 import numpy as np
+import os
 import stat
 import weakref
-from yt.extern.six.moves import cStringIO
 
-from .definitions import ARTIOconstants
-from ._artio_caller import \
-    artio_is_valid, artio_fileset, ARTIOOctreeContainer, \
-    ARTIORootMeshContainer, ARTIOSFCRangeHandler
-from . import _artio_caller
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
-from .fields import \
+from collections import defaultdict
+
+from yt.frontends.artio._artio_caller import \
+    artio_is_valid, \
+    artio_fileset, \
+    ARTIOSFCRangeHandler
+from yt.frontends.artio import _artio_caller
+from yt.frontends.artio.fields import \
     ARTIOFieldInfo
-from yt.fields.particle_fields import \
-    standard_particle_fields
 
-from yt.funcs import *
+from yt.funcs import \
+    mylog
 from yt.geometry.geometry_handler import \
-    Index, YTDataChunk
+    Index, \
+    YTDataChunk
 import yt.geometry.particle_deposit as particle_deposit
 from yt.data_objects.static_output import \
     Dataset
@@ -40,9 +41,8 @@
     OctreeSubset
 from yt.data_objects.data_containers import \
     YTFieldData
-
-from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
+from yt.utilities.exceptions import \
+    YTParticleDepositionNotImplemented
 
 class ARTIOOctreeSubset(OctreeSubset):
     _domain_offset = 0
@@ -178,7 +178,7 @@
         """
         Returns (in code units) the smallest cell size in the simulation.
         """
-        return  1.0/(2**self.max_level)
+        return 1.0/(2**self.max_level)
 
     def convert(self, unit):
         return self.dataset.conversion_factors[unit]
@@ -345,7 +345,6 @@
         # hard-coded -- not provided by headers
         self.dimensionality = 3
         self.refine_by = 2
-        print(self.parameters)
         self.parameters["HydroMethod"] = 'artio'
         self.parameters["Time"] = 1.  # default unit is 1...
 

diff -r 6d69c5574ca5d956a04bb7c92e3a57cb7b7fdfb2 -r 924044fa93863868f92315e933cdb35802b081c9 yt/frontends/artio/fields.py
--- a/yt/frontends/artio/fields.py
+++ b/yt/frontends/artio/fields.py
@@ -14,9 +14,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
 from yt.fields.field_detector import \
@@ -25,8 +22,6 @@
     YTArray
 
 from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs, \
     boltzmann_constant_cgs, \
     amu_cgs
 

diff -r 6d69c5574ca5d956a04bb7c92e3a57cb7b7fdfb2 -r 924044fa93863868f92315e933cdb35802b081c9 yt/frontends/artio/setup.py
--- a/yt/frontends/artio/setup.py
+++ b/yt/frontends/artio/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 import glob
 
 

diff -r 6d69c5574ca5d956a04bb7c92e3a57cb7b7fdfb2 -r 924044fa93863868f92315e933cdb35802b081c9 yt/frontends/artio/tests/test_outputs.py
--- a/yt/frontends/artio/tests/test_outputs.py
+++ b/yt/frontends/artio/tests/test_outputs.py
@@ -14,7 +14,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     data_dir_load, \


https://bitbucket.org/yt_analysis/yt/commits/38ea660ad2fa/
Changeset:   38ea660ad2fa
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 19:45:17+00:00
Summary:     Linting the athena frontend
Affected #:  5 files

diff -r 924044fa93863868f92315e933cdb35802b081c9 -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -13,25 +13,27 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
 import numpy as np
+import os
 import weakref
-import glob #ST 9/12
-from yt.funcs import *
+import glob
+
+from yt.funcs import \
+    mylog, \
+    ensure_tuple
 from yt.data_objects.grid_patch import \
-           AMRGridPatch
+    AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
     GridIndex
 from yt.data_objects.static_output import \
-           Dataset
+    Dataset
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
 from yt.geometry.geometry_handler import \
     YTDataChunk
-from yt.extern.six import PY2, PY3
+from yt.extern.six import PY2
 
 from .fields import AthenaFieldInfo
-from yt.units.yt_array import YTQuantity
 from yt.utilities.decompose import \
     decompose_array, get_psize
 
@@ -75,9 +77,9 @@
 
 class AthenaGrid(AMRGridPatch):
     _id_offset = 0
+
     def __init__(self, id, index, level, start, dimensions,
                  file_offset, read_dims):
-        df = index.dataset.filename[4:-4]
         gname = index.grid_filenames[id]
         AMRGridPatch.__init__(self, id, filename = gname,
                               index = index)
@@ -224,7 +226,6 @@
         grid = {}
         grid['read_field'] = None
         grid['read_type'] = None
-        table_read=False
         line = f.readline()
         while grid['read_field'] is None:
             parse_line(line, grid)
@@ -270,7 +271,6 @@
             gridread = {}
             gridread['read_field'] = None
             gridread['read_type'] = None
-            table_read=False
             line = f.readline()
             while gridread['read_field'] is None:
                 parse_line(line, gridread)
@@ -421,8 +421,6 @@
                                 self.grid_levels[i] + 1,
                                 self.grid_left_edge, self.grid_right_edge,
                                 self.grid_levels, mask)
-                #ids = np.where(mask.astype("bool")) # where is a tuple
-                #mask[ids] = True
             grid.Children = [g for g in self.grids[mask.astype("bool")] if g.Level == grid.Level + 1]
         mylog.debug("Second pass; identifying parents")
         for i, grid in enumerate(self.grids): # Second pass
@@ -436,7 +434,6 @@
         return [g for g in self.grids[mask] if g.Level == grid.Level + 1]
 
     def _chunk_io(self, dobj, cache = True, local_only = False):
-        gfiles = defaultdict(list)
         gobjs = getattr(dobj._current_chunk, "objs", dobj._chunk_info)
         for subset in gobjs:
             yield YTDataChunk(dobj, "io", [subset],

diff -r 924044fa93863868f92315e933cdb35802b081c9 -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 yt/frontends/athena/fields.py
--- a/yt/frontends/athena/fields.py
+++ b/yt/frontends/athena/fields.py
@@ -13,12 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
 from yt.fields.field_info_container import \
     FieldInfoContainer
 from yt.utilities.physical_constants import \
-    kboltz,mh
-from yt.units.yt_array import YTArray
+    kboltz, mh
 
 b_units = "code_magnetic"
 pres_units = "code_mass/(code_length*code_time**2)"

diff -r 924044fa93863868f92315e933cdb35802b081c9 -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 yt/frontends/athena/io.py
--- a/yt/frontends/athena/io.py
+++ b/yt/frontends/athena/io.py
@@ -12,10 +12,11 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
+
 from yt.utilities.io_handler import \
-           BaseIOHandler
+    BaseIOHandler
 import numpy as np
-from yt.funcs import mylog, defaultdict
+from yt.funcs import mylog
 from .data_structures import chk23
 
 float_size = {"float":np.dtype(">f4").itemsize,

diff -r 924044fa93863868f92315e933cdb35802b081c9 -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 yt/frontends/athena/setup.py
--- a/yt/frontends/athena/setup.py
+++ b/yt/frontends/athena/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 924044fa93863868f92315e933cdb35802b081c9 -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -13,11 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    assert_allclose_units
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
 from yt.frontends.athena.api import AthenaDataset
 from yt.config import ytcfg


https://bitbucket.org/yt_analysis/yt/commits/af1d10e72e29/
Changeset:   af1d10e72e29
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 19:45:29+00:00
Summary:     Linting the boxlib frontend
Affected #:  7 files

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -13,6 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import inspect
 import os
 import re
 
@@ -20,7 +21,9 @@
 
 import numpy as np
 
-from yt.funcs import *
+from yt.funcs import \
+    mylog, \
+    ensure_tuple
 from yt.data_objects.grid_patch import AMRGridPatch
 from yt.extern.six.moves import zip as izip
 from yt.geometry.grid_geometry_handler import GridIndex
@@ -188,7 +191,7 @@
             vals = next(header_file).split()
             lev, ngrids = int(vals[0]), int(vals[1])
             assert(lev == level)
-            nsteps = int(next(header_file))
+            nsteps = int(next(header_file))  # NOQA
             for gi in range(ngrids):
                 xlo, xhi = [float(v) for v in next(header_file).split()]
                 if self.dimensionality > 1:
@@ -211,7 +214,7 @@
             next(level_header_file)
             next(level_header_file)
             # Now we get the number of components
-            ncomp_this_file = int(next(level_header_file))
+            ncomp_this_file = int(next(level_header_file))  # NOQA
             # Skip the next line, which contains the number of ghost zones
             next(level_header_file)
             # To decipher this next line, we expect something like:
@@ -889,15 +892,16 @@
                   ['particle_velocity_%s' % ax for ax in 'xyz']:
             self.field_list.append(("io", fn))
         header = open(os.path.join(self.ds.output_dir, "DM", "Header"))
-        version = header.readline()
-        ndim = header.readline()
-        nfields = header.readline()
-        ntotalpart = int(header.readline())
-        dummy = header.readline() # nextid
-        maxlevel = int(header.readline()) # max level
+        version = header.readline()  # NOQA
+        ndim = header.readline()  # NOQA
+        nfields = header.readline()  # NOQA
+        ntotalpart = int(header.readline())  # NOQA
+        nextid = header.readline()  # NOQA
+        maxlevel = int(header.readline())  # NOQA
 
         # Skip over how many grids on each level; this is degenerate
-        for i in range(maxlevel + 1): dummy = header.readline()
+        for i in range(maxlevel + 1):
+            header.readline()
 
         grid_info = np.fromiter((int(i) for line in header.readlines()
                                  for i in line.split()),
@@ -972,8 +976,9 @@
 
 def _guess_pcast(vals):
     # Now we guess some things about the parameter and its type
-    v = vals.split()[0] # Just in case there are multiple; we'll go
-                        # back afterward to using vals.
+    # Just in case there are multiple; we'll go
+    # back afterward to using vals.
+    v = vals.split()[0]
     try:
         float(v.upper().replace("D", "E"))
     except:
@@ -986,6 +991,7 @@
             pcast = float
         else:
             pcast = int
-    vals = [pcast(v) for v in vals.split()]
-    if len(vals) == 1: vals = vals[0]
+    vals = [pcast(value) for value in vals.split()]
+    if len(vals) == 1:
+        vals = vals[0]
     return vals

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/frontends/boxlib/definitions.py
--- a/yt/frontends/boxlib/definitions.py
+++ b/yt/frontends/boxlib/definitions.py
@@ -12,7 +12,7 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
-from yt.funcs import *
+
 
 # TODO: get rid of enzo parameters we do not need
 parameterDict = {"CosmologyCurrentRedshift": float,

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/frontends/boxlib/fields.py
--- a/yt/frontends/boxlib/fields.py
+++ b/yt/frontends/boxlib/fields.py
@@ -184,9 +184,10 @@
                 if field[3] in string.ascii_letters:
                     element, weight = field[2:4], field[4:-1]
                 else:
-                    element, weight = field[2:3], field[3:-1]
+                    element, weight = field[2:3], field[3:-1]  # NOQA
 
-                # Here we can, later, add number density.
+                # Here we can, later, add number density
+                # right now element and weight inferred above are unused
 
 
 class MaestroFieldInfo(FieldInfoContainer):
@@ -280,10 +281,12 @@
                     if field[3] in string.ascii_letters:
                         element, weight = field[2:4], field[4:-1]
                     else:
-                        element, weight = field[2:3], field[3:-1]
+                        element, weight = field[2:3], field[3:-1]  # NOQA
                     weight = int(weight)
 
-                # Here we can, later, add number density.
+                # Here we can, later, add number density using 'element' and
+                # 'weight' inferred above
+
             elif field.startswith("omegadot("):
                 nice_name, tex_label = _nice_species_name(field)
                 display_name = r'\dot{\omega}\left[%s\right]' % tex_label

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/frontends/boxlib/io.py
--- a/yt/frontends/boxlib/io.py
+++ b/yt/frontends/boxlib/io.py
@@ -15,10 +15,8 @@
 
 import os
 import numpy as np
-from yt.utilities.lib.fortran_reader import \
-    read_castro_particles
 from yt.utilities.io_handler import \
-           BaseIOHandler
+    BaseIOHandler
 from yt.funcs import mylog, defaultdict
 from yt.frontends.chombo.io import parse_orion_sinks
 
@@ -156,37 +154,3 @@
                     line = lines[num]
                     particles.append(read(line, field))
             return np.array(particles)
-
-
-class IOHandlerCastro(IOHandlerBoxlib):
-    _dataset_type = "castro_native"
-
-    def _read_particle_field(self, grid, field):
-        offset = grid._particle_offset
-        filen = os.path.expanduser(grid.particle_filename)
-        off = grid._particle_offset
-        tr = np.zeros(grid.NumberOfParticles, dtype='float64')
-        read_castro_particles(filen, off,
-            castro_particle_field_names.index(field),
-            len(castro_particle_field_names),
-            tr)
-        return tr
-
-nyx_particle_field_names = ['particle_position_%s' % ax for ax in 'xyz'] + \
-                           ['particle_mass'] +  \
-                           ['particle_velocity_%s' % ax for ax in 'xyz']
-
-class IOHandlerNyx(IOHandlerBoxlib):
-    _dataset_type = "nyx_native"
-
-    def _read_particle_coords(self, chunks, ptf):
-        offset = grid._particle_offset
-        filen = os.path.expanduser(grid.particle_filename)
-        off = grid._particle_offset
-        tr = np.zeros(grid.NumberOfParticles, dtype='float64')
-        read_castro_particles(filen, off,
-                            nyx_particle_field_names.index(field),
-                            len(nyx_particle_field_names), tr)
-
-    def _read_particle_fields(self, chunks, ptf, fields):
-        pass

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/frontends/boxlib/setup.py
--- a/yt/frontends/boxlib/setup.py
+++ b/yt/frontends/boxlib/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/frontends/boxlib/tests/test_orion.py
--- a/yt/frontends/boxlib/tests/test_orion.py
+++ b/yt/frontends/boxlib/tests/test_orion.py
@@ -13,11 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
 from yt.frontends.boxlib.api import OrionDataset
 

diff -r 38ea660ad2fabe9625c8b329ef40b7bfa9ed2fb0 -r af1d10e72e29454e07f2a6168712516ef6c9410f yt/utilities/lib/fortran_reader.pyx
--- a/yt/utilities/lib/fortran_reader.pyx
+++ b/yt/utilities/lib/fortran_reader.pyx
@@ -331,21 +331,3 @@
                 for z in range(2):
                     child_mask[lex+x,ley+y,lez+z] = art_child_masks[ioct,x,y,z]
 
- at cython.cdivision(True)
- at cython.boundscheck(False)
- at cython.wraparound(False)
-def read_castro_particles(char *fn, int offset, int fieldindex, int nfields,
-                          np.ndarray[np.float64_t, ndim=1] tofill):
-    cdef int nparticles = tofill.shape[0]
-    cdef int i
-    cdef startskip = fieldindex*8
-    cdef endskip = (nfields - 1 - fieldindex)*8
-    cdef np.float64_t temp
-    cdef FILE *f = fopen(fn, 'r')
-    fseek(f, offset + 5*nparticles*4, 0) # 4 bytes
-    for i in range(nparticles):
-        fseek(f, startskip, SEEK_CUR)
-        fread(&temp, 8, 1, f)
-        tofill[i] = temp
-        fseek(f, endskip, SEEK_CUR)
-    fclose(f)


https://bitbucket.org/yt_analysis/yt/commits/6fec1c84eb62/
Changeset:   6fec1c84eb62
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 20:31:50+00:00
Summary:     Linting chombo
Affected #:  3 files

diff -r af1d10e72e29454e07f2a6168712516ef6c9410f -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -19,10 +19,11 @@
 import weakref
 import numpy as np
 
+from six import string_types
 from stat import \
     ST_CTIME
 
-from yt.funcs import *
+from yt.funcs import mylog
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.extern import six
@@ -30,8 +31,6 @@
     GridIndex
 from yt.data_objects.static_output import \
     Dataset
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
 from yt.utilities.file_handler import \
     HDF5FileHandler
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
@@ -108,9 +107,10 @@
         self.directory = ds.fullpath
         self._handle = ds._handle
 
-        tr = self._handle['Chombo_global'].attrs.get("testReal", "float32")
+        self._levels = [
+            key for key in self._handle.keys() if key.startswith('level')
+        ]
 
-        self._levels = [key for key in self._handle.keys() if key.startswith('level')]
         GridIndex.__init__(self, ds, dataset_type)
 
         self._read_particles()
@@ -650,7 +650,7 @@
         pluto_ini_file_exists = False
         orion2_ini_file_exists = False
 
-        if type(args[0]) == type(""):
+        if isinstance(args[0], string_types):
             dir_name = os.path.dirname(os.path.abspath(args[0]))
             pluto_ini_filename = os.path.join(dir_name, "pluto.ini")
             orion2_ini_filename = os.path.join(dir_name, "orion2.ini")

diff -r af1d10e72e29454e07f2a6168712516ef6c9410f -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -182,9 +182,9 @@
         offsets = np.append(np.array([0]), offsets)
         offsets = np.array(offsets, dtype=np.int64)
 
-        # convert between the global grid id and the id on this level            
+        # convert between the global grid id and the id on this level
         grid_levels = np.array([g.Level for g in self.ds.index.grids])
-        grid_ids = np.array([g.id    for g in self.ds.index.grids])
+        grid_ids = np.array([g.id for g in self.ds.index.grids])
         grid_level_offset = grid_ids[np.where(grid_levels == grid.Level)[0][0]]
         lo = grid.id - grid_level_offset
         hi = lo + 1

diff -r af1d10e72e29454e07f2a6168712516ef6c9410f -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e yt/frontends/chombo/setup.py
--- a/yt/frontends/chombo/setup.py
+++ b/yt/frontends/chombo/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/602b07224bd0/
Changeset:   602b07224bd0
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 20:34:06+00:00
Summary:     Linting eagle
Affected #:  4 files

diff -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 yt/frontends/eagle/data_structures.py
--- a/yt/frontends/eagle/data_structures.py
+++ b/yt/frontends/eagle/data_structures.py
@@ -17,7 +17,6 @@
 
 import h5py
 import numpy as np
-import types
 
 from yt.frontends.gadget.data_structures import \
     GadgetHDF5Dataset

diff -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 yt/frontends/eagle/fields.py
--- a/yt/frontends/eagle/fields.py
+++ b/yt/frontends/eagle/fields.py
@@ -15,19 +15,12 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import os
-import numpy as np
-
-from yt.funcs import *
-
-from yt.fields.field_info_container import \
-    FieldInfoContainer
 from yt.frontends.owls.fields import \
     OWLSFieldInfo
-import yt.frontends.owls.owls_ion_tables as oit
 from yt.units.yt_array import YTQuantity
+from yt.utilities.periodic_table import periodic_table
 
-from .definitions import \
+from yt.frontends.eagle.definitions import \
     eaglenetwork_ion_lookup
 
 class EagleNetworkFieldInfo(OWLSFieldInfo):

diff -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 yt/frontends/eagle/setup.py
--- a/yt/frontends/eagle/setup.py
+++ b/yt/frontends/eagle/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 6fec1c84eb622642ce5452ce3f09d400a87ea61e -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 yt/frontends/eagle/tests/test_outputs.py
--- a/yt/frontends/eagle/tests/test_outputs.py
+++ b/yt/frontends/eagle/tests/test_outputs.py
@@ -17,7 +17,6 @@
 from yt.testing import \
     requires_file
 from yt.utilities.answer_testing.framework import \
-    requires_ds, \
     data_dir_load
 from yt.frontends.eagle.api import EagleDataset
 


https://bitbucket.org/yt_analysis/yt/commits/886b237004db/
Changeset:   886b237004db
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 20:55:25+00:00
Summary:     Linting the fits frontend
Affected #:  5 files

diff -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 -r 886b237004db1bd240b1dc128a226caf917ac3dd yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -11,17 +11,22 @@
 #-----------------------------------------------------------------------------
 
 import stat
-import types
 import numpy as np
 import numpy.core.defchararray as np_char
+import os
+import re
+import time
+import uuid
 import weakref
 import warnings
-import re
-import uuid
 
-from yt.extern.six import iteritems
+
+from collections import defaultdict
+
 from yt.config import ytcfg
-from yt.funcs import *
+from yt.funcs import \
+    mylog, \
+    ensure_list
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
@@ -42,7 +47,6 @@
     prefixable_units, \
     unit_prefixes
 from yt.units import dimensions
-from yt.units.yt_array import YTQuantity
 from yt.utilities.on_demand_imports import _astropy, NotAModule
 
 
@@ -137,10 +141,10 @@
                 mylog.info("Adding field %s to the list of fields." % (fname))
                 self.field_list.append(("io",fname))
                 if k in ["x","y"]:
-                    unit = "code_length"
+                    field_unit = "code_length"
                 else:
-                    unit = v
-                self.dataset.field_units[("io",fname)] = unit
+                    field_unit = v
+                self.dataset.field_units[("io",fname)] = field_unit
             return
         self._axis_map = {}
         self._file_map = {}
@@ -149,7 +153,9 @@
         dup_field_index = {}
         # Since FITS header keywords are case-insensitive, we only pick a subset of
         # prefixes, ones that we expect to end up in headers.
-        known_units = dict([(unit.lower(),unit) for unit in self.ds.unit_registry.lut])
+        known_units = dict(
+            [(unit.lower(), unit) for unit in self.ds.unit_registry.lut]
+        )
         for unit in list(known_units.values()):
             if unit in prefixable_units:
                 for p in ["n","u","m","c","k"]:
@@ -211,8 +217,7 @@
         self.num_grids = self.ds.parameters["nprocs"]
 
     def _parse_index(self):
-        f = self._handle # shortcut
-        ds = self.dataset # shortcut
+        ds = self.dataset
 
         # If nprocs > 1, decompose the domain into virtual grids
         if self.num_grids > 1:

diff -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 -r 886b237004db1bd240b1dc128a226caf917ac3dd yt/frontends/fits/fields.py
--- a/yt/frontends/fits/fields.py
+++ b/yt/frontends/fits/fields.py
@@ -10,8 +10,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-from yt.utilities.exceptions import *
 from yt.fields.field_info_container import \
     FieldInfoContainer
 

diff -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 -r 886b237004db1bd240b1dc128a226caf917ac3dd yt/frontends/fits/misc.py
--- a/yt/frontends/fits/misc.py
+++ b/yt/frontends/fits/misc.py
@@ -14,8 +14,8 @@
 import base64
 from yt.extern.six import PY3
 from yt.fields.derived_field import ValidateSpatial
+from yt.funcs import mylog
 from yt.utilities.on_demand_imports import _astropy
-from yt.funcs import mylog, get_image_suffix
 from yt.visualization._mpl_imports import FigureCanvasAgg
 from yt.units.yt_array import YTQuantity, YTArray
 from yt.utilities.fits_image import FITSImageData
@@ -23,7 +23,7 @@
     from io import BytesIO as IO
 else:
     from yt.extern.six.moves import StringIO as IO
-    
+
 import os
 
 def _make_counts(emin, emax):

diff -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 -r 886b237004db1bd240b1dc128a226caf917ac3dd yt/frontends/fits/setup.py
--- a/yt/frontends/fits/setup.py
+++ b/yt/frontends/fits/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 602b07224bd0bc56006c86b754da23a15ae9c6f6 -r 886b237004db1bd240b1dc128a226caf917ac3dd yt/frontends/fits/tests/test_outputs.py
--- a/yt/frontends/fits/tests/test_outputs.py
+++ b/yt/frontends/fits/tests/test_outputs.py
@@ -13,7 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \


https://bitbucket.org/yt_analysis/yt/commits/10c8b79be546/
Changeset:   10c8b79be546
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 21:49:17+00:00
Summary:     Linting FLASH
Affected #:  5 files

diff -r 886b237004db1bd240b1dc128a226caf917ac3dd -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -13,27 +13,20 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+import os
 import stat
 import numpy as np
 import weakref
 
-from yt.config import ytcfg
-from yt.funcs import *
+from yt.funcs import mylog
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
     GridIndex
-from yt.geometry.geometry_handler import \
-    YTDataChunk
 from yt.data_objects.static_output import \
     Dataset
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
 from yt.utilities.file_handler import \
     HDF5FileHandler
-from yt.utilities.io_handler import \
-    io_registry
 from yt.utilities.physical_constants import cm_per_mpc
 from .fields import FLASHFieldInfo
 
@@ -71,7 +64,6 @@
         pass
 
     def _detect_output_fields(self):
-        ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [("flash", s.decode("ascii","ignore"))
                            for s in self._handle["/unknown names"][:].flat]
         if ("/particle names" in self._particle_handle):
@@ -160,8 +152,7 @@
 
     def _populate_grid_objects(self):
         # We only handle 3D data, so offset is 7 (nfaces+1)
-        
-        offset = 7
+
         ii = np.argsort(self.grid_levels.flat)
         gid = self._handle["/gid"][:]
         first_ind = -(self.dataset.refine_by**self.dataset.dimensionality)

diff -r 886b237004db1bd240b1dc128a226caf917ac3dd -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 yt/frontends/flash/fields.py
--- a/yt/frontends/flash/fields.py
+++ b/yt/frontends/flash/fields.py
@@ -13,13 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
 from yt.fields.field_info_container import \
     FieldInfoContainer
 from yt.utilities.physical_constants import \
-    kboltz, mh, Na
-from yt.units.yt_array import \
-    YTArray
+    Na
 
 # Common fields in FLASH: (Thanks to John ZuHone for this list)
 #

diff -r 886b237004db1bd240b1dc128a226caf917ac3dd -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -14,8 +14,6 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
-import h5py
-from yt.utilities.math_utils import prec_accum
 from itertools import groupby
 
 from yt.utilities.io_handler import \
@@ -110,7 +108,6 @@
         rv = {}
         for field in fields:
             ftype, fname = field
-            dt = f["/%s" % fname].dtype
             # Always use *native* 64-bit float.
             rv[field] = np.empty(size, dtype="=f8")
         ng = sum(len(c.objs) for c in chunks)
@@ -149,7 +146,6 @@
         for field in fluid_fields:
             ftype, fname = field
             ds = f["/%s" % fname]
-            ind = 0
             for gs in grid_sequences(chunk.objs):
                 start = gs[0].id - gs[0]._id_offset
                 end = gs[-1].id - gs[-1]._id_offset + 1

diff -r 886b237004db1bd240b1dc128a226caf917ac3dd -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 yt/frontends/flash/setup.py
--- a/yt/frontends/flash/setup.py
+++ b/yt/frontends/flash/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 886b237004db1bd240b1dc128a226caf917ac3dd -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -13,11 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
 from yt.frontends.flash.api import FLASHDataset
 


https://bitbucket.org/yt_analysis/yt/commits/063b9d2dd690/
Changeset:   063b9d2dd690
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 21:52:23+00:00
Summary:     Linting gadget
Affected #:  4 files

diff -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -20,7 +20,6 @@
 import stat
 import struct
 import os
-import types
 
 from yt.data_objects.static_output import \
     ParticleFile
@@ -30,8 +29,6 @@
     ParticleIndex
 from yt.utilities.cosmology import \
     Cosmology
-from yt.utilities.definitions import \
-    sec_conversion
 from yt.utilities.fortran_utils import read_record
 from yt.utilities.logger import ytLogger as mylog
 

diff -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 yt/frontends/gadget/setup.py
--- a/yt/frontends/gadget/setup.py
+++ b/yt/frontends/gadget/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -26,13 +26,15 @@
 from yt.units.unit_registry import \
     UnitRegistry
 from yt.units.yt_array import \
-    YTArray, YTQuantity
+    YTArray
 from yt.utilities.cosmology import \
     Cosmology
 from yt.utilities.exceptions import \
     InvalidSimulationTimeSeries, \
     MissingParameter, \
     NoStoppingCondition
+from yt.utilities.exceptions import \
+    YTOutputNotIdentified
 from yt.utilities.logger import ytLogger as \
     mylog
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
@@ -328,7 +330,7 @@
             self.final_redshift = 1.0 / self.parameters["TimeMax"] - 1.0
             self.cosmological_simulation = 1
             for a, v in cosmo_attr.items():
-                if not v in self.parameters:
+                if v not in self.parameters:
                     raise MissingParameter(self.parameter_filename, v)
                 setattr(self, a, self.parameters[v])
         else:
@@ -426,7 +428,7 @@
                 self.final_time = self.quan(self.parameters["TimeMax"], "code_time")
             else:
                 self.final_time = None
-            if not "TimeMax" in self.parameters:
+            if "TimeMax" not in self.parameters:
                 raise NoStoppingCondition(self.parameter_filename)
 
     def _find_outputs(self):

diff -r 10c8b79be546851598d9ebe64f42f0dabac35fb7 -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 yt/frontends/gadget/tests/test_outputs.py
--- a/yt/frontends/gadget/tests/test_outputs.py
+++ b/yt/frontends/gadget/tests/test_outputs.py
@@ -14,9 +14,9 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    requires_file
 from yt.utilities.answer_testing.framework import \
-    requires_ds, \
     data_dir_load
 from yt.frontends.gadget.api import GadgetHDF5Dataset
 


https://bitbucket.org/yt_analysis/yt/commits/fbebdeac3bc2/
Changeset:   fbebdeac3bc2
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:00:34+00:00
Summary:     Linting gadget_fof
Affected #:  4 files

diff -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 yt/frontends/gadget_fof/data_structures.py
--- a/yt/frontends/gadget_fof/data_structures.py
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -18,19 +18,14 @@
 import h5py
 import numpy as np
 import stat
-import weakref
-import struct
 import glob
-import time
 import os
 
-from .fields import \
+from yt.frontends.gadget_fif.fields import \
     GadgetFOFFieldInfo
 
 from yt.utilities.cosmology import \
     Cosmology
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
 from yt.utilities.exceptions import \
     YTException
 from yt.utilities.logger import ytLogger as \
@@ -42,10 +37,7 @@
     ParticleFile
 from yt.frontends.gadget.data_structures import \
     _fix_unit_ordering
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
+
 
 class GadgetFOFParticleIndex(ParticleIndex):
     def __init__(self, ds, dataset_type):

diff -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 yt/frontends/gadget_fof/fields.py
--- a/yt/frontends/gadget_fof/fields.py
+++ b/yt/frontends/gadget_fof/fields.py
@@ -14,11 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
 
 m_units = "code_mass"
 p_units = "code_length"

diff -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 yt/frontends/gadget_fof/io.py
--- a/yt/frontends/gadget_fof/io.py
+++ b/yt/frontends/gadget_fof/io.py
@@ -17,7 +17,7 @@
 import h5py
 import numpy as np
 
-from yt.utilities.exceptions import *
+from yt.utilities.exceptions import YTDomainOverflow
 from yt.funcs import mylog
 
 from yt.utilities.io_handler import \

diff -r 063b9d2dd69013c61f9f69ad5eac994ffcec5b31 -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 yt/frontends/gadget_fof/setup.py
--- a/yt/frontends/gadget_fof/setup.py
+++ b/yt/frontends/gadget_fof/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/dbe4dd4e8f30/
Changeset:   dbe4dd4e8f30
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:02:39+00:00
Summary:     Linting GDF
Affected #:  4 files

diff -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -14,7 +14,6 @@
 #-----------------------------------------------------------------------------
 
 import h5py
-import types
 import numpy as np
 import weakref
 import os

diff -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c yt/frontends/gdf/fields.py
--- a/yt/frontends/gdf/fields.py
+++ b/yt/frontends/gdf/fields.py
@@ -13,9 +13,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
 

diff -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -67,7 +67,7 @@
             rv[field] = np.empty(fsize, dtype="float64")
         ngrids = sum(len(chunk.objs) for chunk in chunks)
         mylog.debug("Reading %s cells of %s fields in %s blocks",
-                    size, [fname for ftype, fname in fields], ngrids)
+                    size, [fn for ft, fn in fields], ngrids)
         ind = 0
         for chunk in chunks:
             fid = None

diff -r fbebdeac3bc2460af6d9020f5861dfdbe4807376 -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c yt/frontends/gdf/setup.py
--- a/yt/frontends/gdf/setup.py
+++ b/yt/frontends/gdf/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/a1929a33c87e/
Changeset:   a1929a33c87e
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:06:11+00:00
Summary:     Linting halo_catalog
Affected #:  4 files

diff -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -17,25 +17,17 @@
 import h5py
 import numpy as np
 import stat
-import weakref
-import struct
 import glob
-import time
 import os
 
 from .fields import \
     HaloCatalogFieldInfo
 
-from yt.utilities.cosmology import Cosmology
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
     Dataset, \
     ParticleFile
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
     
 class HaloCatalogHDF5File(ParticleFile):
     def __init__(self, ds, io, filename, file_id):

diff -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 yt/frontends/halo_catalog/fields.py
--- a/yt/frontends/halo_catalog/fields.py
+++ b/yt/frontends/halo_catalog/fields.py
@@ -14,17 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs
 
 m_units = "g"
 p_units = "cm"

diff -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 yt/frontends/halo_catalog/io.py
--- a/yt/frontends/halo_catalog/io.py
+++ b/yt/frontends/halo_catalog/io.py
@@ -17,7 +17,7 @@
 import h5py
 import numpy as np
 
-from yt.utilities.exceptions import *
+from yt.utilities.exceptions import YTDomainOverflow
 from yt.funcs import mylog
 
 from yt.utilities.io_handler import \
@@ -25,7 +25,6 @@
 
 from yt.utilities.lib.geometry_utils import compute_morton
 
-from yt.geometry.oct_container import _ORDER_MAX
 
 class IOHandlerHaloCatalogHDF5(BaseIOHandler):
     _dataset_type = "halocatalog_hdf5"
@@ -44,7 +43,6 @@
             for obj in chunk.objs:
                 data_files.update(obj.data_files)
         for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
             with h5py.File(data_file.filename, "r") as f:
                 x = f['particle_position_x'].value.astype("float64")
                 y = f['particle_position_y'].value.astype("float64")
@@ -62,7 +60,6 @@
             for obj in chunk.objs:
                 data_files.update(obj.data_files)
         for data_file in sorted(data_files):
-            pcount = data_file.header['num_halos']
             with h5py.File(data_file.filename, "r") as f:
                 for ptype, field_list in sorted(ptf.items()):
                     x = f['particle_position_x'].value.astype("float64")

diff -r dbe4dd4e8f3072c76c07c7b2726aee5ba9961a4c -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 yt/frontends/halo_catalog/setup.py
--- a/yt/frontends/halo_catalog/setup.py
+++ b/yt/frontends/halo_catalog/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/9f44269fe9a4/
Changeset:   9f44269fe9a4
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:07:35+00:00
Summary:     Linting http_stream
Affected #:  3 files

diff -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 yt/frontends/http_stream/data_structures.py
--- a/yt/frontends/http_stream/data_structures.py
+++ b/yt/frontends/http_stream/data_structures.py
@@ -17,7 +17,6 @@
 
 import numpy as np
 import time
-import types
 
 from yt.data_objects.static_output import \
     ParticleFile

diff -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 yt/frontends/http_stream/io.py
--- a/yt/frontends/http_stream/io.py
+++ b/yt/frontends/http_stream/io.py
@@ -17,10 +17,12 @@
 
 import numpy as np
 
+from yt.funcs import \
+    mylog
 from yt.utilities.io_handler import \
     BaseIOHandler
 from yt.utilities.lib.geometry_utils import \
-     compute_morton
+    compute_morton
 
 try:
     import requests

diff -r a1929a33c87e6ce8fcae963156ce8e4b58a6a2c2 -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 yt/frontends/http_stream/setup.py
--- a/yt/frontends/http_stream/setup.py
+++ b/yt/frontends/http_stream/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/41ab25c45a63/
Changeset:   41ab25c45a63
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:11:58+00:00
Summary:     Linting moab
Affected #:  4 files

diff -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 -r 41ab25c45a63a962157f26d1c4a0463174d55e28 yt/frontends/moab/data_structures.py
--- a/yt/frontends/moab/data_structures.py
+++ b/yt/frontends/moab/data_structures.py
@@ -17,17 +17,12 @@
 import os
 import numpy as np
 import weakref
-from yt.funcs import mylog
 from yt.data_objects.unstructured_mesh import \
-           SemiStructuredMesh
+    SemiStructuredMesh
 from yt.geometry.unstructured_mesh_handler import \
-           UnstructuredIndex
+    UnstructuredIndex
 from yt.data_objects.static_output import \
-           Dataset
-from yt.utilities.io_handler import \
-    io_registry
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+    Dataset
 from yt.utilities.file_handler import HDF5FileHandler
 
 from .fields import MoabFieldInfo, PyneFieldInfo
@@ -86,7 +81,7 @@
         self.mass_unit = self.quan(1.0, "g")
 
     def _parse_parameter_file(self):
-        self._handle = f = h5py.File(self.parameter_filename, "r")
+        self._handle = h5py.File(self.parameter_filename, "r")
         coords = self._handle["/tstt/nodes/coordinates"]
         self.domain_left_edge = coords[0]
         self.domain_right_edge = coords[-1]
@@ -167,7 +162,8 @@
         self.mass_unit = self.quan(1.0, "g")
 
     def _parse_parameter_file(self):
-        from itaps import iBase
+        #  not sure if this import has side-effects so I'm not deleting it
+        from itaps import iBase  # NOQA
 
         ents = list(self.pyne_mesh.structured_iterate_vertex())
         coords = self.pyne_mesh.mesh.getVtxCoords(ents)

diff -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 -r 41ab25c45a63a962157f26d1c4a0463174d55e28 yt/frontends/moab/io.py
--- a/yt/frontends/moab/io.py
+++ b/yt/frontends/moab/io.py
@@ -37,7 +37,7 @@
             rv[field] = np.empty(size, dtype=fhandle[field_dname(fname)].dtype)
         ngrids = sum(len(chunk.objs) for chunk in chunks)
         mylog.debug("Reading %s cells of %s fields in %s blocks",
-                    size, [fname for ftype, fname in fields], ngrids)
+                    size, [fname for ft, fn in fields], ngrids)
         for field in fields:
             ftype, fname = field
             ds = np.array(fhandle[field_dname(fname)][:], dtype="float64")
@@ -53,10 +53,8 @@
     def _read_fluid_selection(self, chunks, selector, fields, size):
         chunks = list(chunks)
         assert(len(chunks) == 1)
-        tags = {}
         rv = {}
         pyne_mesh = self.ds.pyne_mesh
-        mesh = pyne_mesh.mesh
         for field in fields:
             rv[field] = np.empty(size, dtype="float64")
         ngrids = sum(len(chunk.objs) for chunk in chunks)

diff -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 -r 41ab25c45a63a962157f26d1c4a0463174d55e28 yt/frontends/moab/setup.py
--- a/yt/frontends/moab/setup.py
+++ b/yt/frontends/moab/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 9f44269fe9a472cd6d44145cd37f647bba3a0c83 -r 41ab25c45a63a962157f26d1c4a0463174d55e28 yt/frontends/moab/tests/test_c5.py
--- a/yt/frontends/moab/tests/test_c5.py
+++ b/yt/frontends/moab/tests/test_c5.py
@@ -14,13 +14,16 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+import numpy as np
+
+from yt.testing import \
+    assert_equal, \
+    assert_almost_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
     data_dir_load, \
-    PixelizedProjectionValuesTest, \
     FieldValuesTest
 from yt.frontends.moab.api import MoabHex8Dataset
 


https://bitbucket.org/yt_analysis/yt/commits/c528d2fe28ab/
Changeset:   c528d2fe28ab
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:15:06+00:00
Summary:     Linting owls
Affected #:  6 files

diff -r 41ab25c45a63a962157f26d1c4a0463174d55e28 -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 yt/frontends/owls/data_structures.py
--- a/yt/frontends/owls/data_structures.py
+++ b/yt/frontends/owls/data_structures.py
@@ -16,7 +16,6 @@
 #-----------------------------------------------------------------------------
 
 import h5py
-import types
 
 import yt.units
 from yt.frontends.gadget.data_structures import \

diff -r 41ab25c45a63a962157f26d1c4a0463174d55e28 -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 yt/frontends/owls/fields.py
--- a/yt/frontends/owls/fields.py
+++ b/yt/frontends/owls/fields.py
@@ -298,9 +298,9 @@
         owls_ion_path = os.path.join( data_dir, "owls_ion_data" )
 
         if not os.path.exists(owls_ion_path):
-            mylog.info(txt % (data_url, data_dir))                    
+            mylog.info(txt % (data_url, data_dir))
             fname = data_dir + "/" + data_file
-            fn = download_file(os.path.join(data_url, data_file), fname)
+            download_file(os.path.join(data_url, data_file), fname)
 
             cmnd = "cd " + data_dir + "; " + "tar xf " + data_file
             os.system(cmnd)

diff -r 41ab25c45a63a962157f26d1c4a0463174d55e28 -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 yt/frontends/owls/io.py
--- a/yt/frontends/owls/io.py
+++ b/yt/frontends/owls/io.py
@@ -32,7 +32,7 @@
 def _get_h5_handle(fn):
     try:
         f = h5py.File(fn, "r")
-    except IOError as e:
+    except IOError:
         print("ERROR OPENING %s" % (fn))
         if os.path.exists(fn):
             print("FILENAME EXISTS")

diff -r 41ab25c45a63a962157f26d1c4a0463174d55e28 -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 yt/frontends/owls/owls_ion_tables.py
--- a/yt/frontends/owls/owls_ion_tables.py
+++ b/yt/frontends/owls/owls_ion_tables.py
@@ -16,7 +16,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import sys
 import h5py
 import numpy as np
 
@@ -31,7 +30,7 @@
     data = None
     with h5py.File( fname, 'r' ) as h5f:
         ds = h5f[path]
-        if dtype == None:
+        if dtype is None:
             dtype = ds.dtype
         data = np.zeros( ds.shape, dtype=dtype )
         data = ds.value

diff -r 41ab25c45a63a962157f26d1c4a0463174d55e28 -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 yt/frontends/owls/setup.py
--- a/yt/frontends/owls/setup.py
+++ b/yt/frontends/owls/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 41ab25c45a63a962157f26d1c4a0463174d55e28 -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 yt/frontends/owls/tests/test_outputs.py
--- a/yt/frontends/owls/tests/test_outputs.py
+++ b/yt/frontends/owls/tests/test_outputs.py
@@ -14,11 +14,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
     data_dir_load, \
     PixelizedProjectionValuesTest, \
     FieldValuesTest, \


https://bitbucket.org/yt_analysis/yt/commits/95d76e7e8262/
Changeset:   95d76e7e8262
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:17:53+00:00
Summary:     Linting owls_subfind
Affected #:  4 files

diff -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -18,23 +18,18 @@
 import h5py
 import numpy as np
 import stat
-import weakref
-import struct
 import glob
-import time
 import os
 
 from .fields import \
     OWLSSubfindFieldInfo
 
-from yt.utilities.cosmology import \
-    Cosmology
 from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+    sec_conversion
 from yt.utilities.exceptions import \
     YTException
 from yt.utilities.logger import ytLogger as \
-     mylog
+    mylog
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.data_objects.static_output import \
@@ -42,10 +37,6 @@
     ParticleFile
 from yt.frontends.gadget.data_structures import \
     _fix_unit_ordering
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
 
 class OWLSSubfindParticleIndex(ParticleIndex):
     def __init__(self, ds, dataset_type):

diff -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c yt/frontends/owls_subfind/fields.py
--- a/yt/frontends/owls_subfind/fields.py
+++ b/yt/frontends/owls_subfind/fields.py
@@ -14,11 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
 
 m_units = "code_mass"
 mdot_units = "code_mass / code_time"

diff -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c yt/frontends/owls_subfind/io.py
--- a/yt/frontends/owls_subfind/io.py
+++ b/yt/frontends/owls_subfind/io.py
@@ -17,7 +17,7 @@
 import h5py
 import numpy as np
 
-from yt.utilities.exceptions import *
+from yt.utilities.exceptions import YTDomainOverflow
 from yt.funcs import mylog
 
 from yt.utilities.io_handler import \
@@ -25,8 +25,6 @@
 
 from yt.utilities.lib.geometry_utils import compute_morton
 
-from yt.geometry.oct_container import _ORDER_MAX
-
 class IOHandlerOWLSSubfindHDF5(BaseIOHandler):
     _dataset_type = "subfind_hdf5"
 

diff -r c528d2fe28ab095af9078bd2f31ac5f7108e4f36 -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c yt/frontends/owls_subfind/setup.py
--- a/yt/frontends/owls_subfind/setup.py
+++ b/yt/frontends/owls_subfind/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/e1a2bb45f868/
Changeset:   e1a2bb45f868
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:22:41+00:00
Summary:     Linting ramses
Affected #:  5 files

diff -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py
+++ b/yt/frontends/ramses/data_structures.py
@@ -21,29 +21,24 @@
 import weakref
 from io import BytesIO
 
-from yt.funcs import *
+from yt.funcs import \
+    mylog
 from yt.geometry.oct_geometry_handler import \
     OctreeIndex
 from yt.geometry.geometry_handler import \
-    Index, YTDataChunk
+    YTDataChunk
 from yt.data_objects.static_output import \
     Dataset
 from yt.data_objects.octree_subset import \
     OctreeSubset
 
 from .definitions import ramses_header, field_aliases
-from yt.utilities.lib.misc_utilities import \
-    get_box_grids_level
-from yt.utilities.io_handler import \
-    io_registry
 from yt.utilities.physical_constants import mp, kb
 from .fields import \
     RAMSESFieldInfo, _X
 import yt.utilities.fortran_utils as fpu
 from yt.geometry.oct_container import \
     RAMSESOctreeContainer
-from yt.fields.particle_fields import \
-    standard_particle_fields
 from yt.arraytypes import blankRecordArray
 
 class RAMSESDomainFile(object):
@@ -193,8 +188,8 @@
             self.ngridbound = fpu.read_vector(f, 'i').astype("int64")
         else:
             self.ngridbound = np.zeros(hvals['nlevelmax'], dtype='int64')
-        free_mem = fpu.read_attrs(f, (('free_mem', 5, 'i'), ) )
-        ordering = fpu.read_vector(f, 'c')
+        free_mem = fpu.read_attrs(f, (('free_mem', 5, 'i'), ) )  # NOQA
+        ordering = fpu.read_vector(f, 'c')  # NOQA
         fpu.skip(f, 4)
         # Now we're at the tree itself
         # Now we iterate over each level and each CPU.
@@ -241,7 +236,7 @@
                 #ng is the number of octs on this level on this domain
                 ng = _ng(cpu, level)
                 if ng == 0: continue
-                ind = fpu.read_vector(f, "I").astype("int64")
+                ind = fpu.read_vector(f, "I").astype("int64")  # NOQA
                 fpu.skip(f, 2)
                 pos = np.empty((ng, 3), dtype='float64')
                 pos[:,0] = fpu.read_vector(f, "d") - nx
@@ -507,12 +502,11 @@
             print("z = %0.8f" % (self.dataset.current_redshift))
         except:
             pass
-        print("t = %0.8e = %0.8e s = %0.8e years" % \
-            (self.ds.current_time.in_units("code_time"),
-             self.ds.current_time.in_units("s"),
-             self.ds.current_time.in_units("yr")))
+        print("t = %0.8e = %0.8e s = %0.8e years" % (
+            self.ds.current_time.in_units("code_time"),
+            self.ds.current_time.in_units("s"),
+            self.ds.current_time.in_units("yr")))
         print("\nSmallest Cell:")
-        u=[]
         for item in ("Mpc", "pc", "AU", "cm"):
             print("\tWidth: %0.3e %s" % (dx.in_units(item), item))
 

diff -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f yt/frontends/ramses/fields.py
--- a/yt/frontends/ramses/fields.py
+++ b/yt/frontends/ramses/fields.py
@@ -19,16 +19,12 @@
 from yt.utilities.physical_constants import \
     boltzmann_constant_cgs, \
     mass_hydrogen_cgs, \
-    mass_sun_cgs, \
     mh
 from yt.utilities.linear_interpolators import \
     BilinearFieldInterpolator
 import yt.utilities.fortran_utils as fpu
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
 
 b_units = "code_magnetic"
 ra_units = "code_length / code_time**2"

diff -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f yt/frontends/ramses/io.py
--- a/yt/frontends/ramses/io.py
+++ b/yt/frontends/ramses/io.py
@@ -35,7 +35,6 @@
         # Each domain subset will contain a hydro_offset array, which gives
         # pointers to level-by-level hydro information
         tr = defaultdict(list)
-        cp = 0
         for chunk in chunks:
             for subset in chunk.objs:
                 # Now we read the entire thing

diff -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f yt/frontends/ramses/setup.py
--- a/yt/frontends/ramses/setup.py
+++ b/yt/frontends/ramses/setup.py
@@ -1,9 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
-import glob
 
 
 def configuration(parent_package='', top_path=None):

diff -r 95d76e7e8262c41eb51776aee7db8f9cbbb9c30c -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f yt/frontends/ramses/tests/test_outputs.py
--- a/yt/frontends/ramses/tests/test_outputs.py
+++ b/yt/frontends/ramses/tests/test_outputs.py
@@ -14,7 +14,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     data_dir_load, \


https://bitbucket.org/yt_analysis/yt/commits/7e21d0054ef0/
Changeset:   7e21d0054ef0
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:24:50+00:00
Summary:     Linting rockstar
Affected #:  4 files

diff -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f -r 7e21d0054ef08fd14feb2898c124ea202565f67c yt/frontends/rockstar/data_structures.py
--- a/yt/frontends/rockstar/data_structures.py
+++ b/yt/frontends/rockstar/data_structures.py
@@ -16,10 +16,7 @@
 
 import numpy as np
 import stat
-import weakref
-import struct
 import glob
-import time
 import os
 
 from .fields import \
@@ -32,9 +29,6 @@
     Dataset, \
     ParticleFile
 import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
 
 from .definitions import \
     header_dt

diff -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f -r 7e21d0054ef08fd14feb2898c124ea202565f67c yt/frontends/rockstar/fields.py
--- a/yt/frontends/rockstar/fields.py
+++ b/yt/frontends/rockstar/fields.py
@@ -14,17 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-
-from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs
 
 m_units = "Msun / h"                # Msun / h
 p_units = "Mpccm / h"               # Mpc / h comoving

diff -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f -r 7e21d0054ef08fd14feb2898c124ea202565f67c yt/frontends/rockstar/io.py
--- a/yt/frontends/rockstar/io.py
+++ b/yt/frontends/rockstar/io.py
@@ -14,20 +14,20 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
 import numpy as np
+import os
 
-from yt.utilities.exceptions import *
 from yt.funcs import mylog
 
+from yt.utilities.exceptions import \
+    YTDomainOverflow
+
 from yt.utilities.io_handler import \
     BaseIOHandler
 
-import yt.utilities.fortran_utils as fpu
 from .definitions import halo_dts
 from yt.utilities.lib.geometry_utils import compute_morton
 
-from yt.geometry.oct_container import _ORDER_MAX
 from operator import attrgetter
 
 class IOHandlerRockstarBinary(BaseIOHandler):

diff -r e1a2bb45f86892d1f86f50f37cda337f8e8c926f -r 7e21d0054ef08fd14feb2898c124ea202565f67c yt/frontends/rockstar/setup.py
--- a/yt/frontends/rockstar/setup.py
+++ b/yt/frontends/rockstar/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):


https://bitbucket.org/yt_analysis/yt/commits/9d6c7c9dff02/
Changeset:   9d6c7c9dff02
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:27:48+00:00
Summary:     Linting SDF
Affected #:  5 files

diff -r 7e21d0054ef08fd14feb2898c124ea202565f67c -r 9d6c7c9dff02ccdf7f893f0b757fea84918eee57 yt/frontends/sdf/data_structures.py
--- a/yt/frontends/sdf/data_structures.py
+++ b/yt/frontends/sdf/data_structures.py
@@ -15,15 +15,10 @@
 #-----------------------------------------------------------------------------
 
 
-import h5py
 import numpy as np
 import stat
-import weakref
-import struct
-import glob
 import time
 import os
-import types
 import sys
 import contextlib
 
@@ -32,14 +27,8 @@
     ParticleIndex
 from yt.data_objects.static_output import \
     Dataset, ParticleFile
-from yt.utilities.physical_ratios import \
-    cm_per_kpc, \
-    mass_sun_grams, \
-    sec_per_Gyr
 from .fields import \
     SDFFieldInfo
-from .io import \
-    IOHandlerSDF
 from yt.utilities.sdf import \
     SDFRead,\
     SDFIndex,\

diff -r 7e21d0054ef08fd14feb2898c124ea202565f67c -r 9d6c7c9dff02ccdf7f893f0b757fea84918eee57 yt/frontends/sdf/fields.py
--- a/yt/frontends/sdf/fields.py
+++ b/yt/frontends/sdf/fields.py
@@ -14,23 +14,9 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import os
-import numpy as np
-
-from yt.funcs import *
-
 from yt.fields.field_info_container import \
     FieldInfoContainer
 
-from yt.config import ytcfg
-from yt.utilities.physical_constants import mh
-from yt.fields.species_fields import \
-    add_species_field_by_fraction, \
-    add_species_field_by_density, \
-    setup_species_fields
-
-from yt.fields.particle_fields import \
-    add_volume_weighted_smoothed_field
 
 class SDFFieldInfo(FieldInfoContainer):
     known_other_fields = ()

diff -r 7e21d0054ef08fd14feb2898c124ea202565f67c -r 9d6c7c9dff02ccdf7f893f0b757fea84918eee57 yt/frontends/sdf/io.py
--- a/yt/frontends/sdf/io.py
+++ b/yt/frontends/sdf/io.py
@@ -15,17 +15,14 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
-from yt.funcs import *
-from yt.utilities.exceptions import *
-from yt.units.yt_array import YTArray
 
 from yt.utilities.io_handler import \
     BaseIOHandler
 
-from yt.utilities.fortran_utils import read_record
+from yt.funcs import mylog
+from yt.utilities.exceptions import YTDomainOverflow
 from yt.utilities.lib.geometry_utils import compute_morton
 
-from yt.geometry.oct_container import _ORDER_MAX
 CHUNKSIZE = 32**3
 
 class IOHandlerSDF(BaseIOHandler):
@@ -48,7 +45,6 @@
                 data_files.update(obj.data_files)
         assert(len(data_files) == 1)
         for data_file in sorted(data_files):
-            pcount = self._handle['x'].size
             yield "dark_matter", (
                 self._handle['x'], self._handle['y'], self._handle['z'])
 
@@ -62,7 +58,6 @@
                 data_files.update(obj.data_files)
         assert(len(data_files) == 1)
         for data_file in sorted(data_files):
-            pcount = self._handle['x'].size
             for ptype, field_list in sorted(ptf.items()):
                 x = self._handle['x']
                 y = self._handle['y']

diff -r 7e21d0054ef08fd14feb2898c124ea202565f67c -r 9d6c7c9dff02ccdf7f893f0b757fea84918eee57 yt/frontends/sdf/setup.py
--- a/yt/frontends/sdf/setup.py
+++ b/yt/frontends/sdf/setup.py
@@ -1,9 +1,5 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
-import glob
+
 
 def configuration(parent_package='', top_path=None):
     from numpy.distutils.misc_util import Configuration

diff -r 7e21d0054ef08fd14feb2898c124ea202565f67c -r 9d6c7c9dff02ccdf7f893f0b757fea84918eee57 yt/frontends/sdf/tests/test_outputs.py
--- a/yt/frontends/sdf/tests/test_outputs.py
+++ b/yt/frontends/sdf/tests/test_outputs.py
@@ -44,4 +44,4 @@
     yield assert_equal, str(ds), "ds14_scivis_0128_e4_dt04_1.0000"
     ad = ds.all_data()
     assert np.unique(ad['particle_position_x']).size > 1
-    p = ProjectionPlot(ds, "z", _fields)
+    ProjectionPlot(ds, "z", _fields)


https://bitbucket.org/yt_analysis/yt/commits/7fb188cd5816/
Changeset:   7fb188cd5816
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:28:13+00:00
Summary:     Linting SPH
Affected #:  1 file

diff -r 9d6c7c9dff02ccdf7f893f0b757fea84918eee57 -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 yt/frontends/sph/setup.py
--- a/yt/frontends/sph/setup.py
+++ b/yt/frontends/sph/setup.py
@@ -1,9 +1,5 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
-import glob
+
 
 def configuration(parent_package='', top_path=None):
     from numpy.distutils.misc_util import Configuration


https://bitbucket.org/yt_analysis/yt/commits/6244b58cb2b9/
Changeset:   6244b58cb2b9
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:42:37+00:00
Summary:     Linting stream
Affected #:  8 files

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -13,6 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import os
+import time
 import weakref
 import numpy as np
 import uuid
@@ -20,13 +22,12 @@
 
 from numbers import Number as numeric_type
 
+from yt.funcs import \
+    iterable, \
+    ensure_list
 from yt.utilities.io_handler import io_registry
-from yt.funcs import *
-from yt.config import ytcfg
 from yt.data_objects.data_containers import \
-    YTFieldData, \
-    YTDataContainer, \
-    YTSelectionContainer
+    YTFieldData
 from yt.data_objects.particle_unions import \
     ParticleUnion
 from yt.data_objects.grid_patch import \
@@ -43,19 +44,13 @@
     OctreeIndex
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
-from yt.fields.particle_fields import \
-    particle_vector_functions, \
-    particle_deposition_functions, \
-    standard_particle_fields
 from yt.geometry.oct_container import \
     OctreeContainer
 from yt.geometry.unstructured_mesh_handler import \
-           UnstructuredIndex
+    UnstructuredIndex
 from yt.data_objects.static_output import \
     Dataset
 from yt.utilities.logger import ytLogger as mylog
-from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
 from yt.geometry.grid_container import \
@@ -63,14 +58,14 @@
     MatchPointsToGrids
 from yt.utilities.decompose import \
     decompose_array, get_psize
-from yt.units.yt_array import YTQuantity, YTArray, uconcatenate
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+from yt.units.yt_array import \
+    YTQuantity, \
+    uconcatenate
 from yt.utilities.flagging_methods import \
     FlaggingGrid
 from yt.data_objects.unstructured_mesh import \
-           SemiStructuredMesh, \
-           UnstructuredMesh
+    SemiStructuredMesh, \
+    UnstructuredMesh
 from yt.extern.six import string_types, iteritems
 from .fields import \
     StreamFieldInfo
@@ -96,7 +91,6 @@
     def _guess_properties_from_parent(self):
         rf = self.ds.refine_by
         my_ind = self.id - self._id_offset
-        le = self.LeftEdge
         self.dds = self.Parent.dds/rf
         ParentLeftIndex = np.rint((self.LeftEdge-self.Parent.LeftEdge)/self.Parent.dds)
         self.start_index = rf*(ParentLeftIndex + self.Parent.get_global_startindex()).astype('int64')
@@ -1072,7 +1066,7 @@
 
     """
 
-    domain_dimensions = np.ones(3, "int32") * (1<<over_refine_factor)
+    domain_dimensions = np.ones(3, "int32") * (1 << over_refine_factor)
     nprocs = 1
     if bbox is None:
         bbox = np.array([[0.0, 1.0], [0.0, 1.0], [0.0, 1.0]], 'float64')

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/fields.py
--- a/yt/frontends/stream/fields.py
+++ b/yt/frontends/stream/fields.py
@@ -15,10 +15,7 @@
 
 from yt.fields.field_info_container import \
     FieldInfoContainer
-import yt.fields.api
-from yt.fields.particle_fields import \
-    particle_deposition_functions, \
-    particle_vector_functions
+
 
 class StreamFieldInfo(FieldInfoContainer):
     known_other_fields = (

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/io.py
--- a/yt/frontends/stream/io.py
+++ b/yt/frontends/stream/io.py
@@ -13,17 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from collections import defaultdict
-
-import os
 import numpy as np
 
 from yt.utilities.io_handler import \
-    BaseIOHandler, _axis_ids
+    BaseIOHandler
 from yt.utilities.logger import ytLogger as mylog
-from yt.units.yt_array import YTArray
 from yt.utilities.lib.geometry_utils import compute_morton
-from yt.utilities.exceptions import *
+from yt.utilities.exceptions import YTDomainOverflow
 
 class IOHandlerStream(BaseIOHandler):
 
@@ -221,7 +217,7 @@
             rv[field] = np.empty(size, dtype="float64")
         ngrids = sum(len(chunk.objs) for chunk in chunks)
         mylog.debug("Reading %s cells of %s fields in %s blocks",
-                    size, [fname for ftype, fname in fields], ngrids)
+                    size, [fn for ft, fn in fields], ngrids)
         for field in fields:
             ind = 0
             ftype, fname = field

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/setup.py
--- a/yt/frontends/stream/setup.py
+++ b/yt/frontends/stream/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/tests/test_stream_amrgrids.py
--- a/yt/frontends/stream/tests/test_stream_amrgrids.py
+++ b/yt/frontends/stream/tests/test_stream_amrgrids.py
@@ -1,9 +1,10 @@
-from yt.testing import *
 import numpy as np
 from yt.utilities.exceptions import YTIntDomainOverflow
 
 from yt import load_amr_grids, ProjectionPlot
 
+from yt.testing import assert_raises
+
 def test_qt_overflow():
     grid_data = []
 
@@ -47,4 +48,4 @@
 
     domain_dimensions = np.array([8, 8, 8])
 
-    spf = load_amr_grids(grid_data, domain_dimensions, refine_by=ref_by)
+    load_amr_grids(grid_data, domain_dimensions, refine_by=ref_by)

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/tests/test_stream_hexahedral.py
--- a/yt/frontends/stream/tests/test_stream_hexahedral.py
+++ b/yt/frontends/stream/tests/test_stream_hexahedral.py
@@ -1,14 +1,11 @@
 import numpy as np
-from yt.mods import *
-from yt.testing import *
 from yt.frontends.stream.api import \
-    load_hexahedral_mesh, load_uniform_grid
+    load_hexahedral_mesh
 from yt.frontends.stream.data_structures import \
     hexahedral_connectivity
-
-def setup() :
-    pass
-
+from yt.testing import \
+    assert_almost_equal, \
+    assert_equal
 # Field information
 
 def test_stream_hexahedral() :

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/tests/test_stream_particles.py
--- a/yt/frontends/stream/tests/test_stream_particles.py
+++ b/yt/frontends/stream/tests/test_stream_particles.py
@@ -1,13 +1,11 @@
 import numpy as np
-from yt.mods import *
-from yt.testing import *
+
+from yt.testing import \
+    assert_equal
 from yt.frontends.stream.api import load_uniform_grid, refine_amr, load_amr_grids
 import yt.utilities.initial_conditions as ic
 import yt.utilities.flagging_methods as fm
 
-def setup() :
-    pass
-
 # Field information
 
 def test_stream_particles() :

diff -r 7fb188cd5816e377f5f68833a7d1bcdb97847ea7 -r 6244b58cb2b99d6698484d80d85eb617175d6905 yt/frontends/stream/tests/test_update_data.py
--- a/yt/frontends/stream/tests/test_update_data.py
+++ b/yt/frontends/stream/tests/test_update_data.py
@@ -1,4 +1,4 @@
-from yt.testing import *
+from yt.testing import fake_random_ds
 from yt.data_objects.profiles import BinnedProfile1D
 from numpy.random import uniform
 


https://bitbucket.org/yt_analysis/yt/commits/52311968ee92/
Changeset:   52311968ee92
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 22:48:13+00:00
Summary:     Linting tipsy
Affected #:  5 files

diff -r 6244b58cb2b99d6698484d80d85eb617175d6905 -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -84,7 +84,6 @@
             print("SOMETHING HAS GONE WRONG.  NBODIES != SUM PARTICLES.")
             print("%s != (%s == %s + %s + %s)" % (
                 self.parameters['nbodies'],
-                tot,
                 self.parameters['nsph'],
                 self.parameters['ndark'],
                 self.parameters['nstar']))

diff -r 6244b58cb2b99d6698484d80d85eb617175d6905 -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 yt/frontends/tipsy/fields.py
--- a/yt/frontends/tipsy/fields.py
+++ b/yt/frontends/tipsy/fields.py
@@ -16,8 +16,7 @@
 #-----------------------------------------------------------------------------
 
 from yt.frontends.sph.fields import SPHFieldInfo
-from yt.fields.particle_fields import add_volume_weighted_smoothed_field, add_nearest_neighbor_field
-from yt.utilities.physical_constants import mp, kb
+from yt.fields.particle_fields import add_nearest_neighbor_field
 
 class TipsyFieldInfo(SPHFieldInfo):
     aux_particle_fields = {

diff -r 6244b58cb2b99d6698484d80d85eb617175d6905 -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 yt/frontends/tipsy/io.py
--- a/yt/frontends/tipsy/io.py
+++ b/yt/frontends/tipsy/io.py
@@ -18,16 +18,15 @@
 import glob
 import numpy as np
 import os
+import struct
 
-from yt.geometry.oct_container import \
-    _ORDER_MAX
 from yt.utilities.io_handler import \
     BaseIOHandler
 from yt.utilities.lib.geometry_utils import \
     compute_morton
 from yt.utilities.logger import ytLogger as \
     mylog
-    
+
 CHUNKSIZE = 10000000
 
 class IOHandlerTipsyBinary(BaseIOHandler):
@@ -213,12 +212,11 @@
                 # We'll just add the individual types separately
                 count = data_file.total_particles[ptype]
                 if count == 0: continue
-                start, stop = ind, ind + count
+                stop = ind + count
                 while ind < stop:
                     c = min(CHUNKSIZE, stop - ind)
                     pp = np.fromfile(f, dtype = self._pdtypes[ptype],
                                      count = c)
-                    eps = np.finfo(pp["Coordinates"]["x"].dtype).eps
                     np.minimum(mi, [pp["Coordinates"]["x"].min(),
                                     pp["Coordinates"]["y"].min(),
                                     pp["Coordinates"]["z"].min()], mi)
@@ -242,7 +240,6 @@
                           dtype="uint64")
         ind = 0
         DLE, DRE = ds.domain_left_edge, ds.domain_right_edge
-        dx = (DRE - DLE) / (2**_ORDER_MAX)
         self.domain_left_edge = DLE.in_units("code_length").ndarray_view()
         self.domain_right_edge = DRE.in_units("code_length").ndarray_view()
         with open(data_file.filename, "rb") as f:
@@ -251,7 +248,7 @@
                 # We'll just add the individual types separately
                 count = data_file.total_particles[ptype]
                 if count == 0: continue
-                start, stop = ind, ind + count
+                stop = ind + count
                 while ind < stop:
                     c = min(CHUNKSIZE, stop - ind)
                     pp = np.fromfile(f, dtype = self._pdtypes[ptype],
@@ -266,7 +263,6 @@
                         mas[axi] = ma
                     pos = np.empty((pp.size, 3), dtype="float64")
                     for i, ax in enumerate("xyz"):
-                        eps = np.finfo(pp["Coordinates"][ax].dtype).eps
                         pos[:,i] = pp["Coordinates"][ax]
                     regions.add_data_file(pos, data_file.file_id,
                                           data_file.ds.filter_bbox)
@@ -305,7 +301,6 @@
         # We can just look at the particle counts.
         self._header_offset = data_file.ds._header_offset
         self._pdtypes = {}
-        pds = {}
         field_list = []
         tp = data_file.total_particles
         aux_filenames = glob.glob(data_file.filename+'.*') # Find out which auxiliaries we have

diff -r 6244b58cb2b99d6698484d80d85eb617175d6905 -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 yt/frontends/tipsy/setup.py
--- a/yt/frontends/tipsy/setup.py
+++ b/yt/frontends/tipsy/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 6244b58cb2b99d6698484d80d85eb617175d6905 -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 yt/frontends/tipsy/tests/test_outputs.py
--- a/yt/frontends/tipsy/tests/test_outputs.py
+++ b/yt/frontends/tipsy/tests/test_outputs.py
@@ -14,11 +14,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
     data_dir_load, \
     PixelizedProjectionValuesTest, \
     FieldValuesTest, \


https://bitbucket.org/yt_analysis/yt/commits/85173286740b/
Changeset:   85173286740b
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 23:02:45+00:00
Summary:     Linting enzo
Affected #:  7 files

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/answer_testing_support.py
--- a/yt/frontends/enzo/answer_testing_support.py
+++ b/yt/frontends/enzo/answer_testing_support.py
@@ -13,20 +13,22 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+import numpy as np
+import os
+
+from functools import wraps
+
 from yt.config import ytcfg
-from yt.mods import *
-
+from yt.convenience import load
+from yt.testing import assert_allclose
 from yt.utilities.answer_testing.framework import \
-     AnswerTestingTest, \
-     can_run_ds, \
-     FieldValuesTest, \
-     GridHierarchyTest, \
-     GridValuesTest, \
-     ProjectionValuesTest, \
-     ParentageRelationshipsTest, \
-     temp_cwd, \
-     AssertWrapper
+    AnswerTestingTest, \
+    can_run_ds, \
+    FieldValuesTest, \
+    GridValuesTest, \
+    ProjectionValuesTest, \
+    temp_cwd, \
+    AssertWrapper
 
 def requires_outputlog(path = ".", prefix = ""):
     def ffalse(func):

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -19,13 +19,16 @@
 import os
 import stat
 import string
+import time
 import re
 
-from threading import Thread
-
+from collection import defaultdict
 from yt.extern.six.moves import zip as izip
 
-from yt.funcs import *
+from yt.funcs import \
+    ensure_list, \
+    ensure_tuple, \
+    get_pbar
 from yt.config import ytcfg
 from yt.data_objects.grid_patch import \
     AMRGridPatch
@@ -36,20 +39,15 @@
 from yt.data_objects.static_output import \
     Dataset
 from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+    NullFunc
 from yt.utilities.physical_constants import \
     rho_crit_g_cm3_h2, cm_per_mpc
-from yt.utilities.io_handler import io_registry
 from yt.utilities.logger import ytLogger as mylog
 from yt.utilities.pyparselibconfig import libconfig
 
 from .fields import \
     EnzoFieldInfo
 
-from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    parallel_blocking_call
 
 class EnzoGrid(AMRGridPatch):
     """
@@ -77,7 +75,6 @@
         """
         rf = self.ds.refine_by
         my_ind = self.id - self._id_offset
-        le = self.LeftEdge
         self.dds = self.Parent.dds/rf
         ParentLeftIndex = np.rint((self.LeftEdge-self.Parent.LeftEdge)/self.Parent.dds)
         self.start_index = rf*(ParentLeftIndex + self.Parent.get_global_startindex()).astype('int64')
@@ -148,12 +145,9 @@
         # We will attempt this by creating a datacube that is exactly bigger
         # than the grid by nZones*dx in each direction
         nl = self.get_global_startindex() - n_zones
-        nr = nl + self.ActiveDimensions + 2*n_zones
         new_left_edge = nl * self.dds + self.ds.domain_left_edge
-        new_right_edge = nr * self.dds + self.ds.domain_left_edge
         # Something different needs to be done for the root grid, though
         level = self.Level
-        args = (level, new_left_edge, new_right_edge)
         kwargs = {'dims': self.ActiveDimensions + 2*n_zones,
                   'num_ghost_zones':n_zones,
                   'use_pbar':False}
@@ -197,7 +191,7 @@
     def __init__(self, ds, dataset_type):
 
         self.dataset_type = dataset_type
-        if ds.file_style != None:
+        if ds.file_style is not None:
             self._bn = ds.file_style
         else:
             self._bn = "%s.cpu%%04i"
@@ -268,14 +262,12 @@
             for line in f:
                 if line.startswith(token):
                     return line.split()[2:]
-        t1 = time.time()
         pattern = r"Pointer: Grid\[(\d*)\]->NextGrid(Next|This)Level = (\d*)\s+$"
         patt = re.compile(pattern)
         f = open(self.index_filename, "rt")
         self.grids = [self.grid(1, self)]
         self.grids[0].Level = 0
         si, ei, LE, RE, fn, npart = [], [], [], [], [], []
-        all = [si, ei, LE, RE, fn]
         pbar = get_pbar("Parsing Hierarchy ", self.num_grids)
         version = self.dataset.parameters.get("VersionNumber", None)
         params = self.dataset.parameters
@@ -326,7 +318,6 @@
         temp_grids[:] = self.grids
         self.grids = temp_grids
         self.filenames = fn
-        t2 = time.time()
 
     def _initialize_grid_arrays(self):
         super(EnzoHierarchy, self)._initialize_grid_arrays()
@@ -403,7 +394,7 @@
         fields = []
         for ptype in self.dataset["AppendActiveParticleType"]:
             select_grids = self.grid_active_particle_count[ptype].flat
-            if np.any(select_grids) == False:
+            if np.any(select_grids) is False:
                 current_ptypes = self.dataset.particle_types
                 new_ptypes = [p for p in current_ptypes if p != ptype]
                 self.dataset.particle_types = new_ptypes
@@ -1027,7 +1018,8 @@
                 self.hubble_constant = self.cosmological_simulation = 0.0
 
     def _obtain_enzo(self):
-        import enzo; return enzo
+        import enzo
+        return enzo
 
     @classmethod
     def _is_valid(cls, *args, **kwargs):

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -13,16 +13,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
 from yt.utilities.physical_constants import \
-    mh, me, mp, \
-    mass_sun_cgs
+    me, \
+    mp
 
 b_units = "code_magnetic"
 ra_units = "code_length / code_time**2"

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -13,19 +13,18 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import os
 import random
 from contextlib import contextmanager
 
 from yt.utilities.io_handler import \
-    BaseIOHandler, _axis_ids
+    BaseIOHandler
 from yt.utilities.logger import ytLogger as mylog
-from yt.geometry.selection_routines import mask_fill, AlwaysSelector
+from yt.geometry.selection_routines import AlwaysSelector
 from yt.extern.six import u, b, iteritems
 import h5py
 
 import numpy as np
-from yt.funcs import *
+
 
 _convert_mass = ("particle_mass","mass")
 
@@ -305,7 +304,7 @@
     _dataset_type = "enzo_packed_3d_gz"
 
     def __init__(self, *args, **kwargs):
-        super(IOHandlerPackgedHDF5GhostZones, self).__init__(*args, **kwargs)
+        super(IOHandlerPackedHDF5GhostZones, self).__init__(*args, **kwargs)
         NGZ = self.ds.parameters.get("NumberOfGhostZones", 3)
         self._base = (slice(NGZ, -NGZ),
                       slice(NGZ, -NGZ),
@@ -357,7 +356,8 @@
                 raise RuntimeError
             g = chunks[0].objs[0]
             for ftype, fname in fields:
-                rv[(ftype, fname)] = self.grids_in_memory[grid.id][fname].swapaxes(0,2)
+                rv[(ftype, fname)] = \
+                    self.grids_in_memory[g.id][fname].swapaxes(0, 2)
             return rv
         if size is None:
             size = sum((g.count(selector) for chunk in chunks

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/setup.py
--- a/yt/frontends/enzo/setup.py
+++ b/yt/frontends/enzo/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/simulation_handling.py
--- a/yt/frontends/enzo/simulation_handling.py
+++ b/yt/frontends/enzo/simulation_handling.py
@@ -17,6 +17,8 @@
 import glob
 import os
 
+from math import ceil
+
 from yt.convenience import \
     load, \
     only_on_root
@@ -26,13 +28,14 @@
 from yt.units.unit_registry import \
     UnitRegistry
 from yt.units.yt_array import \
-    YTArray, YTQuantity
+    YTArray
 from yt.utilities.cosmology import \
     Cosmology
 from yt.utilities.exceptions import \
     InvalidSimulationTimeSeries, \
     MissingParameter, \
-    NoStoppingCondition
+    NoStoppingCondition, \
+    YTOutputNotIdentified
 from yt.utilities.logger import ytLogger as \
     mylog
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
@@ -385,7 +388,7 @@
                           'final_redshift': 'CosmologyFinalRedshift'}
             self.cosmological_simulation = 1
             for a, v in cosmo_attr.items():
-                if not v in self.parameters:
+                if v not in self.parameters:
                     raise MissingParameter(self.parameter_filename, v)
                 setattr(self, a, self.parameters[v])
         else:
@@ -412,7 +415,7 @@
 
         self.all_time_outputs = []
         if self.final_time is None or \
-            not 'dtDataDump' in self.parameters or \
+            'dtDataDump' not in self.parameters or \
             self.parameters['dtDataDump'] <= 0.0: return []
 
         index = 0
@@ -441,7 +444,7 @@
         mylog.warn('Calculating cycle outputs.  Dataset times will be unavailable.')
 
         if self.stop_cycle is None or \
-            not 'CycleSkipDataDump' in self.parameters or \
+            'CycleSkipDataDump' not in self.parameters or \
             self.parameters['CycleSkipDataDump'] <= 0.0: return []
 
         self.all_time_outputs = []
@@ -623,7 +626,6 @@
         mylog.info("Writing redshift output list to %s.", filename)
         f = open(filename, 'w')
         for q, output in enumerate(outputs):
-            z_string = "%%s[%%d] = %%.%df" % decimals
             f.write(("CosmologyOutputRedshift[%d] = %."
                      + str(decimals) + "f\n") %
                     ((q + start_index), output['redshift']))

diff -r 52311968ee92fc6cf52b597f6b6c4c160ca2b011 -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -13,7 +13,14 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+import numpy as np
+
+from yt.testing import \
+    assert_almost_equal, \
+    assert_equal, \
+    requires_file, \
+    units_override_check, \
+    assert_array_equal
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
@@ -41,12 +48,12 @@
     dd = ds.all_data()
     dens_enzo = dd["Density"].copy()
     for f in sorted(ds.field_list):
-        if not f[1].endswith("_Density") or \
-               f[1].startswith("Dark_Matter_")  or \
-               f[1].startswith("Electron_") or \
-               f[1].startswith("SFR_") or \
-               f[1].startswith("Forming_Stellar_") or \
-               f[1].startswith("Star_Particle_"):
+        if not (f[1].endswith("_Density") or 
+                f[1].startswith("Dark_Matter_") or
+                f[1].startswith("Electron_") or
+                f[1].startswith("SFR_") or 
+                f[1].startswith("Forming_Stellar_") or
+                f[1].startswith("Star_Particle_")):
             continue
         dens_enzo -= dd[f]
     delta_enzo = np.abs(dens_enzo / dd["Density"])


https://bitbucket.org/yt_analysis/yt/commits/23b8401e7e04/
Changeset:   23b8401e7e04
Branch:      yt
User:        ngoldbaum
Date:        2015-09-18 23:07:10+00:00
Summary:     Merging with mainline
Affected #:  11 files

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1257,8 +1257,8 @@
 
 .. _specifying-cosmology-tipsy:
 
-Specifying Tipsy Cosmological Parameters
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Specifying Tipsy Cosmological Parameters and Setting Default Units
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 Cosmological parameters can be specified to Tipsy to enable computation of
 default units.  The parameters recognized are of this form:
@@ -1270,5 +1270,27 @@
                            'omega_matter': 0.272,
                            'hubble_constant': 0.702}
 
-These will be used set the units, if they are specified.
+If you wish to set the default units directly, you can do so by using the
+``unit_base`` keyword in the load statement.
 
+ .. code-block:: python
+
+    import yt
+    ds = yt.load(filename, unit_base={'length', (1.0, 'Mpc')})
+
+
+Loading Cosmological Simulations
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you are not using a parameter file (i.e. non-Gasoline users), then you must
+use keyword ``cosmology_parameters`` when loading your data set to indicate to
+yt that it is a cosmological data set. If you do not wish to set any
+non-default cosmological parameters, you may pass an empty dictionary.
+
+ .. code-block:: python
+
+    import yt
+    ds = yt.load(filename, cosmology_parameters={})
+
+
+

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -788,13 +788,19 @@
         kwargs = {}
         if nneighbors:
             kwargs['nneighbors'] = nneighbors
+        # This is for applying cutoffs, similar to in the SPLASH paper.
+        smooth_cutoff = data["index","cell_volume"]**(1./3)
+        smooth_cutoff.convert_to_units("code_length")
         # volume_weighted smooth operations return lists of length 1.
         rv = data.smooth(pos, [mass, hsml, dens, quan],
                          method="volume_weighted",
                          create_octree=True,
+                         index_fields=[smooth_cutoff],
                          kernel_name=kernel_name)[0]
         rv[np.isnan(rv)] = 0.0
         # Now some quick unit conversions.
+        # This should be used when seeking a non-normalized value:
+        rv /= hsml.uq**3 / hsml.uq.in_cgs().uq**3
         rv = data.apply_units(rv, field_units)
         return rv
     registry.add_field(field_name, function = _vol_weight,

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/frontends/gadget/tests/test_outputs.py
--- a/yt/frontends/gadget/tests/test_outputs.py
+++ b/yt/frontends/gadget/tests/test_outputs.py
@@ -1,5 +1,5 @@
 """
-Gadget frontend tests using the IsothermalCollapse dataset
+Gadget frontend tests
 
 
 
@@ -14,15 +14,49 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import \
-    requires_file
+from yt.testing import requires_file
 from yt.utilities.answer_testing.framework import \
-    data_dir_load
-from yt.frontends.gadget.api import GadgetHDF5Dataset
+    data_dir_load, \
+    requires_ds, \
+    sph_answer
+from yt.frontends.gadget.api import GadgetHDF5Dataset, GadgetDataset
 
-isothermal = "IsothermalCollapse/snap_505.hdf5"
- at requires_file(isothermal)
+isothermal_h5 = "IsothermalCollapse/snap_505.hdf5"
+isothermal_bin = "IsothermalCollapse/snap_505"
+gdg = "GadgetDiskGalaxy/snapshot_0200.hdf5"
+
+iso_fields = (
+    ("gas", "density"),
+    ("gas", "temperature"),
+    ('gas', 'velocity_magnitude'),
+    ("deposit", "all_density"),
+    ("deposit", "all_count"),
+    ("deposit", "all_cic"),
+    ("deposit", "PartType0_density"),
+)
+iso_kwargs = dict(bounding_box=[[-3, 3], [-3, 3], [-3, 3]])
+
+gdg_fields = iso_fields + (("deposit", "PartType4_density"), )
+gdg_kwargs = dict(bounding_box=[[-1e5, 1e5], [-1e5, 1e5], [-1e5, 1e5]])
+
+
+ at requires_file(isothermal_h5)
+ at requires_file(isothermal_bin)
 def test_GadgetDataset():
-    kwargs = dict(bounding_box=[[-3,3], [-3,3], [-3,3]])
-    assert isinstance(data_dir_load(isothermal, kwargs=kwargs),
+    assert isinstance(data_dir_load(isothermal_h5, kwargs=iso_kwargs),
                       GadgetHDF5Dataset)
+    assert isinstance(data_dir_load(isothermal_bin, kwargs=iso_kwargs),
+                      GadgetDataset)
+
+
+ at requires_ds(isothermal_h5)
+def test_iso_collapse():
+    for test in sph_answer(isothermal_h5, 'snap_505', 2**17,
+                           iso_fields, ds_kwargs=iso_kwargs):
+        yield test
+
+ at requires_ds(gdg, big_data=True)
+def test_gadget_disk_galaxy():
+    for test in sph_answer(gdg, 'snap_505', 11907080, gdg_fields,
+                           ds_kwargs=gdg_kwargs):
+        yield test

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/frontends/owls/tests/test_outputs.py
--- a/yt/frontends/owls/tests/test_outputs.py
+++ b/yt/frontends/owls/tests/test_outputs.py
@@ -15,44 +15,32 @@
 #-----------------------------------------------------------------------------
 
 from yt.testing import \
-    assert_equal, \
     requires_file
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     data_dir_load, \
-    PixelizedProjectionValuesTest, \
-    FieldValuesTest, \
-    create_obj
+    sph_answer
 from yt.frontends.owls.api import OWLSDataset
 
-_fields = (("deposit", "all_density"), ("deposit", "all_count"),
-           ("deposit", "PartType0_density"),
-           ("deposit", "PartType4_density"))
+os33 = "snapshot_033/snap_033.0.hdf5"
 
-os33 = "snapshot_033/snap_033.0.hdf5"
+_fields = (
+    ("gas", "density"),
+    ("gas", "temperature"),
+    ('gas', 'He_p0_number_density'),
+    ('gas', 'N_p1_number_density'),
+    ('gas', 'velocity_magnitude'),
+    ("deposit", "all_density"),
+    ("deposit", "all_count"),
+    ("deposit", "all_cic"),
+    ("deposit", "PartType0_density"),
+    ("deposit", "PartType4_density"))
+
+
 @requires_ds(os33, big_data=True)
 def test_snapshot_033():
-    ds = data_dir_load(os33)
-    yield assert_equal, str(ds), "snap_033"
-    dso = [ None, ("sphere", ("c", (0.1, 'unitary')))]
-    dd = ds.all_data()
-    yield assert_equal, dd["particle_position"].shape[0], 2*(128*128*128)
-    yield assert_equal, dd["particle_position"].shape[1], 3
-    tot = sum(dd[ptype,"particle_position"].shape[0]
-              for ptype in ds.particle_types if ptype != "all")
-    yield assert_equal, tot, (2*128*128*128)
-    for dobj_name in dso:
-        for field in _fields:
-            for axis in [0, 1, 2]:
-                for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        os33, axis, field, weight_field,
-                        dobj_name)
-            yield FieldValuesTest(os33, field, dobj_name)
-        dobj = create_obj(ds, dobj_name)
-        s1 = dobj["ones"].sum()
-        s2 = sum(mask.sum() for block, mask in dobj.blocks)
-        yield assert_equal, s1, s2
+    for test in sph_answer(os33, 'snap_033', 2*128**3, _fields):
+        yield test
 
 
 @requires_file(os33)

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -32,6 +32,7 @@
 from yt.utilities.physical_constants import \
     G, \
     cm_per_kpc
+from yt import YTQuantity
 
 from .fields import \
     TipsyFieldInfo
@@ -166,9 +167,9 @@
         self.domain_dimensions = np.ones(3, "int32") * nz
         periodic = self.parameters.get('bPeriodic', True)
         period = self.parameters.get('dPeriod', None)
-        comoving = self.parameters.get('bComove', False)
         self.periodicity = (periodic, periodic, periodic)
-        if comoving and period is None:
+        self.comoving = self.parameters.get('bComove', False)
+        if self.comoving and period is None:
             period = 1.0
         if self.bounding_box is None:
             if periodic and period is not None:
@@ -185,7 +186,9 @@
             self.domain_left_edge = bbox[:,0]
             self.domain_right_edge = bbox[:,1]
 
-        if comoving:
+        # If the cosmology parameters dictionary got set when data is
+        # loaded, we can assume it's a cosmological data set
+        if self.comoving or self._cosmology_parameters is not None:
             cosm = self._cosmology_parameters or {}
             self.scale_factor = hvals["time"]#In comoving simulations, time stores the scale factor a
             self.cosmological_simulation = 1
@@ -223,8 +226,15 @@
             self.length_unit = self.quan(lu, 'kpc')*self.scale_factor
             self.mass_unit = self.quan(mu, 'Msun')
             density_unit = self.mass_unit/ (self.length_unit/self.scale_factor)**3
-            # Gasoline's hubble constant, dHubble0, is stored units of proper code time.
-            self.hubble_constant *= np.sqrt(G.in_units('kpc**3*Msun**-1*s**-2')*density_unit).value/(3.2407793e-18)  
+
+            # If self.comoving is set, we know this is a gasoline data set,
+            # and we do the conversion on the hubble constant.
+            if self.comoving:
+                # Gasoline's hubble constant, dHubble0, is stored units of
+                # proper code time.
+                self.hubble_constant *= np.sqrt(G.in_units(
+                    'kpc**3*Msun**-1*s**-2') * density_unit).value / (
+                    3.2407793e-18)
             cosmo = Cosmology(self.hubble_constant,
                               self.omega_matter, self.omega_lambda)
             self.current_time = cosmo.hubble_time(self.current_redshift)
@@ -236,6 +246,24 @@
             density_unit = self.mass_unit / self.length_unit**3
         self.time_unit = 1.0 / np.sqrt(G * density_unit)
 
+        # If unit base is defined by the user, override all relevant units
+        if self._unit_base is not None:
+            length = self._unit_base.get('length', self.length_unit)
+            length = self.quan(*length) if isinstance(length, tuple) else self.quan(length)
+            self.length_unit = length
+
+            mass = self._unit_base.get('mass', self.mass_unit)
+            mass = self.quan(*mass) if isinstance(mass, tuple) else self.quan(mass)
+            self.mass_unit = mass
+
+            density_unit = self.mass_unit / self.length_unit**3
+            self.time_unit = 1.0 / np.sqrt(G * density_unit)
+
+            time = self._unit_base.get('time', self.time_unit)
+            time = self.quan(*time) if isinstance(time, tuple) else self.quan(time)
+            self.time_unit = time
+
+
     @staticmethod
     def _validate_header(filename):
         '''

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/frontends/tipsy/fields.py
--- a/yt/frontends/tipsy/fields.py
+++ b/yt/frontends/tipsy/fields.py
@@ -37,7 +37,8 @@
         'FeMassFrac':("FeMassFrac", ("dimensionless", ["Fe_fraction"], None)),
         'c':("c", ("code_velocity", [""], None)),
         'acc':("acc", ("code_velocity / code_time", [""], None)),
-        'accg':("accg", ("code_velocity / code_time", [""], None))}
+        'accg':("accg", ("code_velocity / code_time", [""], None)),
+        'smoothlength':('smoothlength', ("code_length", ["smoothing_length"], None))}
 
     def __init__(self, ds, field_list, slice_info = None):
         for field in field_list:
@@ -59,15 +60,19 @@
 
     def setup_gas_particle_fields(self, ptype):
 
-        def _smoothing_length(field, data):
-            # For now, we hardcode num_neighbors.  We should make this configurable
-            # in the future.
-            num_neighbors = 64
-            fn, = add_nearest_neighbor_field(ptype, "particle_position", self, num_neighbors)
-            return data[ptype, 'nearest_neighbor_distance_%d' % num_neighbors]
+        num_neighbors = 65
+        fn, = add_nearest_neighbor_field(ptype, "particle_position", self, num_neighbors)
+        def _func():
+            def _smoothing_length(field, data):
+                # For now, we hardcode num_neighbors.  We should make this configurable
+                # in the future.
+                rv = data[ptype, 'nearest_neighbor_distance_%d' % num_neighbors]
+                #np.maximum(rv, 0.5*data[ptype, "Epsilon"], rv)
+                return rv
+            return _smoothing_length
 
         self.add_field(
             (ptype, "smoothing_length"),
-            function=_smoothing_length,
+            function=_func(),
             particle_type=True,
             units="code_length")

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/frontends/tipsy/tests/test_outputs.py
--- a/yt/frontends/tipsy/tests/test_outputs.py
+++ b/yt/frontends/tipsy/tests/test_outputs.py
@@ -20,9 +20,10 @@
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     data_dir_load, \
-    PixelizedProjectionValuesTest, \
+    sph_answer, \
+    create_obj, \
     FieldValuesTest, \
-    create_obj
+    PixelizedProjectionValuesTest
 from yt.frontends.tipsy.api import TipsyDataset
 
 _fields = (("deposit", "all_density"),
@@ -62,9 +63,9 @@
         s2 = sum(mask.sum() for block, mask in dobj.blocks)
         yield assert_equal, s1, s2
 
-gasoline = "agora_1e11.00400/agora_1e11.00400"
- at requires_ds(gasoline, big_data = True, file_check = True)
-def test_gasoline():
+gasoline_dmonly = "agora_1e11.00400/agora_1e11.00400"
+ at requires_ds(gasoline_dmonly, big_data = True, file_check = True)
+def test_gasoline_dmonly():
     cosmology_parameters = dict(current_redshift = 0.0,
                                 omega_lambda = 0.728,
                                 omega_matter = 0.272,
@@ -72,7 +73,7 @@
     kwargs = dict(cosmology_parameters = cosmology_parameters,
                   unit_base = {'length': (1.0/60.0, "Mpccm/h")},
                   n_ref = 64)
-    ds = data_dir_load(gasoline, TipsyDataset, (), kwargs)
+    ds = data_dir_load(gasoline_dmonly, TipsyDataset, (), kwargs)
     yield assert_equal, str(ds), "agora_1e11.00400"
     dso = [ None, ("sphere", ("c", (0.3, 'unitary')))]
     dd = ds.all_data()
@@ -93,7 +94,22 @@
         s2 = sum(mask.sum() for block, mask in dobj.blocks)
         yield assert_equal, s1, s2
 
+tg_fields = (
+    ('gas', 'density'),
+    ('gas', 'temperature'),
+    ('gas', 'velocity_magnitude'),
+    ('gas', 'Fe_fraction'),
+    ('Stars', 'Metals'),
+)
 
+tipsy_gal = 'TipsyGalaxy/galaxy.00300'
+ at requires_ds(tipsy_gal)
+def test_tipsy_galaxy():
+    for test in sph_answer(tipsy_gal, 'galaxy.00300', 315372, tg_fields):
+        yield test
+        
+ at requires_file(gasoline_dmonly)
 @requires_file(pkdgrav)
 def test_TipsyDataset():
     assert isinstance(data_dir_load(pkdgrav), TipsyDataset)
+    assert isinstance(data_dir_load(gasoline_dmonly), TipsyDataset)

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/geometry/particle_deposit.pxd
--- a/yt/geometry/particle_deposit.pxd
+++ b/yt/geometry/particle_deposit.pxd
@@ -40,13 +40,14 @@
 
 cdef inline np.float64_t sph_kernel_cubic(np.float64_t x) nogil:
     cdef np.float64_t kernel
+    cdef np.float64_t C = 2.5464790894703255
     if x <= 0.5:
         kernel = 1.-6.*x*x*(1.-x)
     elif x>0.5 and x<=1.0:
         kernel = 2.*(1.-x)*(1.-x)*(1.-x)
     else:
         kernel = 0.
-    return kernel
+    return kernel * C
 
 ########################################################
 # Alternative SPH kernels for use with the Grid method #

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/geometry/particle_smooth.pxd
--- a/yt/geometry/particle_smooth.pxd
+++ b/yt/geometry/particle_smooth.pxd
@@ -55,10 +55,12 @@
                                np.int64_t *pinds, np.int64_t *pcounts,
                                np.int64_t offset, np.float64_t **index_fields,
                                OctreeContainer octree, np.int64_t domain_id,
-                               int *nsize)
+                               int *nsize, np.float64_t *oct_left_edges,
+                               np.float64_t *oct_dds)
     cdef int neighbor_search(self, np.float64_t pos[3], OctreeContainer octree,
                              np.int64_t **nind, int *nsize, 
-                             np.int64_t nneighbors, np.int64_t domain_id, Oct **oct = ?)
+                             np.int64_t nneighbors, np.int64_t domain_id, 
+                             Oct **oct = ?, int extra_layer = ?)
     cdef void neighbor_process_particle(self, np.float64_t cpos[3],
                                np.float64_t *ppos,
                                np.float64_t **fields, 
@@ -78,7 +80,9 @@
                             np.int64_t *pcounts,
                             np.int64_t *pinds,
                             np.float64_t *ppos,
-                            np.float64_t cpos[3])
+                            np.float64_t cpos[3],
+                            np.float64_t* oct_left_edges,
+                            np.float64_t* oct_dds)
     cdef void process(self, np.int64_t offset, int i, int j, int k,
                       int dim[3], np.float64_t cpos[3], np.float64_t **fields,
                       np.float64_t **index_fields)

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/geometry/particle_smooth.pyx
--- a/yt/geometry/particle_smooth.pyx
+++ b/yt/geometry/particle_smooth.pyx
@@ -154,6 +154,8 @@
         cdef np.ndarray[np.float64_t, ndim=1] tarr
         cdef np.ndarray[np.float64_t, ndim=4] iarr
         cdef np.ndarray[np.float64_t, ndim=2] cart_positions
+        cdef np.ndarray[np.float64_t, ndim=2] oct_left_edges, oct_dds
+        cdef OctInfo oinfo
         if geometry == "cartesian":
             self.pos_setup = cart_coord_setup
             cart_positions = positions
@@ -177,6 +179,8 @@
         numpart = positions.shape[0]
         # pcount is the number of particles per oct.
         pcount = np.zeros_like(pdom_ind)
+        oct_left_edges = np.zeros((pdom_ind.shape[0], 3), dtype='float64')
+        oct_dds = np.zeros_like(oct_left_edges)
         # doff is the offset to a given oct in the sorted particles.
         doff = np.zeros_like(pdom_ind) - 1
         doff_m = np.zeros((mdom_ind.shape[0], 2), dtype="int64")
@@ -202,10 +206,11 @@
         for i in range(3):
             self.DW[i] = (mesh_octree.DRE[i] - mesh_octree.DLE[i])
             self.periodicity[i] = periodicity[i]
+        cdef np.float64_t factor = (1 << (particle_octree.oref))
         for i in range(positions.shape[0]):
             for j in range(3):
                 pos[j] = positions[i, j]
-            oct = particle_octree.get(pos)
+            oct = particle_octree.get(pos, &oinfo)
             if oct == NULL or (domain_id > 0 and oct.domain != domain_id):
                 continue
             # Note that this has to be our local index, not our in-file index.
@@ -214,8 +219,11 @@
             offset = oct.domain_ind - moff_p
             pcount[offset] += 1
             pdoms[i] = offset # We store the *actual* offset.
-            oct = mesh_octree.get(pos)
-            offset = oct.domain_ind - moff_m
+            # store oct positions and dds to avoid searching for neighbors
+            # in octs that we know are too far away
+            for j in range(3):
+                oct_left_edges[offset, j] = oinfo.left_edge[j]
+                oct_dds[offset, j] = oinfo.dds[j] * factor
         # Now we have oct assignments.  Let's sort them.
         # Note that what we will be providing to our processing functions will
         # actually be indirectly-sorted fields.  This preserves memory at the
@@ -254,10 +262,11 @@
             visited[oct.domain_ind - moff_m] = 1
             if offset < 0: continue
             nproc += 1
-            self.neighbor_process(dims, moi.left_edge, moi.dds,
-                         cart_pos, field_pointers, doffs, &nind,
-                         pinds, pcounts, offset, index_field_pointers,
-                         particle_octree, domain_id, &nsize)
+            self.neighbor_process(
+                dims, moi.left_edge, moi.dds, cart_pos, field_pointers, doffs,
+                &nind, pinds, pcounts, offset, index_field_pointers,
+                particle_octree, domain_id, &nsize, &oct_left_edges[0, 0],
+                &oct_dds[0, 0])
         #print "VISITED", visited.sum(), visited.size,
         #print 100.0*float(visited.sum())/visited.size
         if nind != NULL:
@@ -394,11 +403,12 @@
     cdef int neighbor_search(self, np.float64_t pos[3], OctreeContainer octree,
                              np.int64_t **nind, int *nsize,
                              np.int64_t nneighbors, np.int64_t domain_id,
-                             Oct **oct = NULL):
+                             Oct **oct = NULL, int extra_layer = 0):
         cdef OctInfo oi
         cdef Oct *ooct
-        cdef Oct **neighbors
-        cdef int j
+        cdef Oct **neighbors, **first_layer
+        cdef int j, total_neighbors = 0, initial_layer = 0
+        cdef int layer_ind = 0
         cdef np.int64_t moff = octree.get_domain_offset(domain_id)
         ooct = octree.get(pos, &oi)
         if oct != NULL and ooct == oct[0]:
@@ -407,24 +417,53 @@
         if nind[0] == NULL:
             nsize[0] = 27
             nind[0] = <np.int64_t *> malloc(sizeof(np.int64_t)*nsize[0])
-        neighbors = octree.neighbors(&oi, &nneighbors, ooct, self.periodicity)
-        # Now we have all our neighbors.  And, we should be set for what
-        # else we need to do.
-        if nneighbors > nsize[0]:
-            nind[0] = <np.int64_t *> realloc(
-                nind[0], sizeof(np.int64_t)*nneighbors)
-            nsize[0] = nneighbors
+        # This is our "seed" set of neighbors.  If we are asked to, we will
+        # create a master list of neighbors that is much bigger and includes
+        # everything.
+        layer_ind = 0
+        first_layer = NULL
+        while 1:
+            neighbors = octree.neighbors(&oi, &nneighbors, ooct, self.periodicity)
+            # Now we have all our neighbors.  And, we should be set for what
+            # else we need to do.
+            if total_neighbors + nneighbors > nsize[0]:
+                nind[0] = <np.int64_t *> realloc(
+                    nind[0], sizeof(np.int64_t)*(nneighbors + total_neighbors))
+                nsize[0] = nneighbors + total_neighbors
+            for j in range(nneighbors):
+                # Particle octree neighbor indices
+                nind[0][j + total_neighbors] = neighbors[j].domain_ind - moff
+            total_neighbors += nneighbors
+            if extra_layer == 0:
+                # Not adding on any additional layers here.
+                free(neighbors)
+                neighbors = NULL
+                break
+            if initial_layer == 0:
+                initial_layer = nneighbors
+                first_layer = neighbors
+            else:
+                # Allocated internally; we free this in the loops if we aren't
+                # tracking it
+                free(neighbors)
+                neighbors = NULL
+            ooct = first_layer[layer_ind]
+            layer_ind += 1
+            if layer_ind == initial_layer:
+                neighbors
+                break
+            
 
-        for j in range(nneighbors):
+        for j in range(total_neighbors):
             # Particle octree neighbor indices
-            nind[0][j] = neighbors[j].domain_ind - moff
+            if nind[0][j] == -1: continue
             for n in range(j):
                 if nind[0][j] == nind[0][n]:
                     nind[0][j] = -1
-                break
         # This is allocated by the neighbors function, so we deallocate it.
-        free(neighbors)
-        return nneighbors
+        if first_layer != NULL:
+            free(first_layer)
+        return total_neighbors
 
     @cython.cdivision(True)
     @cython.boundscheck(False)
@@ -492,16 +531,51 @@
                             np.int64_t *pcounts,
                             np.int64_t *pinds,
                             np.float64_t *ppos,
-                            np.float64_t cpos[3]
+                            np.float64_t cpos[3],
+                            np.float64_t *oct_left_edges,
+                            np.float64_t *oct_dds,
                             ):
         # We are now given the number of neighbors, the indices into the
         # domains for them, and the number of particles for each.
-        cdef int ni, i, j
+        cdef int ni, i, j, k
         cdef np.int64_t offset, pn, pc
-        cdef np.float64_t pos[3]
+        cdef np.float64_t pos[3], cp, r2_trunc, r2, ex[2], DR[2], dist
         self.neighbor_reset()
         for ni in range(nneighbors):
             if nind[ni] == -1: continue
+            # terminate early if all 8 corners of oct are farther away than
+            # most distant currently known neighbor
+            if oct_left_edges != NULL and self.curn == self.maxn:
+                r2_trunc = self.neighbors[self.curn - 1].r2
+                # iterate over each dimension in the outer loop so we can
+                # consolidate temporary storage
+                # What this next bit does is figure out which component is the
+                # closest, of each possible permutation.
+                # k here is the dimension
+                r2 = 0.0
+                for k in range(3):
+                    # We start at left edge, then do halfway, then right edge.
+                    ex[0] = oct_left_edges[3*nind[ni] + k]
+                    ex[1] = ex[0] + oct_dds[3*nind[ni] + k]
+                    # There are three possibilities; we are between, left-of,
+                    # or right-of the extrema.  Thanks to
+                    # http://stackoverflow.com/questions/5254838/calculating-distance-between-a-point-and-a-rectangular-box-nearest-point
+                    # for some help.  This has been modified to account for
+                    # periodicity.
+                    dist = 0.0
+                    DR[0] = (ex[0] - cpos[k])
+                    DR[1] = (cpos[k] - ex[1])
+                    for j in range(2):
+                        if not self.periodicity[k]:
+                            pass
+                        elif (DR[j] > self.DW[k]/2.0):
+                            DR[j] -= self.DW[k]
+                        elif (DR[j] < -self.DW[k]/2.0):
+                            DR[j] += self.DW[k]
+                        dist = fmax(dist, DR[j])
+                    r2 += dist*dist
+                if r2 > r2_trunc:
+                    continue
             offset = doffs[nind[ni]]
             pc = pcounts[nind[ni]]
             for i in range(pc):
@@ -518,7 +592,8 @@
                                np.int64_t offset,
                                np.float64_t **index_fields,
                                OctreeContainer octree, np.int64_t domain_id,
-                               int *nsize):
+                               int *nsize, np.float64_t *oct_left_edges,
+                               np.float64_t *oct_dds):
         # Note that we assume that fields[0] == smoothing length in the native
         # units supplied.  We can now iterate over every cell in the block and
         # every particle to find the nearest.  We will use a priority heap.
@@ -534,9 +609,9 @@
                 for k in range(dim[2]):
                     self.pos_setup(cpos, opos)
                     nneighbors = self.neighbor_search(opos, octree,
-                                    nind, nsize, nneighbors, domain_id, &oct)
+                                    nind, nsize, nneighbors, domain_id, &oct, 0)
                     self.neighbor_find(nneighbors, nind[0], doffs, pcounts,
-                        pinds, ppos, opos)
+                                       pinds, ppos, opos, oct_left_edges, oct_dds)
                     # Now we have all our neighbors in our neighbor list.
                     if self.curn <-1*self.maxn:
                         ntot = nntot = 0
@@ -572,11 +647,18 @@
         cdef np.float64_t opos[3]
         self.pos_setup(cpos, opos)
         nneighbors = self.neighbor_search(opos, octree,
-                        nind, nsize, nneighbors, domain_id, &oct)
-        self.neighbor_find(nneighbors, nind[0], doffs, pcounts, pinds, ppos, opos)
+                        nind, nsize, nneighbors, domain_id, &oct, 0)
+        self.neighbor_find(nneighbors, nind[0], doffs, pcounts, pinds, ppos,
+                           opos, NULL, NULL)
         self.process(offset, i, j, k, dim, opos, fields, index_fields)
 
 cdef class VolumeWeightedSmooth(ParticleSmoothOperation):
+    # This smoothing function evaluates the field, *without* normalization, at
+    # every point in the *mesh*.  Applying a normalization results in
+    # non-conservation of mass when smoothing density; to avoid this, we do not
+    # apply this normalization factor.  The SPLASH paper
+    # (http://arxiv.org/abs/0709.0832v1) discusses this in detail; what we are
+    # applying here is equation 6, with variable smoothing lengths (eq 13).
     cdef np.float64_t **fp
     cdef public object vals
     def initialize(self):
@@ -588,18 +670,17 @@
             raise RuntimeError
         cdef np.ndarray tarr
         self.fp = <np.float64_t **> malloc(
-            sizeof(np.float64_t *) * (self.nfields - 2))
+            sizeof(np.float64_t *) * (self.nfields - 3))
         self.vals = []
-        for i in range(self.nfields - 2):
+        # We usually only allocate one field; if we are doing multiple field,
+        # single-pass smoothing, then we might have more.
+        for i in range(self.nfields - 3):
             tarr = np.zeros(self.nvals, dtype="float64", order="F")
             self.vals.append(tarr)
             self.fp[i] = <np.float64_t *> tarr.data
 
     def finalize(self):
         free(self.fp)
-        vv = self.vals.pop(-1)
-        for v in self.vals:
-            v /= vv
         return self.vals
 
     @cython.cdivision(True)
@@ -612,11 +693,13 @@
         # We also have a list of neighboring particles with particle numbers.
         cdef int n, fi
         cdef np.float64_t weight, r2, val, hsml, dens, mass, coeff, max_r
+        cdef np.float64_t max_hsml, ihsml, ihsml3, kern
         coeff = 0.0
         cdef np.int64_t pn
         # We get back our mass
         # rho_i = sum(j = 1 .. n) m_j * W_ij
         max_r = sqrt(self.neighbors[self.curn-1].r2)
+        max_hsml = index_fields[0][gind(i,j,k,dim) + offset]
         for n in range(self.curn):
             # No normalization for the moment.
             # fields[0] is the smoothing length.
@@ -625,18 +708,22 @@
             # Smoothing kernel weight function
             mass = fields[0][pn]
             hsml = fields[1][pn]
+            dens = fields[2][pn]
             if hsml < 0:
                 hsml = max_r
             if hsml == 0: continue
+            ihsml = 1.0/hsml
+            hsml = fmax(max_hsml/2.0, hsml)
+            ihsml3 = ihsml*ihsml*ihsml
             # Usually this density has been computed
-            dens = fields[2][pn]
             if dens == 0.0: continue
-            weight = mass * self.sph_kernel(sqrt(r2) / hsml) / dens
+            weight = (mass / dens) * ihsml3
+            kern = self.sph_kernel(sqrt(r2) * ihsml)
+            weight *= kern
             # Mass of the particle times the value
             for fi in range(self.nfields - 3):
                 val = fields[fi + 3][pn]
                 self.fp[fi][gind(i,j,k,dim) + offset] += val * weight
-            self.fp[self.nfields - 3][gind(i,j,k,dim) + offset] += weight
         return
 
 volume_weighted_smooth = VolumeWeightedSmooth

diff -r 85173286740ba7b17687150bb6ddfa570ce0f7b3 -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -388,12 +388,13 @@
 
     def run(self):
         obj = create_obj(self.ds, self.obj_type)
+        field = obj._determine_fields(self.field)[0]
         if self.particle_type:
-            weight_field = "particle_ones"
+            weight_field = (field[0], "particle_ones")
         else:
-            weight_field = "ones"
+            weight_field = ("index", "ones")
         avg = obj.quantities.weighted_average_quantity(
-            self.field, weight=weight_field)
+            field, weight=weight_field)
         mi, ma = obj.quantities.extrema(self.field)
         return np.array([avg, mi, ma])
 
@@ -817,6 +818,40 @@
                         ds_fn, axis, field, weight_field,
                         dobj_name)
 
+
+def sph_answer(ds_fn, ds_str_repr, ds_nparticles, fields, ds_kwargs=None):
+    if not can_run_ds(ds_fn):
+        return
+    if ds_kwargs is None:
+        ds_kwargs = {}
+    ds = data_dir_load(ds_fn, kwargs=ds_kwargs)
+    yield assert_equal, str(ds), ds_str_repr
+    dso = [None, ("sphere", ("c", (0.1, 'unitary')))]
+    dd = ds.all_data()
+    yield assert_equal, dd["particle_position"].shape, (ds_nparticles, 3)
+    tot = sum(dd[ptype, "particle_position"].shape[0]
+              for ptype in ds.particle_types if ptype != "all")
+    yield assert_equal, tot, ds_nparticles
+    for dobj_name in dso:
+        dobj = create_obj(ds, dobj_name)
+        s1 = dobj["ones"].sum()
+        s2 = sum(mask.sum() for block, mask in dobj.blocks)
+        yield assert_equal, s1, s2
+        for field in fields:
+            if field[0] in ds.particle_types:
+                particle_type = True
+            else:
+                particle_type = False
+            for axis in [0, 1, 2]:
+                for weight_field in [None, ('gas', 'density')]:
+                    if particle_type is False:
+                        yield PixelizedProjectionValuesTest(
+                            ds_fn, axis, field, weight_field,
+                            dobj_name)
+            yield FieldValuesTest(ds_fn, field, dobj_name,
+                                  particle_type=particle_type)
+    return
+
 def create_obj(ds, obj_type):
     # obj_type should be tuple of
     #  ( obj_name, ( args ) )


https://bitbucket.org/yt_analysis/yt/commits/b931e5f93eff/
Changeset:   b931e5f93eff
Branch:      yt
User:        ngoldbaum
Date:        2015-09-19 03:45:21+00:00
Summary:     Fixing two typos
Affected #:  2 files

diff -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 -r b931e5f93eff4fa4fd3fe5b406e3b83c46c49a76 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -22,7 +22,7 @@
 import time
 import re
 
-from collection import defaultdict
+from collections import defaultdict
 from yt.extern.six.moves import zip as izip
 
 from yt.funcs import \

diff -r 23b8401e7e047f1caa00c774a1d3e2e1c8f98162 -r b931e5f93eff4fa4fd3fe5b406e3b83c46c49a76 yt/frontends/gadget_fof/data_structures.py
--- a/yt/frontends/gadget_fof/data_structures.py
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -21,7 +21,7 @@
 import glob
 import os
 
-from yt.frontends.gadget_fif.fields import \
+from yt.frontends.gadget_fof.fields import \
     GadgetFOFFieldInfo
 
 from yt.utilities.cosmology import \


https://bitbucket.org/yt_analysis/yt/commits/741858d31f01/
Changeset:   741858d31f01
Branch:      yt
User:        ngoldbaum
Date:        2015-09-19 17:13:11+00:00
Summary:     Use hopefully less confusing construction to filter enzo fields
Affected #:  1 file

diff -r b931e5f93eff4fa4fd3fe5b406e3b83c46c49a76 -r 741858d31f01d2b5f49e701fd81310c58ace571a yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -48,12 +48,12 @@
     dd = ds.all_data()
     dens_enzo = dd["Density"].copy()
     for f in sorted(ds.field_list):
-        if not (f[1].endswith("_Density") or 
-                f[1].startswith("Dark_Matter_") or
-                f[1].startswith("Electron_") or
-                f[1].startswith("SFR_") or 
-                f[1].startswith("Forming_Stellar_") or
-                f[1].startswith("Star_Particle_")):
+        ff = f[1]
+        if not ff.endswith("_Density"):
+            continue
+        start_strings = ["Electron_", "SFR_", "Forming_Stellar_",
+                         "Dark_Matter", "Star_Particle_"]
+        if any([ff.startswith(ss) for ss in start_strings]):
             continue
         dens_enzo -= dd[f]
     delta_enzo = np.abs(dens_enzo / dd["Density"])


https://bitbucket.org/yt_analysis/yt/commits/eb3ce6ec7a33/
Changeset:   eb3ce6ec7a33
Branch:      yt
User:        ngoldbaum
Date:        2015-09-19 17:39:46+00:00
Summary:     Use assert_almost_equal in a possibly finicky floating point comparison test
Affected #:  1 file

diff -r 741858d31f01d2b5f49e701fd81310c58ace571a -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -646,10 +646,10 @@
         return result
 
     def compare(self, new_result, old_result):
-        err_msg = ("Analytic halo mass functions not equation for " +
+        err_msg = ("Analytic halo mass functions not equal for " +
                    "fitting function %d.") % self.fitting_function
-        assert_equal(new_result, old_result,
-                     err_msg=err_msg, verbose=True)
+        assert_almost_equal(new_result, old_result,
+                            err_msg=err_msg, verbose=True)
 
 def compare_image_lists(new_result, old_result, decimals):
     fns = ['old.png', 'new.png']


https://bitbucket.org/yt_analysis/yt/commits/5ba8977b8ada/
Changeset:   5ba8977b8ada
Branch:      yt
User:        ngoldbaum
Date:        2015-09-22 22:28:56+00:00
Summary:     Merging with mainline
Affected #:  11 files

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 MANIFEST.in
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,10 +1,10 @@
-include distribute_setup.py README* CREDITS COPYING.txt CITATION requirements.txt optional-requirements.txt
+include README* CREDITS COPYING.txt CITATION requirements.txt optional-requirements.txt
 include yt/visualization/mapserver/html/map_index.html
 include yt/visualization/mapserver/html/leaflet/*.css
 include yt/visualization/mapserver/html/leaflet/*.js
 include yt/visualization/mapserver/html/leaflet/images/*.png
 recursive-include yt *.py *.pyx *.pxd *.h README* *.txt LICENSE* *.cu
-recursive-include doc *.rst *.txt *.py *.ipynb *.png *.jpg *.css *.inc *.html
+recursive-include doc *.rst *.txt *.py *.ipynb *.png *.jpg *.css *.html
 recursive-include doc *.h *.c *.sh *.svgz *.pdf *.svg *.pyx
 include doc/README doc/activate doc/activate.csh doc/cheatsheet.tex
 include doc/extensions/README doc/Makefile
@@ -12,5 +12,3 @@
 prune doc/build
 recursive-include yt/analysis_modules/halo_finding/rockstar *.py *.pyx
 prune yt/frontends/_skeleton
-prune tests
-exclude clean.sh .hgchurn

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/data_objects/grid_patch.py
--- a/yt/data_objects/grid_patch.py
+++ b/yt/data_objects/grid_patch.py
@@ -136,6 +136,8 @@
         # that dx=dy=dz, at least here.  We probably do elsewhere.
         id = self.id - self._id_offset
         if self.Parent is not None:
+            if not hasattr(self.Parent, 'dds'):
+                self.Parent._setup_dx()
             self.dds = self.Parent.dds.ndarray_view() / self.ds.refine_by
         else:
             LE, RE = self.index.grid_left_edge[id,:], \

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/frontends/artio/data_structures.py
--- a/yt/frontends/artio/data_structures.py
+++ b/yt/frontends/artio/data_structures.py
@@ -178,7 +178,8 @@
         """
         Returns (in code units) the smallest cell size in the simulation.
         """
-        return 1.0/(2**self.max_level)
+        return (self.dataset.domain_width /
+                (self.dataset.domain_dimensions * 2**(self.max_level))).min()
 
     def convert(self, unit):
         return self.dataset.conversion_factors[unit]

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/frontends/chombo/fields.py
--- a/yt/frontends/chombo/fields.py
+++ b/yt/frontends/chombo/fields.py
@@ -185,27 +185,10 @@
             for alias in aliases:
                 self.alias((ptype, alias), (ptype, f), units = output_units)
 
-        # We'll either have particle_position or particle_position_[xyz]
-        if (ptype, "particle_position") in self.field_list or \
-           (ptype, "particle_position") in self.field_aliases:
-            particle_scalar_functions(ptype,
-                   "particle_position", "particle_velocity",
-                   self)
-        else:
-            # We need to check to make sure that there's a "known field" that
-            # overlaps with one of the vector fields.  For instance, if we are
-            # in the Stream frontend, and we have a set of scalar position
-            # fields, they will overlap with -- and be overridden by -- the
-            # "known" vector field that the frontend creates.  So the easiest
-            # thing to do is to simply remove the on-disk field (which doesn't
-            # exist) and replace it with a derived field.
-            if (ptype, "particle_position") in self and \
-                 self[ptype, "particle_position"]._function == NullFunc:
-                self.pop((ptype, "particle_position"))
-            particle_vector_functions(ptype,
-                    ["particle_position_%s" % ax for ax in 'xyz'],
-                    ["particle_velocity_%s" % ax for ax in 'xyz'],
-                    self)
+        ppos_fields = ["particle_position_%s" % ax for ax in 'xyz']
+        pvel_fields = ["particle_velocity_%s" % ax for ax in 'xyz']
+        particle_vector_functions(ptype, ppos_fields, pvel_fields, self)
+
         particle_deposition_functions(ptype, "particle_position",
             "particle_mass", self)
         standard_particle_fields(self, ptype)
@@ -219,7 +202,7 @@
             self.add_output_field(field, 
                                   units = self.ds.field_units.get(field, ""),
                                   particle_type = True)
-        self.setup_smoothed_fields(ptype, 
+        self.setup_smoothed_fields(ptype,
                                    num_neighbors=num_neighbors,
                                    ftype=ftype)
 

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/frontends/stream/tests/test_outputs.py
--- a/yt/frontends/stream/tests/test_outputs.py
+++ b/yt/frontends/stream/tests/test_outputs.py
@@ -19,7 +19,7 @@
 import unittest
 
 from yt.testing import assert_raises
-from yt.utilities.answer_testing.framework import data_dir_load
+from yt.convenience import load
 from yt.utilities.exceptions import YTOutputNotIdentified
 
 class TestEmptyLoad(unittest.TestCase):
@@ -40,6 +40,6 @@
         shutil.rmtree(self.tmpdir)
 
     def test_load_empty_file(self):
-        assert_raises(YTOutputNotIdentified, data_dir_load, "not_a_file")
-        assert_raises(YTOutputNotIdentified, data_dir_load, "empty_file")
-        assert_raises(YTOutputNotIdentified, data_dir_load, "empty_directory")
+        assert_raises(YTOutputNotIdentified, load, "not_a_file")
+        assert_raises(YTOutputNotIdentified, load, "empty_file")
+        assert_raises(YTOutputNotIdentified, load, "empty_directory")

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/units/setup.py
--- a/yt/units/setup.py
+++ b/yt/units/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/units/tests/test_units.py
--- a/yt/units/tests/test_units.py
+++ b/yt/units/tests/test_units.py
@@ -225,7 +225,7 @@
 
     """
     try:
-        u1 = Unit(Symbol("jigawatts"))
+        Unit(Symbol("jigawatts"))
     except UnitParseError:
         yield assert_true, True
     else:
@@ -237,7 +237,7 @@
 
     """
     try:
-        u1 = Unit([1])  # something other than Expr and str
+        Unit([1])  # something other than Expr and str
     except UnitParseError:
         yield assert_true, True
     else:
@@ -249,7 +249,7 @@
 
     """
     try:
-        u1 = Unit("a", base_value=1, dimensions="(mass)")
+        Unit("a", base_value=1, dimensions="(mass)")
     except UnitParseError:
         yield assert_true, True
     else:
@@ -264,7 +264,7 @@
     a = Symbol("a")
 
     try:
-        u1 = Unit("a", base_value=1, dimensions=a)
+        Unit("a", base_value=1, dimensions=a)
     except UnitParseError:
         pass
     else:
@@ -277,7 +277,7 @@
 
     """
     try:
-        u1 = Unit("a", base_value="a", dimensions=(mass/time))
+        Unit("a", base_value="a", dimensions=(mass/time))
     except UnitParseError:
         yield assert_true, True
     else:
@@ -454,7 +454,7 @@
     assert_raises(InvalidUnitOperation, operator.mul, u1, u2)
     assert_raises(InvalidUnitOperation, operator.truediv, u1, u2)
 
-def test_comoving_labels():
+def test_comoving_and_code_unit_labels():
     ds = fake_random_ds(64, nprocs=1)
 
     # create a fake comoving unit
@@ -464,3 +464,10 @@
     test_unit = Unit('Mpccm', registry=ds.unit_registry)
     assert_almost_equal(test_unit.base_value, cm_per_mpc/3)
     assert_equal(test_unit.latex_repr, r'\rm{Mpc}/(1+z)')
+
+    test_unit = Unit('code_mass', registry=ds.unit_registry)
+    assert_equal(test_unit.latex_repr, '\\rm{code\\ mass}')
+
+    test_unit = Unit('code_mass/code_length**3', registry=ds.unit_registry)
+    assert_equal(test_unit.latex_repr,
+                 '\\frac{\\rm{code\\ mass}}{\\rm{code\\ length}^{3}}')

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/units/unit_object.py
--- a/yt/units/unit_object.py
+++ b/yt/units/unit_object.py
@@ -18,7 +18,7 @@
     Pow, Symbol, Integer, \
     Float, Basic, Rational, sqrt
 from sympy.core.numbers import One
-from sympy import sympify, latex, symbols
+from sympy import sympify, latex
 from sympy.parsing.sympy_parser import \
     parse_expr, auto_number, rationalize
 from keyword import iskeyword
@@ -32,7 +32,6 @@
 from yt.utilities.exceptions import YTUnitsNotReducible
 
 import copy
-import string
 import token
 
 class UnitParseError(Exception):

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/units/unit_registry.py
--- a/yt/units/unit_registry.py
+++ b/yt/units/unit_registry.py
@@ -63,7 +63,7 @@
 
         if tex_repr is None:
             # make educated guess that will look nice in most cases
-            tex_repr = r"\rm{" + symbol + "}"
+            tex_repr = r"\rm{" + symbol.replace('_', '\ ') + "}"
 
         # Add to lut
         self.lut.update({symbol: (base_value, dimensions, offset, tex_repr)})

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -40,7 +40,6 @@
 from yt.utilities.on_demand_imports import _astropy
 from sympy import Rational
 from yt.units.unit_lookup_table import \
-    unit_prefixes, prefixable_units, \
     default_unit_symbol_lut
 from yt.units.equivalencies import equivalence_registry
 from yt.utilities.logger import ytLogger as mylog
@@ -630,12 +629,12 @@
         # Converting from AstroPy Quantity
         u = arr.unit
         ap_units = []
-        for base, power in zip(u.bases, u.powers):
+        for base, exponent in zip(u.bases, u.powers):
             unit_str = base.to_string()
             # we have to do this because AstroPy is silly and defines
             # hour as "h"
             if unit_str == "h": unit_str = "hr"
-            ap_units.append("%s**(%s)" % (unit_str, Rational(power)))
+            ap_units.append("%s**(%s)" % (unit_str, Rational(exponent)))
         ap_units = "*".join(ap_units)
         if isinstance(arr.value, np.ndarray):
             return YTArray(arr.value, ap_units, registry=unit_registry)
@@ -675,9 +674,9 @@
         >>> c = yt.YTArray.from_pint(b)
         """
         p_units = []
-        for base, power in arr.units.items():
+        for base, exponent in arr.units.items():
             bs = convert_pint_units(base)
-            p_units.append("%s**(%s)" % (bs, Rational(power)))
+            p_units.append("%s**(%s)" % (bs, Rational(exponent)))
         p_units = "*".join(p_units)
         if isinstance(arr.magnitude, np.ndarray):
             return YTArray(arr.magnitude, p_units, registry=unit_registry)

diff -r eb3ce6ec7a338e5ba063aadabe02ba23c0b0e478 -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 yt/visualization/plot_container.py
--- a/yt/visualization/plot_container.py
+++ b/yt/visualization/plot_container.py
@@ -52,6 +52,7 @@
             args[0].plots[field].figure = None
             args[0].plots[field].axes = None
             args[0].plots[field].cax = None
+        args[0]._setup_plots()
         return rv
     return newfunc
 


https://bitbucket.org/yt_analysis/yt/commits/237abb87f026/
Changeset:   237abb87f026
Branch:      yt
User:        ngoldbaum
Date:        2015-10-07 15:43:48+00:00
Summary:     Merging with mainline
Affected #:  73 files

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 MANIFEST.in
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -3,6 +3,7 @@
 include yt/visualization/mapserver/html/leaflet/*.css
 include yt/visualization/mapserver/html/leaflet/*.js
 include yt/visualization/mapserver/html/leaflet/images/*.png
+exclude scripts/pr_backport.py
 recursive-include yt *.py *.pyx *.pxd *.h README* *.txt LICENSE* *.cu
 recursive-include doc *.rst *.txt *.py *.ipynb *.png *.jpg *.css *.html
 recursive-include doc *.h *.c *.sh *.svgz *.pdf *.svg *.pyx

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 doc/source/developing/index.rst
--- a/doc/source/developing/index.rst
+++ b/doc/source/developing/index.rst
@@ -21,6 +21,7 @@
    building_the_docs
    testing
    debugdrive
+   releasing
    creating_datatypes
    creating_derived_fields
    creating_derived_quantities

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 doc/source/developing/releasing.rst
--- /dev/null
+++ b/doc/source/developing/releasing.rst
@@ -0,0 +1,208 @@
+How to Do a Release
+-------------------
+
+Periodically, the yt development community issues new releases. Since yt follows
+`semantic versioning <http://semver.org/>`_, the type of release can be read off
+from the version number used. Version numbers should follow the scheme
+``MAJOR.MINOR.PATCH``. There are three kinds of possible releases:
+
+* Bugfix releases
+
+  These releases are regularly scheduled and will optimally happen approximately
+  once a month. These releases should contain only fixes for bugs discovered in
+  earlier releases and should not contain new features or API changes. Bugfix
+  releases should increment the ``PATCH`` version number. Bugfix releases should
+  *not* be generated by merging from the ``yt`` branch, instead bugfix pull
+  requests should be manually backported using the PR backport script, described
+  below. Version ``3.2.2`` is a bugfix release.
+
+* Minor releases
+
+  These releases happen when new features are deemed ready to be merged into the
+  ``stable`` branch and should not happen on a regular schedule. Minor releases
+  can also include fixes for bugs if the fix is determined to be too invasive
+  for a bugfix release. Minor releases should *not* inlucde
+  backwards-incompatible changes and should not change APIs.  If an API change
+  is deemed to be necessary, the old API should continue to function but might
+  trigger deprecation warnings. Minor releases should happen by merging the
+  ``yt`` branch into the ``stable`` branch. Minor releases should increment the
+  ``MINOR`` version number and reset the ``PATCH`` version number to zero.
+  Version ``3.3.0`` is a minor release.
+
+* Major releases
+
+  These releases happen when the development community decides to make major
+  backwards-incompatible changes. In principle a major version release could
+  include arbitrary changes to the library. Major version releases should only
+  happen after extensive discussion and vetting among the developer and user
+  community. Like minor releases, a major release should happen by merging the
+  ``yt`` branch into the ``stable`` branch. Major releases should increment the
+  ``MAJOR`` version number and reset the ``MINOR`` and ``PATCH`` version numbers
+  to zero. If it ever happens, version ``4.0.0`` will be a major release.
+
+The job of doing a release differs depending on the kind of release. Below, we
+describe the necessary steps for each kind of release in detail.
+
+Doing a Bugfix Release
+~~~~~~~~~~~~~~~~~~~~~~
+
+As described above, bugfix releases are regularly scheduled updates for minor
+releases to ensure fixes for bugs make their way out to users in a timely
+manner. Since bugfix releases should not include new features, we do not issue
+bugfix releases by simply merging from the development ``yt`` branch into the
+``stable`` branch.  Instead, we make use of the ``pr_backport.py`` script to
+manually cherry-pick bugfixes from the from ``yt`` branch onto the ``stable``
+branch.
+
+The backport script issues interactive prompts to backport individual pull
+requests to the ``stable`` branch in a temporary clone of the main yt mercurial
+repository on bitbucket. The script is written this way to to avoid editing
+history in a clone of the repository that a developer uses for day-to-day work
+and to avoid mixing work-in-progress changes with changes that have made their
+way to the "canonical" yt repository on bitbucket.
+
+Rather than automatically manipulating the temporary repository by scripting
+mercurial commands using ``python-hglib``, the script must be "operated" by a
+human who is ready to think carefully about what the script is telling them
+to do. Most operations will merely require copy/pasting a suggested mercurial
+command. However, some changes will require manual backporting.
+
+To run the backport script, first open two terminal windows. The first window
+will be used to run the backport script. The second terminal will be used to
+manipulate a temporary clone of the yt mercurial repository. In the first
+window, navigate to the ``scripts`` directory at the root of the yt repository
+and run the backport script,
+
+.. code-block:: bash
+
+   $ cd $YT_HG/scripts
+   $ python pr_backport.py
+
+You will then need to wait for about a minute (depending on the speed of your
+internet connection and bitbucket's servers) while the script makes a clone of
+the main yt repository and then gathers information about pull requests that
+have been merged since the last tagged release. Once this step finishes, you
+will be prompted to navigate to the temporary folder in a new separate terminal
+session. Do so, and then hit the enter key in the original terminal session.
+
+For each pull request in the set of pull requests that were merged since the
+last tagged release that were pointed at the "main" line of development
+(e.g. not the ``experimental`` bookmark), you will be prompted by the script
+with the PR number, title, description, and a suggested mercurial
+command to use to backport the pull request. If the pull request consists of a
+single changeset, you will be prompted to use ``hg graft``. If it contains more
+than one changeset, you will be prompted to use ``hg rebase``. Note that
+``rebase`` is an optional extension for mercurial that is not turned on by
+default. To enable it, add a section like the following in your ``.hgrc`` file:
+
+.. code-block:: none
+
+   [extensions]
+   rebase=
+
+Since ``rebase`` is bundled with core mercurial, you do not need to specify a
+path to the rebase extension, just say ``rebase=`` and mercurial will find the
+version of ``rebase`` bundled with mercurial. Note also that mercurial does not
+automatically update to the tip of the rebased head after executing ``hg
+rebase`` so you will need to manually issue ``hg update stable`` to move your
+working directory to the new head of the stable branch. The backport script
+should prompt you with a suggestion to update as well.
+
+If the pull request contains merge commits, you must take care to *not* backport
+commits that merge with the main line of development on the ``yt`` branch. Doing
+so may bring unrelated changes, including new features, into a bugfix
+release. If the pull request you'd like to backport contains merge commits, the
+backport script should warn you to be extra careful.
+
+Once you've finished backporting, the script will let you know that you are done
+and warn you to push your work. The temporary repository you have been working
+with will be deleted as soon as the script exits, so take care to push your work
+on the ``stable`` branch to your fork on bitbucket. Once you've pushed to your
+fork, you will be able to issue a pull request containing the backported fixes
+just like any other yt pull request.
+
+Doing a Minor or Major Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This is much simpler than a bugfix release.  All that needs to happen is the
+``yt`` branch must get merged into the ``stable`` branch, and any conflicts that
+happen must be resolved, almost certainly in favor of the yt branch. This can
+happen either using a merge tool like ``vimdiff`` and ``kdiff3`` or by telling
+mercurial to write merge markers. If you prefer merge markers, the following
+configuration options should be turned on in your ``hgrc`` to get more detail
+during the merge:
+
+.. code-block:: none
+
+   [ui]
+   merge = internal:merge3
+   mergemarkers = detailed
+
+The first option tells mercurial to write merge markers that show the state of
+the conflicted region of the code on both sides of the merge as well as the
+"base" most recent common ancestor changeset. The second option tells mercurial
+to add extra information about the code near the merge markers.
+
+
+Incrementing Version Numbers and Tagging a Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Before creating the tag for the release, you must increment the version numbers
+that are hard-coded in a few files in the yt source so that version metadata
+for the code is generated correctly. This includes things like ``yt.__version__``
+and the version that gets read by the Python Package Index (PyPI) infrastructure.
+
+The paths relative to the root of the repository for the three files that need
+to be edited are:
+
+* ``doc/source/conf.py``
+
+  The ``version`` and ``release`` variables need to be updated.
+
+* ``setup.py``
+
+  The ``VERSION`` variable needs to be updated
+
+* ``yt/__init__.py``
+
+  The ``__version__`` variable must be updated.
+
+Once these files have been updated, commit these updates. This is the commit we
+will tag for the release.
+
+To actually create the tag, issue the following command:
+
+.. code-block:: bash
+
+   hg tag <tag-name>
+
+Where ``<tag-name>`` follows the project's naming scheme for tags
+(e.g. ``yt-3.2.1``). Commit the tag, and you should be ready to upload the
+release to pypi.
+
+If you are doing a minor or major version number release, you will also need to
+update back to the development branch and update the development version numbers
+in the same files.
+
+
+Uploading to PyPI
+~~~~~~~~~~~~~~~~~
+
+To actually upload the release to the Python Package Index, you just need to
+issue the following command:
+
+.. code-block:: bash
+
+   python setup.py sdist upload -r https://pypi.python.org/pypi
+
+You will be prompted for your PyPI credentials and then the package should
+upload. Note that for this to complete successfully, you will need an account on
+PyPI and that account will need to be registered as an "owner" of the yt
+package. Right now there are three owners: Matt Turk, Britton Smith, and Nathan
+Goldbaum.
+
+After the release is uploaded to PyPI, you should send out an announcement
+e-mail to the yt mailing lists as well as other possibly interested mailing
+lists for all but bugfix releases. In addition, you should contact John ZuHone
+about uploading binary wheels to PyPI for Windows and OS X users and contact
+Nathan Goldbaum about getting the Anaconda packages updated.

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 scripts/pr_backport.py
--- /dev/null
+++ b/scripts/pr_backport.py
@@ -0,0 +1,311 @@
+import hglib
+import requests
+import shutil
+import tempfile
+
+from datetime import datetime
+from distutils.version import LooseVersion
+from time import strptime, mktime
+
+MERGED_PR_ENDPOINT = ("http://bitbucket.org/api/2.0/repositories/yt_analysis/"
+                      "yt/pullrequests/?state=MERGED")
+
+YT_REPO = "https://bitbucket.org/yt_analysis/yt"
+
+
+def clone_new_repo(source=None):
+    """Clones a new copy of yt_analysis/yt and returns a path to it"""
+    path = tempfile.mkdtemp()
+    dest_repo_path = path+'/yt-backport'
+    if source is None:
+        source = YT_REPO
+    hglib.clone(source=source, dest=dest_repo_path)
+    with hglib.open(dest_repo_path) as client:
+        # Changesets that are on the yt branch but aren't topological ancestors
+        # of whichever changeset the experimental bookmark is pointing at
+        client.update('heads(branch(yt) - ::bookmark(experimental))')
+    return dest_repo_path
+
+
+def get_first_commit_after_last_major_release(repo_path):
+    """Returns the SHA1 hash of the first commit to the yt branch that wasn't
+    included in the last tagged release.
+    """
+    with hglib.open(repo_path) as client:
+        tags = client.log("reverse(tag())")
+        tags = sorted([LooseVersion(t[2]) for t in tags])
+        for t in tags[::-1]:
+            if t.version[0:2] != ['yt', '-']:
+                continue
+            if len(t.version) == 4 or t.version[4] == 0:
+                last_major_tag = t
+                break
+        last_before_release = client.log(
+            "last(ancestors(%s) and branch(yt))" % str(last_major_tag))
+        first_after_release = client.log(
+            "first(descendants(%s) and branch(yt) and not %s)"
+            % (last_before_release[0][1], last_before_release[0][1]))
+    return str(first_after_release[0][1][:12])
+
+
+def get_branch_tip(repo_path, branch, exclude=None):
+    """Returns the SHA1 hash of the most recent commit on the given branch"""
+    revset = "head() and branch(%s)" % branch
+    if exclude is not None:
+        revset += "and not %s" % exclude
+    with hglib.open(repo_path) as client:
+        change = client.log(revset)[0][1][:12]
+    return change
+
+
+def get_lineage_between_release_and_tip(repo_path, first, last):
+    """Returns the lineage of changesets that were at one point the public tip"""
+    with hglib.open(repo_path) as client:
+        lineage = client.log("'%s'::'%s' and p1('%s'::'%s') + '%s'"
+                             % (first, last, first, last, last))
+        return lineage
+
+
+def get_pull_requests_since_last_release(repo_path):
+    """Returns a list of pull requests made since the last tagged release"""
+    r = requests.get(MERGED_PR_ENDPOINT)
+    done = False
+    merged_prs = []
+    with hglib.open(repo_path) as client:
+        last_tag = client.log("reverse(tag())")[0]
+    while not done:
+        if r.status_code != 200:
+            raise RuntimeError
+        data = r.json()
+        prs = data['values']
+        for pr in prs:
+            activity = requests.get(pr['links']['activity']['href']).json()
+            merge_date = None
+            for action in activity['values']:
+                if 'update' in action and action['update']['state'] == 'MERGED':
+                    merge_date = action['update']['date']
+                    merge_date = merge_date.split('.')[0]
+                    timestamp = mktime(strptime(merge_date, "%Y-%m-%dT%H:%M:%S"))
+                    merge_date = datetime.fromtimestamp(timestamp)
+                    break
+            if merge_date is None:
+                break
+            if merge_date < last_tag[6]:
+                done = True
+                break
+            merged_prs.append(pr)
+        r = requests.get(data['next'])
+    return merged_prs
+
+
+def cache_commit_data(prs):
+    """Avoid repeated calls to bitbucket API to get the list of commits per PR"""
+    commit_data = {}
+    for pr in prs:
+        data = requests.get(pr['links']['commits']['href']).json()
+        if data.keys() == [u'error']:
+            # this happens when commits have been stripped, e.g.
+            # https://bitbucket.org/yt_analysis/yt/pull-requests/1641
+            continue
+        done = False
+        commits = []
+        while not done:
+            commits.extend(data['values'])
+            if 'next' not in data:
+                done = True
+            else:
+                data = requests.get(data['next']).json()
+        commit_data[pr['id']] = commits
+    return commit_data
+
+
+def find_commit_in_prs(needle, commit_data, prs):
+    """Finds the commit `needle` PR in the commit_data dictionary
+
+    If found, returns the pr the needle commit is in. If the commit was not
+    part of the PRs in the dictionary, returns None.
+    """
+    for pr_id in commit_data:
+        commits = commit_data[pr_id]
+        for commit in commits:
+            if commit['hash'] == needle[1]:
+                pr = [pr for pr in prs if pr['id'] == pr_id][0]
+                return pr
+    return None
+
+
+def find_merge_commit_in_prs(needle, prs):
+    """Find the merge commit `needle` in the list of `prs`
+
+    If found, returns the pr the merge commit comes from. If not found, return
+    None
+    """
+    for pr in prs[::-1]:
+        if pr['merge_commit'] is not None:
+            if pr['merge_commit']['hash'] == needle[1][:12]:
+                return pr
+    return None
+
+
+def create_commits_to_prs_mapping(linege, prs):
+    """create a mapping from commits to the pull requests that the commit is
+    part of
+    """
+    commits_to_prs = {}
+    # make a copy of this list to avoid side effects from calling this function
+    my_prs = list(prs)
+    commit_data = cache_commit_data(my_prs)
+    for commit in lineage:
+        cset_hash = commit[1]
+        message = commit[5]
+        if message.startswith('Merged in') and '(pull request #' in message:
+            pr = find_merge_commit_in_prs(commit, my_prs)
+            if pr is None:
+                continue
+            commits_to_prs[cset_hash] = pr
+            # Since we know this PR won't have another commit associated with it,
+            # remove from global list to reduce number of network accesses
+            my_prs.remove(commits_to_prs[cset_hash])
+        else:
+            pr = find_commit_in_prs(commit, commit_data, my_prs)
+            commits_to_prs[cset_hash] = pr
+    return commits_to_prs
+
+
+def invert_commits_to_prs_mapping(commits_to_prs):
+    """invert the mapping from individual commits to pull requests"""
+    inv_map = {}
+    for k, v in commits_to_prs.iteritems():
+        # can't save v itself in inv_map since it's an unhashable dictionary
+        if v is not None:
+            created_date = v['created_on'].split('.')[0]
+            timestamp = mktime(strptime(created_date, "%Y-%m-%dT%H:%M:%S"))
+            created_date = datetime.fromtimestamp(timestamp)
+            pr_desc = (v['id'], v['title'], created_date,
+                       v['links']['html']['href'], v['description'])
+        else:
+            pr_desc = None
+        inv_map[pr_desc] = inv_map.get(pr_desc, [])
+        inv_map[pr_desc].append(k)
+    return inv_map
+
+
+def get_last_descendant(repo_path, commit):
+    """get the most recent descendant of a commit"""
+    with hglib.open(repo_path) as client:
+        com = client.log('last(%s::)' % commit)
+    return com[0][1][:12]
+
+def screen_already_backported(repo_path, inv_map):
+    with hglib.open(repo_path) as client:
+        tags = client.log("reverse(tag())")
+        major_tags = [t for t in tags if t[2].endswith('.0')]
+        most_recent_major_tag_name = major_tags[0][2]
+        lineage = client.log(
+            "descendants(%s) and branch(stable)" % most_recent_major_tag_name)
+        prs_to_screen = []
+        for pr in inv_map:
+            for commit in lineage:
+                if commit[5].startswith('Backporting PR #%s' % pr[0]):
+                    prs_to_screen.append(pr)
+        for pr in prs_to_screen:
+            del inv_map[pr]
+        return inv_map
+
+def commit_already_on_stable(repo_path, commit):
+    with hglib.open(repo_path) as client:
+        commit_info = client.log(commit)[0]
+        most_recent_tag_name = client.log("reverse(tag())")[0][2]
+        lineage = client.log(
+            "descendants(%s) and branch(stable)" % most_recent_tag_name)
+        # if there is a stable commit with the same commit message,
+        # it's been grafted
+        if any([commit_info[5] == c[5] for c in lineage]):
+            return True
+        return False
+
+def backport_pr_commits(repo_path, inv_map, last_stable, prs):
+    """backports pull requests to the stable branch.
+
+    Accepts a dictionary mapping pull requests to a list of commits that
+    are in the pull request.
+    """
+    pr_list = inv_map.keys()
+    pr_list = sorted(pr_list, key=lambda x: x[2])
+    for pr_desc in pr_list:
+        merge_warn = False
+        merge_commits = []
+        pr = [pr for pr in prs if pr['id'] == pr_desc[0]][0]
+        data = requests.get(pr['links']['commits']['href']).json()
+        commits = data['values']
+        while 'next' in data:
+            data = requests.get(data['next']).json()
+            commits.extend(data['values'])
+        commits = [com['hash'][:12] for com in commits]
+        with hglib.open(repo_path) as client:
+            for com in commits:
+                if client.log('merge() and %s' % com) != []:
+                    merge_warn = True
+                    merge_commits.append(com)
+        if len(commits) > 1:
+            revset = " | ".join(commits)
+            revset = '"%s"' % revset
+            message = "Backporting PR #%s %s" % \
+                (pr['id'], pr['links']['html']['href'])
+            dest = get_last_descendant(repo_path, last_stable)
+            message = \
+                "hg rebase -r %s --keep --collapse -m \"%s\" -d %s\n" % \
+                (revset, message, dest)
+            message += "hg update stable\n\n"
+            if merge_warn is True:
+                if len(merge_commits) > 1:
+                    merge_commits = ", ".join(merge_commits)
+                else:
+                    merge_commits = merge_commits[0]
+                message += \
+                    "WARNING, PULL REQUEST CONTAINS MERGE COMMITS, CONSIDER\n" \
+                    "BACKPORTING BY HAND TO AVOID BACKPORTING UNWANTED CHANGES\n"
+                message += \
+                    "Merge commits are %s\n\n" % merge_commits
+        else:
+            if commit_already_on_stable(repo_path, commits[0]) is True:
+                continue
+            message = "hg graft %s\n" % commits[0]
+        print "PR #%s\nTitle: %s\nCreated on: %s\nLink: %s\n%s" % pr_desc
+        print "To backport, issue the following command(s):\n"
+        print message
+        raw_input('Press any key to continue')
+
+
+if __name__ == "__main__":
+    print ""
+    print "Gathering PR information, this may take a minute."
+    print "Don't worry, yt loves you."
+    print ""
+    repo_path = clone_new_repo()
+    try:
+        last_major_release = get_first_commit_after_last_major_release(repo_path)
+        last_dev = get_branch_tip(repo_path, 'yt', 'experimental')
+        last_stable = get_branch_tip(repo_path, 'stable')
+        lineage = get_lineage_between_release_and_tip(
+            repo_path, last_major_release, last_dev)
+        prs = get_pull_requests_since_last_release(repo_path)
+        commits_to_prs = create_commits_to_prs_mapping(lineage, prs)
+        inv_map = invert_commits_to_prs_mapping(commits_to_prs)
+        # for now, ignore commits that aren't part of a pull request since
+        # the last bugfix release. These are mostly commits in pull requests
+        # from before the last bugfix release but might include commits that
+        # were pushed directly to the repo.
+        del inv_map[None]
+
+        inv_map = screen_already_backported(repo_path, inv_map)
+        print "In another terminal window, navigate to the following path:"
+        print "%s" % repo_path
+        raw_input("Press any key to continue")
+        backport_pr_commits(repo_path, inv_map, last_stable, prs)
+        raw_input(
+            "Now you need to push your backported changes. The temporary\n"
+            "repository currently being used will be deleted as soon as you\n"
+            "press any key.")
+    finally:
+        shutil.rmtree(repo_path)

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 scripts/yt_lodgeit.py
--- a/scripts/yt_lodgeit.py
+++ /dev/null
@@ -1,320 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-    LodgeIt!
-    ~~~~~~~~
-
-    A script that pastes stuff into the yt-project pastebin on
-    paste.yt-project.org.
-
-    Modified (very, very slightly) from the original script by the authors
-    below.
-
-    .lodgeitrc / _lodgeitrc
-    -----------------------
-
-    Under UNIX create a file called ``~/.lodgeitrc``, under Windows
-    create a file ``%APPDATA%/_lodgeitrc`` to override defaults::
-
-        language=default_language
-        clipboard=true/false
-        open_browser=true/false
-        encoding=fallback_charset
-
-    :authors: 2007-2008 Georg Brandl <georg at python.org>,
-              2006 Armin Ronacher <armin.ronacher at active-4.com>,
-              2006 Matt Good <matt at matt-good.net>,
-              2005 Raphael Slinckx <raphael at slinckx.net>
-"""
-import os
-import sys
-from optparse import OptionParser
-
-
-SCRIPT_NAME = os.path.basename(sys.argv[0])
-VERSION = '0.3'
-SERVICE_URL = 'http://paste.yt-project.org/'
-SETTING_KEYS = ['author', 'title', 'language', 'private', 'clipboard',
-                'open_browser']
-
-# global server proxy
-_xmlrpc_service = None
-
-
-def fail(msg, code):
-    """Bail out with an error message."""
-    print >> sys.stderr, 'ERROR: %s' % msg
-    sys.exit(code)
-
-
-def load_default_settings():
-    """Load the defaults from the lodgeitrc file."""
-    settings = {
-        'language':     None,
-        'clipboard':    True,
-        'open_browser': False,
-        'encoding':     'iso-8859-15'
-    }
-    rcfile = None
-    if os.name == 'posix':
-        rcfile = os.path.expanduser('~/.lodgeitrc')
-    elif os.name == 'nt' and 'APPDATA' in os.environ:
-        rcfile = os.path.expandvars(r'$APPDATA\_lodgeitrc')
-    if rcfile:
-        try:
-            f = open(rcfile)
-            for line in f:
-                if line.strip()[:1] in '#;':
-                    continue
-                p = line.split('=', 1)
-                if len(p) == 2:
-                    key = p[0].strip().lower()
-                    if key in settings:
-                        if key in ('clipboard', 'open_browser'):
-                            settings[key] = p[1].strip().lower() in \
-                                            ('true', '1', 'on', 'yes')
-                        else:
-                            settings[key] = p[1].strip()
-            f.close()
-        except IOError:
-            pass
-    settings['tags'] = []
-    settings['title'] = None
-    return settings
-
-
-def make_utf8(text, encoding):
-    """Convert a text to UTF-8, brute-force."""
-    try:
-        u = unicode(text, 'utf-8')
-        uenc = 'utf-8'
-    except UnicodeError:
-        try:
-            u = unicode(text, encoding)
-            uenc = 'utf-8'
-        except UnicodeError:
-            u = unicode(text, 'iso-8859-15', 'ignore')
-            uenc = 'iso-8859-15'
-    try:
-        import chardet
-    except ImportError:
-        return u.encode('utf-8')
-    d = chardet.detect(text)
-    if d['encoding'] == uenc:
-        return u.encode('utf-8')
-    return unicode(text, d['encoding'], 'ignore').encode('utf-8')
-
-
-def get_xmlrpc_service():
-    """Create the XMLRPC server proxy and cache it."""
-    global _xmlrpc_service
-    import xmlrpclib
-    if _xmlrpc_service is None:
-        try:
-            _xmlrpc_service = xmlrpclib.ServerProxy(SERVICE_URL + 'xmlrpc/',
-                                                    allow_none=True)
-        except Exception, err:
-            fail('Could not connect to Pastebin: %s' % err, -1)
-    return _xmlrpc_service
-
-
-def copy_url(url):
-    """Copy the url into the clipboard."""
-    # try windows first
-    try:
-        import win32clipboard
-    except ImportError:
-        # then give pbcopy a try.  do that before gtk because
-        # gtk might be installed on os x but nobody is interested
-        # in the X11 clipboard there.
-        from subprocess import Popen, PIPE
-        try:
-            client = Popen(['pbcopy'], stdin=PIPE)
-        except OSError:
-            try:
-                import pygtk
-                pygtk.require('2.0')
-                import gtk
-                import gobject
-            except ImportError:
-                return
-            gtk.clipboard_get(gtk.gdk.SELECTION_CLIPBOARD).set_text(url)
-            gobject.idle_add(gtk.main_quit)
-            gtk.main()
-        else:
-            client.stdin.write(url)
-            client.stdin.close()
-            client.wait()
-    else:
-        win32clipboard.OpenClipboard()
-        win32clipboard.EmptyClipboard()
-        win32clipboard.SetClipboardText(url)
-        win32clipboard.CloseClipboard()
-
-
-def open_webbrowser(url):
-    """Open a new browser window."""
-    import webbrowser
-    webbrowser.open(url)
-
-
-def language_exists(language):
-    """Check if a language alias exists."""
-    xmlrpc = get_xmlrpc_service()
-    langs = xmlrpc.pastes.getLanguages()
-    return language in langs
-
-
-def get_mimetype(data, filename):
-    """Try to get MIME type from data."""
-    try:
-        import gnomevfs
-    except ImportError:
-        from mimetypes import guess_type
-        if filename:
-            return guess_type(filename)[0]
-    else:
-        if filename:
-            return gnomevfs.get_mime_type(os.path.abspath(filename))
-        return gnomevfs.get_mime_type_for_data(data)
-
-
-def print_languages():
-    """Print a list of all supported languages, with description."""
-    xmlrpc = get_xmlrpc_service()
-    languages = xmlrpc.pastes.getLanguages().items()
-    languages.sort(lambda a, b: cmp(a[1].lower(), b[1].lower()))
-    print 'Supported Languages:'
-    for alias, name in languages:
-        print '    %-30s%s' % (alias, name)
-
-
-def download_paste(uid):
-    """Download a paste given by ID."""
-    xmlrpc = get_xmlrpc_service()
-    paste = xmlrpc.pastes.getPaste(uid)
-    if not paste:
-        fail('Paste "%s" does not exist.' % uid, 5)
-    print paste['code'].encode('utf-8')
-
-
-def create_paste(code, language, filename, mimetype, private):
-    """Create a new paste."""
-    xmlrpc = get_xmlrpc_service()
-    rv = xmlrpc.pastes.newPaste(language, code, None, filename, mimetype,
-                                private)
-    if not rv:
-        fail('Could not create paste. Something went wrong '
-             'on the server side.', 4)
-    return rv
-
-
-def compile_paste(filenames, langopt):
-    """Create a single paste out of zero, one or multiple files."""
-    def read_file(f):
-        try:
-            return f.read()
-        finally:
-            f.close()
-    mime = ''
-    lang = langopt or ''
-    if not filenames:
-        data = read_file(sys.stdin)
-        if not langopt:
-            mime = get_mimetype(data, '') or ''
-        fname = ""
-    elif len(filenames) == 1:
-        fname = filenames[0]
-        data = read_file(open(filenames[0], 'rb'))
-        if not langopt:
-            mime = get_mimetype(data, filenames[0]) or ''
-    else:
-        result = []
-        for fname in filenames:
-            data = read_file(open(fname, 'rb'))
-            if langopt:
-                result.append('### %s [%s]\n\n' % (fname, langopt))
-            else:
-                result.append('### %s\n\n' % fname)
-            result.append(data)
-            result.append('\n\n')
-        data = ''.join(result)
-        lang = 'multi'
-    return data, lang, fname, mime
-
-
-def main():
-    """Main script entry point."""
-
-    usage = ('Usage: %%prog [options] [FILE ...]\n\n'
-             'Read the files and paste their contents to %s.\n'
-             'If no file is given, read from standard input.\n'
-             'If multiple files are given, they are put into a single paste.'
-             % SERVICE_URL)
-    parser = OptionParser(usage=usage)
-
-    settings = load_default_settings()
-
-    parser.add_option('-v', '--version', action='store_true',
-                      help='Print script version')
-    parser.add_option('-L', '--languages', action='store_true', default=False,
-                      help='Retrieve a list of supported languages')
-    parser.add_option('-l', '--language', default=settings['language'],
-                      help='Used syntax highlighter for the file')
-    parser.add_option('-e', '--encoding', default=settings['encoding'],
-                      help='Specify the encoding of a file (default is '
-                           'utf-8 or guessing if available)')
-    parser.add_option('-b', '--open-browser', dest='open_browser',
-                      action='store_true',
-                      default=settings['open_browser'],
-                      help='Open the paste in a web browser')
-    parser.add_option('-p', '--private', action='store_true', default=False,
-                      help='Paste as private')
-    parser.add_option('--no-clipboard', dest='clipboard',
-                      action='store_false',
-                      default=settings['clipboard'],
-                      help="Don't copy the url into the clipboard")
-    parser.add_option('--download', metavar='UID',
-                      help='Download a given paste')
-
-    opts, args = parser.parse_args()
-
-    # special modes of operation:
-    # - paste script version
-    if opts.version:
-        print '%s: version %s' % (SCRIPT_NAME, VERSION)
-        sys.exit()
-    # - print list of languages
-    elif opts.languages:
-        print_languages()
-        sys.exit()
-    # - download Paste
-    elif opts.download:
-        download_paste(opts.download)
-        sys.exit()
-
-    # check language if given
-    if opts.language and not language_exists(opts.language):
-        fail('Language %s is not supported.' % opts.language, 3)
-
-    # load file(s)
-    try:
-        data, language, filename, mimetype = compile_paste(args, opts.language)
-    except Exception, err:
-        fail('Error while reading the file(s): %s' % err, 2)
-    if not data:
-        fail('Aborted, no content to paste.', 4)
-
-    # create paste
-    code = make_utf8(data, opts.encoding)
-    pid = create_paste(code, language, filename, mimetype, opts.private)
-    url = '%sshow/%s/' % (SERVICE_URL, pid)
-    print url
-    if opts.open_browser:
-        open_webbrowser(url)
-    if opts.clipboard:
-        copy_url(url)
-
-
-if __name__ == '__main__':
-    sys.exit(main())

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 setup.py
--- a/setup.py
+++ b/setup.py
@@ -164,7 +164,7 @@
     config.make_config_py()
     # config.make_svn_version_py()
     config.add_subpackage('yt', 'yt')
-    config.add_scripts("scripts/*")
+    config.add_scripts("scripts/iyt")
 
     return config
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py
@@ -15,7 +15,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from .absorption_line import tau_profile
@@ -159,7 +159,9 @@
         field_data = {}
         if use_peculiar_velocity:
             input_fields.append('velocity_los')
+            input_fields.append('redshift_eff')
             field_units["velocity_los"] = "cm/s"
+            field_units["redshift_eff"] = ""
         for feature in self.line_list + self.continuum_list:
             if not feature['field_name'] in input_fields:
                 input_fields.append(feature['field_name'])
@@ -204,11 +206,11 @@
 
         for continuum in self.continuum_list:
             column_density = field_data[continuum['field_name']] * field_data['dl']
-            delta_lambda = continuum['wavelength'] * field_data['redshift']
+            # redshift_eff field combines cosmological and velocity redshifts
             if use_peculiar_velocity:
-                # include factor of (1 + z) because our velocity is in proper frame.
-                delta_lambda += continuum['wavelength'] * (1 + field_data['redshift']) * \
-                    field_data['velocity_los'] / speed_of_light_cgs
+                delta_lambda = continuum['wavelength'] * field_data['redshift_eff']
+            else:
+                delta_lambda = continuum['wavelength'] * field_data['redshift']
             this_wavelength = delta_lambda + continuum['wavelength']
             right_index = np.digitize(this_wavelength, self.lambda_bins).clip(0, self.n_lambda)
             left_index = np.digitize((this_wavelength *
@@ -242,11 +244,11 @@
 
         for line in parallel_objects(self.line_list, njobs=njobs):
             column_density = field_data[line['field_name']] * field_data['dl']
-            delta_lambda = line['wavelength'] * field_data['redshift']
+            # redshift_eff field combines cosmological and velocity redshifts
             if use_peculiar_velocity:
-                # include factor of (1 + z) because our velocity is in proper frame.
-                delta_lambda += line['wavelength'] * (1 + field_data['redshift']) * \
-                    field_data['velocity_los'] / speed_of_light_cgs
+                delta_lambda = line['wavelength'] * field_data['redshift_eff']
+            else:
+                delta_lambda = line['wavelength'] * field_data['redshift']
             thermal_b =  np.sqrt((2 * boltzmann_constant_cgs *
                                   field_data['temperature']) /
                                   line['atomic_mass'])

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/absorption_spectrum/absorption_spectrum_fit.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum_fit.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum_fit.py
@@ -1,4 +1,4 @@
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.analysis_modules.absorption_spectrum.absorption_line import \

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
--- a/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
+++ b/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.analysis_modules.cosmological_observation.cosmology_splice import \
@@ -29,6 +29,7 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_objects, \
     parallel_root_only
+from yt.utilities.physical_constants import speed_of_light_cgs
 
 class LightRay(CosmologySplice):
     """
@@ -365,7 +366,7 @@
         all_fields.extend(['dl', 'dredshift', 'redshift'])
         if get_los_velocity:
             all_fields.extend(['velocity_x', 'velocity_y',
-                               'velocity_z', 'velocity_los'])
+                               'velocity_z', 'velocity_los', 'redshift_eff'])
             data_fields.extend(['velocity_x', 'velocity_y', 'velocity_z'])
 
         all_ray_storage = {}
@@ -457,6 +458,28 @@
             sub_data['redshift'] = my_segment['redshift'] - \
               sub_data['dredshift'].cumsum() + sub_data['dredshift']
 
+            # When velocity_los is present, add effective redshift 
+            # (redshift_eff) field by combining cosmological redshift and 
+            # doppler redshift.
+            
+            # first convert los velocities to comoving frame (ie mult. by (1+z)), 
+            # then calculate doppler redshift:
+            # 1 + redshift_dopp = sqrt((1+v/c) / (1-v/c))
+
+            # then to add cosmological redshift and doppler redshift, follow
+            # eqn 3.75 in Peacock's Cosmological Physics:
+            # 1 + z_obs = (1 + z_cosmo) * (1 + z_doppler)
+            # Alternatively, see eqn 5.49 in Peebles for a similar result.
+            if get_los_velocity:
+
+                velocity_los_cm = (1 + sub_data['redshift']) * \
+                                  sub_data['velocity_los']
+                redshift_dopp = ((1 + velocity_los_cm / speed_of_light_cgs) /
+                                (1 - velocity_los_cm / speed_of_light_cgs))**(0.5) - 1
+                sub_data['redshift_eff'] = ((1 + redshift_dopp) * \
+                                           (1 + sub_data['redshift'])) - 1
+                del velocity_los_cm, redshift_dopp
+
             # Remove empty lixels.
             sub_dl_nonzero = sub_data['dl'].nonzero()
             for field in all_fields:

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/halo_analysis/enzofof_merger_tree.py
--- a/yt/analysis_modules/halo_analysis/enzofof_merger_tree.py
+++ b/yt/analysis_modules/halo_analysis/enzofof_merger_tree.py
@@ -34,7 +34,7 @@
 
 
 import numpy as np
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import glob
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/halo_analysis/halo_callbacks.py
--- a/yt/analysis_modules/halo_analysis/halo_callbacks.py
+++ b/yt/analysis_modules/halo_analysis/halo_callbacks.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -14,7 +14,7 @@
 #-----------------------------------------------------------------------------
 
 import gc
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import math
 import numpy as np
 import glob

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/particle_trajectories/particle_trajectories.py
--- a/yt/analysis_modules/particle_trajectories/particle_trajectories.py
+++ b/yt/analysis_modules/particle_trajectories/particle_trajectories.py
@@ -22,7 +22,7 @@
 from collections import OrderedDict
 
 import numpy as np
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 
 class ParticleTrajectories(object):
     r"""A collection of particle trajectories in time over a series of

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/photon_simulator/photon_models.py
--- a/yt/analysis_modules/photon_simulator/photon_models.py
+++ b/yt/analysis_modules/photon_simulator/photon_models.py
@@ -199,15 +199,24 @@
                 ei = start_e
                 for cn, Z in zip(number_of_photons[ibegin:iend], metalZ[ibegin:iend]):
                     if cn == 0: continue
+                    # The rather verbose form of the few next statements is a
+                    # result of code optimization and shouldn't be changed
+                    # without checking for perfomance degradation. See
+                    # https://bitbucket.org/yt_analysis/yt/pull-requests/1766
+                    # for details.
                     if self.method == "invert_cdf":
-                        cumspec = cumspec_c + Z*cumspec_m
-                        cumspec /= cumspec[-1]
+                        cumspec = cumspec_c
+                        cumspec += Z * cumspec_m
+                        norm_factor = 1.0 / cumspec[-1]
+                        cumspec *= norm_factor
                         randvec = np.random.uniform(size=cn)
                         randvec.sort()
                         cell_e = np.interp(randvec, cumspec, ebins)
                     elif self.method == "accept_reject":
-                        tot_spec = cspec.d+Z*mspec.d
-                        tot_spec /= tot_spec.sum()
+                        tot_spec = cspec.d
+                        tot_spec += Z * mspec.d
+                        norm_factor = 1.0 / tot_spec.sum()
+                        tot_spec *= norm_factor
                         eidxs = np.random.choice(nchan, size=cn, p=tot_spec)
                         cell_e = emid[eidxs]
                     energies[ei:ei+cn] = cell_e

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -35,7 +35,7 @@
     communication_system, parallel_root_only, get_mpi_type, \
     parallel_capable
 from yt.units.yt_array import YTQuantity, YTArray, uconcatenate
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 from yt.utilities.on_demand_imports import _astropy
 import warnings
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/photon_simulator/spectral_models.py
--- a/yt/analysis_modules/photon_simulator/spectral_models.py
+++ b/yt/analysis_modules/photon_simulator/spectral_models.py
@@ -11,7 +11,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py
--- a/yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py
+++ b/yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py
@@ -14,7 +14,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/star_analysis/sfr_spectrum.py
--- a/yt/analysis_modules/star_analysis/sfr_spectrum.py
+++ b/yt/analysis_modules/star_analysis/sfr_spectrum.py
@@ -16,7 +16,7 @@
 
 import os
 import numpy as np
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import math
 
 from yt.config import ytcfg

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/analysis_modules/two_point_functions/two_point_functions.py
--- a/yt/analysis_modules/two_point_functions/two_point_functions.py
+++ b/yt/analysis_modules/two_point_functions/two_point_functions.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.funcs import mylog

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/data_objects/profiles.py
--- a/yt/data_objects/profiles.py
+++ b/yt/data_objects/profiles.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.funcs import *

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/data_objects/tests/test_compose.py
--- a/yt/data_objects/tests/test_compose.py
+++ b/yt/data_objects/tests/test_compose.py
@@ -20,7 +20,6 @@
     yi = y / min_dx
     zi = z / min_dx
     index = xi + delta[0] * (yi + delta[1] * zi)
-    index = index.astype('int64')
     return index
 
 def test_compose_no_overlap():

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/fields/field_aliases.py
--- a/yt/fields/field_aliases.py
+++ b/yt/fields/field_aliases.py
@@ -83,7 +83,11 @@
     ("CuttingPlaneBy",                   "cutting_plane_by"),
     ("MeanMolecularWeight",              "mean_molecular_weight"),
     ("particle_density",                 "particle_density"),
+    ("ThermalEnergy",                    "thermal_energy"),
+    ("TotalEnergy",                      "total_energy"),
     ("MagneticEnergy",                   "magnetic_energy"),
+    ("GasEnergy",                        "thermal_energy"),
+    ("Gas_Energy",                       "thermal_energy"),
     ("BMagnitude",                       "b_magnitude"),
     ("PlasmaBeta",                       "plasma_beta"),
     ("MagneticPressure",                 "magnetic_pressure"),

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -806,6 +806,7 @@
     registry.add_field(field_name, function = _vol_weight,
                        validators = [ValidateSpatial(0)],
                        units = field_units)
+    registry.find_dependencies((field_name,))
     return [field_name]
 
 def add_nearest_neighbor_field(ptype, coord_name, registry, nneighbors = 64):

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -190,7 +190,7 @@
         particle header, star files, etc.
         """
         base_prefix, base_suffix = filename_pattern['amr']
-        aexpstr = 'a'+file_amr.rsplit('a',1)[1].replace(base_suffix,'')
+        numericstr = file_amr.rsplit('_',1)[1].replace(base_suffix,'')
         possibles = glob.glob(os.path.dirname(os.path.abspath(file_amr))+"/*")
         for filetype, (prefix, suffix) in filename_pattern.items():
             # if this attribute is already set skip it
@@ -198,7 +198,10 @@
                 continue
             match = None
             for possible in possibles:
-                if possible.endswith(aexpstr+suffix):
+                if possible.endswith(numericstr+suffix):
+                    if os.path.basename(possible).startswith(prefix):
+                        match = possible
+                elif possible.endswith(suffix):
                     if os.path.basename(possible).startswith(prefix):
                         match = possible
             if match is not None:

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import re
 import os
 import weakref

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -88,7 +88,7 @@
         field_dict = {}
         for key, val in self._handle.attrs.items():
             if key.startswith('particle_'):
-                comp_number = int(re.match('particle_component_(\d)', key).groups()[0])
+                comp_number = int(re.match('particle_component_(\d+)', key).groups()[0])
                 field_dict[val.decode("ascii")] = comp_number
         self._particle_field_index = field_dict
         return self._particle_field_index

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/eagle/data_structures.py
--- a/yt/frontends/eagle/data_structures.py
+++ b/yt/frontends/eagle/data_structures.py
@@ -15,7 +15,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.frontends.gadget.data_structures import \

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/enzo/answer_testing_support.py
--- a/yt/frontends/enzo/answer_testing_support.py
+++ b/yt/frontends/enzo/answer_testing_support.py
@@ -80,16 +80,17 @@
 
     def __call__(self):
         # Read in the ds
-        ds = load(self.data_file)  
-        exact = self.get_analytical_solution() 
+        ds = load(self.data_file)
+        ds.setup_deprecated_fields()
+        exact = self.get_analytical_solution()
 
         ad = ds.all_data()
         position = ad['x']
         for k in self.fields:
-            field = ad[k]
+            field = ad[k].d
             for xmin, xmax in zip(self.left_edges, self.right_edges):
                 mask = (position >= xmin)*(position <= xmax)
-                exact_field = np.interp(position[mask], exact['pos'], exact[k]) 
+                exact_field = np.interp(position[mask], exact['pos'], exact[k])
                 myname = "ShockTubeTest_%s" % k
                 # yield test vs analytical solution 
                 yield AssertWrapper(myname, assert_allclose, field[mask], 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import weakref
 import numpy as np
 import os
@@ -627,7 +627,7 @@
 
     def _fill_arrays(self, ei, si, LE, RE, npart, nap):
         self.grid_dimensions[:,:1] = ei
-        self.grid_dimensions[:,:1] -= np.array(si, self.float_type)
+        self.grid_dimensions[:,:1] -= np.array(si, dtype='i4')
         self.grid_dimensions += 1
         self.grid_left_edge[:,:1] = LE
         self.grid_right_edge[:,:1] = RE
@@ -642,7 +642,7 @@
 
     def _fill_arrays(self, ei, si, LE, RE, npart, nap):
         self.grid_dimensions[:,:2] = ei
-        self.grid_dimensions[:,:2] -= np.array(si, self.float_type)
+        self.grid_dimensions[:,:2] -= np.array(si, dtype='i4')
         self.grid_dimensions += 1
         self.grid_left_edge[:,:2] = LE
         self.grid_right_edge[:,:2] = RE

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -21,7 +21,7 @@
 from yt.utilities.logger import ytLogger as mylog
 from yt.geometry.selection_routines import AlwaysSelector
 from yt.extern.six import u, b, iteritems
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 
 import numpy as np
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -15,7 +15,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import stat
 import struct

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/gadget/tests/test_outputs.py
--- a/yt/frontends/gadget/tests/test_outputs.py
+++ b/yt/frontends/gadget/tests/test_outputs.py
@@ -14,6 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+from collections import OrderedDict
+
 from yt.testing import requires_file
 from yt.utilities.answer_testing.framework import \
     data_dir_load, \
@@ -23,20 +25,25 @@
 
 isothermal_h5 = "IsothermalCollapse/snap_505.hdf5"
 isothermal_bin = "IsothermalCollapse/snap_505"
-gdg = "GadgetDiskGalaxy/snapshot_0200.hdf5"
+gdg = "GadgetDiskGalaxy/snapshot_200.hdf5"
 
-iso_fields = (
-    ("gas", "density"),
-    ("gas", "temperature"),
-    ('gas', 'velocity_magnitude'),
-    ("deposit", "all_density"),
-    ("deposit", "all_count"),
-    ("deposit", "all_cic"),
-    ("deposit", "PartType0_density"),
+# This maps from field names to weight field names to use for projections
+iso_fields = OrderedDict(
+    [
+        (("gas", "density"), None),
+        (("gas", "temperature"), None),
+        (("gas", "temperature"), ('gas', 'density')),
+        (('gas', 'velocity_magnitude'), None),
+        (("deposit", "all_density"), None),
+        (("deposit", "all_count"), None),
+        (("deposit", "all_cic"), None),
+        (("deposit", "PartType0_density"), None),
+    ]
 )
 iso_kwargs = dict(bounding_box=[[-3, 3], [-3, 3], [-3, 3]])
 
-gdg_fields = iso_fields + (("deposit", "PartType4_density"), )
+gdg_fields = iso_fields.copy()
+gdg_fields["deposit", "PartType4_density"] = None
 gdg_kwargs = dict(bounding_box=[[-1e5, 1e5], [-1e5, 1e5], [-1e5, 1e5]])
 
 
@@ -53,10 +60,12 @@
 def test_iso_collapse():
     for test in sph_answer(isothermal_h5, 'snap_505', 2**17,
                            iso_fields, ds_kwargs=iso_kwargs):
+        test_iso_collapse.__name__ = test.description
         yield test
 
 @requires_ds(gdg, big_data=True)
 def test_gadget_disk_galaxy():
-    for test in sph_answer(gdg, 'snap_505', 11907080, gdg_fields,
+    for test in sph_answer(gdg, 'snapshot_200', 11907080, gdg_fields,
                            ds_kwargs=gdg_kwargs):
+        test_gadget_disk_galaxy.__name__ = test.description
         yield test

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/gadget_fof/data_structures.py
--- a/yt/frontends/gadget_fof/data_structures.py
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -15,7 +15,7 @@
 #-----------------------------------------------------------------------------
 
 from collections import defaultdict
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import stat
 import glob

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/gadget_fof/io.py
--- a/yt/frontends/gadget_fof/io.py
+++ b/yt/frontends/gadget_fof/io.py
@@ -14,7 +14,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.utilities.exceptions import YTDomainOverflow

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import weakref
 import os

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/gdf/io.py
--- a/yt/frontends/gdf/io.py
+++ b/yt/frontends/gdf/io.py
@@ -14,7 +14,7 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 from yt.funcs import \
     mylog
 from yt.utilities.io_handler import \

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -14,7 +14,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import stat
 import glob

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/halo_catalog/io.py
--- a/yt/frontends/halo_catalog/io.py
+++ b/yt/frontends/halo_catalog/io.py
@@ -14,7 +14,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.utilities.exceptions import YTDomainOverflow

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/moab/data_structures.py
--- a/yt/frontends/moab/data_structures.py
+++ b/yt/frontends/moab/data_structures.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import os
 import numpy as np
 import weakref

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/owls/data_structures.py
--- a/yt/frontends/owls/data_structures.py
+++ b/yt/frontends/owls/data_structures.py
@@ -15,7 +15,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 
 import yt.units
 from yt.frontends.gadget.data_structures import \

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/owls/io.py
--- a/yt/frontends/owls/io.py
+++ b/yt/frontends/owls/io.py
@@ -15,7 +15,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import os
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/owls/owls_ion_tables.py
--- a/yt/frontends/owls/owls_ion_tables.py
+++ b/yt/frontends/owls/owls_ion_tables.py
@@ -16,7 +16,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/owls/tests/test_outputs.py
--- a/yt/frontends/owls/tests/test_outputs.py
+++ b/yt/frontends/owls/tests/test_outputs.py
@@ -14,6 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+from collections import OrderedDict
+
 from yt.testing import \
     requires_file
 from yt.utilities.answer_testing.framework import \
@@ -24,22 +26,27 @@
 
 os33 = "snapshot_033/snap_033.0.hdf5"
 
-_fields = (
-    ("gas", "density"),
-    ("gas", "temperature"),
-    ('gas', 'He_p0_number_density'),
-    ('gas', 'N_p1_number_density'),
-    ('gas', 'velocity_magnitude'),
-    ("deposit", "all_density"),
-    ("deposit", "all_count"),
-    ("deposit", "all_cic"),
-    ("deposit", "PartType0_density"),
-    ("deposit", "PartType4_density"))
+# This maps from field names to weight field names to use for projections
+_fields = OrderedDict(
+    [
+        (("gas", "density"), None),
+        (("gas", "temperature"), None),
+        (("gas", "temperature"), ("gas", "density")),
+        (('gas', 'He_p0_number_density'), None),
+        (('gas', 'velocity_magnitude'), None),
+        (("deposit", "all_density"), None),
+        (("deposit", "all_count"), None),
+        (("deposit", "all_cic"), None),
+        (("deposit", "PartType0_density"), None),
+        (("deposit", "PartType4_density"), None),
+    ]
+)
 
 
 @requires_ds(os33, big_data=True)
 def test_snapshot_033():
     for test in sph_answer(os33, 'snap_033', 2*128**3, _fields):
+        test_snapshot_033.__name__ = test.description
         yield test
 
 

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -15,7 +15,7 @@
 #-----------------------------------------------------------------------------
 
 from collections import defaultdict
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import stat
 import glob

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/owls_subfind/io.py
--- a/yt/frontends/owls_subfind/io.py
+++ b/yt/frontends/owls_subfind/io.py
@@ -14,7 +14,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import h5py
+from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
 from yt.utilities.exceptions import YTDomainOverflow

diff -r 5ba8977b8ada96722823b21d5e36dfdd9216c3f2 -r 237abb87f026f7fdb448e0a78cc16251d5527004 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -72,7 +72,7 @@
                  bounding_box=None,
                  units_override=None):
         # Because Tipsy outputs don't have a fixed domain boundary, one can
-        # specify a bounding box which effectively gives a domain_left_edge 
+        # specify a bounding box which effectively gives a domain_left_edge
         # and domain_right_edge
         self.bounding_box = bounding_box
         self.filter_bbox = (bounding_box is not None)
@@ -179,7 +179,7 @@
             else:
                 self.domain_left_edge = None
                 self.domain_right_edge = None
-        else: 
+        else:
             bbox = np.array(self.bounding_box, dtype="float64")
             if bbox.shape == (2, 3):
                 bbox = bbox.transpose()
@@ -269,7 +269,7 @@
         '''
         This method automatically detects whether the tipsy file is big/little endian
         and is not corrupt/invalid.  It returns a tuple of (Valid, endianswap) where
-        Valid is a boolean that is true if the file is a tipsy file, and endianswap is 
+        Valid is a boolean that is true if the file is a tipsy file, and endianswap is
         the endianness character '>' or '<'.
         '''
         try:

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/0f0bad273502/
Changeset:   0f0bad273502
Branch:      yt
User:        ngoldbaum
Date:        2015-10-07 15:49:55+00:00
Summary:     Removing an out-of-date comment, a use of sec_conversions, and fixing two lint issues
Affected #:  4 files

diff -r 237abb87f026f7fdb448e0a78cc16251d5527004 -r 0f0bad27350299273b5d465c5f7bfd1758a3eaf3 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -151,8 +151,6 @@
             gre[i][:ND] = np.rint(gre[i][:ND]/dx[0][:ND])*dx[0][:ND]
 
     def _populate_grid_objects(self):
-        # We only handle 3D data, so offset is 7 (nfaces+1)
-
         ii = np.argsort(self.grid_levels.flat)
         gid = self._handle["/gid"][:]
         first_ind = -(self.dataset.refine_by**self.dataset.dimensionality)

diff -r 237abb87f026f7fdb448e0a78cc16251d5527004 -r 0f0bad27350299273b5d465c5f7bfd1758a3eaf3 yt/frontends/owls_subfind/data_structures.py
--- a/yt/frontends/owls_subfind/data_structures.py
+++ b/yt/frontends/owls_subfind/data_structures.py
@@ -24,8 +24,6 @@
 from .fields import \
     OWLSSubfindFieldInfo
 
-from yt.utilities.definitions import \
-    sec_conversion
 from yt.utilities.exceptions import \
     YTException
 from yt.utilities.logger import ytLogger as \
@@ -126,7 +124,7 @@
             int(os.stat(self.parameter_filename)[stat.ST_CTIME])
 
         # Set standard values
-        self.current_time = self.quan(hvals["Time_GYR"] * sec_conversion["Gyr"], "s")
+        self.current_time = self.quan(hvals["Time_GYR"], "Gyr")
         self.domain_left_edge = np.zeros(3, "float64")
         self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
         nz = 1 << self.over_refine_factor

diff -r 237abb87f026f7fdb448e0a78cc16251d5527004 -r 0f0bad27350299273b5d465c5f7bfd1758a3eaf3 yt/frontends/tipsy/data_structures.py
--- a/yt/frontends/tipsy/data_structures.py
+++ b/yt/frontends/tipsy/data_structures.py
@@ -32,7 +32,6 @@
 from yt.utilities.physical_constants import \
     G, \
     cm_per_kpc
-from yt import YTQuantity
 
 from .fields import \
     TipsyFieldInfo

diff -r 237abb87f026f7fdb448e0a78cc16251d5527004 -r 0f0bad27350299273b5d465c5f7bfd1758a3eaf3 yt/frontends/tipsy/io.py
--- a/yt/frontends/tipsy/io.py
+++ b/yt/frontends/tipsy/io.py
@@ -19,7 +19,6 @@
 import numpy as np
 from numpy.lib.recfunctions import append_fields
 import os
-import struct
 
 from yt.utilities.io_handler import \
     BaseIOHandler


https://bitbucket.org/yt_analysis/yt/commits/f8d010e72ce0/
Changeset:   f8d010e72ce0
Branch:      yt
User:        ngoldbaum
Date:        2015-10-07 20:08:06+00:00
Summary:     Removing two unused classes from the API docs.
Affected #:  1 file

diff -r 0f0bad27350299273b5d465c5f7bfd1758a3eaf3 -r f8d010e72ce0c949b48b19e8d424dadcb665ab94 doc/source/reference/api/api.rst
--- a/doc/source/reference/api/api.rst
+++ b/doc/source/reference/api/api.rst
@@ -211,8 +211,6 @@
    ~yt.frontends.boxlib.data_structures.OrionDataset
    ~yt.frontends.boxlib.fields.BoxlibFieldInfo
    ~yt.frontends.boxlib.io.IOHandlerBoxlib
-   ~yt.frontends.boxlib.io.IOHandlerCastro
-   ~yt.frontends.boxlib.io.IOHandlerNyx
    ~yt.frontends.boxlib.io.IOHandlerOrion
 
 Chombo


https://bitbucket.org/yt_analysis/yt/commits/03e316bc6fdd/
Changeset:   03e316bc6fdd
Branch:      yt
User:        brittonsmith
Date:        2015-10-12 18:29:43+00:00
Summary:     Merged in ngoldbaum/yt (pull request #1762)

Linting yt.frontends
Affected #:  112 files

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d doc/source/reference/api/api.rst
--- a/doc/source/reference/api/api.rst
+++ b/doc/source/reference/api/api.rst
@@ -211,8 +211,6 @@
    ~yt.frontends.boxlib.data_structures.OrionDataset
    ~yt.frontends.boxlib.fields.BoxlibFieldInfo
    ~yt.frontends.boxlib.io.IOHandlerBoxlib
-   ~yt.frontends.boxlib.io.IOHandlerCastro
-   ~yt.frontends.boxlib.io.IOHandlerNyx
    ~yt.frontends.boxlib.io.IOHandlerOrion
 
 Chombo

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/_skeleton/data_structures.py
--- a/yt/frontends/_skeleton/data_structures.py
+++ b/yt/frontends/_skeleton/data_structures.py
@@ -13,6 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import os
+
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/_skeleton/fields.py
--- a/yt/frontends/_skeleton/fields.py
+++ b/yt/frontends/_skeleton/fields.py
@@ -13,8 +13,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/_skeleton/setup.py
--- a/yt/frontends/_skeleton/setup.py
+++ b/yt/frontends/_skeleton/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -9,48 +9,47 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
+import glob
 import numpy as np
 import os
 import stat
+import struct
 import weakref
-from yt.extern.six.moves import cStringIO
-import difflib
-import glob
 
-from yt.funcs import *
 from yt.geometry.oct_geometry_handler import \
     OctreeIndex
 from yt.geometry.geometry_handler import \
-    Index, YTDataChunk
+    YTDataChunk
 from yt.data_objects.static_output import \
     Dataset, ParticleFile
 from yt.data_objects.octree_subset import \
     OctreeSubset
+from yt.funcs import \
+    mylog
 from yt.geometry.oct_container import \
     ARTOctreeContainer
-from .fields import ARTFieldInfo
-from yt.utilities.io_handler import \
-    io_registry
-from yt.utilities.lib.misc_utilities import \
-    get_box_grids_level
+from yt.frontends.art.definitions import \
+    fluid_fields, \
+    particle_fields, \
+    filename_pattern, \
+    particle_header_struct, \
+    amr_header_struct, \
+    dmparticle_header_struct, \
+    constants, \
+    seek_extras
+from yt.frontends.art.fields import ARTFieldInfo
 from yt.data_objects.particle_unions import \
     ParticleUnion
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
-from yt.utilities.lib.geometry_utils import compute_morton
 
-from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
-from .io import _read_art_level_info
-from .io import _read_child_level
-from .io import _read_root_level
-from .io import b2t
-from .io import a2b
-
-from yt.utilities.io_handler import \
-    io_registry
-from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
+from yt.frontends.art.io import \
+    _read_art_level_info, \
+    _read_child_level, \
+    _read_root_level, \
+    b2t, \
+    a2b
 
 
 class ARTIndex(OctreeIndex):
@@ -181,8 +180,6 @@
         self.max_level = limit_level
         self.force_max_level = force_max_level
         self.spread_age = spread_age
-        self.domain_left_edge = np.zeros(3, dtype='float')
-        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         Dataset.__init__(self, filename, dataset_type,
                          units_override=units_override)
         self.storage_filename = storage_filename
@@ -231,7 +228,6 @@
         aexpn = self.parameters["aexpn"]
 
         # all other units
-        wmu = self.parameters["wmu"]
         Om0 = self.parameters['Om0']
         ng = self.parameters['ng']
         boxh = self.parameters['boxh']
@@ -255,6 +251,8 @@
         """
         Get the various simulation parameters & constants.
         """
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         self.dimensionality = 3
         self.refine_by = 2
         self.periodicity = (True, True, True)
@@ -268,7 +266,7 @@
         with open(self._file_amr, 'rb') as f:
             amr_header_vals = fpu.read_attrs(f, amr_header_struct, '>')
             for to_skip in ['tl', 'dtl', 'tlold', 'dtlold', 'iSO']:
-                skipped = fpu.skip(f, endian='>')
+                fpu.skip(f, endian='>')
             (self.ncell) = fpu.read_vector(f, 'i', '>')[0]
             # Try to figure out the root grid dimensions
             est = int(np.rint(self.ncell**(1.0/3.0)))
@@ -383,7 +381,7 @@
             return False
         with open(f, 'rb') as fh:
             try:
-                amr_header_vals = fpu.read_attrs(fh, amr_header_struct, '>')
+                fpu.read_attrs(fh, amr_header_struct, '>')
                 return True
             except:
                 return False
@@ -425,8 +423,6 @@
         self.parameter_filename = filename
         self.skip_stars = skip_stars
         self.spread_age = spread_age
-        self.domain_left_edge = np.zeros(3, dtype='float')
-        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         Dataset.__init__(self, filename, dataset_type)
         self.storage_filename = storage_filename
 
@@ -470,7 +466,6 @@
         aexpn = self.parameters["aexpn"]
 
         # all other units
-        wmu = self.parameters["wmu"]
         Om0 = self.parameters['Om0']
         ng = self.parameters['ng']
         boxh = self.parameters['boxh']
@@ -494,6 +489,8 @@
         """
         Get the various simulation parameters & constants.
         """
+        self.domain_left_edge = np.zeros(3, dtype='float')
+        self.domain_right_edge = np.zeros(3, dtype='float')+1.0
         self.dimensionality = 3
         self.refine_by = 2
         self.periodicity = (True, True, True)
@@ -633,32 +630,32 @@
             try:
                 seek = 4
                 fh.seek(seek)
-                headerstr = np.fromfile(fh, count=1, dtype=(str,45))
-                aexpn = np.fromfile(fh, count=1, dtype='>f4')
-                aexp0 = np.fromfile(fh, count=1, dtype='>f4')
-                amplt = np.fromfile(fh, count=1, dtype='>f4')
-                astep = np.fromfile(fh, count=1, dtype='>f4')
-                istep = np.fromfile(fh, count=1, dtype='>i4')
-                partw = np.fromfile(fh, count=1, dtype='>f4')
-                tintg = np.fromfile(fh, count=1, dtype='>f4')
-                ekin = np.fromfile(fh, count=1, dtype='>f4')
-                ekin1 = np.fromfile(fh, count=1, dtype='>f4')
-                ekin2 = np.fromfile(fh, count=1, dtype='>f4')
-                au0 = np.fromfile(fh, count=1, dtype='>f4')
-                aeu0 = np.fromfile(fh, count=1, dtype='>f4')
-                nrowc = np.fromfile(fh, count=1, dtype='>i4')
-                ngridc = np.fromfile(fh, count=1, dtype='>i4')
-                nspecs = np.fromfile(fh, count=1, dtype='>i4')
-                nseed = np.fromfile(fh, count=1, dtype='>i4')
-                Om0 = np.fromfile(fh, count=1, dtype='>f4')
-                Oml0 = np.fromfile(fh, count=1, dtype='>f4')
-                hubble = np.fromfile(fh, count=1, dtype='>f4')
-                Wp5 = np.fromfile(fh, count=1, dtype='>f4')
-                Ocurv = np.fromfile(fh, count=1, dtype='>f4')
-                wspecies = np.fromfile(fh, count=10, dtype='>f4')
-                lspecies = np.fromfile(fh, count=10, dtype='>i4')
-                extras = np.fromfile(fh, count=79, dtype='>f4')
-                boxsize = np.fromfile(fh, count=1, dtype='>f4')
+                headerstr = np.fromfile(fh, count=1, dtype=(str,45))  # NOQA
+                aexpn = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                aexp0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                amplt = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                astep = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                istep = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                partw = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                tintg = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                ekin = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                ekin1 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                ekin2 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                au0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                aeu0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                nrowc = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                ngridc = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                nspecs = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                nseed = np.fromfile(fh, count=1, dtype='>i4')  # NOQA
+                Om0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                Oml0 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                hubble = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                Wp5 = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                Ocurv = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
+                wspecies = np.fromfile(fh, count=10, dtype='>f4')  # NOQA
+                lspecies = np.fromfile(fh, count=10, dtype='>i4')  # NOQA
+                extras = np.fromfile(fh, count=79, dtype='>f4')  # NOQA
+                boxsize = np.fromfile(fh, count=1, dtype='>f4')  # NOQA
                 return True
             except:
                 return False
@@ -702,7 +699,7 @@
         oct_handler.fill_level(0, levels, cell_inds, file_inds, tr, source)
         del source
         # Now we continue with the additional levels.
-        for level in range(1, self.ds.max_level + 1):
+        for level in range(1, self.ds.index.max_level + 1):
             no = self.domain.level_count[level]
             noct_range = [0, no]
             source = _read_child_level(
@@ -789,9 +786,7 @@
             Level[Lev], iNOLL[Lev], iHOLL[Lev] = fpu.read_vector(f, 'i', '>')
             # print 'Level %i : '%Lev, iNOLL
             # print 'offset after level record:',f.tell()
-            iOct = iHOLL[Lev] - 1
             nLevel = iNOLL[Lev]
-            nLevCells = nLevel * nchild
             ntot = ntot + nLevel
 
             # Skip all the oct hierarchy data
@@ -834,11 +829,9 @@
 
     def _read_amr_root(self, oct_handler):
         self.level_offsets
-        f = open(self.ds._file_amr, "rb")
         # add the root *cell* not *oct* mesh
         root_octs_side = self.ds.domain_dimensions[0]/2
         NX = np.ones(3)*root_octs_side
-        octs_side = NX*2 # Level == 0
         LE = np.array([0.0, 0.0, 0.0], dtype='float64')
         RE = np.array([1.0, 1.0, 1.0], dtype='float64')
         root_dx = (RE - LE) / NX
@@ -849,7 +842,7 @@
                            LL[1]:RL[1]:NX[1]*1j,
                            LL[2]:RL[2]:NX[2]*1j]
         root_fc = np.vstack([p.ravel() for p in root_fc]).T
-        nocts_check = oct_handler.add(self.domain_id, 0, root_fc)
+        oct_handler.add(self.domain_id, 0, root_fc)
         assert(oct_handler.nocts == root_fc.shape[0])
         mylog.debug("Added %07i octs on level %02i, cumulative is %07i",
                     root_octs_side**3, 0, oct_handler.nocts)

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/art/fields.py
--- a/yt/frontends/art/fields.py
+++ b/yt/frontends/art/fields.py
@@ -13,13 +13,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
-from yt.frontends.art.definitions import *
 
 b_units = "code_magnetic"
 ra_units = "code_length / code_time**2"
@@ -68,7 +63,7 @@
             tr *= data.ds.parameters['wmu'] * data.ds.parameters['Om0']
             tr *= (data.ds.parameters['gamma'] - 1.)
             tr /= data.ds.parameters['aexpn']**2
-            return  tr * data['art', 'GasEnergy'] / data['art', 'Density']
+            return tr * data['art', 'GasEnergy'] / data['art', 'Density']
         self.add_field(('gas', 'temperature'),
                        function=_temperature, 
                        units='K')

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -15,24 +15,30 @@
 
 
 import numpy as np
-import struct
 import os
 import os.path
 import sys
+
+from collections import defaultdict
+
 if sys.version_info >= (3,0,0):
     long = int
-    
-from yt.funcs import *
+
+from yt.frontends.art.definitions import \
+    particle_star_fields, \
+    particle_fields, \
+    star_struct, \
+    hydro_struct
 from yt.utilities.io_handler import \
     BaseIOHandler
-from yt.utilities.fortran_utils import *
 from yt.utilities.logger import ytLogger as mylog
-from yt.frontends.art.definitions import *
-from yt.utilities.physical_constants import sec_per_year
 from yt.utilities.lib.geometry_utils import compute_morton
-from yt.geometry.oct_container import _ORDER_MAX
-from yt.units.yt_array import YTQuantity
-
+from yt.utilities.fortran_utils import \
+    read_vector, \
+    skip
+from yt.units.yt_array import \
+    YTQuantity, \
+    YTArray
 
 class IOHandlerART(BaseIOHandler):
     _dataset_type = "art"
@@ -80,7 +86,6 @@
         key = (selector, ftype)
         if key in self.masks.keys() and self.caching:
             return self.masks[key]
-        ds = self.ds
         pstr = 'particle_position_%s'
         x,y,z = [self._get_field((ftype, pstr % ax)) for ax in 'xyz']
         mask = selector.select_points(x, y, z, 0.0)
@@ -120,7 +125,7 @@
         tr = {}
         ftype, fname = field
         ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(self.ds, ftype, 
+        pbool, idxa, idxb = _determine_field_size(self.ds, ftype,
                                                   self.ls, ptmax)
         npa = idxb - idxa
         sizes = np.diff(np.concatenate(([0], self.ls)))
@@ -178,7 +183,7 @@
             # dark_matter -- stars are regular matter.
             tr[field] /= self.ds.domain_dimensions.prod()
         if tr == {}:
-            tr = dict((f, np.array([])) for f in fields)
+            tr = dict((f, np.array([])) for f in [field])
         if self.caching:
             self.cache[field] = tr[field]
             return self.cache[field]
@@ -195,7 +200,6 @@
         count = data_file.ds.parameters['lspecies'][-1]
         DLE = data_file.ds.domain_left_edge
         DRE = data_file.ds.domain_right_edge
-        dx = (DRE - DLE) / 2**_ORDER_MAX
         with open(data_file.filename, "rb") as f:
             # The first total_particles * 3 values are positions
             pp = np.fromfile(f, dtype = '>f4', count = totcount*3)
@@ -209,7 +213,6 @@
 
     def _identify_fields(self, domain):
         field_list = []
-        tp = domain.total_particles
         self.particle_field_list = [f for f in particle_fields]
         for ptype in self.ds.particle_types_raw:
             for pfield in self.particle_field_list:
@@ -225,7 +228,7 @@
         tr = {}
         ftype, fname = field
         ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(self.ds, ftype, 
+        pbool, idxa, idxb = _determine_field_size(self.ds, ftype,
                                                   self.ls, ptmax)
         npa = idxb - idxa
         sizes = np.diff(np.concatenate(([0], self.ls)))
@@ -258,17 +261,6 @@
                     data[a: a + size] = i
                     a += size
             tr[field] = data
-        if fname == "particle_creation_time":
-            self.tb, self.ages, data = interpolate_ages(
-                tr[field][-nstars:],
-                self.file_stars,
-                self.tb,
-                self.ages,
-                self.ds.current_time)
-            temp = tr.get(field, np.zeros(npa, 'f8'))
-            temp[-nstars:] = data
-            tr[field] = temp
-            del data
         # We check again, after it's been filled
         if fname.startswith("particle_mass"):
             # We now divide by NGrid in order to make this match up.  Note that
@@ -356,7 +348,6 @@
     # ioct always represents the index of the next variable
     # not the current, so shift forward one index
     # the last index isn't used
-    ioctso = iocts.copy()
     iocts[1:] = iocts[:-1]  # shift
     iocts = iocts[:nLevel]  # chop off the last, unused, index
     iocts[0] = iOct  # starting value
@@ -400,11 +391,11 @@
     # Posy   = d_x * (iOctPs(2,iO) + sign ( id , idelta(j,2) ))
     # Posz   = d_x * (iOctPs(3,iO) + sign ( id , idelta(j,3) ))
     # idelta = [[-1,  1, -1,  1, -1,  1, -1,  1],
-              #[-1, -1,  1,  1, -1, -1,  1,  1],
-              #[-1, -1, -1, -1,  1,  1,  1,  1]]
+    #           [-1, -1,  1,  1, -1, -1,  1,  1],
+    #           [-1, -1, -1, -1,  1,  1,  1,  1]]
     # idelta = np.array(idelta)
     # if ncell0 is None:
-        # ncell0 = coarse_grid**3
+    #     ncell0 = coarse_grid**3
     # nchild = 8
     # ndim = 3
     # nshift = nchild -1
@@ -424,15 +415,13 @@
     f.seek(pos)
     return unitary_center, fl, iocts, nLevel, root_level
 
-def get_ranges(skip, count, field, words=6, real_size=4, np_per_page=4096**2, 
+def get_ranges(skip, count, field, words=6, real_size=4, np_per_page=4096**2,
                   num_pages=1):
     #translate every particle index into a file position ranges
     ranges = []
     arr_size = np_per_page * real_size
-    page_size = words * np_per_page * real_size
     idxa, idxb = 0, 0
     posa, posb = 0, 0
-    left = count
     for page in range(num_pages):
         idxb += np_per_page
         for i, fname in enumerate(['x', 'y', 'z', 'vx', 'vy', 'vz']):
@@ -462,7 +451,7 @@
     num_pages = os.path.getsize(file)/(real_size*words*np_per_page)
     fh = open(file, 'r')
     skip, count = idxa, idxb - idxa
-    kwargs = dict(words=words, real_size=real_size, 
+    kwargs = dict(words=words, real_size=real_size,
                   np_per_page=np_per_page, num_pages=num_pages)
     arrs = []
     for field in fields:
@@ -495,7 +484,6 @@
 
 def _read_child_mask_level(f, level_child_offsets, level, nLevel, nhydro_vars):
     f.seek(level_child_offsets[level])
-    nvals = nLevel * (nhydro_vars + 6)  # 2 vars, 2 pads
     ioctch = np.zeros(nLevel, dtype='uint8')
     idc = np.zeros(nLevel, dtype='int32')
 
@@ -639,8 +627,6 @@
         return a2t(b2a(tb))
     if len(tb) < n:
         n = len(tb)
-    age_min = a2t(b2a(tb.max(), **kwargs), **kwargs)
-    age_max = a2t(b2a(tb.min(), **kwargs), **kwargs)
     tbs = -1.*np.logspace(np.log10(-tb.min()),
                           np.log10(-tb.max()), n)
     ages = []

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/art/setup.py
--- a/yt/frontends/art/setup.py
+++ b/yt/frontends/art/setup.py
@@ -1,6 +1,5 @@
 #!/usr/bin/env python
-import setuptools
-import os, sys, os.path
+
 
 def configuration(parent_package='',top_path=None):
     from numpy.distutils.misc_util import Configuration

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -17,49 +17,60 @@
 from yt.testing import \
     requires_file, \
     assert_equal, \
-    units_override_check
+    units_override_check, \
+    assert_almost_equal
+from yt.units.yt_array import \
+    YTQuantity
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    big_patch_amr, \
+    FieldValuesTest, \
     PixelizedProjectionValuesTest, \
     data_dir_load
 from yt.frontends.art.api import ARTDataset
 
-_fields = ("density", "temperature", "particle_mass", ("all", "particle_position_x"))
+_fields = (
+    ("gas", "density"),
+    ("gas", "temperature"),
+    ("all", "particle_mass"),
+    ("all", "particle_position_x")
+)
 
 d9p = "D9p_500/10MpcBox_HartGal_csf_a0.500.d"
 
 @requires_ds(d9p, big_data=True)
 def test_d9p():
     ds = data_dir_load(d9p)
+    ds.index
     yield assert_equal, str(ds), "10MpcBox_HartGal_csf_a0.500.d"
-    for test in big_patch_amr(d9p, _fields):
-        test_d9p.__name__ = test.description
-        yield test
     dso = [None, ("sphere", ("max", (0.1, 'unitary')))]
     for field in _fields:
         for axis in [0, 1, 2]:
             for dobj_name in dso:
                 for weight_field in [None, "density"]:
-                    yield PixelizedProjectionValuesTest(
-                        d9p, axis, field, weight_field,
-                        dobj_name)
-
+                    if field[0] not in ds.particle_types:
+                        yield PixelizedProjectionValuesTest(
+                            d9p, axis, field, weight_field,
+                            dobj_name)
+            yield FieldValuesTest(d9p, field, dobj_name)
 
     ad = ds.all_data()
     # 'Ana' variable values output from the ART Fortran 'ANA' analysis code
     AnaNStars = 6255
-    yield assert_equal, ad[('stars','particle_type')].size, AnaNStars
+    yield assert_equal, ad[('stars', 'particle_type')].size, AnaNStars
     yield assert_equal, ad[('specie4', 'particle_type')].size, AnaNStars
-    AnaNDM = 2833405
-    yield assert_equal, ad[('darkmatter','particle_type')].size, AnaNDM
-    yield assert_equal, ad[('specie0', 'particle_type')].size + \
-        ad[('specie1', 'particle_type')].size + \
-        ad[('specie2', 'particle_type')].size + \
-        ad[('specie3', 'particle_type')].size, AnaNDM
 
-    AnaBoxSize = yt.units.yt_array.YTQuantity(7.1442196564,'Mpc')
-    AnaVolume = yt.units.yt_array.YTQuantity(364.640074656,'Mpc**3')
+    # The *real* asnwer is 2833405, but yt misses one particle since it lives
+    # on a domain boundary. See issue 814. When that is fixed, this test
+    # will need to be updated
+    AnaNDM = 2833404
+    yield assert_equal, ad[('darkmatter', 'particle_type')].size, AnaNDM
+    yield assert_equal, (ad[('specie0', 'particle_type')].size +
+                         ad[('specie1', 'particle_type')].size +
+                         ad[('specie2', 'particle_type')].size +
+                         ad[('specie3', 'particle_type')].size), AnaNDM
+
+    AnaBoxSize = YTQuantity(7.1442196564, 'Mpc')
+    AnaVolume = YTQuantity(364.640074656, 'Mpc**3')
     Volume = 1
     for i in ds.domain_width.in_units('Mpc'):
         yield assert_almost_equal, i, AnaBoxSize
@@ -67,26 +78,29 @@
     yield assert_almost_equal, Volume, AnaVolume
 
     AnaNCells = 4087490
-    yield assert_equal, len(ad[('index','cell_volume')]), AnaNCells
+    yield assert_equal, len(ad[('index', 'cell_volume')]), AnaNCells
 
-    AnaTotDMMass = yt.units.yt_array.YTQuantity(1.01191786811e+14,'Msun')
-    yield assert_almost_equal, ad[('darkmatter','particle_mass')].sum()\
-        .in_units('Msun'), AnaTotDMMass
+    AnaTotDMMass = YTQuantity(1.01191786808255e+14, 'Msun')
+    yield (assert_almost_equal,
+           ad[('darkmatter', 'particle_mass')].sum().in_units('Msun'),
+           AnaTotDMMass)
 
-    AnaTotStarMass = yt.units.yt_array.YTQuantity(1776251.,'Msun')
-    yield assert_almost_equal, ad[('stars','particle_mass')].sum()\
-        .in_units('Msun'), AnaTotStarMass
+    AnaTotStarMass = YTQuantity(1776701.3990607238, 'Msun')
+    yield (assert_almost_equal,
+           ad[('stars', 'particle_mass')].sum().in_units('Msun'),
+           AnaTotStarMass)
 
-    AnaTotStarMassInitial = yt.units.yt_array.YTQuantity(2422854.,'Msun')
-    yield assert_almost_equal, ad[('stars','particle_mass_initial')].sum()\
-        .in_units('Msun'), AnaTotStarMass
+    AnaTotStarMassInitial = YTQuantity(2423468.2801332865, 'Msun')
+    yield (assert_almost_equal,
+           ad[('stars', 'particle_mass_initial')].sum().in_units('Msun'),
+           AnaTotStarMassInitial)
 
-    AnaTotGasMass = yt.units.yt_array.YTQuantity(1.781994e+13,'Msun')
-    yield assert_almost_equal, ad[('gas','cell_mass')].sum()\
-        .in_units('Msun'), AnaTotGasMass
+    AnaTotGasMass = YTQuantity(1.7826982029216785e+13, 'Msun')
+    yield (assert_almost_equal, ad[('gas', 'cell_mass')].sum().in_units('Msun'),
+           AnaTotGasMass)
 
-    AnaTotTemp = yt.units.yt_array.YTQuantity(1.5019e11, 'K') #just leaves
-    yield assert_equal, ad[('gas','temperature')].sum(), AnaTotTemp
+    AnaTotTemp = YTQuantity(150219844793.39072, 'K')  # just leaves
+    yield assert_equal, ad[('gas', 'temperature')].sum(), AnaTotTemp
 
 
 @requires_file(d9p)
@@ -97,4 +111,3 @@
 def test_units_override():
     for test in units_override_check(d9p):
         yield test
-

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/artio/data_structures.py
--- a/yt/frontends/artio/data_structures.py
+++ b/yt/frontends/artio/data_structures.py
@@ -13,26 +13,27 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
+
 import numpy as np
+import os
 import stat
 import weakref
-from yt.extern.six.moves import cStringIO
 
-from .definitions import ARTIOconstants
-from ._artio_caller import \
-    artio_is_valid, artio_fileset, ARTIOOctreeContainer, \
-    ARTIORootMeshContainer, ARTIOSFCRangeHandler
-from . import _artio_caller
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
-from .fields import \
+from collections import defaultdict
+
+from yt.frontends.artio._artio_caller import \
+    artio_is_valid, \
+    artio_fileset, \
+    ARTIOSFCRangeHandler
+from yt.frontends.artio import _artio_caller
+from yt.frontends.artio.fields import \
     ARTIOFieldInfo
-from yt.fields.particle_fields import \
-    standard_particle_fields
 
-from yt.funcs import *
+from yt.funcs import \
+    mylog
 from yt.geometry.geometry_handler import \
-    Index, YTDataChunk
+    Index, \
+    YTDataChunk
 import yt.geometry.particle_deposit as particle_deposit
 from yt.data_objects.static_output import \
     Dataset
@@ -40,9 +41,8 @@
     OctreeSubset
 from yt.data_objects.data_containers import \
     YTFieldData
-
-from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
+from yt.utilities.exceptions import \
+    YTParticleDepositionNotImplemented
 
 class ARTIOOctreeSubset(OctreeSubset):
     _domain_offset = 0
@@ -346,7 +346,6 @@
         # hard-coded -- not provided by headers
         self.dimensionality = 3
         self.refine_by = 2
-        print(self.parameters)
         self.parameters["HydroMethod"] = 'artio'
         self.parameters["Time"] = 1.  # default unit is 1...
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/artio/fields.py
--- a/yt/frontends/artio/fields.py
+++ b/yt/frontends/artio/fields.py
@@ -14,9 +14,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
 from yt.fields.field_detector import \
@@ -25,8 +22,6 @@
     YTArray
 
 from yt.utilities.physical_constants import \
-    mh, \
-    mass_sun_cgs, \
     boltzmann_constant_cgs, \
     amu_cgs
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/artio/setup.py
--- a/yt/frontends/artio/setup.py
+++ b/yt/frontends/artio/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 import glob
 
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/artio/tests/test_outputs.py
--- a/yt/frontends/artio/tests/test_outputs.py
+++ b/yt/frontends/artio/tests/test_outputs.py
@@ -14,7 +14,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     data_dir_load, \

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -13,25 +13,27 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
+import os
 import weakref
-import glob #ST 9/12
-from yt.funcs import *
+import glob
+
+from yt.funcs import \
+    mylog, \
+    ensure_tuple
 from yt.data_objects.grid_patch import \
-           AMRGridPatch
+    AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
     GridIndex
 from yt.data_objects.static_output import \
-           Dataset
+    Dataset
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
 from yt.geometry.geometry_handler import \
     YTDataChunk
-from yt.extern.six import PY2, PY3
+from yt.extern.six import PY2
 
 from .fields import AthenaFieldInfo
-from yt.units.yt_array import YTQuantity
 from yt.utilities.decompose import \
     decompose_array, get_psize
 
@@ -75,9 +77,9 @@
 
 class AthenaGrid(AMRGridPatch):
     _id_offset = 0
+
     def __init__(self, id, index, level, start, dimensions,
                  file_offset, read_dims):
-        df = index.dataset.filename[4:-4]
         gname = index.grid_filenames[id]
         AMRGridPatch.__init__(self, id, filename = gname,
                               index = index)
@@ -224,7 +226,6 @@
         grid = {}
         grid['read_field'] = None
         grid['read_type'] = None
-        table_read=False
         line = f.readline()
         while grid['read_field'] is None:
             parse_line(line, grid)
@@ -270,7 +271,6 @@
             gridread = {}
             gridread['read_field'] = None
             gridread['read_type'] = None
-            table_read=False
             line = f.readline()
             while gridread['read_field'] is None:
                 parse_line(line, gridread)
@@ -421,8 +421,6 @@
                                 self.grid_levels[i] + 1,
                                 self.grid_left_edge, self.grid_right_edge,
                                 self.grid_levels, mask)
-                #ids = np.where(mask.astype("bool")) # where is a tuple
-                #mask[ids] = True
             grid.Children = [g for g in self.grids[mask.astype("bool")] if g.Level == grid.Level + 1]
         mylog.debug("Second pass; identifying parents")
         for i, grid in enumerate(self.grids): # Second pass
@@ -436,7 +434,6 @@
         return [g for g in self.grids[mask] if g.Level == grid.Level + 1]
 
     def _chunk_io(self, dobj, cache = True, local_only = False):
-        gfiles = defaultdict(list)
         gobjs = getattr(dobj._current_chunk, "objs", dobj._chunk_info)
         for subset in gobjs:
             yield YTDataChunk(dobj, "io", [subset],

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/athena/fields.py
--- a/yt/frontends/athena/fields.py
+++ b/yt/frontends/athena/fields.py
@@ -13,12 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
 from yt.fields.field_info_container import \
     FieldInfoContainer
 from yt.utilities.physical_constants import \
-    kboltz,mh
-from yt.units.yt_array import YTArray
+    kboltz, mh
 
 b_units = "code_magnetic"
 pres_units = "code_mass/(code_length*code_time**2)"

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/athena/io.py
--- a/yt/frontends/athena/io.py
+++ b/yt/frontends/athena/io.py
@@ -12,10 +12,11 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
+
 from yt.utilities.io_handler import \
-           BaseIOHandler
+    BaseIOHandler
 import numpy as np
-from yt.funcs import mylog, defaultdict
+from yt.funcs import mylog
 from .data_structures import chk23
 
 float_size = {"float":np.dtype(">f4").itemsize,

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/athena/setup.py
--- a/yt/frontends/athena/setup.py
+++ b/yt/frontends/athena/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -13,11 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    assert_allclose_units
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
 from yt.frontends.athena.api import AthenaDataset
 from yt.config import ytcfg

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -13,6 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
+import inspect
 import os
 import re
 
@@ -20,7 +21,9 @@
 
 import numpy as np
 
-from yt.funcs import *
+from yt.funcs import \
+    mylog, \
+    ensure_tuple
 from yt.data_objects.grid_patch import AMRGridPatch
 from yt.extern.six.moves import zip as izip
 from yt.geometry.grid_geometry_handler import GridIndex
@@ -188,7 +191,7 @@
             vals = next(header_file).split()
             lev, ngrids = int(vals[0]), int(vals[1])
             assert(lev == level)
-            nsteps = int(next(header_file))
+            nsteps = int(next(header_file))  # NOQA
             for gi in range(ngrids):
                 xlo, xhi = [float(v) for v in next(header_file).split()]
                 if self.dimensionality > 1:
@@ -211,7 +214,7 @@
             next(level_header_file)
             next(level_header_file)
             # Now we get the number of components
-            ncomp_this_file = int(next(level_header_file))
+            ncomp_this_file = int(next(level_header_file))  # NOQA
             # Skip the next line, which contains the number of ghost zones
             next(level_header_file)
             # To decipher this next line, we expect something like:
@@ -889,15 +892,16 @@
                   ['particle_velocity_%s' % ax for ax in 'xyz']:
             self.field_list.append(("io", fn))
         header = open(os.path.join(self.ds.output_dir, "DM", "Header"))
-        version = header.readline()
-        ndim = header.readline()
-        nfields = header.readline()
-        ntotalpart = int(header.readline())
-        dummy = header.readline() # nextid
-        maxlevel = int(header.readline()) # max level
+        version = header.readline()  # NOQA
+        ndim = header.readline()  # NOQA
+        nfields = header.readline()  # NOQA
+        ntotalpart = int(header.readline())  # NOQA
+        nextid = header.readline()  # NOQA
+        maxlevel = int(header.readline())  # NOQA
 
         # Skip over how many grids on each level; this is degenerate
-        for i in range(maxlevel + 1): dummy = header.readline()
+        for i in range(maxlevel + 1):
+            header.readline()
 
         grid_info = np.fromiter((int(i) for line in header.readlines()
                                  for i in line.split()),
@@ -972,8 +976,9 @@
 
 def _guess_pcast(vals):
     # Now we guess some things about the parameter and its type
-    v = vals.split()[0] # Just in case there are multiple; we'll go
-                        # back afterward to using vals.
+    # Just in case there are multiple; we'll go
+    # back afterward to using vals.
+    v = vals.split()[0]
     try:
         float(v.upper().replace("D", "E"))
     except:
@@ -986,6 +991,7 @@
             pcast = float
         else:
             pcast = int
-    vals = [pcast(v) for v in vals.split()]
-    if len(vals) == 1: vals = vals[0]
+    vals = [pcast(value) for value in vals.split()]
+    if len(vals) == 1:
+        vals = vals[0]
     return vals

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/boxlib/definitions.py
--- a/yt/frontends/boxlib/definitions.py
+++ b/yt/frontends/boxlib/definitions.py
@@ -12,7 +12,7 @@
 #
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
-from yt.funcs import *
+
 
 # TODO: get rid of enzo parameters we do not need
 parameterDict = {"CosmologyCurrentRedshift": float,

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/boxlib/fields.py
--- a/yt/frontends/boxlib/fields.py
+++ b/yt/frontends/boxlib/fields.py
@@ -184,9 +184,10 @@
                 if field[3] in string.ascii_letters:
                     element, weight = field[2:4], field[4:-1]
                 else:
-                    element, weight = field[2:3], field[3:-1]
+                    element, weight = field[2:3], field[3:-1]  # NOQA
 
-                # Here we can, later, add number density.
+                # Here we can, later, add number density
+                # right now element and weight inferred above are unused
 
 
 class MaestroFieldInfo(FieldInfoContainer):
@@ -280,10 +281,12 @@
                     if field[3] in string.ascii_letters:
                         element, weight = field[2:4], field[4:-1]
                     else:
-                        element, weight = field[2:3], field[3:-1]
+                        element, weight = field[2:3], field[3:-1]  # NOQA
                     weight = int(weight)
 
-                # Here we can, later, add number density.
+                # Here we can, later, add number density using 'element' and
+                # 'weight' inferred above
+
             elif field.startswith("omegadot("):
                 nice_name, tex_label = _nice_species_name(field)
                 display_name = r'\dot{\omega}\left[%s\right]' % tex_label

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/boxlib/io.py
--- a/yt/frontends/boxlib/io.py
+++ b/yt/frontends/boxlib/io.py
@@ -15,10 +15,8 @@
 
 import os
 import numpy as np
-from yt.utilities.lib.fortran_reader import \
-    read_castro_particles
 from yt.utilities.io_handler import \
-           BaseIOHandler
+    BaseIOHandler
 from yt.funcs import mylog, defaultdict
 from yt.frontends.chombo.io import parse_orion_sinks
 
@@ -156,37 +154,3 @@
                     line = lines[num]
                     particles.append(read(line, field))
             return np.array(particles)
-
-
-class IOHandlerCastro(IOHandlerBoxlib):
-    _dataset_type = "castro_native"
-
-    def _read_particle_field(self, grid, field):
-        offset = grid._particle_offset
-        filen = os.path.expanduser(grid.particle_filename)
-        off = grid._particle_offset
-        tr = np.zeros(grid.NumberOfParticles, dtype='float64')
-        read_castro_particles(filen, off,
-            castro_particle_field_names.index(field),
-            len(castro_particle_field_names),
-            tr)
-        return tr
-
-nyx_particle_field_names = ['particle_position_%s' % ax for ax in 'xyz'] + \
-                           ['particle_mass'] +  \
-                           ['particle_velocity_%s' % ax for ax in 'xyz']
-
-class IOHandlerNyx(IOHandlerBoxlib):
-    _dataset_type = "nyx_native"
-
-    def _read_particle_coords(self, chunks, ptf):
-        offset = grid._particle_offset
-        filen = os.path.expanduser(grid.particle_filename)
-        off = grid._particle_offset
-        tr = np.zeros(grid.NumberOfParticles, dtype='float64')
-        read_castro_particles(filen, off,
-                            nyx_particle_field_names.index(field),
-                            len(nyx_particle_field_names), tr)
-
-    def _read_particle_fields(self, chunks, ptf, fields):
-        pass

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/boxlib/setup.py
--- a/yt/frontends/boxlib/setup.py
+++ b/yt/frontends/boxlib/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/boxlib/tests/test_orion.py
--- a/yt/frontends/boxlib/tests/test_orion.py
+++ b/yt/frontends/boxlib/tests/test_orion.py
@@ -13,11 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
 from yt.frontends.boxlib.api import OrionDataset
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -19,10 +19,11 @@
 import weakref
 import numpy as np
 
+from six import string_types
 from stat import \
     ST_CTIME
 
-from yt.funcs import *
+from yt.funcs import mylog
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.extern import six
@@ -30,8 +31,6 @@
     GridIndex
 from yt.data_objects.static_output import \
     Dataset
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
 from yt.utilities.file_handler import \
     HDF5FileHandler
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
@@ -108,9 +107,10 @@
         self.directory = ds.fullpath
         self._handle = ds._handle
 
-        tr = self._handle['Chombo_global'].attrs.get("testReal", "float32")
+        self._levels = [
+            key for key in self._handle.keys() if key.startswith('level')
+        ]
 
-        self._levels = [key for key in self._handle.keys() if key.startswith('level')]
         GridIndex.__init__(self, ds, dataset_type)
 
         self._read_particles()
@@ -650,7 +650,7 @@
         pluto_ini_file_exists = False
         orion2_ini_file_exists = False
 
-        if type(args[0]) == type(""):
+        if isinstance(args[0], string_types):
             dir_name = os.path.dirname(os.path.abspath(args[0]))
             pluto_ini_filename = os.path.join(dir_name, "pluto.ini")
             orion2_ini_filename = os.path.join(dir_name, "orion2.ini")

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -182,9 +182,9 @@
         offsets = np.append(np.array([0]), offsets)
         offsets = np.array(offsets, dtype=np.int64)
 
-        # convert between the global grid id and the id on this level            
+        # convert between the global grid id and the id on this level
         grid_levels = np.array([g.Level for g in self.ds.index.grids])
-        grid_ids = np.array([g.id    for g in self.ds.index.grids])
+        grid_ids = np.array([g.id for g in self.ds.index.grids])
         grid_level_offset = grid_ids[np.where(grid_levels == grid.Level)[0][0]]
         lo = grid.id - grid_level_offset
         hi = lo + 1

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/chombo/setup.py
--- a/yt/frontends/chombo/setup.py
+++ b/yt/frontends/chombo/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/eagle/data_structures.py
--- a/yt/frontends/eagle/data_structures.py
+++ b/yt/frontends/eagle/data_structures.py
@@ -17,7 +17,6 @@
 
 from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
-import types
 
 from yt.frontends.gadget.data_structures import \
     GadgetHDF5Dataset

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/eagle/fields.py
--- a/yt/frontends/eagle/fields.py
+++ b/yt/frontends/eagle/fields.py
@@ -15,19 +15,12 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import os
-import numpy as np
-
-from yt.funcs import *
-
-from yt.fields.field_info_container import \
-    FieldInfoContainer
 from yt.frontends.owls.fields import \
     OWLSFieldInfo
-import yt.frontends.owls.owls_ion_tables as oit
 from yt.units.yt_array import YTQuantity
+from yt.utilities.periodic_table import periodic_table
 
-from .definitions import \
+from yt.frontends.eagle.definitions import \
     eaglenetwork_ion_lookup
 
 class EagleNetworkFieldInfo(OWLSFieldInfo):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/eagle/setup.py
--- a/yt/frontends/eagle/setup.py
+++ b/yt/frontends/eagle/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/eagle/tests/test_outputs.py
--- a/yt/frontends/eagle/tests/test_outputs.py
+++ b/yt/frontends/eagle/tests/test_outputs.py
@@ -17,7 +17,6 @@
 from yt.testing import \
     requires_file
 from yt.utilities.answer_testing.framework import \
-    requires_ds, \
     data_dir_load
 from yt.frontends.eagle.api import EagleDataset
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/answer_testing_support.py
--- a/yt/frontends/enzo/answer_testing_support.py
+++ b/yt/frontends/enzo/answer_testing_support.py
@@ -13,20 +13,22 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+import numpy as np
+import os
+
+from functools import wraps
+
 from yt.config import ytcfg
-from yt.mods import *
-
+from yt.convenience import load
+from yt.testing import assert_allclose
 from yt.utilities.answer_testing.framework import \
-     AnswerTestingTest, \
-     can_run_ds, \
-     FieldValuesTest, \
-     GridHierarchyTest, \
-     GridValuesTest, \
-     ProjectionValuesTest, \
-     ParentageRelationshipsTest, \
-     temp_cwd, \
-     AssertWrapper
+    AnswerTestingTest, \
+    can_run_ds, \
+    FieldValuesTest, \
+    GridValuesTest, \
+    ProjectionValuesTest, \
+    temp_cwd, \
+    AssertWrapper
 
 def requires_outputlog(path = ".", prefix = ""):
     def ffalse(func):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -19,13 +19,16 @@
 import os
 import stat
 import string
+import time
 import re
 
-from threading import Thread
-
+from collections import defaultdict
 from yt.extern.six.moves import zip as izip
 
-from yt.funcs import *
+from yt.funcs import \
+    ensure_list, \
+    ensure_tuple, \
+    get_pbar
 from yt.config import ytcfg
 from yt.data_objects.grid_patch import \
     AMRGridPatch
@@ -36,20 +39,15 @@
 from yt.data_objects.static_output import \
     Dataset
 from yt.fields.field_info_container import \
-    FieldInfoContainer, NullFunc
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+    NullFunc
 from yt.utilities.physical_constants import \
     rho_crit_g_cm3_h2, cm_per_mpc
-from yt.utilities.io_handler import io_registry
 from yt.utilities.logger import ytLogger as mylog
 from yt.utilities.pyparselibconfig import libconfig
 
 from .fields import \
     EnzoFieldInfo
 
-from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    parallel_blocking_call
 
 class EnzoGrid(AMRGridPatch):
     """
@@ -77,7 +75,6 @@
         """
         rf = self.ds.refine_by
         my_ind = self.id - self._id_offset
-        le = self.LeftEdge
         self.dds = self.Parent.dds/rf
         ParentLeftIndex = np.rint((self.LeftEdge-self.Parent.LeftEdge)/self.Parent.dds)
         self.start_index = rf*(ParentLeftIndex + self.Parent.get_global_startindex()).astype('int64')
@@ -148,12 +145,9 @@
         # We will attempt this by creating a datacube that is exactly bigger
         # than the grid by nZones*dx in each direction
         nl = self.get_global_startindex() - n_zones
-        nr = nl + self.ActiveDimensions + 2*n_zones
         new_left_edge = nl * self.dds + self.ds.domain_left_edge
-        new_right_edge = nr * self.dds + self.ds.domain_left_edge
         # Something different needs to be done for the root grid, though
         level = self.Level
-        args = (level, new_left_edge, new_right_edge)
         kwargs = {'dims': self.ActiveDimensions + 2*n_zones,
                   'num_ghost_zones':n_zones,
                   'use_pbar':False}
@@ -197,7 +191,7 @@
     def __init__(self, ds, dataset_type):
 
         self.dataset_type = dataset_type
-        if ds.file_style != None:
+        if ds.file_style is not None:
             self._bn = ds.file_style
         else:
             self._bn = "%s.cpu%%04i"
@@ -268,14 +262,12 @@
             for line in f:
                 if line.startswith(token):
                     return line.split()[2:]
-        t1 = time.time()
         pattern = r"Pointer: Grid\[(\d*)\]->NextGrid(Next|This)Level = (\d*)\s+$"
         patt = re.compile(pattern)
         f = open(self.index_filename, "rt")
         self.grids = [self.grid(1, self)]
         self.grids[0].Level = 0
         si, ei, LE, RE, fn, npart = [], [], [], [], [], []
-        all = [si, ei, LE, RE, fn]
         pbar = get_pbar("Parsing Hierarchy ", self.num_grids)
         version = self.dataset.parameters.get("VersionNumber", None)
         params = self.dataset.parameters
@@ -326,7 +318,6 @@
         temp_grids[:] = self.grids
         self.grids = temp_grids
         self.filenames = fn
-        t2 = time.time()
 
     def _initialize_grid_arrays(self):
         super(EnzoHierarchy, self)._initialize_grid_arrays()
@@ -403,7 +394,7 @@
         fields = []
         for ptype in self.dataset["AppendActiveParticleType"]:
             select_grids = self.grid_active_particle_count[ptype].flat
-            if np.any(select_grids) == False:
+            if np.any(select_grids) is False:
                 current_ptypes = self.dataset.particle_types
                 new_ptypes = [p for p in current_ptypes if p != ptype]
                 self.dataset.particle_types = new_ptypes
@@ -1027,7 +1018,8 @@
                 self.hubble_constant = self.cosmological_simulation = 0.0
 
     def _obtain_enzo(self):
-        import enzo; return enzo
+        import enzo
+        return enzo
 
     @classmethod
     def _is_valid(cls, *args, **kwargs):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -13,16 +13,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
 from yt.utilities.physical_constants import \
-    mh, me, mp, \
-    mass_sun_cgs
+    me, \
+    mp
 
 b_units = "code_magnetic"
 ra_units = "code_length / code_time**2"

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -13,19 +13,18 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import os
 import random
 from contextlib import contextmanager
 
 from yt.utilities.io_handler import \
-    BaseIOHandler, _axis_ids
+    BaseIOHandler
 from yt.utilities.logger import ytLogger as mylog
-from yt.geometry.selection_routines import mask_fill, AlwaysSelector
+from yt.geometry.selection_routines import AlwaysSelector
 from yt.extern.six import u, b, iteritems
 from yt.utilities.on_demand_imports import _h5py as h5py
 
 import numpy as np
-from yt.funcs import *
+
 
 _convert_mass = ("particle_mass","mass")
 
@@ -305,7 +304,7 @@
     _dataset_type = "enzo_packed_3d_gz"
 
     def __init__(self, *args, **kwargs):
-        super(IOHandlerPackgedHDF5GhostZones, self).__init__(*args, **kwargs)
+        super(IOHandlerPackedHDF5GhostZones, self).__init__(*args, **kwargs)
         NGZ = self.ds.parameters.get("NumberOfGhostZones", 3)
         self._base = (slice(NGZ, -NGZ),
                       slice(NGZ, -NGZ),
@@ -357,7 +356,8 @@
                 raise RuntimeError
             g = chunks[0].objs[0]
             for ftype, fname in fields:
-                rv[(ftype, fname)] = self.grids_in_memory[grid.id][fname].swapaxes(0,2)
+                rv[(ftype, fname)] = \
+                    self.grids_in_memory[g.id][fname].swapaxes(0, 2)
             return rv
         if size is None:
             size = sum((g.count(selector) for chunk in chunks

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/setup.py
--- a/yt/frontends/enzo/setup.py
+++ b/yt/frontends/enzo/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/simulation_handling.py
--- a/yt/frontends/enzo/simulation_handling.py
+++ b/yt/frontends/enzo/simulation_handling.py
@@ -17,6 +17,8 @@
 import glob
 import os
 
+from math import ceil
+
 from yt.convenience import \
     load, \
     only_on_root
@@ -26,13 +28,14 @@
 from yt.units.unit_registry import \
     UnitRegistry
 from yt.units.yt_array import \
-    YTArray, YTQuantity
+    YTArray
 from yt.utilities.cosmology import \
     Cosmology
 from yt.utilities.exceptions import \
     InvalidSimulationTimeSeries, \
     MissingParameter, \
-    NoStoppingCondition
+    NoStoppingCondition, \
+    YTOutputNotIdentified
 from yt.utilities.logger import ytLogger as \
     mylog
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
@@ -385,7 +388,7 @@
                           'final_redshift': 'CosmologyFinalRedshift'}
             self.cosmological_simulation = 1
             for a, v in cosmo_attr.items():
-                if not v in self.parameters:
+                if v not in self.parameters:
                     raise MissingParameter(self.parameter_filename, v)
                 setattr(self, a, self.parameters[v])
         else:
@@ -412,7 +415,7 @@
 
         self.all_time_outputs = []
         if self.final_time is None or \
-            not 'dtDataDump' in self.parameters or \
+            'dtDataDump' not in self.parameters or \
             self.parameters['dtDataDump'] <= 0.0: return []
 
         index = 0
@@ -441,7 +444,7 @@
         mylog.warn('Calculating cycle outputs.  Dataset times will be unavailable.')
 
         if self.stop_cycle is None or \
-            not 'CycleSkipDataDump' in self.parameters or \
+            'CycleSkipDataDump' not in self.parameters or \
             self.parameters['CycleSkipDataDump'] <= 0.0: return []
 
         self.all_time_outputs = []
@@ -623,7 +626,6 @@
         mylog.info("Writing redshift output list to %s.", filename)
         f = open(filename, 'w')
         for q, output in enumerate(outputs):
-            z_string = "%%s[%%d] = %%.%df" % decimals
             f.write(("CosmologyOutputRedshift[%d] = %."
                      + str(decimals) + "f\n") %
                     ((q + start_index), output['redshift']))

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -13,7 +13,14 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+import numpy as np
+
+from yt.testing import \
+    assert_almost_equal, \
+    assert_equal, \
+    requires_file, \
+    units_override_check, \
+    assert_array_equal
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
@@ -41,12 +48,12 @@
     dd = ds.all_data()
     dens_enzo = dd["Density"].copy()
     for f in sorted(ds.field_list):
-        if not f[1].endswith("_Density") or \
-               f[1].startswith("Dark_Matter_")  or \
-               f[1].startswith("Electron_") or \
-               f[1].startswith("SFR_") or \
-               f[1].startswith("Forming_Stellar_") or \
-               f[1].startswith("Star_Particle_"):
+        ff = f[1]
+        if not ff.endswith("_Density"):
+            continue
+        start_strings = ["Electron_", "SFR_", "Forming_Stellar_",
+                         "Dark_Matter", "Star_Particle_"]
+        if any([ff.startswith(ss) for ss in start_strings]):
             continue
         dens_enzo -= dd[f]
     delta_enzo = np.abs(dens_enzo / dd["Density"])

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -11,17 +11,22 @@
 #-----------------------------------------------------------------------------
 
 import stat
-import types
 import numpy as np
 import numpy.core.defchararray as np_char
+import os
+import re
+import time
+import uuid
 import weakref
 import warnings
-import re
-import uuid
 
-from yt.extern.six import iteritems
+
+from collections import defaultdict
+
 from yt.config import ytcfg
-from yt.funcs import *
+from yt.funcs import \
+    mylog, \
+    ensure_list
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
@@ -42,7 +47,6 @@
     prefixable_units, \
     unit_prefixes
 from yt.units import dimensions
-from yt.units.yt_array import YTQuantity
 from yt.utilities.on_demand_imports import _astropy, NotAModule
 
 
@@ -137,10 +141,10 @@
                 mylog.info("Adding field %s to the list of fields." % (fname))
                 self.field_list.append(("io",fname))
                 if k in ["x","y"]:
-                    unit = "code_length"
+                    field_unit = "code_length"
                 else:
-                    unit = v
-                self.dataset.field_units[("io",fname)] = unit
+                    field_unit = v
+                self.dataset.field_units[("io",fname)] = field_unit
             return
         self._axis_map = {}
         self._file_map = {}
@@ -149,7 +153,9 @@
         dup_field_index = {}
         # Since FITS header keywords are case-insensitive, we only pick a subset of
         # prefixes, ones that we expect to end up in headers.
-        known_units = dict([(unit.lower(),unit) for unit in self.ds.unit_registry.lut])
+        known_units = dict(
+            [(unit.lower(), unit) for unit in self.ds.unit_registry.lut]
+        )
         for unit in list(known_units.values()):
             if unit in prefixable_units:
                 for p in ["n","u","m","c","k"]:
@@ -211,8 +217,7 @@
         self.num_grids = self.ds.parameters["nprocs"]
 
     def _parse_index(self):
-        f = self._handle # shortcut
-        ds = self.dataset # shortcut
+        ds = self.dataset
 
         # If nprocs > 1, decompose the domain into virtual grids
         if self.num_grids > 1:

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/fits/fields.py
--- a/yt/frontends/fits/fields.py
+++ b/yt/frontends/fits/fields.py
@@ -10,8 +10,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-from yt.utilities.exceptions import *
 from yt.fields.field_info_container import \
     FieldInfoContainer
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/fits/misc.py
--- a/yt/frontends/fits/misc.py
+++ b/yt/frontends/fits/misc.py
@@ -14,8 +14,8 @@
 import base64
 from yt.extern.six import PY3
 from yt.fields.derived_field import ValidateSpatial
+from yt.funcs import mylog
 from yt.utilities.on_demand_imports import _astropy
-from yt.funcs import mylog, get_image_suffix
 from yt.visualization._mpl_imports import FigureCanvasAgg
 from yt.units.yt_array import YTQuantity, YTArray
 from yt.utilities.fits_image import FITSImageData
@@ -23,7 +23,7 @@
     from io import BytesIO as IO
 else:
     from yt.extern.six.moves import StringIO as IO
-    
+
 import os
 
 def _make_counts(emin, emax):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/fits/setup.py
--- a/yt/frontends/fits/setup.py
+++ b/yt/frontends/fits/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/fits/tests/test_outputs.py
--- a/yt/frontends/fits/tests/test_outputs.py
+++ b/yt/frontends/fits/tests/test_outputs.py
@@ -13,7 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -13,27 +13,20 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.utilities.on_demand_imports import _h5py as h5py
+import os
 import stat
 import numpy as np
 import weakref
 
-from yt.config import ytcfg
-from yt.funcs import *
+from yt.funcs import mylog
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
     GridIndex
-from yt.geometry.geometry_handler import \
-    YTDataChunk
 from yt.data_objects.static_output import \
     Dataset
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
 from yt.utilities.file_handler import \
     HDF5FileHandler
-from yt.utilities.io_handler import \
-    io_registry
 from yt.utilities.physical_constants import cm_per_mpc
 from .fields import FLASHFieldInfo
 
@@ -71,7 +64,6 @@
         pass
 
     def _detect_output_fields(self):
-        ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [("flash", s.decode("ascii","ignore"))
                            for s in self._handle["/unknown names"][:].flat]
         if ("/particle names" in self._particle_handle):
@@ -159,9 +151,6 @@
             gre[i][:ND] = np.rint(gre[i][:ND]/dx[0][:ND])*dx[0][:ND]
 
     def _populate_grid_objects(self):
-        # We only handle 3D data, so offset is 7 (nfaces+1)
-        
-        offset = 7
         ii = np.argsort(self.grid_levels.flat)
         gid = self._handle["/gid"][:]
         first_ind = -(self.dataset.refine_by**self.dataset.dimensionality)

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/flash/fields.py
--- a/yt/frontends/flash/fields.py
+++ b/yt/frontends/flash/fields.py
@@ -13,13 +13,10 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
 from yt.fields.field_info_container import \
     FieldInfoContainer
 from yt.utilities.physical_constants import \
-    kboltz, mh, Na
-from yt.units.yt_array import \
-    YTArray
+    Na
 
 # Common fields in FLASH: (Thanks to John ZuHone for this list)
 #

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -14,8 +14,6 @@
 #-----------------------------------------------------------------------------
 
 import numpy as np
-from yt.utilities.on_demand_imports import _h5py as h5py
-from yt.utilities.math_utils import prec_accum
 from itertools import groupby
 
 from yt.utilities.io_handler import \
@@ -110,7 +108,6 @@
         rv = {}
         for field in fields:
             ftype, fname = field
-            dt = f["/%s" % fname].dtype
             # Always use *native* 64-bit float.
             rv[field] = np.empty(size, dtype="=f8")
         ng = sum(len(c.objs) for c in chunks)
@@ -149,7 +146,6 @@
         for field in fluid_fields:
             ftype, fname = field
             ds = f["/%s" % fname]
-            ind = 0
             for gs in grid_sequences(chunk.objs):
                 start = gs[0].id - gs[0]._id_offset
                 end = gs[-1].id - gs[-1]._id_offset + 1

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/flash/setup.py
--- a/yt/frontends/flash/setup.py
+++ b/yt/frontends/flash/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -13,11 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    assert_equal, \
+    requires_file, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
 from yt.frontends.flash.api import FLASHDataset
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget/data_structures.py
--- a/yt/frontends/gadget/data_structures.py
+++ b/yt/frontends/gadget/data_structures.py
@@ -20,7 +20,6 @@
 import stat
 import struct
 import os
-import types
 
 from yt.data_objects.static_output import \
     ParticleFile
@@ -30,8 +29,6 @@
     ParticleIndex
 from yt.utilities.cosmology import \
     Cosmology
-from yt.utilities.definitions import \
-    sec_conversion
 from yt.utilities.fortran_utils import read_record
 from yt.utilities.logger import ytLogger as mylog
 

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget/setup.py
--- a/yt/frontends/gadget/setup.py
+++ b/yt/frontends/gadget/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -26,13 +26,15 @@
 from yt.units.unit_registry import \
     UnitRegistry
 from yt.units.yt_array import \
-    YTArray, YTQuantity
+    YTArray
 from yt.utilities.cosmology import \
     Cosmology
 from yt.utilities.exceptions import \
     InvalidSimulationTimeSeries, \
     MissingParameter, \
     NoStoppingCondition
+from yt.utilities.exceptions import \
+    YTOutputNotIdentified
 from yt.utilities.logger import ytLogger as \
     mylog
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
@@ -328,7 +330,7 @@
             self.final_redshift = 1.0 / self.parameters["TimeMax"] - 1.0
             self.cosmological_simulation = 1
             for a, v in cosmo_attr.items():
-                if not v in self.parameters:
+                if v not in self.parameters:
                     raise MissingParameter(self.parameter_filename, v)
                 setattr(self, a, self.parameters[v])
         else:
@@ -426,7 +428,7 @@
                 self.final_time = self.quan(self.parameters["TimeMax"], "code_time")
             else:
                 self.final_time = None
-            if not "TimeMax" in self.parameters:
+            if "TimeMax" not in self.parameters:
                 raise NoStoppingCondition(self.parameter_filename)
 
     def _find_outputs(self):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget_fof/data_structures.py
--- a/yt/frontends/gadget_fof/data_structures.py
+++ b/yt/frontends/gadget_fof/data_structures.py
@@ -18,19 +18,14 @@
 from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 import stat
-import weakref
-import struct
 import glob
-import time
 import os
 
-from .fields import \
+from yt.frontends.gadget_fof.fields import \
     GadgetFOFFieldInfo
 
 from yt.utilities.cosmology import \
     Cosmology
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
 from yt.utilities.exceptions import \
     YTException
 from yt.utilities.logger import ytLogger as \
@@ -42,10 +37,7 @@
     ParticleFile
 from yt.frontends.gadget.data_structures import \
     _fix_unit_ordering
-import yt.utilities.fortran_utils as fpu
-from yt.units.yt_array import \
-    YTArray, \
-    YTQuantity
+
 
 class GadgetFOFParticleIndex(ParticleIndex):
     def __init__(self, ds, dataset_type):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget_fof/fields.py
--- a/yt/frontends/gadget_fof/fields.py
+++ b/yt/frontends/gadget_fof/fields.py
@@ -14,11 +14,8 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.funcs import mylog
 from yt.fields.field_info_container import \
     FieldInfoContainer
-from yt.units.yt_array import \
-    YTArray
 
 m_units = "code_mass"
 p_units = "code_length"

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget_fof/io.py
--- a/yt/frontends/gadget_fof/io.py
+++ b/yt/frontends/gadget_fof/io.py
@@ -17,7 +17,7 @@
 from yt.utilities.on_demand_imports import _h5py as h5py
 import numpy as np
 
-from yt.utilities.exceptions import *
+from yt.utilities.exceptions import YTDomainOverflow
 from yt.funcs import mylog
 
 from yt.utilities.io_handler import \

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gadget_fof/setup.py
--- a/yt/frontends/gadget_fof/setup.py
+++ b/yt/frontends/gadget_fof/setup.py
@@ -1,8 +1,4 @@
 #!/usr/bin/env python
-import setuptools
-import os
-import sys
-import os.path
 
 
 def configuration(parent_package='', top_path=None):

diff -r b4941cba3982e6eddba1aba16e8d458f9de6980c -r 03e316bc6fddecfb4705d0f73754ce2553e45a3d yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -14,7 +14,6 @@
 #-----------------------------------------------------------------------------
 
 from yt.utilities.on_demand_imports import _h5py as h5py
-import types
 import numpy as np
 import weakref
 import os

This diff is so big that we needed to truncate the remainder.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list