[yt-svn] commit/yt: 7 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sun Jul 24 08:28:24 PDT 2016


7 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/67225c90d6b8/
Changeset:   67225c90d6b8
Branch:      yt
User:        chummels
Date:        2016-07-24 02:38:09+00:00
Summary:     Adding domain_dims and domain_boundaries to Gadget Simulation class.
Affected #:  1 file

diff -r 2d79c347e8e4a123811b6fac31ff7529d6bf7c91 -r 67225c90d6b8a46a5d14e42f611ff8a0df6c632c yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -97,7 +97,11 @@
                     dimensions.length, "\\rm{%s}/(1+z)" % my_unit)
             self.length_unit = self.quan(self.unit_base["UnitLength_in_cm"],
                                          "cmcm / h", registry=self.unit_registry)
-            self.box_size *= self.length_unit.in_units("Mpccm / h")
+            self.mass_unit = self.quan(self.unit_base["UnitMass_in_g"],
+                                         "g / h", registry=self.unit_registry)
+            self.box_size *= self.length_unit
+            self.domain_left_edge *= self.length_unit
+            self.domain_right_edge *= self.length_unit
         else:
             # Read time from file for non-cosmological sim
             self.time_unit = self.quan(
@@ -322,6 +326,9 @@
 
             self.parameters[param] = vals
 
+        # Domain dimensions for Gadget datasets are always 2x2x2 for octree
+        self.domain_dimensions = np.array([2,2,2])
+
         if self.parameters["ComovingIntegrationOn"]:
             cosmo_attr = {"box_size": "BoxSize",
                           "omega_lambda": "OmegaLambda",
@@ -334,6 +341,8 @@
                 if v not in self.parameters:
                     raise MissingParameter(self.parameter_filename, v)
                 setattr(self, a, self.parameters[v])
+            self.domain_left_edge = np.array([0., 0., 0.])
+            self.domain_right_edge = np.array([1., 1., 1.]) * self.parameters['BoxSize']
         else:
             self.cosmological_simulation = 0
             self.omega_lambda = self.omega_matter = \


https://bitbucket.org/yt_analysis/yt/commits/12d61fc0cb38/
Changeset:   12d61fc0cb38
Branch:      yt
User:        chummels
Date:        2016-07-24 04:21:34+00:00
Summary:     Assuring that gadget simulation class can find its datasets.
Affected #:  1 file

diff -r 67225c90d6b8a46a5d14e42f611ff8a0df6c632c -r 12d61fc0cb38969f380fe4adb4a5b433a0843bb5 yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -348,17 +348,29 @@
             self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = 0.0
 
+    def _find_data_dir(self):
+        """
+        Find proper location for datasets.  First look where parameter file
+        points, but if this doesn't exist then default to the current 
+        directory.
+        """
+        if self.parameters["OutputDir"].startswith("/"):
+            data_dir = self.parameters["OutputDir"]
+        else:
+            data_dir = os.path.join(self.directory,
+                                    self.parameters["OutputDir"])
+        if not os.path.exists(data_dir):
+            mylog.info("OutputDir not found at %s, instead using %s." % 
+                       (data_dir, self.directory))
+            data_dir = self.directory
+        self.data_dir = data_dir
+
     def _snapshot_format(self, index=None):
         """
         The snapshot filename for a given index.  Modify this for different 
         naming conventions.
         """
 
-        if self.parameters["OutputDir"].startswith("/"):
-            data_dir = self.parameters["OutputDir"]
-        else:
-            data_dir = os.path.join(self.directory,
-                                    self.parameters["OutputDir"])
         if self.parameters["NumFilesPerSnapshot"] > 1:
             suffix = ".0"
         else:
@@ -371,13 +383,16 @@
             count = "%03d" % index
         filename = "%s_%s%s" % (self.parameters["SnapshotFileBase"],
                                 count, suffix)
-        return os.path.join(data_dir, filename)
+        return os.path.join(self.data_dir, filename)
                 
     def _get_all_outputs(self, find_outputs=False):
         """
         Get all potential datasets and combine into a time-sorted list.
         """
 
+        # Find the data directory where the outputs are
+        self._find_data_dir()
+
         # Create the set of outputs from which further selection will be done.
         if find_outputs:
             self._find_outputs()
@@ -446,7 +461,6 @@
         Search for directories matching the data dump keywords.
         If found, get dataset times py opening the ds.
         """
-
         potential_outputs = glob.glob(self._snapshot_format())
         self.all_outputs = self._check_for_outputs(potential_outputs)
         self.all_outputs.sort(key=lambda obj: obj["time"])


https://bitbucket.org/yt_analysis/yt/commits/0d3655c6addc/
Changeset:   0d3655c6addc
Branch:      yt
User:        chummels
Date:        2016-07-24 04:29:55+00:00
Summary:     Allowing Gadget Simulations to be loaded from anywhere on the file system like other datasets.
Affected #:  1 file

diff -r 12d61fc0cb38969f380fe4adb4a5b433a0843bb5 -r 0d3655c6addc11681f2f01ef72d5c6c58ce81ac5 yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -399,7 +399,9 @@
         else:
             if self.parameters["OutputListOn"]:
                 a_values = [float(a) for a in 
-                            open(self.parameters["OutputListFilename"], "r").readlines()]
+                            open(os.path.join(self.data_dir, 
+                                 self.parameters["OutputListFilename"]), 
+                            "r").readlines()]
             else:
                 a_values = [float(self.parameters["TimeOfFirstSnapshot"])]
                 time_max = float(self.parameters["TimeMax"])


https://bitbucket.org/yt_analysis/yt/commits/bb8d5eaf4b1a/
Changeset:   bb8d5eaf4b1a
Branch:      yt
User:        chummels
Date:        2016-07-24 07:27:31+00:00
Summary:     Adding tests to assure we can make SPH light rays and absorption spectra
Affected #:  1 file

diff -r 0d3655c6addc11681f2f01ef72d5c6c58ce81ac5 -r bb8d5eaf4b1a52c022198e8cb5e72dd7499d9ac9 yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
@@ -26,17 +26,21 @@
 import shutil
 from yt.utilities.on_demand_imports import \
     _h5py as h5
+from yt.convenience import load
 
 
 COSMO_PLUS = "enzo_cosmology_plus/AMRCosmology.enzo"
 COSMO_PLUS_SINGLE = "enzo_cosmology_plus/RD0009/RD0009"
+GIZMO_PLUS = "gizmo_cosmology_plus/N128L16.param"
+GIZMO_PLUS_SINGLE = "gizmo_cosmology_plus/snap_N128L16_151.hdf5"
 
 
 @requires_file(COSMO_PLUS)
 @requires_answer_testing()
 def test_absorption_spectrum_cosmo():
     """
-    This test generates an absorption spectrum from a cosmological light ray
+    This test generates an absorption spectrum from a compound light ray on a
+    grid dataset
     """
     # Set up in a temp dir
     tmpdir = tempfile.mkdtemp()
@@ -92,7 +96,8 @@
 @requires_answer_testing()
 def test_absorption_spectrum_non_cosmo():
     """
-    This test generates an absorption spectrum from a non-cosmological light ray
+    This test generates an absorption spectrum from a simple light ray on a
+    grid dataset
     """
 
     # Set up in a temp dir
@@ -244,3 +249,114 @@
     a = 1.7e-4
     x = np.linspace(5.0, -3.6, 60)
     yield assert_allclose_units, voigt_old(a, x), voigt_scipy(a, x), 1e-8
+
+ at requires_file(GIZMO_PLUS)
+ at requires_answer_testing()
+def test_absorption_spectrum_cosmo_sph():
+    """
+    This test generates an absorption spectrum from a compound light ray on a
+    particle dataset
+    """
+    # Set up in a temp dir
+    tmpdir = tempfile.mkdtemp()
+    curdir = os.getcwd()
+    os.chdir(tmpdir)
+
+    lr = LightRay(GIZMO_PLUS, 'Gadget', 0.0, 0.01)
+
+    lr.make_light_ray(seed=1234567,
+                      fields=[('gas', 'temperature'), 
+                              ('gas', 'H_number_density')],
+                      data_filename='lightray.h5')
+
+    sp = AbsorptionSpectrum(900.0, 1800.0, 10000)
+
+    my_label = 'HI Lya'
+    field = ('gas', 'H_number_density')
+    wavelength = 1215.6700  # Angstromss
+    f_value = 4.164E-01
+    gamma = 6.265e+08
+    mass = 1.00794
+
+    sp.add_line(my_label, field, wavelength, f_value,
+                gamma, mass, label_threshold=1.e10)
+
+    my_label = 'HI Lya'
+    field = ('gas', 'H_number_density')
+    wavelength = 912.323660  # Angstroms
+    normalization = 1.6e17
+    index = 3.0
+
+    sp.add_continuum(my_label, field, wavelength, normalization, index)
+
+    wavelength, flux = sp.make_spectrum('lightray.h5',
+                                        output_file='spectrum.h5',
+                                        line_list_file='lines.txt',
+                                        use_peculiar_velocity=True)
+
+    # load just-generated hdf5 file of spectral data (for consistency)
+    data = h5.File('spectrum.h5', 'r')
+
+    for key in data.keys():
+        func = lambda x=key: data[x][:]
+        func.__name__ = "{}_cosmo_sph".format(key)
+        test = GenericArrayTest(None, func)
+        test_absorption_spectrum_cosmo_sph.__name__ = test.description
+        yield test
+
+    # clean up
+    os.chdir(curdir)
+    shutil.rmtree(tmpdir)
+
+ at requires_file(GIZMO_PLUS_SINGLE)
+ at requires_answer_testing()
+def test_absorption_spectrum_non_cosmo_sph():
+    """
+    This test generates an absorption spectrum from a simple light ray on a
+    particle dataset
+    """
+
+    # Set up in a temp dir
+    tmpdir = tempfile.mkdtemp()
+    curdir = os.getcwd()
+    os.chdir(tmpdir)
+
+    ds = load(GIZMO_PLUS_SINGLE)
+    lr = LightRay(ds)
+    ray_start = ds.domain_left_edge
+    ray_end = ds.domain_right_edge
+    lr.make_light_ray(start_position=ray_start, end_position=ray_end,
+                      fields=[('gas', 'temperature'), 
+                              ('gas', 'H_number_density')],
+                      data_filename='lightray.h5')
+
+    sp = AbsorptionSpectrum(1200.0, 1300.0, 10001)
+
+    my_label = 'HI Lya'
+    field = ('gas', 'H_number_density')
+    wavelength = 1215.6700  # Angstromss
+    f_value = 4.164E-01
+    gamma = 6.265e+08
+    mass = 1.00794
+
+    sp.add_line(my_label, field, wavelength, f_value,
+                gamma, mass, label_threshold=1.e10)
+
+    wavelength, flux = sp.make_spectrum('lightray.h5',
+                                        output_file='spectrum.h5',
+                                        line_list_file='lines.txt',
+                                        use_peculiar_velocity=True)
+
+    # load just-generated hdf5 file of spectral data (for consistency)
+    data = h5.File('spectrum.h5', 'r')
+    
+    for key in data.keys():
+        func = lambda x=key: data[x][:]
+        func.__name__ = "{}_non_cosmo_sph".format(key)
+        test = GenericArrayTest(None, func)
+        test_absorption_spectrum_non_cosmo_sph.__name__ = test.description
+        yield test
+
+    # clean up
+    os.chdir(curdir)
+    shutil.rmtree(tmpdir)


https://bitbucket.org/yt_analysis/yt/commits/5e4aa308dd87/
Changeset:   5e4aa308dd87
Branch:      yt
User:        chummels
Date:        2016-07-24 07:32:31+00:00
Summary:     Adding answer tests for sph absorption spectrum to tests.yaml
Affected #:  1 file

diff -r bb8d5eaf4b1a52c022198e8cb5e72dd7499d9ac9 -r 5e4aa308dd879ce7e92ce95697ece4e7a464c94d tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -67,9 +67,11 @@
   local_ytdata_000:
     - yt/frontends/ytdata
 
-  local_absorption_spectrum_000:
+  local_absorption_spectrum_001:
     - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo
     - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo
+    - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo_sph
+    - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo_sph
 
 other_tests:
   unittests:


https://bitbucket.org/yt_analysis/yt/commits/65d7055d3f4b/
Changeset:   65d7055d3f4b
Branch:      yt
User:        chummels
Date:        2016-07-24 14:30:29+00:00
Summary:     Changing x *= y to x = x * y for gadget simulation domain arrays to retain units.
Affected #:  1 file

diff -r 5e4aa308dd879ce7e92ce95697ece4e7a464c94d -r 65d7055d3f4b16e1c43462eea93fe45ac4a9156a yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -99,9 +99,9 @@
                                          "cmcm / h", registry=self.unit_registry)
             self.mass_unit = self.quan(self.unit_base["UnitMass_in_g"],
                                          "g / h", registry=self.unit_registry)
-            self.box_size *= self.length_unit
-            self.domain_left_edge *= self.length_unit
-            self.domain_right_edge *= self.length_unit
+            self.box_size = self.box_size * self.length_unit
+            self.domain_left_edge = self.domain_left_edge * self.length_unit
+            self.domain_right_edge = self.domain_right_edge * self.length_unit
         else:
             # Read time from file for non-cosmological sim
             self.time_unit = self.quan(


https://bitbucket.org/yt_analysis/yt/commits/585aa2c18066/
Changeset:   585aa2c18066
Branch:      yt
User:        ngoldbaum
Date:        2016-07-24 15:27:52+00:00
Summary:     Merged in chummels/yt (pull request #2303)

[bugfix] Re-enabling compound LightRays with gadget frontend. Closes #1251
Affected #:  3 files

diff -r 119f0e32709181f5fc5606a85abdd208c3a5e14e -r 585aa2c180666410619882922d795ea09cc05dc3 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -67,9 +67,11 @@
   local_ytdata_000:
     - yt/frontends/ytdata
 
-  local_absorption_spectrum_000:
+  local_absorption_spectrum_001:
     - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo
     - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo
+    - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_non_cosmo_sph
+    - yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py:test_absorption_spectrum_cosmo_sph
 
 other_tests:
   unittests:

diff -r 119f0e32709181f5fc5606a85abdd208c3a5e14e -r 585aa2c180666410619882922d795ea09cc05dc3 yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
--- a/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
+++ b/yt/analysis_modules/absorption_spectrum/tests/test_absorption_spectrum.py
@@ -26,17 +26,21 @@
 import shutil
 from yt.utilities.on_demand_imports import \
     _h5py as h5
+from yt.convenience import load
 
 
 COSMO_PLUS = "enzo_cosmology_plus/AMRCosmology.enzo"
 COSMO_PLUS_SINGLE = "enzo_cosmology_plus/RD0009/RD0009"
+GIZMO_PLUS = "gizmo_cosmology_plus/N128L16.param"
+GIZMO_PLUS_SINGLE = "gizmo_cosmology_plus/snap_N128L16_151.hdf5"
 
 
 @requires_file(COSMO_PLUS)
 @requires_answer_testing()
 def test_absorption_spectrum_cosmo():
     """
-    This test generates an absorption spectrum from a cosmological light ray
+    This test generates an absorption spectrum from a compound light ray on a
+    grid dataset
     """
     # Set up in a temp dir
     tmpdir = tempfile.mkdtemp()
@@ -92,7 +96,8 @@
 @requires_answer_testing()
 def test_absorption_spectrum_non_cosmo():
     """
-    This test generates an absorption spectrum from a non-cosmological light ray
+    This test generates an absorption spectrum from a simple light ray on a
+    grid dataset
     """
 
     # Set up in a temp dir
@@ -244,3 +249,114 @@
     a = 1.7e-4
     x = np.linspace(5.0, -3.6, 60)
     yield assert_allclose_units, voigt_old(a, x), voigt_scipy(a, x), 1e-8
+
+ at requires_file(GIZMO_PLUS)
+ at requires_answer_testing()
+def test_absorption_spectrum_cosmo_sph():
+    """
+    This test generates an absorption spectrum from a compound light ray on a
+    particle dataset
+    """
+    # Set up in a temp dir
+    tmpdir = tempfile.mkdtemp()
+    curdir = os.getcwd()
+    os.chdir(tmpdir)
+
+    lr = LightRay(GIZMO_PLUS, 'Gadget', 0.0, 0.01)
+
+    lr.make_light_ray(seed=1234567,
+                      fields=[('gas', 'temperature'), 
+                              ('gas', 'H_number_density')],
+                      data_filename='lightray.h5')
+
+    sp = AbsorptionSpectrum(900.0, 1800.0, 10000)
+
+    my_label = 'HI Lya'
+    field = ('gas', 'H_number_density')
+    wavelength = 1215.6700  # Angstromss
+    f_value = 4.164E-01
+    gamma = 6.265e+08
+    mass = 1.00794
+
+    sp.add_line(my_label, field, wavelength, f_value,
+                gamma, mass, label_threshold=1.e10)
+
+    my_label = 'HI Lya'
+    field = ('gas', 'H_number_density')
+    wavelength = 912.323660  # Angstroms
+    normalization = 1.6e17
+    index = 3.0
+
+    sp.add_continuum(my_label, field, wavelength, normalization, index)
+
+    wavelength, flux = sp.make_spectrum('lightray.h5',
+                                        output_file='spectrum.h5',
+                                        line_list_file='lines.txt',
+                                        use_peculiar_velocity=True)
+
+    # load just-generated hdf5 file of spectral data (for consistency)
+    data = h5.File('spectrum.h5', 'r')
+
+    for key in data.keys():
+        func = lambda x=key: data[x][:]
+        func.__name__ = "{}_cosmo_sph".format(key)
+        test = GenericArrayTest(None, func)
+        test_absorption_spectrum_cosmo_sph.__name__ = test.description
+        yield test
+
+    # clean up
+    os.chdir(curdir)
+    shutil.rmtree(tmpdir)
+
+ at requires_file(GIZMO_PLUS_SINGLE)
+ at requires_answer_testing()
+def test_absorption_spectrum_non_cosmo_sph():
+    """
+    This test generates an absorption spectrum from a simple light ray on a
+    particle dataset
+    """
+
+    # Set up in a temp dir
+    tmpdir = tempfile.mkdtemp()
+    curdir = os.getcwd()
+    os.chdir(tmpdir)
+
+    ds = load(GIZMO_PLUS_SINGLE)
+    lr = LightRay(ds)
+    ray_start = ds.domain_left_edge
+    ray_end = ds.domain_right_edge
+    lr.make_light_ray(start_position=ray_start, end_position=ray_end,
+                      fields=[('gas', 'temperature'), 
+                              ('gas', 'H_number_density')],
+                      data_filename='lightray.h5')
+
+    sp = AbsorptionSpectrum(1200.0, 1300.0, 10001)
+
+    my_label = 'HI Lya'
+    field = ('gas', 'H_number_density')
+    wavelength = 1215.6700  # Angstromss
+    f_value = 4.164E-01
+    gamma = 6.265e+08
+    mass = 1.00794
+
+    sp.add_line(my_label, field, wavelength, f_value,
+                gamma, mass, label_threshold=1.e10)
+
+    wavelength, flux = sp.make_spectrum('lightray.h5',
+                                        output_file='spectrum.h5',
+                                        line_list_file='lines.txt',
+                                        use_peculiar_velocity=True)
+
+    # load just-generated hdf5 file of spectral data (for consistency)
+    data = h5.File('spectrum.h5', 'r')
+    
+    for key in data.keys():
+        func = lambda x=key: data[x][:]
+        func.__name__ = "{}_non_cosmo_sph".format(key)
+        test = GenericArrayTest(None, func)
+        test_absorption_spectrum_non_cosmo_sph.__name__ = test.description
+        yield test
+
+    # clean up
+    os.chdir(curdir)
+    shutil.rmtree(tmpdir)

diff -r 119f0e32709181f5fc5606a85abdd208c3a5e14e -r 585aa2c180666410619882922d795ea09cc05dc3 yt/frontends/gadget/simulation_handling.py
--- a/yt/frontends/gadget/simulation_handling.py
+++ b/yt/frontends/gadget/simulation_handling.py
@@ -97,7 +97,11 @@
                     dimensions.length, "\\rm{%s}/(1+z)" % my_unit)
             self.length_unit = self.quan(self.unit_base["UnitLength_in_cm"],
                                          "cmcm / h", registry=self.unit_registry)
-            self.box_size *= self.length_unit.in_units("Mpccm / h")
+            self.mass_unit = self.quan(self.unit_base["UnitMass_in_g"],
+                                         "g / h", registry=self.unit_registry)
+            self.box_size = self.box_size * self.length_unit
+            self.domain_left_edge = self.domain_left_edge * self.length_unit
+            self.domain_right_edge = self.domain_right_edge * self.length_unit
         else:
             # Read time from file for non-cosmological sim
             self.time_unit = self.quan(
@@ -322,6 +326,9 @@
 
             self.parameters[param] = vals
 
+        # Domain dimensions for Gadget datasets are always 2x2x2 for octree
+        self.domain_dimensions = np.array([2,2,2])
+
         if self.parameters["ComovingIntegrationOn"]:
             cosmo_attr = {"box_size": "BoxSize",
                           "omega_lambda": "OmegaLambda",
@@ -334,22 +341,36 @@
                 if v not in self.parameters:
                     raise MissingParameter(self.parameter_filename, v)
                 setattr(self, a, self.parameters[v])
+            self.domain_left_edge = np.array([0., 0., 0.])
+            self.domain_right_edge = np.array([1., 1., 1.]) * self.parameters['BoxSize']
         else:
             self.cosmological_simulation = 0
             self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = 0.0
 
+    def _find_data_dir(self):
+        """
+        Find proper location for datasets.  First look where parameter file
+        points, but if this doesn't exist then default to the current 
+        directory.
+        """
+        if self.parameters["OutputDir"].startswith("/"):
+            data_dir = self.parameters["OutputDir"]
+        else:
+            data_dir = os.path.join(self.directory,
+                                    self.parameters["OutputDir"])
+        if not os.path.exists(data_dir):
+            mylog.info("OutputDir not found at %s, instead using %s." % 
+                       (data_dir, self.directory))
+            data_dir = self.directory
+        self.data_dir = data_dir
+
     def _snapshot_format(self, index=None):
         """
         The snapshot filename for a given index.  Modify this for different 
         naming conventions.
         """
 
-        if self.parameters["OutputDir"].startswith("/"):
-            data_dir = self.parameters["OutputDir"]
-        else:
-            data_dir = os.path.join(self.directory,
-                                    self.parameters["OutputDir"])
         if self.parameters["NumFilesPerSnapshot"] > 1:
             suffix = ".0"
         else:
@@ -362,20 +383,25 @@
             count = "%03d" % index
         filename = "%s_%s%s" % (self.parameters["SnapshotFileBase"],
                                 count, suffix)
-        return os.path.join(data_dir, filename)
+        return os.path.join(self.data_dir, filename)
                 
     def _get_all_outputs(self, find_outputs=False):
         """
         Get all potential datasets and combine into a time-sorted list.
         """
 
+        # Find the data directory where the outputs are
+        self._find_data_dir()
+
         # Create the set of outputs from which further selection will be done.
         if find_outputs:
             self._find_outputs()
         else:
             if self.parameters["OutputListOn"]:
                 a_values = [float(a) for a in 
-                            open(self.parameters["OutputListFilename"], "r").readlines()]
+                            open(os.path.join(self.data_dir, 
+                                 self.parameters["OutputListFilename"]), 
+                            "r").readlines()]
             else:
                 a_values = [float(self.parameters["TimeOfFirstSnapshot"])]
                 time_max = float(self.parameters["TimeMax"])
@@ -437,7 +463,6 @@
         Search for directories matching the data dump keywords.
         If found, get dataset times py opening the ds.
         """
-
         potential_outputs = glob.glob(self._snapshot_format())
         self.all_outputs = self._check_for_outputs(potential_outputs)
         self.all_outputs.sort(key=lambda obj: obj["time"])

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list