[yt-svn] commit/yt: 2 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Nov 29 07:37:41 PST 2017


2 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/7b16c9e7e03e/
Changeset:   7b16c9e7e03e
User:        ngoldbaum
Date:        2017-11-13 23:27:42+00:00
Summary:     we no longer need to pass number_of_particles when loading data
Affected #:  2 files

diff -r 21be624bbb559f9f6ff0c47251e53397d8804f1b -r 7b16c9e7e03e17cf974905107357e93bfce8db4c yt/analysis_modules/level_sets/tests/test_clump_finding.py
--- a/yt/analysis_modules/level_sets/tests/test_clump_finding.py
+++ b/yt/analysis_modules/level_sets/tests/test_clump_finding.py
@@ -54,8 +54,7 @@
             "particle_mass": np.ones(n_p),
             "particle_position_x": px,
             "particle_position_y": px,
-            "particle_position_z": px,
-            "number_of_particles": n_p}
+            "particle_position_z": px}
 
     ds = load_uniform_grid(data, dims)
 

diff -r 21be624bbb559f9f6ff0c47251e53397d8804f1b -r 7b16c9e7e03e17cf974905107357e93bfce8db4c yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -199,7 +199,6 @@
     for field, offset, u in zip(fields, offsets, units):
         v = (prng.random_sample(ndims) - offset) * peak_value
         if field[0] == "all":
-            data['number_of_particles'] = v.size
             v = v.ravel()
         data[field] = (v, u)
     if particles:
@@ -217,7 +216,6 @@
             for f in ('particle_velocity_%s' % ax for ax in 'xyz'):
                 data['io', f] = (prng.random_sample(size=particles) - 0.5, 'cm/s')
             data['io', 'particle_mass'] = (prng.random_sample(particles), 'g')
-        data['number_of_particles'] = particles
     ug = load_uniform_grid(data, ndims, length_unit=length_unit, nprocs=nprocs,
                            unit_system=unit_system, bbox=bbox)
     return ug
@@ -259,7 +257,6 @@
             for f in ('particle_velocity_%s' % ax for ax in 'xyz'):
                 gdata['io', f] = (prng.random_sample(particles) - 0.5, 'cm/s')
             gdata['io', 'particle_mass'] = (prng.random_sample(particles), 'g')
-            gdata['number_of_particles'] = particles
         data.append(gdata)
     bbox = np.array([LE, RE]).T
     return load_amr_grids(data, [32, 32, 32], geometry=geometry, bbox=bbox)


https://bitbucket.org/yt_analysis/yt/commits/0e28d5ea5938/
Changeset:   0e28d5ea5938
User:        ngoldbaum
Date:        2017-11-29 15:37:28+00:00
Summary:     Merge pull request #1620 from ngoldbaum/np-data-dict

No longer need to pass number_of_particles when loading data
Affected #:  2 files

diff -r ffdd469ac2df675887bbcca6372a5f7ecd0d1554 -r 0e28d5ea5938979f536edf8f55dbc078f3406102 yt/analysis_modules/level_sets/tests/test_clump_finding.py
--- a/yt/analysis_modules/level_sets/tests/test_clump_finding.py
+++ b/yt/analysis_modules/level_sets/tests/test_clump_finding.py
@@ -54,8 +54,7 @@
             "particle_mass": np.ones(n_p),
             "particle_position_x": px,
             "particle_position_y": px,
-            "particle_position_z": px,
-            "number_of_particles": n_p}
+            "particle_position_z": px}
 
     ds = load_uniform_grid(data, dims)
 

diff -r ffdd469ac2df675887bbcca6372a5f7ecd0d1554 -r 0e28d5ea5938979f536edf8f55dbc078f3406102 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -199,7 +199,6 @@
     for field, offset, u in zip(fields, offsets, units):
         v = (prng.random_sample(ndims) - offset) * peak_value
         if field[0] == "all":
-            data['number_of_particles'] = v.size
             v = v.ravel()
         data[field] = (v, u)
     if particles:
@@ -217,7 +216,6 @@
             for f in ('particle_velocity_%s' % ax for ax in 'xyz'):
                 data['io', f] = (prng.random_sample(size=particles) - 0.5, 'cm/s')
             data['io', 'particle_mass'] = (prng.random_sample(particles), 'g')
-        data['number_of_particles'] = particles
     ug = load_uniform_grid(data, ndims, length_unit=length_unit, nprocs=nprocs,
                            unit_system=unit_system, bbox=bbox)
     return ug
@@ -259,7 +257,6 @@
             for f in ('particle_velocity_%s' % ax for ax in 'xyz'):
                 gdata['io', f] = (prng.random_sample(particles) - 0.5, 'cm/s')
             gdata['io', 'particle_mass'] = (prng.random_sample(particles), 'g')
-            gdata['number_of_particles'] = particles
         data.append(gdata)
     bbox = np.array([LE, RE]).T
     return load_amr_grids(data, [32, 32, 32], geometry=geometry, bbox=bbox)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list