[yt-svn] commit/yt: 4 new changesets
commits-noreply at bitbucket.org
commits-noreply at bitbucket.org
Fri Jan 19 09:25:51 PST 2018
4 new commits in yt:
https://bitbucket.org/yt_analysis/yt/commits/dde2dfe7d6ed/
Changeset: dde2dfe7d6ed
User: ngoldbaum
Date: 2017-12-12 21:44:51+00:00
Summary: update pr backport script for git/github
Affected #: 1 file
diff -r 7731f952da66a137239a2a6a312997c404c2d58c -r dde2dfe7d6ed7abc0ba98cb86483863106bae7db scripts/pr_backport.py
--- a/scripts/pr_backport.py
+++ b/scripts/pr_backport.py
@@ -1,18 +1,41 @@
-from __future__ import print_function
-import hglib
+import dateutil.parser
+import git
import requests
import shutil
import tempfile
-from datetime import datetime
-from distutils.version import LooseVersion
-from time import strptime, mktime
from yt.extern.six.moves import input
-MERGED_PR_ENDPOINT = ("http://bitbucket.org/api/2.0/repositories/yt_analysis/"
- "yt/pullrequests/?state=MERGED")
+API_URL = 'https://api.github.com/graphql'
+
+YT_REPO = "https://github.com/yt-project/yt"
-YT_REPO = "https://bitbucket.org/yt_analysis/yt"
+PR_QUERY = """
+{
+ repository(owner: "yt-project", name: "yt") {
+ pullRequests(first:100 states:MERGED %s) {
+ edges {
+ node {
+ number
+ title
+ mergedAt
+ author {
+ login
+ }
+ body
+ url
+ }
+ }
+ pageInfo {
+ endCursor
+ hasNextPage
+ hasPreviousPage
+ startCursor
+ }
+ }
+ }
+}
+"""
def clone_new_repo(source=None):
@@ -21,304 +44,65 @@
dest_repo_path = path+'/yt-backport'
if source is None:
source = YT_REPO
- hglib.clone(source=source, dest=dest_repo_path)
- with hglib.open(dest_repo_path) as client:
- # Changesets that are on the yt branch but aren't topological ancestors
- # of whichever changeset the experimental bookmark is pointing at
- bookmarks, _ = client.bookmarks()
- bookmark_names = [b[0] for b in bookmarks]
- if 'experimental' in bookmark_names:
- client.update('heads(branch(yt) - ::bookmark(experimental))')
- else:
- client.update('heads(branch(yt))')
+ git.Repo.clone_from(source, dest_repo_path)
return dest_repo_path
-def get_first_commit_after_last_major_release(repo_path):
- """Returns the SHA1 hash of the first commit to the yt branch that wasn't
- included in the last tagged release.
- """
- with hglib.open(repo_path) as client:
- tags = client.log("reverse(tag())")
- tags = [t[2].decode('utf8') for t in tags]
- tags = sorted([t for t in tags if t[:2] == 'yt'])
- for t in tags[::-1]:
- ver = LooseVersion(t)
- if len(ver.version) == 4 or ver.version[4] == 0:
- last_major_tag = t
- break
- last_before_release = client.log(
- "last(ancestors(%s) and branch(yt))" % str(last_major_tag))
- rev = last_before_release[0][1].decode()
- first_after_release = client.log(
- "first(descendants(%s) and branch(yt) and not %s)" % (rev, rev))
- return first_after_release[0][1][:12].decode('utf8')
-
-
-def get_branch_tip(repo_path, branch, exclude=None):
- """Returns the SHA1 hash of the most recent commit on the given branch"""
- revset = "head() and branch(%s)" % branch
- with hglib.open(repo_path) as client:
- if exclude is not None:
- try:
- client.log(exclude)
- revset += "and not %s" % exclude
- except hglib.error.CommandError:
- pass
- change = client.log(revset)[0][1][:12].decode('utf8')
- return change
-
-
-def get_lineage_between_release_and_tip(repo_path, first, last):
- """Returns the lineage of changesets that were at one point the public tip"""
- with hglib.open(repo_path) as client:
- lineage = client.log("'%s'::'%s' and p1('%s'::'%s') + '%s'"
- % (first, last, first, last, last))
- return lineage
-
-
-def get_pull_requests_since_last_release(repo_path):
- """Returns a list of pull requests made since the last tagged release"""
- r = requests.get(MERGED_PR_ENDPOINT)
- done = False
- merged_prs = []
- with hglib.open(repo_path) as client:
- last_tag = client.log("reverse(tag())")[0]
- while not done:
- if r.status_code != 200:
- raise RuntimeError
- data = r.json()
- prs = data['values']
- for pr in prs:
- activity = requests.get(pr['links']['activity']['href']).json()
- merge_date = None
- for action in activity['values']:
- if 'update' in action and action['update']['state'] == 'MERGED':
- merge_date = action['update']['date']
- merge_date = merge_date.split('.')[0]
- timestamp = mktime(strptime(merge_date, "%Y-%m-%dT%H:%M:%S"))
- merge_date = datetime.fromtimestamp(timestamp)
- break
- if merge_date is None:
- break
- if merge_date < last_tag[6]:
- done = True
- break
- merged_prs.append(pr)
- r = requests.get(data['next'])
- return merged_prs
-
-
-def cache_commit_data(prs):
- """Avoid repeated calls to bitbucket API to get the list of commits per PR"""
- commit_data = {}
- for pr in prs:
- data = requests.get(pr['links']['commits']['href']).json()
- if data.keys() == [u'error']:
- # this happens when commits have been stripped, e.g.
- # https://bitbucket.org/yt_analysis/yt/pull-requests/1641
- continue
- done = False
- commits = []
- while not done:
- commits.extend(data['values'])
- if 'next' not in data:
- done = True
- else:
- data = requests.get(data['next']).json()
- commit_data[pr['id']] = commits
- return commit_data
-
-
-def find_commit_in_prs(needle, commit_data, prs):
- """Finds the commit `needle` PR in the commit_data dictionary
-
- If found, returns the pr the needle commit is in. If the commit was not
- part of the PRs in the dictionary, returns None.
- """
- for pr_id in commit_data:
- commits = commit_data[pr_id]
- for commit in commits:
- if commit['hash'] == needle[1]:
- pr = [pr for pr in prs if pr['id'] == pr_id][0]
- return pr
- return None
-
-
-def find_merge_commit_in_prs(needle, prs):
- """Find the merge commit `needle` in the list of `prs`
-
- If found, returns the pr the merge commit comes from. If not found, return
- None
- """
- for pr in prs[::-1]:
- if pr['merge_commit'] is not None:
- if pr['merge_commit']['hash'] == needle[1][:12].decode('utf8'):
- return pr
- return None
+def get_date_of_last_tag(repo_path):
+ repo = git.Repo(repo_path)
+ tags = sorted(repo.tags, key=lambda t: t.commit.committed_date)
+ return tags[-1].commit.committed_date
-def create_commits_to_prs_mapping(lineage, prs):
- """create a mapping from commits to the pull requests that the commit is
- part of
- """
- commits_to_prs = {}
- # make a copy of this list to avoid side effects from calling this function
- my_prs = list(prs)
- commit_data = cache_commit_data(my_prs)
- for commit in lineage:
- cset_hash = commit[1].decode('utf8')
- message = commit[5].decode('utf8')
- if message.startswith('Merged in') and '(pull request #' in message:
- pr = find_merge_commit_in_prs(commit, my_prs)
- if pr is None:
- continue
- commits_to_prs[cset_hash] = pr
- # Since we know this PR won't have another commit associated with it,
- # remove from global list to reduce number of network accesses
- my_prs.remove(commits_to_prs[cset_hash])
- else:
- pr = find_commit_in_prs(commit, commit_data, my_prs)
- commits_to_prs[cset_hash] = pr
- return commits_to_prs
-
-
-def invert_commits_to_prs_mapping(commits_to_prs):
- """invert the mapping from individual commits to pull requests"""
- inv_map = {}
- for k, v in commits_to_prs.items():
- # can't save v itself in inv_map since it's an unhashable dictionary
- if v is not None:
- created_date = v['created_on'].split('.')[0]
- timestamp = mktime(strptime(created_date, "%Y-%m-%dT%H:%M:%S"))
- created_date = datetime.fromtimestamp(timestamp)
- pr_desc = (v['id'], v['title'], created_date,
- v['links']['html']['href'], v['description'])
- else:
- pr_desc = None
- inv_map[pr_desc] = inv_map.get(pr_desc, [])
- inv_map[pr_desc].append(k)
- return inv_map
+def get_prs_since_last_release(date, key):
+ headers = {"Authorization": 'token %s' % key}
+ resp = requests.post(url=API_URL, json={"query": PR_QUERY % ""}, headers=headers)
+ ret = []
+ while True:
+ jsr = resp.json()
+ cursor = jsr['data']['repository']['pullRequests']['pageInfo']['endCursor']
+ if cursor is None:
+ break
+ prs = jsr['data']['repository']['pullRequests']['edges']
+ for pr in prs:
+ pr_date = dateutil.parser.parse(pr['node']['mergedAt']).timestamp()
+ if pr_date > date:
+ ret.append(pr['node'])
+ resp = requests.post(
+ url=API_URL, json={"query": PR_QUERY % ('after:"%s"' % cursor)},
+ headers=headers)
+ return ret
-def get_last_descendant(repo_path, commit):
- """get the most recent descendant of a commit"""
- with hglib.open(repo_path) as client:
- com = client.log('last(%s::)' % commit)
- return com[0][1][:12]
-
-def screen_already_backported(repo_path, inv_map):
- with hglib.open(repo_path) as client:
- tags = client.log("reverse(tag())")
- tags = [t[2].decode('utf8') for t in tags]
- tags = [LooseVersion(t) for t in tags if t.startswith('yt')]
- major_tags = [
- t for t in tags if len(t.version) == 4 or t.version[-1] == 0]
- most_recent_major_tag_name = major_tags[0].vstring
- lineage = client.log(
- "descendants(%s) and branch(stable)" % most_recent_major_tag_name)
- prs_to_screen = []
- for pr in inv_map:
- for commit in lineage:
- desc = commit[5].decode('utf8')
- if desc.startswith('Backporting PR #%s' % pr[0]):
- prs_to_screen.append(pr)
- for pr in prs_to_screen:
- del inv_map[pr]
- return inv_map
-
-def commit_already_on_stable(repo_path, commit):
- with hglib.open(repo_path) as client:
- commit_info = client.log(commit)[0]
- most_recent_tag_name = client.log("reverse(tag())")[0][2]
- most_recent_tag_name = most_recent_tag_name.decode('utf8')
- lineage = client.log(
- "descendants(%s) and branch(stable)" % most_recent_tag_name)
- # if there is a stable commit with the same commit message,
- # it's been grafted
- if any([commit_info[5] == c[5] for c in lineage]):
- return True
- return False
-
-def backport_pr_commits(repo_path, inv_map, last_stable, prs):
- """backports pull requests to the stable branch.
-
- Accepts a dictionary mapping pull requests to a list of commits that
- are in the pull request.
- """
- pr_list = inv_map.keys()
- pr_list = sorted(pr_list, key=lambda x: x[2])
- for pr_desc in pr_list:
- merge_warn = False
- merge_commits = []
- pr = [pr for pr in prs if pr['id'] == pr_desc[0]][0]
- data = requests.get(pr['links']['commits']['href']).json()
- commits = data['values']
- while 'next' in data:
- data = requests.get(data['next']).json()
- commits.extend(data['values'])
- commits = [com['hash'][:12] for com in commits]
- with hglib.open(repo_path) as client:
- for com in commits:
- if client.log('merge() and %s' % com) != []:
- merge_warn = True
- merge_commits.append(com)
- if len(commits) > 1:
- revset = " | ".join(commits)
- revset = '"%s"' % revset
- message = "Backporting PR #%s %s" % \
- (pr['id'], pr['links']['html']['href'])
- dest = get_last_descendant(repo_path, last_stable).decode('utf8')
- message = \
- "hg rebase -r %s --keep --collapse -m \"%s\" -d %s\n" % \
- (revset, message, dest)
- message += "hg update stable\n\n"
- if merge_warn is True:
- if len(merge_commits) > 1:
- merge_commits = ", ".join(merge_commits)
- else:
- merge_commits = merge_commits[0]
- message += \
- "WARNING, PULL REQUEST CONTAINS MERGE COMMITS, CONSIDER\n" \
- "BACKPORTING BY HAND TO AVOID BACKPORTING UNWANTED CHANGES\n"
- message += \
- "Merge commits are %s\n\n" % merge_commits
- else:
- if commit_already_on_stable(repo_path, commits[0]) is True:
- continue
- message = "hg graft %s\n" % commits[0]
- print("PR #%s\nTitle: %s\nCreated on: %s\nLink: %s\n%s" % pr_desc)
- print("To backport, issue the following command(s):\n")
- print(message)
- input('Press any key to continue')
+def backport_prs(repo_path, prs):
+ for pr in prs:
+ print('')
+ print('PR %s' % pr['number'])
+ print(pr['title'])
+ print(pr['author']['login'])
+ print(pr['body'])
+ print(pr['url'])
+ print("%s.diff" % pr['url'])
+ input("Press any key to continue")
if __name__ == "__main__":
+ #key = input("Please enter your github OAuth API key\n"
+ # "See the github help for instructions on how to "
+ # "generate a personal access token.\n>>> ")
+ key = '6efd3310bf66e941e32ad8ea18ebc581e1ccda1a'
print("")
print("Gathering PR information, this may take a minute.")
print("Don't worry, yt loves you.")
print("")
repo_path = clone_new_repo()
try:
- last_major_release = get_first_commit_after_last_major_release(repo_path)
- last_dev = get_branch_tip(repo_path, 'yt', 'experimental')
- last_stable = get_branch_tip(repo_path, 'stable')
- lineage = get_lineage_between_release_and_tip(
- repo_path, last_major_release, last_dev)
- prs = get_pull_requests_since_last_release(repo_path)
- commits_to_prs = create_commits_to_prs_mapping(lineage, prs)
- inv_map = invert_commits_to_prs_mapping(commits_to_prs)
- # for now, ignore commits that aren't part of a pull request since
- # the last bugfix release. These are mostly commits in pull requests
- # from before the last bugfix release but might include commits that
- # were pushed directly to the repo.
- del inv_map[None]
-
- inv_map = screen_already_backported(repo_path, inv_map)
+ date = get_date_of_last_tag(repo_path)
+ prs = get_prs_since_last_release(date, key)
print("In another terminal window, navigate to the following path:")
print("%s" % repo_path)
input("Press any key to continue")
- backport_pr_commits(repo_path, inv_map, last_stable, prs)
+ backport_prs(repo_path, prs)
input(
"Now you need to push your backported changes. The temporary\n"
"repository currently being used will be deleted as soon as you\n"
https://bitbucket.org/yt_analysis/yt/commits/6cfd69ddeafb/
Changeset: 6cfd69ddeafb
User: ngoldbaum
Date: 2017-12-12 21:49:09+00:00
Summary: update docs on backporting
Affected #: 1 file
diff -r dde2dfe7d6ed7abc0ba98cb86483863106bae7db -r 6cfd69ddeafb6ff4cbb7cd30668ea91ac7f445ce doc/source/developing/releasing.rst
--- a/doc/source/developing/releasing.rst
+++ b/doc/source/developing/releasing.rst
@@ -52,10 +52,14 @@
the ``stable`` branch. Instead, we manually cherry-pick bugfixes from the from
``master`` branch onto the ``stable`` branch.
-If the pull request contains merge commits, you must take care to *not* backport
-commits that merge with the main line of development on the ``master`` branch. Doing
-so may bring unrelated changes, including new features, into a bugfix
-release.
+You may find the ``pr_backport.py`` script located in the ``scripts`` folder at
+the root of the repository to be helpful. This script uses the github API to
+find the list of pull requests made since the last release and prompts the user
+to backport each pull request individually. Note that the backport process is
+fully manual. The easiest way to do it is to download the diff for the pull
+request (the URL for the diff is printed out by the backport script) and then
+use ``git apply`` to apply the patch for the pull request to a local copy of yt
+with the ``stable`` branch checked out.
Once you've finished backporting push your work to Github. Once you've pushed to
your fork, you will be able to issue a pull request containing the backported
https://bitbucket.org/yt_analysis/yt/commits/3f1a62145475/
Changeset: 3f1a62145475
User: ngoldbaum
Date: 2017-12-12 21:54:07+00:00
Summary: remove my key
Affected #: 1 file
diff -r 6cfd69ddeafb6ff4cbb7cd30668ea91ac7f445ce -r 3f1a621454756a1b6d680afabe3303cd38097079 scripts/pr_backport.py
--- a/scripts/pr_backport.py
+++ b/scripts/pr_backport.py
@@ -87,10 +87,9 @@
if __name__ == "__main__":
- #key = input("Please enter your github OAuth API key\n"
- # "See the github help for instructions on how to "
- # "generate a personal access token.\n>>> ")
- key = '6efd3310bf66e941e32ad8ea18ebc581e1ccda1a'
+ key = input("Please enter your github OAuth API key\n"
+ "See the github help for instructions on how to "
+ "generate a personal access token.\n>>> ")
print("")
print("Gathering PR information, this may take a minute.")
print("Don't worry, yt loves you.")
https://bitbucket.org/yt_analysis/yt/commits/f93e4c5f5748/
Changeset: f93e4c5f5748
User: MatthewTurk
Date: 2018-01-19 17:25:35+00:00
Summary: Merge pull request #1648 from ngoldbaum/pr_backport
update pr backport script for git/github
Affected #: 2 files
diff -r 3445446d0f2a9f93f040a619a4b4eab4f7c3c070 -r f93e4c5f5748c150c78c1afee5ac163ad0ec3145 doc/source/developing/releasing.rst
--- a/doc/source/developing/releasing.rst
+++ b/doc/source/developing/releasing.rst
@@ -52,10 +52,14 @@
the ``stable`` branch. Instead, we manually cherry-pick bugfixes from the from
``master`` branch onto the ``stable`` branch.
-If the pull request contains merge commits, you must take care to *not* backport
-commits that merge with the main line of development on the ``master`` branch. Doing
-so may bring unrelated changes, including new features, into a bugfix
-release.
+You may find the ``pr_backport.py`` script located in the ``scripts`` folder at
+the root of the repository to be helpful. This script uses the github API to
+find the list of pull requests made since the last release and prompts the user
+to backport each pull request individually. Note that the backport process is
+fully manual. The easiest way to do it is to download the diff for the pull
+request (the URL for the diff is printed out by the backport script) and then
+use ``git apply`` to apply the patch for the pull request to a local copy of yt
+with the ``stable`` branch checked out.
Once you've finished backporting push your work to Github. Once you've pushed to
your fork, you will be able to issue a pull request containing the backported
diff -r 3445446d0f2a9f93f040a619a4b4eab4f7c3c070 -r f93e4c5f5748c150c78c1afee5ac163ad0ec3145 scripts/pr_backport.py
--- a/scripts/pr_backport.py
+++ b/scripts/pr_backport.py
@@ -1,18 +1,41 @@
-from __future__ import print_function
-import hglib
+import dateutil.parser
+import git
import requests
import shutil
import tempfile
-from datetime import datetime
-from distutils.version import LooseVersion
-from time import strptime, mktime
from yt.extern.six.moves import input
-MERGED_PR_ENDPOINT = ("http://bitbucket.org/api/2.0/repositories/yt_analysis/"
- "yt/pullrequests/?state=MERGED")
+API_URL = 'https://api.github.com/graphql'
+
+YT_REPO = "https://github.com/yt-project/yt"
-YT_REPO = "https://bitbucket.org/yt_analysis/yt"
+PR_QUERY = """
+{
+ repository(owner: "yt-project", name: "yt") {
+ pullRequests(first:100 states:MERGED %s) {
+ edges {
+ node {
+ number
+ title
+ mergedAt
+ author {
+ login
+ }
+ body
+ url
+ }
+ }
+ pageInfo {
+ endCursor
+ hasNextPage
+ hasPreviousPage
+ startCursor
+ }
+ }
+ }
+}
+"""
def clone_new_repo(source=None):
@@ -21,304 +44,64 @@
dest_repo_path = path+'/yt-backport'
if source is None:
source = YT_REPO
- hglib.clone(source=source, dest=dest_repo_path)
- with hglib.open(dest_repo_path) as client:
- # Changesets that are on the yt branch but aren't topological ancestors
- # of whichever changeset the experimental bookmark is pointing at
- bookmarks, _ = client.bookmarks()
- bookmark_names = [b[0] for b in bookmarks]
- if 'experimental' in bookmark_names:
- client.update('heads(branch(yt) - ::bookmark(experimental))')
- else:
- client.update('heads(branch(yt))')
+ git.Repo.clone_from(source, dest_repo_path)
return dest_repo_path
-def get_first_commit_after_last_major_release(repo_path):
- """Returns the SHA1 hash of the first commit to the yt branch that wasn't
- included in the last tagged release.
- """
- with hglib.open(repo_path) as client:
- tags = client.log("reverse(tag())")
- tags = [t[2].decode('utf8') for t in tags]
- tags = sorted([t for t in tags if t[:2] == 'yt'])
- for t in tags[::-1]:
- ver = LooseVersion(t)
- if len(ver.version) == 4 or ver.version[4] == 0:
- last_major_tag = t
- break
- last_before_release = client.log(
- "last(ancestors(%s) and branch(yt))" % str(last_major_tag))
- rev = last_before_release[0][1].decode()
- first_after_release = client.log(
- "first(descendants(%s) and branch(yt) and not %s)" % (rev, rev))
- return first_after_release[0][1][:12].decode('utf8')
-
-
-def get_branch_tip(repo_path, branch, exclude=None):
- """Returns the SHA1 hash of the most recent commit on the given branch"""
- revset = "head() and branch(%s)" % branch
- with hglib.open(repo_path) as client:
- if exclude is not None:
- try:
- client.log(exclude)
- revset += "and not %s" % exclude
- except hglib.error.CommandError:
- pass
- change = client.log(revset)[0][1][:12].decode('utf8')
- return change
-
-
-def get_lineage_between_release_and_tip(repo_path, first, last):
- """Returns the lineage of changesets that were at one point the public tip"""
- with hglib.open(repo_path) as client:
- lineage = client.log("'%s'::'%s' and p1('%s'::'%s') + '%s'"
- % (first, last, first, last, last))
- return lineage
-
-
-def get_pull_requests_since_last_release(repo_path):
- """Returns a list of pull requests made since the last tagged release"""
- r = requests.get(MERGED_PR_ENDPOINT)
- done = False
- merged_prs = []
- with hglib.open(repo_path) as client:
- last_tag = client.log("reverse(tag())")[0]
- while not done:
- if r.status_code != 200:
- raise RuntimeError
- data = r.json()
- prs = data['values']
- for pr in prs:
- activity = requests.get(pr['links']['activity']['href']).json()
- merge_date = None
- for action in activity['values']:
- if 'update' in action and action['update']['state'] == 'MERGED':
- merge_date = action['update']['date']
- merge_date = merge_date.split('.')[0]
- timestamp = mktime(strptime(merge_date, "%Y-%m-%dT%H:%M:%S"))
- merge_date = datetime.fromtimestamp(timestamp)
- break
- if merge_date is None:
- break
- if merge_date < last_tag[6]:
- done = True
- break
- merged_prs.append(pr)
- r = requests.get(data['next'])
- return merged_prs
-
-
-def cache_commit_data(prs):
- """Avoid repeated calls to bitbucket API to get the list of commits per PR"""
- commit_data = {}
- for pr in prs:
- data = requests.get(pr['links']['commits']['href']).json()
- if data.keys() == [u'error']:
- # this happens when commits have been stripped, e.g.
- # https://bitbucket.org/yt_analysis/yt/pull-requests/1641
- continue
- done = False
- commits = []
- while not done:
- commits.extend(data['values'])
- if 'next' not in data:
- done = True
- else:
- data = requests.get(data['next']).json()
- commit_data[pr['id']] = commits
- return commit_data
-
-
-def find_commit_in_prs(needle, commit_data, prs):
- """Finds the commit `needle` PR in the commit_data dictionary
-
- If found, returns the pr the needle commit is in. If the commit was not
- part of the PRs in the dictionary, returns None.
- """
- for pr_id in commit_data:
- commits = commit_data[pr_id]
- for commit in commits:
- if commit['hash'] == needle[1]:
- pr = [pr for pr in prs if pr['id'] == pr_id][0]
- return pr
- return None
-
-
-def find_merge_commit_in_prs(needle, prs):
- """Find the merge commit `needle` in the list of `prs`
-
- If found, returns the pr the merge commit comes from. If not found, return
- None
- """
- for pr in prs[::-1]:
- if pr['merge_commit'] is not None:
- if pr['merge_commit']['hash'] == needle[1][:12].decode('utf8'):
- return pr
- return None
+def get_date_of_last_tag(repo_path):
+ repo = git.Repo(repo_path)
+ tags = sorted(repo.tags, key=lambda t: t.commit.committed_date)
+ return tags[-1].commit.committed_date
-def create_commits_to_prs_mapping(lineage, prs):
- """create a mapping from commits to the pull requests that the commit is
- part of
- """
- commits_to_prs = {}
- # make a copy of this list to avoid side effects from calling this function
- my_prs = list(prs)
- commit_data = cache_commit_data(my_prs)
- for commit in lineage:
- cset_hash = commit[1].decode('utf8')
- message = commit[5].decode('utf8')
- if message.startswith('Merged in') and '(pull request #' in message:
- pr = find_merge_commit_in_prs(commit, my_prs)
- if pr is None:
- continue
- commits_to_prs[cset_hash] = pr
- # Since we know this PR won't have another commit associated with it,
- # remove from global list to reduce number of network accesses
- my_prs.remove(commits_to_prs[cset_hash])
- else:
- pr = find_commit_in_prs(commit, commit_data, my_prs)
- commits_to_prs[cset_hash] = pr
- return commits_to_prs
-
-
-def invert_commits_to_prs_mapping(commits_to_prs):
- """invert the mapping from individual commits to pull requests"""
- inv_map = {}
- for k, v in commits_to_prs.items():
- # can't save v itself in inv_map since it's an unhashable dictionary
- if v is not None:
- created_date = v['created_on'].split('.')[0]
- timestamp = mktime(strptime(created_date, "%Y-%m-%dT%H:%M:%S"))
- created_date = datetime.fromtimestamp(timestamp)
- pr_desc = (v['id'], v['title'], created_date,
- v['links']['html']['href'], v['description'])
- else:
- pr_desc = None
- inv_map[pr_desc] = inv_map.get(pr_desc, [])
- inv_map[pr_desc].append(k)
- return inv_map
+def get_prs_since_last_release(date, key):
+ headers = {"Authorization": 'token %s' % key}
+ resp = requests.post(url=API_URL, json={"query": PR_QUERY % ""}, headers=headers)
+ ret = []
+ while True:
+ jsr = resp.json()
+ cursor = jsr['data']['repository']['pullRequests']['pageInfo']['endCursor']
+ if cursor is None:
+ break
+ prs = jsr['data']['repository']['pullRequests']['edges']
+ for pr in prs:
+ pr_date = dateutil.parser.parse(pr['node']['mergedAt']).timestamp()
+ if pr_date > date:
+ ret.append(pr['node'])
+ resp = requests.post(
+ url=API_URL, json={"query": PR_QUERY % ('after:"%s"' % cursor)},
+ headers=headers)
+ return ret
-def get_last_descendant(repo_path, commit):
- """get the most recent descendant of a commit"""
- with hglib.open(repo_path) as client:
- com = client.log('last(%s::)' % commit)
- return com[0][1][:12]
-
-def screen_already_backported(repo_path, inv_map):
- with hglib.open(repo_path) as client:
- tags = client.log("reverse(tag())")
- tags = [t[2].decode('utf8') for t in tags]
- tags = [LooseVersion(t) for t in tags if t.startswith('yt')]
- major_tags = [
- t for t in tags if len(t.version) == 4 or t.version[-1] == 0]
- most_recent_major_tag_name = major_tags[0].vstring
- lineage = client.log(
- "descendants(%s) and branch(stable)" % most_recent_major_tag_name)
- prs_to_screen = []
- for pr in inv_map:
- for commit in lineage:
- desc = commit[5].decode('utf8')
- if desc.startswith('Backporting PR #%s' % pr[0]):
- prs_to_screen.append(pr)
- for pr in prs_to_screen:
- del inv_map[pr]
- return inv_map
-
-def commit_already_on_stable(repo_path, commit):
- with hglib.open(repo_path) as client:
- commit_info = client.log(commit)[0]
- most_recent_tag_name = client.log("reverse(tag())")[0][2]
- most_recent_tag_name = most_recent_tag_name.decode('utf8')
- lineage = client.log(
- "descendants(%s) and branch(stable)" % most_recent_tag_name)
- # if there is a stable commit with the same commit message,
- # it's been grafted
- if any([commit_info[5] == c[5] for c in lineage]):
- return True
- return False
-
-def backport_pr_commits(repo_path, inv_map, last_stable, prs):
- """backports pull requests to the stable branch.
-
- Accepts a dictionary mapping pull requests to a list of commits that
- are in the pull request.
- """
- pr_list = inv_map.keys()
- pr_list = sorted(pr_list, key=lambda x: x[2])
- for pr_desc in pr_list:
- merge_warn = False
- merge_commits = []
- pr = [pr for pr in prs if pr['id'] == pr_desc[0]][0]
- data = requests.get(pr['links']['commits']['href']).json()
- commits = data['values']
- while 'next' in data:
- data = requests.get(data['next']).json()
- commits.extend(data['values'])
- commits = [com['hash'][:12] for com in commits]
- with hglib.open(repo_path) as client:
- for com in commits:
- if client.log('merge() and %s' % com) != []:
- merge_warn = True
- merge_commits.append(com)
- if len(commits) > 1:
- revset = " | ".join(commits)
- revset = '"%s"' % revset
- message = "Backporting PR #%s %s" % \
- (pr['id'], pr['links']['html']['href'])
- dest = get_last_descendant(repo_path, last_stable).decode('utf8')
- message = \
- "hg rebase -r %s --keep --collapse -m \"%s\" -d %s\n" % \
- (revset, message, dest)
- message += "hg update stable\n\n"
- if merge_warn is True:
- if len(merge_commits) > 1:
- merge_commits = ", ".join(merge_commits)
- else:
- merge_commits = merge_commits[0]
- message += \
- "WARNING, PULL REQUEST CONTAINS MERGE COMMITS, CONSIDER\n" \
- "BACKPORTING BY HAND TO AVOID BACKPORTING UNWANTED CHANGES\n"
- message += \
- "Merge commits are %s\n\n" % merge_commits
- else:
- if commit_already_on_stable(repo_path, commits[0]) is True:
- continue
- message = "hg graft %s\n" % commits[0]
- print("PR #%s\nTitle: %s\nCreated on: %s\nLink: %s\n%s" % pr_desc)
- print("To backport, issue the following command(s):\n")
- print(message)
- input('Press any key to continue')
+def backport_prs(repo_path, prs):
+ for pr in prs:
+ print('')
+ print('PR %s' % pr['number'])
+ print(pr['title'])
+ print(pr['author']['login'])
+ print(pr['body'])
+ print(pr['url'])
+ print("%s.diff" % pr['url'])
+ input("Press any key to continue")
if __name__ == "__main__":
+ key = input("Please enter your github OAuth API key\n"
+ "See the github help for instructions on how to "
+ "generate a personal access token.\n>>> ")
print("")
print("Gathering PR information, this may take a minute.")
print("Don't worry, yt loves you.")
print("")
repo_path = clone_new_repo()
try:
- last_major_release = get_first_commit_after_last_major_release(repo_path)
- last_dev = get_branch_tip(repo_path, 'yt', 'experimental')
- last_stable = get_branch_tip(repo_path, 'stable')
- lineage = get_lineage_between_release_and_tip(
- repo_path, last_major_release, last_dev)
- prs = get_pull_requests_since_last_release(repo_path)
- commits_to_prs = create_commits_to_prs_mapping(lineage, prs)
- inv_map = invert_commits_to_prs_mapping(commits_to_prs)
- # for now, ignore commits that aren't part of a pull request since
- # the last bugfix release. These are mostly commits in pull requests
- # from before the last bugfix release but might include commits that
- # were pushed directly to the repo.
- del inv_map[None]
-
- inv_map = screen_already_backported(repo_path, inv_map)
+ date = get_date_of_last_tag(repo_path)
+ prs = get_prs_since_last_release(date, key)
print("In another terminal window, navigate to the following path:")
print("%s" % repo_path)
input("Press any key to continue")
- backport_pr_commits(repo_path, inv_map, last_stable, prs)
+ backport_prs(repo_path, prs)
input(
"Now you need to push your backported changes. The temporary\n"
"repository currently being used will be deleted as soon as you\n"
Repository URL: https://bitbucket.org/yt_analysis/yt/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
More information about the yt-svn
mailing list