mirror of https://github.com/zcash/developers.git
Merge pull request #67 from zcash/change-deployment-pipeline
Add a Zashi pipeline renderer
This commit is contained in:
commit
ec032ce14a
|
@ -148,6 +148,12 @@ jobs:
|
||||||
DAG_VIEW: transparent-deprecation
|
DAG_VIEW: transparent-deprecation
|
||||||
TERMINATE_AT: "zcash/zcash#4203"
|
TERMINATE_AT: "zcash/zcash#4203"
|
||||||
|
|
||||||
|
- name: Render Zashi pipeline
|
||||||
|
run: python3 ./zashi-pipeline.py
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
ZENHUB_TOKEN: ${{ secrets.ZENHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Copy the index page
|
- name: Copy the index page
|
||||||
run: cp ./index.html ./public
|
run: cp ./index.html ./public
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,308 @@
|
||||||
|
from sgqlc.endpoint.http import HTTPEndpoint
|
||||||
|
from sgqlc.operation import Operation
|
||||||
|
|
||||||
|
from github_schema import github_schema as schema
|
||||||
|
|
||||||
|
# To get the id of a repo, see <https://stackoverflow.com/a/47223479/393146>.
|
||||||
|
|
||||||
|
HALO2_REPOS = {
|
||||||
|
290019239: ('zcash', 'halo2'),
|
||||||
|
344239327: ('zcash', 'pasta_curves'),
|
||||||
|
}
|
||||||
|
|
||||||
|
CORE_REPOS = {
|
||||||
|
26987049: ('zcash', 'zcash'),
|
||||||
|
47279130: ('zcash', 'zips'),
|
||||||
|
48303644: ('zcash', 'incrementalmerkletree'),
|
||||||
|
85334928: ('zcash', 'librustzcash'),
|
||||||
|
133857578: ('zcash-hackworks', 'zcash-test-vectors'),
|
||||||
|
111058300: ('zcash', 'sapling-crypto'),
|
||||||
|
**HALO2_REPOS,
|
||||||
|
305835578: ('zcash', 'orchard'),
|
||||||
|
}
|
||||||
|
|
||||||
|
TFL_REPOS = {
|
||||||
|
642135348: ('Electric-Coin-Company', 'tfl-book'),
|
||||||
|
725179873: ('Electric-Coin-Company', 'zebra-tfl'),
|
||||||
|
695805989: ('zcash', 'simtfl'),
|
||||||
|
}
|
||||||
|
|
||||||
|
ANDROID_REPOS = {
|
||||||
|
390808594: ('Electric-Coin-Company', 'zashi-android'),
|
||||||
|
151763639: ('Electric-Coin-Company', 'zcash-android-wallet-sdk'),
|
||||||
|
719178328: ('Electric-Coin-Company', 'zashi'),
|
||||||
|
}
|
||||||
|
|
||||||
|
IOS_REPOS = {
|
||||||
|
387551125: ('Electric-Coin-Company', 'zashi-ios'),
|
||||||
|
185480114: ('Electric-Coin-Company', 'zcash-swift-wallet-sdk'),
|
||||||
|
270825987: ('Electric-Coin-Company', 'MnemonicSwift'),
|
||||||
|
439137887: ('Electric-Coin-Company', 'zcash-light-client-ffi'),
|
||||||
|
719178328: ('Electric-Coin-Company', 'zashi'),
|
||||||
|
}
|
||||||
|
|
||||||
|
WALLET_REPOS = {
|
||||||
|
85334928: ('zcash', 'librustzcash'),
|
||||||
|
159714694: ('zcash', 'lightwalletd'),
|
||||||
|
**ANDROID_REPOS,
|
||||||
|
**IOS_REPOS,
|
||||||
|
}
|
||||||
|
|
||||||
|
ECC_REPOS = {
|
||||||
|
**CORE_REPOS,
|
||||||
|
**TFL_REPOS,
|
||||||
|
**WALLET_REPOS,
|
||||||
|
65419597: ('Electric-Coin-Company', 'infrastructure'),
|
||||||
|
}
|
||||||
|
|
||||||
|
ZF_REPOS = {
|
||||||
|
205255683: ('ZcashFoundation', 'zebra'),
|
||||||
|
225479018: ('ZcashFoundation', 'redjubjub'),
|
||||||
|
235651437: ('ZcashFoundation', 'ed25519-zebra'),
|
||||||
|
279422254: ('ZcashFoundation', 'zcash_script'),
|
||||||
|
}
|
||||||
|
|
||||||
|
ZF_FROST_REPOS = {
|
||||||
|
437862440: ('ZcashFoundation', 'frost'),
|
||||||
|
}
|
||||||
|
|
||||||
|
ZCASHD_DEPRECATION_REPOS = {
|
||||||
|
26987049: ('zcash', 'zcash'),
|
||||||
|
47279130: ('zcash', 'zips'),
|
||||||
|
85334928: ('zcash', 'librustzcash'),
|
||||||
|
863610221: ('zcash', 'wallet'),
|
||||||
|
159714694: ('zcash', 'lightwalletd'),
|
||||||
|
}
|
||||||
|
|
||||||
|
POOL_DEPRECATION_REPOS = {
|
||||||
|
**CORE_REPOS,
|
||||||
|
**WALLET_REPOS,
|
||||||
|
}
|
||||||
|
|
||||||
|
REPO_SETS = {
|
||||||
|
'core': CORE_REPOS,
|
||||||
|
'halo2': HALO2_REPOS,
|
||||||
|
'tfl': TFL_REPOS,
|
||||||
|
'wallet': WALLET_REPOS,
|
||||||
|
'wallet-ios': IOS_REPOS,
|
||||||
|
'wallet-android': ANDROID_REPOS,
|
||||||
|
'ecc': ECC_REPOS,
|
||||||
|
'zf': ZF_REPOS,
|
||||||
|
'zf-frost': ZF_FROST_REPOS,
|
||||||
|
'zf-devops': {**ZF_REPOS, **ZF_FROST_REPOS},
|
||||||
|
'zcashd-deprecation': ZCASHD_DEPRECATION_REPOS,
|
||||||
|
'sprout-deprecation': POOL_DEPRECATION_REPOS,
|
||||||
|
'transparent-deprecation': POOL_DEPRECATION_REPOS,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def api(token):
|
||||||
|
return HTTPEndpoint(
|
||||||
|
'https://api.github.com/graphql',
|
||||||
|
{'Authorization': 'bearer %s' % token},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GitHubIssue:
|
||||||
|
def __init__(self, repo_id, issue_number, data, REPOS):
|
||||||
|
self.repo_id = repo_id
|
||||||
|
self.issue_number = issue_number
|
||||||
|
self.milestone = None
|
||||||
|
self._REPOS = REPOS
|
||||||
|
|
||||||
|
if data is not None:
|
||||||
|
labels = [label['name'] for label in data['labels']['nodes']]
|
||||||
|
self.title = data['title']
|
||||||
|
self.labels = labels
|
||||||
|
self.is_release = 'C-release' in labels
|
||||||
|
self.is_target = 'C-target' in labels
|
||||||
|
self.is_pr = 'merged' in data
|
||||||
|
self.is_committed = 'S-committed' in labels
|
||||||
|
self.is_in_progress = 'S-in-progress' in labels
|
||||||
|
self.waiting_on_review = 'S-waiting-on-review' in labels
|
||||||
|
self.url = data['url']
|
||||||
|
self.state = 'closed' if data['state'] in ['CLOSED', 'MERGED'] else 'open'
|
||||||
|
if 'milestone' in data and data['milestone']:
|
||||||
|
self.milestone = data['milestone']['title']
|
||||||
|
else:
|
||||||
|
# If we can't fetch issue data, assume we don't care.
|
||||||
|
self.title = ''
|
||||||
|
self.labels = []
|
||||||
|
self.url = None
|
||||||
|
self.is_release = False
|
||||||
|
self.is_target = False
|
||||||
|
self.is_pr = False
|
||||||
|
self.is_committed = False
|
||||||
|
self.is_in_progress = False
|
||||||
|
self.waiting_on_review = False
|
||||||
|
self.state = 'closed'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.repo_id in self._REPOS:
|
||||||
|
repo = self._REPOS[self.repo_id]
|
||||||
|
# Shorten the representation of long repo names.
|
||||||
|
if repo[0] == 'Electric-Coin-Company':
|
||||||
|
repo = ('ECC', repo[1])
|
||||||
|
repo = '/'.join(repo)
|
||||||
|
return '%s#%d' % (repo, self.issue_number)
|
||||||
|
else:
|
||||||
|
return 'Unknown'
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (self.repo_id, self.issue_number) == (other.repo_id, other.issue_number)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.repo_id, self.issue_number))
|
||||||
|
|
||||||
|
def any_cat(self, categories):
|
||||||
|
release_cat = self.is_release if 'releases' in categories else False
|
||||||
|
targets_cat = self.is_target if 'targets' in categories else False
|
||||||
|
return release_cat or targets_cat
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_issues(op, issues, REPOS):
|
||||||
|
repos = set([repo for (repo, _) in issues])
|
||||||
|
repos = {repo: [issue for (r, issue) in issues if r == repo] for repo in repos}
|
||||||
|
|
||||||
|
for repo, issues in repos.items():
|
||||||
|
conn = op.repository(
|
||||||
|
owner=REPOS[repo][0],
|
||||||
|
name=REPOS[repo][1],
|
||||||
|
__alias__='repo%d' % repo,
|
||||||
|
)
|
||||||
|
|
||||||
|
for issue in issues:
|
||||||
|
res = conn.issue_or_pull_request(number=issue, __alias__='issue%d' % issue)
|
||||||
|
for typ in [schema.Issue, schema.PullRequest]:
|
||||||
|
node = res.__as__(typ)
|
||||||
|
node.labels(first=50).nodes().name()
|
||||||
|
node.state()
|
||||||
|
node.milestone().title()
|
||||||
|
node.title()
|
||||||
|
node.url()
|
||||||
|
if typ == schema.PullRequest:
|
||||||
|
node.merged()
|
||||||
|
|
||||||
|
|
||||||
|
def download_issues(endpoint, nodes, REPOS):
|
||||||
|
issues = [(repo, issue) for (repo, issue) in nodes if repo in REPOS]
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
|
||||||
|
# Ensure that any graph nodes from ZenHub that are not in the repos we care about have
|
||||||
|
# default entries, to simplify subsequent graph manipulation code.
|
||||||
|
for repo, issue in [(repo, issue) for (repo, issue) in nodes if repo not in REPOS]:
|
||||||
|
ret[(repo, issue)] = GitHubIssue(repo, issue, None, REPOS)
|
||||||
|
|
||||||
|
def chunks(lst, n):
|
||||||
|
for i in range(0, len(lst), n):
|
||||||
|
yield lst[i : i + n]
|
||||||
|
|
||||||
|
for issues in chunks(issues, 50):
|
||||||
|
op = Operation(schema.Query)
|
||||||
|
fetch_issues(op, issues, REPOS)
|
||||||
|
|
||||||
|
d = endpoint(op)
|
||||||
|
data = op + d
|
||||||
|
|
||||||
|
for repo, issue in issues:
|
||||||
|
repo_data = data['repo%d' % repo]
|
||||||
|
issue_key = 'issue%d' % issue
|
||||||
|
# If GITHUB_TOKEN doesn't have permission to read from a particular private
|
||||||
|
# repository in REPOS, GitHub returns an empty repo_data section.
|
||||||
|
issue_data = repo_data[issue_key] if issue_key in repo_data else None
|
||||||
|
ret[(repo, issue)] = GitHubIssue(repo, issue, issue_data, REPOS)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_issues_with_labels(op, labels, repos):
|
||||||
|
for (repo_id, (repo, issue_cursor, pr_cursor)) in repos:
|
||||||
|
conn = op.repository(
|
||||||
|
owner=repo[0],
|
||||||
|
name=repo[1],
|
||||||
|
__alias__='repo%d' % repo_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if issue_cursor != -1:
|
||||||
|
issues = conn.issues(
|
||||||
|
labels=labels,
|
||||||
|
first=50,
|
||||||
|
after=issue_cursor,
|
||||||
|
)
|
||||||
|
issues.nodes.number()
|
||||||
|
issues.nodes.labels(first=50).nodes().name()
|
||||||
|
issues.nodes.state()
|
||||||
|
issues.nodes.milestone().title()
|
||||||
|
issues.nodes.title()
|
||||||
|
issues.nodes.url()
|
||||||
|
issues.page_info.has_next_page()
|
||||||
|
issues.page_info.end_cursor()
|
||||||
|
|
||||||
|
if pr_cursor != -1:
|
||||||
|
prs = conn.pull_requests(
|
||||||
|
labels=labels,
|
||||||
|
first=50,
|
||||||
|
after=pr_cursor,
|
||||||
|
)
|
||||||
|
prs.nodes.number()
|
||||||
|
prs.nodes.labels(first=50).nodes().name()
|
||||||
|
prs.nodes.state()
|
||||||
|
prs.nodes.milestone().title()
|
||||||
|
prs.nodes.title()
|
||||||
|
prs.nodes.url()
|
||||||
|
prs.nodes.merged()
|
||||||
|
prs.page_info.has_next_page()
|
||||||
|
prs.page_info.end_cursor()
|
||||||
|
|
||||||
|
|
||||||
|
def download_issues_with_labels(endpoint, labels, REPOS):
|
||||||
|
ret = {}
|
||||||
|
repos = {repo_id: (repo, None, None) for (repo_id, repo) in REPOS.items()}
|
||||||
|
|
||||||
|
while True:
|
||||||
|
op = Operation(schema.Query)
|
||||||
|
fetch_issues_with_labels(op, labels, repos.items())
|
||||||
|
|
||||||
|
d = endpoint(op)
|
||||||
|
data = op + d
|
||||||
|
|
||||||
|
done = []
|
||||||
|
for (repo_id, (repo, _, _)) in repos.items():
|
||||||
|
repo_data = data['repo%d' % repo_id]
|
||||||
|
|
||||||
|
if hasattr(repo_data, 'issues'):
|
||||||
|
for issue in repo_data.issues.nodes:
|
||||||
|
ret[(repo_id, issue.number)] = GitHubIssue(repo_id, issue.number, issue, REPOS)
|
||||||
|
if repo_data.issues.page_info.has_next_page:
|
||||||
|
issue_cursor = repo_data.issues.page_info.end_cursor
|
||||||
|
else:
|
||||||
|
issue_cursor = -1
|
||||||
|
else:
|
||||||
|
issue_cursor = -1
|
||||||
|
|
||||||
|
if hasattr(repo_data, 'pull_requests'):
|
||||||
|
for pr in repo_data.pull_requests.nodes:
|
||||||
|
ret[(repo_id, pr.number)] = GitHubIssue(repo_id, pr.number, pr, REPOS)
|
||||||
|
if repo_data.pull_requests.page_info.has_next_page:
|
||||||
|
pr_cursor = repo_data.pull_requests.page_info.end_cursor
|
||||||
|
else:
|
||||||
|
pr_cursor = -1
|
||||||
|
else:
|
||||||
|
pr_cursor = -1
|
||||||
|
|
||||||
|
if issue_cursor == -1 and pr_cursor == -1:
|
||||||
|
done.append(repo_id)
|
||||||
|
else:
|
||||||
|
repos[repo_id] = (repo, issue_cursor, pr_cursor)
|
||||||
|
|
||||||
|
for repo_id in done:
|
||||||
|
del repos[repo_id]
|
||||||
|
|
||||||
|
if len(repos) > 0:
|
||||||
|
print('.', end='', flush=True)
|
||||||
|
else:
|
||||||
|
print()
|
||||||
|
break
|
||||||
|
|
||||||
|
return ret
|
|
@ -0,0 +1,155 @@
|
||||||
|
import networkx as nx
|
||||||
|
from sgqlc.endpoint.http import HTTPEndpoint
|
||||||
|
from sgqlc.operation import Operation
|
||||||
|
|
||||||
|
from helpers.github import CORE_REPOS, WALLET_REPOS, ZF_REPOS, ZF_FROST_REPOS
|
||||||
|
from zenhub_schema import zenhub_schema
|
||||||
|
|
||||||
|
WORKSPACE_SETS = {
|
||||||
|
# ecc-core
|
||||||
|
'5dc1fd615862290001229f21': CORE_REPOS.keys(),
|
||||||
|
# ecc-wallet
|
||||||
|
'5db8aa0244512d0001e0968e': WALLET_REPOS.keys(),
|
||||||
|
# zf
|
||||||
|
'5fb24d9264a3e8000e666a9e': ZF_REPOS.keys(),
|
||||||
|
# zf-frost
|
||||||
|
'607d75e0169bd50011d5410f': ZF_FROST_REPOS.keys(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def api(token):
|
||||||
|
return HTTPEndpoint(
|
||||||
|
'https://api.zenhub.com/public/graphql',
|
||||||
|
{'Authorization': 'Bearer %s' % token},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_workspace_graph(op, workspace_id, repos, cursor):
|
||||||
|
dependencies = op.workspace(id=workspace_id).issue_dependencies(
|
||||||
|
# TODO: This causes a 500 Internal Server Error. We need the ZenHub repo IDs here,
|
||||||
|
# not the GitHub repo IDs (which the previous REST API used).
|
||||||
|
# repository_ids=repos,
|
||||||
|
first=100,
|
||||||
|
after=cursor,
|
||||||
|
)
|
||||||
|
dependencies.nodes.id()
|
||||||
|
dependencies.nodes.blocked_issue.number()
|
||||||
|
dependencies.nodes.blocked_issue.repository.gh_id()
|
||||||
|
dependencies.nodes.blocking_issue.number()
|
||||||
|
dependencies.nodes.blocking_issue.repository.gh_id()
|
||||||
|
dependencies.page_info.has_next_page()
|
||||||
|
dependencies.page_info.end_cursor()
|
||||||
|
|
||||||
|
|
||||||
|
def get_dependency_graph(endpoint, workspace_id, repos):
|
||||||
|
edges = []
|
||||||
|
cursor = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
op = Operation(zenhub_schema.Query)
|
||||||
|
fetch_workspace_graph(op, workspace_id, repos, cursor)
|
||||||
|
|
||||||
|
d = endpoint(op)
|
||||||
|
data = op + d
|
||||||
|
|
||||||
|
if hasattr(data.workspace, 'issue_dependencies'):
|
||||||
|
dependencies = data.workspace.issue_dependencies
|
||||||
|
edges += [
|
||||||
|
(
|
||||||
|
(node.blocking_issue.repository.gh_id, node.blocking_issue.number),
|
||||||
|
(node.blocked_issue.repository.gh_id, node.blocked_issue.number),
|
||||||
|
)
|
||||||
|
for node in dependencies.nodes
|
||||||
|
]
|
||||||
|
|
||||||
|
if dependencies.page_info.has_next_page:
|
||||||
|
cursor = dependencies.page_info.end_cursor
|
||||||
|
print('.', end='', flush=True)
|
||||||
|
else:
|
||||||
|
print()
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
print()
|
||||||
|
break
|
||||||
|
|
||||||
|
return nx.DiGraph(edges)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_epics(op, workspace_id, repos, cursor):
|
||||||
|
epics = op.workspace(id=workspace_id).epics(
|
||||||
|
# TODO: This causes a 500 Internal Server Error. We need the ZenHub repo IDs here,
|
||||||
|
# not the GitHub repo IDs (which the previous REST API used).
|
||||||
|
# repository_ids=repos,
|
||||||
|
first=100,
|
||||||
|
after=cursor,
|
||||||
|
)
|
||||||
|
epics.nodes.id()
|
||||||
|
epics.nodes.issue.number()
|
||||||
|
epics.nodes.issue.repository.gh_id()
|
||||||
|
epics.page_info.has_next_page()
|
||||||
|
epics.page_info.end_cursor()
|
||||||
|
|
||||||
|
|
||||||
|
def get_epics(endpoint, workspace_id, repos):
|
||||||
|
epics = []
|
||||||
|
cursor = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
op = Operation(zenhub_schema.Query)
|
||||||
|
fetch_epics(op, workspace_id, repos, cursor)
|
||||||
|
|
||||||
|
d = endpoint(op)
|
||||||
|
data = op + d
|
||||||
|
|
||||||
|
epics_page = data.workspace.epics
|
||||||
|
epics += [
|
||||||
|
(node.id, (node.issue.repository.gh_id, node.issue.number))
|
||||||
|
for node in epics_page.nodes
|
||||||
|
]
|
||||||
|
|
||||||
|
if epics_page.page_info.has_next_page:
|
||||||
|
cursor = epics_page.page_info.end_cursor
|
||||||
|
print('.', end='', flush=True)
|
||||||
|
else:
|
||||||
|
print()
|
||||||
|
break
|
||||||
|
|
||||||
|
return epics
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_epic_issues(op, workspace_id, epic_id, cursor):
|
||||||
|
epic = op.workspace(id=workspace_id).epics(ids=[epic_id])
|
||||||
|
child_issues = epic.nodes.child_issues(
|
||||||
|
first=100,
|
||||||
|
after=cursor,
|
||||||
|
)
|
||||||
|
child_issues.nodes.number()
|
||||||
|
child_issues.nodes.repository.gh_id()
|
||||||
|
child_issues.page_info.has_next_page()
|
||||||
|
child_issues.page_info.end_cursor()
|
||||||
|
|
||||||
|
|
||||||
|
def get_epic_issues(endpoint, workspace_id, epic_id):
|
||||||
|
epic_issues = []
|
||||||
|
cursor = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
op = Operation(zenhub_schema.Query)
|
||||||
|
fetch_epic_issues(op, workspace_id, epic_id, cursor)
|
||||||
|
|
||||||
|
d = endpoint(op)
|
||||||
|
data = op + d
|
||||||
|
|
||||||
|
epic = data.workspace.epics.nodes[0]
|
||||||
|
epic_issues += [
|
||||||
|
(node.repository.gh_id, node.number) for node in epic.child_issues.nodes
|
||||||
|
]
|
||||||
|
|
||||||
|
if epic.child_issues.page_info.has_next_page:
|
||||||
|
cursor = epic.child_issues.page_info.end_cursor
|
||||||
|
print('.', end='', flush=True)
|
||||||
|
else:
|
||||||
|
print()
|
||||||
|
break
|
||||||
|
|
||||||
|
return epic_issues
|
|
@ -17,5 +17,6 @@
|
||||||
<p><a href="zcash-zcashd-deprecation-dag"><code>zcashd</code> deprecation DAG</a></p>
|
<p><a href="zcash-zcashd-deprecation-dag"><code>zcashd</code> deprecation DAG</a></p>
|
||||||
<p><a href="zcash-sprout-deprecation-dag">Sprout pool deprecation DAG</a></p>
|
<p><a href="zcash-sprout-deprecation-dag">Sprout pool deprecation DAG</a></p>
|
||||||
<p><a href="zcash-transparent-deprecation-dag">Transparent pool deprecation DAG</a></p>
|
<p><a href="zcash-transparent-deprecation-dag">Transparent pool deprecation DAG</a></p>
|
||||||
|
<p><a href="zashi-pipeline">Zashi pipeline</a></p>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -0,0 +1,250 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import networkx as nx
|
||||||
|
from str2bool import str2bool as strtobool
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from textwrap import wrap
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from helpers import github, zenhub
|
||||||
|
|
||||||
|
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN')
|
||||||
|
ZENHUB_TOKEN = os.environ.get('ZENHUB_TOKEN')
|
||||||
|
|
||||||
|
# IDs of repos we look for releases in.
|
||||||
|
RUST = 85334928
|
||||||
|
ANDROID_SDK = 151763639
|
||||||
|
SWIFT_SDK = 185480114
|
||||||
|
ZASHI_ANDROID = 390808594
|
||||||
|
ZASHI_IOS = 387551125
|
||||||
|
|
||||||
|
REPOS = {
|
||||||
|
**github.CORE_REPOS,
|
||||||
|
**github.WALLET_REPOS,
|
||||||
|
}
|
||||||
|
|
||||||
|
RELEASE_MATRIX = {
|
||||||
|
RUST: [ANDROID_SDK, SWIFT_SDK],
|
||||||
|
ANDROID_SDK: [ZASHI_ANDROID],
|
||||||
|
SWIFT_SDK: [ZASHI_IOS],
|
||||||
|
ZASHI_ANDROID: [],
|
||||||
|
ZASHI_IOS: []
|
||||||
|
}
|
||||||
|
|
||||||
|
class TrackedIssue:
|
||||||
|
def __init__(self, issue):
|
||||||
|
self.issue = issue
|
||||||
|
|
||||||
|
def build_release_matrix_from(dg, issue, repo_id):
|
||||||
|
acc = []
|
||||||
|
for child in dg.neighbors(issue):
|
||||||
|
if child.repo_id == repo_id and 'C-release' in child.labels:
|
||||||
|
# Fetch the rows that each child's downstreams need rendered.
|
||||||
|
child_deps = [
|
||||||
|
build_release_matrix_from(dg, child, dep_repo)
|
||||||
|
for dep_repo in RELEASE_MATRIX.get(repo_id)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Merge the rows from each downstream repo together.
|
||||||
|
child_releases = [
|
||||||
|
{k: v for d in prod for k, v in d.items()}
|
||||||
|
for prod in itertools.product(*child_deps)
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(child_releases) > 0:
|
||||||
|
for rec in child_releases:
|
||||||
|
rec[repo_id] = child
|
||||||
|
else:
|
||||||
|
child_releases = [{repo_id: child}]
|
||||||
|
|
||||||
|
acc.extend(child_releases)
|
||||||
|
else:
|
||||||
|
acc.extend(build_release_matrix_from(dg, child, repo_id))
|
||||||
|
|
||||||
|
return acc
|
||||||
|
|
||||||
|
def main():
|
||||||
|
gapi = github.api(GITHUB_TOKEN)
|
||||||
|
zapi = zenhub.api(ZENHUB_TOKEN)
|
||||||
|
|
||||||
|
print('Fetching tracked issues')
|
||||||
|
tracked_issues = github.download_issues_with_labels(gapi, ['C-tracked-bug', 'C-tracked-feature'], REPOS)
|
||||||
|
|
||||||
|
# The repos we care about are now:
|
||||||
|
# - Any repo containing a tracked issue.
|
||||||
|
# - The wallet repos where releases occur.
|
||||||
|
tracked_repos = set([repo_id for (repo_id, _) in tracked_issues])
|
||||||
|
repos = {
|
||||||
|
**github.WALLET_REPOS,
|
||||||
|
}
|
||||||
|
for repo_id in tracked_repos:
|
||||||
|
repos[repo_id] = REPOS[repo_id]
|
||||||
|
workspaces = {
|
||||||
|
workspace_id: [repo_id for repo_id in repos if repo_id in repos]
|
||||||
|
for (workspace_id, repos) in zenhub.WORKSPACE_SETS.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Build the full dependency graph from ZenHub's per-workspace API.
|
||||||
|
print('Fetching graph')
|
||||||
|
dg = nx.compose_all([
|
||||||
|
zenhub.get_dependency_graph(zapi, workspace_id, repos)
|
||||||
|
for (workspace_id, repos) in workspaces.items()
|
||||||
|
if len(repos) > 0
|
||||||
|
])
|
||||||
|
|
||||||
|
print('Rendering deployment pipeline')
|
||||||
|
|
||||||
|
# Ensure that the tracked issues all exist in the graph. This is a no-op for
|
||||||
|
# issues that are already present.
|
||||||
|
start_at = set([issue for issue in tracked_issues])
|
||||||
|
for i in start_at:
|
||||||
|
dg.add_node(i)
|
||||||
|
|
||||||
|
# Replace the graph with the subgraph that only includes the tracked
|
||||||
|
# issues and their descendants.
|
||||||
|
descendants = [nx.descendants(dg, n) for n in start_at]
|
||||||
|
dg = nx.subgraph(dg, start_at.union(*descendants))
|
||||||
|
|
||||||
|
# Fetch the issues within the graph.
|
||||||
|
mapping = github.download_issues(gapi, dg.nodes, repos)
|
||||||
|
|
||||||
|
# Relabel the graph
|
||||||
|
dg = nx.relabel_nodes(dg, mapping)
|
||||||
|
|
||||||
|
# Filter out unknown issues
|
||||||
|
unknown = [n for n in dg if n.repo_id not in repos]
|
||||||
|
if len(unknown) > 0:
|
||||||
|
dg.remove_nodes_from(unknown)
|
||||||
|
|
||||||
|
# Apply property annotations
|
||||||
|
for (source, sink) in dg.edges:
|
||||||
|
attrs = dg.edges[source, sink]
|
||||||
|
attrs['is_open'] = 0 if source.state == 'closed' else 1
|
||||||
|
|
||||||
|
# Render the HTML version!
|
||||||
|
html_header = '''<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Zashi Pipeline</title>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
color: #1f2328;
|
||||||
|
font-family: -apple-system,BlinkMacSystemFont,"Segoe UI","Noto Sans",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.5;
|
||||||
|
word-wrap: break-word;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #0969da;
|
||||||
|
}
|
||||||
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
width: 100%;
|
||||||
|
width: max-content;
|
||||||
|
max-width: 100%;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
table tr {
|
||||||
|
border-top: 1px solid #d1d9e0b3;
|
||||||
|
}
|
||||||
|
table th, table td {
|
||||||
|
border: 1px solid #d1d9e0b3;
|
||||||
|
padding: 6px 13px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
body {
|
||||||
|
background-color: #121212;
|
||||||
|
color: #f0f6fc;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #4493f8;
|
||||||
|
}
|
||||||
|
table tr {
|
||||||
|
border-top: 1px solid #3d444db3;
|
||||||
|
}
|
||||||
|
table th, table td {
|
||||||
|
border: 1px solid #3d444db3;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Zashi Pipeline</h1>
|
||||||
|
<p>🐞 = bug, 💡 = feature, ✅ = implemented / released, 🛑 = unfinished, 📥 = unassigned / DAG needs updating</p>
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Type</th>
|
||||||
|
<th>Issue</th>
|
||||||
|
<th>Rust crate</th>
|
||||||
|
<th>Android SDK</th>
|
||||||
|
<th>Swift SDK</th>
|
||||||
|
<th>Zashi Android</th>
|
||||||
|
<th>Zashi iOS</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
'''
|
||||||
|
html_footer = '''
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
'''
|
||||||
|
with open('public/zashi-pipeline.html', 'w') as f:
|
||||||
|
f.write(html_header)
|
||||||
|
|
||||||
|
for issue in tracked_issues.values():
|
||||||
|
rows = build_release_matrix_from(dg, issue, RUST);
|
||||||
|
for i, row in enumerate(rows):
|
||||||
|
f.write('<tr>')
|
||||||
|
|
||||||
|
if i == 0:
|
||||||
|
rowspan = ''
|
||||||
|
if len(rows) > 1:
|
||||||
|
rowspan = ' rowspan="{}"'.format(len(rows))
|
||||||
|
|
||||||
|
f.write('<td{}>{}</td>'.format(
|
||||||
|
rowspan,
|
||||||
|
'🐞' if 'C-tracked-bug' in issue.labels else '💡',
|
||||||
|
))
|
||||||
|
f.write('<td{}>{} <a href="{}">{}</a></td>'.format(
|
||||||
|
rowspan,
|
||||||
|
'✅' if issue.state == 'closed' else '🛑',
|
||||||
|
issue.url,
|
||||||
|
issue.title,
|
||||||
|
))
|
||||||
|
|
||||||
|
for repo_id in [RUST, ANDROID_SDK, SWIFT_SDK, ZASHI_ANDROID, ZASHI_IOS]:
|
||||||
|
child = row.get(repo_id)
|
||||||
|
if child is None:
|
||||||
|
# Release not found in this repo
|
||||||
|
f.write('<td>📥</td>')
|
||||||
|
else:
|
||||||
|
# Extract version number from title
|
||||||
|
if repo_id == RUST:
|
||||||
|
version = re.search(r'zcash_[^ ]+ \d+(\.\d+)+', child.title).group()
|
||||||
|
else:
|
||||||
|
version = re.search(r'\d+(\.\d+)+', child.title).group()
|
||||||
|
|
||||||
|
f.write('<td>{} <a href="{}">{}</a></td>'.format(
|
||||||
|
'✅' if child.state == 'closed' else '🛑',
|
||||||
|
child.url,
|
||||||
|
version,
|
||||||
|
))
|
||||||
|
f.write('</tr>')
|
||||||
|
|
||||||
|
f.write(html_footer)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
if GITHUB_TOKEN and ZENHUB_TOKEN:
|
||||||
|
main()
|
||||||
|
else:
|
||||||
|
print('Please set the GITHUB_TOKEN and ZENHUB_TOKEN environment variables.')
|
|
@ -7,129 +7,22 @@
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
|
|
||||||
from str2bool import str2bool as strtobool
|
from str2bool import str2bool as strtobool
|
||||||
import mimetypes
|
|
||||||
import os
|
import os
|
||||||
from textwrap import wrap
|
from textwrap import wrap
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from sgqlc.endpoint.http import HTTPEndpoint
|
from helpers import github, zenhub
|
||||||
from sgqlc.operation import Operation
|
|
||||||
from github_schema import github_schema as schema
|
|
||||||
from zenhub_schema import zenhub_schema
|
|
||||||
|
|
||||||
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN')
|
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN')
|
||||||
ZENHUB_TOKEN = os.environ.get('ZENHUB_TOKEN')
|
ZENHUB_TOKEN = os.environ.get('ZENHUB_TOKEN')
|
||||||
|
|
||||||
DAG_VIEW = os.environ.get('DAG_VIEW', 'core')
|
DAG_VIEW = os.environ.get('DAG_VIEW', 'core')
|
||||||
|
|
||||||
# To get the id of a repo, see <https://stackoverflow.com/a/47223479/393146>.
|
REPOS = github.REPO_SETS[DAG_VIEW]
|
||||||
|
|
||||||
HALO2_REPOS = {
|
|
||||||
290019239: ('zcash', 'halo2'),
|
|
||||||
344239327: ('zcash', 'pasta_curves'),
|
|
||||||
}
|
|
||||||
|
|
||||||
CORE_REPOS = {
|
|
||||||
26987049: ('zcash', 'zcash'),
|
|
||||||
47279130: ('zcash', 'zips'),
|
|
||||||
48303644: ('zcash', 'incrementalmerkletree'),
|
|
||||||
85334928: ('zcash', 'librustzcash'),
|
|
||||||
133857578: ('zcash-hackworks', 'zcash-test-vectors'),
|
|
||||||
111058300: ('zcash', 'sapling-crypto'),
|
|
||||||
**HALO2_REPOS,
|
|
||||||
305835578: ('zcash', 'orchard'),
|
|
||||||
}
|
|
||||||
|
|
||||||
TFL_REPOS = {
|
|
||||||
642135348: ('Electric-Coin-Company', 'tfl-book'),
|
|
||||||
725179873: ('Electric-Coin-Company', 'zebra-tfl'),
|
|
||||||
695805989: ('zcash', 'simtfl'),
|
|
||||||
}
|
|
||||||
|
|
||||||
ANDROID_REPOS = {
|
|
||||||
390808594: ('Electric-Coin-Company', 'zashi-android'),
|
|
||||||
151763639: ('Electric-Coin-Company', 'zcash-android-wallet-sdk'),
|
|
||||||
719178328: ('Electric-Coin-Company', 'zashi'),
|
|
||||||
}
|
|
||||||
|
|
||||||
IOS_REPOS = {
|
|
||||||
387551125: ('Electric-Coin-Company', 'zashi-ios'),
|
|
||||||
185480114: ('Electric-Coin-Company', 'zcash-swift-wallet-sdk'),
|
|
||||||
270825987: ('Electric-Coin-Company', 'MnemonicSwift'),
|
|
||||||
439137887: ('Electric-Coin-Company', 'zcash-light-client-ffi'),
|
|
||||||
719178328: ('Electric-Coin-Company', 'zashi'),
|
|
||||||
}
|
|
||||||
|
|
||||||
WALLET_REPOS = {
|
|
||||||
85334928: ('zcash', 'librustzcash'),
|
|
||||||
159714694: ('zcash', 'lightwalletd'),
|
|
||||||
**ANDROID_REPOS,
|
|
||||||
**IOS_REPOS,
|
|
||||||
}
|
|
||||||
|
|
||||||
ECC_REPOS = {
|
|
||||||
**CORE_REPOS,
|
|
||||||
**TFL_REPOS,
|
|
||||||
**WALLET_REPOS,
|
|
||||||
65419597: ('Electric-Coin-Company', 'infrastructure'),
|
|
||||||
}
|
|
||||||
|
|
||||||
ZF_REPOS = {
|
|
||||||
205255683: ('ZcashFoundation', 'zebra'),
|
|
||||||
225479018: ('ZcashFoundation', 'redjubjub'),
|
|
||||||
235651437: ('ZcashFoundation', 'ed25519-zebra'),
|
|
||||||
279422254: ('ZcashFoundation', 'zcash_script'),
|
|
||||||
}
|
|
||||||
|
|
||||||
ZF_FROST_REPOS = {
|
|
||||||
437862440: ('ZcashFoundation', 'frost'),
|
|
||||||
}
|
|
||||||
|
|
||||||
ZCASHD_DEPRECATION_REPOS = {
|
|
||||||
26987049: ('zcash', 'zcash'),
|
|
||||||
47279130: ('zcash', 'zips'),
|
|
||||||
85334928: ('zcash', 'librustzcash'),
|
|
||||||
863610221: ('zcash', 'wallet'),
|
|
||||||
159714694: ('zcash', 'lightwalletd'),
|
|
||||||
}
|
|
||||||
|
|
||||||
POOL_DEPRECATION_REPOS = {
|
|
||||||
**CORE_REPOS,
|
|
||||||
**WALLET_REPOS,
|
|
||||||
}
|
|
||||||
|
|
||||||
REPO_SETS = {
|
|
||||||
'core': CORE_REPOS,
|
|
||||||
'halo2': HALO2_REPOS,
|
|
||||||
'tfl': TFL_REPOS,
|
|
||||||
'wallet': WALLET_REPOS,
|
|
||||||
'wallet-ios': IOS_REPOS,
|
|
||||||
'wallet-android': ANDROID_REPOS,
|
|
||||||
'ecc': ECC_REPOS,
|
|
||||||
'zf': ZF_REPOS,
|
|
||||||
'zf-frost': ZF_FROST_REPOS,
|
|
||||||
'zf-devops': {**ZF_REPOS, **ZF_FROST_REPOS},
|
|
||||||
'zcashd-deprecation': ZCASHD_DEPRECATION_REPOS,
|
|
||||||
'sprout-deprecation': POOL_DEPRECATION_REPOS,
|
|
||||||
'transparent-deprecation': POOL_DEPRECATION_REPOS,
|
|
||||||
}
|
|
||||||
|
|
||||||
REPOS = REPO_SETS[DAG_VIEW]
|
|
||||||
|
|
||||||
WORKSPACE_SETS = {
|
|
||||||
# ecc-core
|
|
||||||
'5dc1fd615862290001229f21': CORE_REPOS.keys(),
|
|
||||||
# ecc-wallet
|
|
||||||
'5db8aa0244512d0001e0968e': WALLET_REPOS.keys(),
|
|
||||||
# zf
|
|
||||||
'5fb24d9264a3e8000e666a9e': ZF_REPOS.keys(),
|
|
||||||
# zf-frost
|
|
||||||
'607d75e0169bd50011d5410f': ZF_FROST_REPOS.keys(),
|
|
||||||
}
|
|
||||||
|
|
||||||
WORKSPACES = {
|
WORKSPACES = {
|
||||||
workspace_id: [repo_id for repo_id in repos if repo_id in REPOS]
|
workspace_id: [repo_id for repo_id in repos if repo_id in REPOS]
|
||||||
for (workspace_id, repos) in WORKSPACE_SETS.items()
|
for (workspace_id, repos) in zenhub.WORKSPACE_SETS.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
SUPPORTED_CATEGORIES = set(['releases', 'targets'])
|
SUPPORTED_CATEGORIES = set(['releases', 'targets'])
|
||||||
|
@ -160,248 +53,14 @@ SHOW_MILESTONES = strtobool(os.environ.get('SHOW_MILESTONES', 'false'))
|
||||||
SHOW_EPICS = strtobool(os.environ.get('SHOW_EPICS', 'false'))
|
SHOW_EPICS = strtobool(os.environ.get('SHOW_EPICS', 'false'))
|
||||||
|
|
||||||
|
|
||||||
class GitHubIssue:
|
|
||||||
def __init__(self, repo_id, issue_number, data):
|
|
||||||
self.repo_id = repo_id
|
|
||||||
self.issue_number = issue_number
|
|
||||||
self.milestone = None
|
|
||||||
|
|
||||||
if data is not None:
|
|
||||||
labels = [label['name'] for label in data['labels']['nodes']]
|
|
||||||
self.title = data['title']
|
|
||||||
self.is_release = 'C-release' in labels
|
|
||||||
self.is_target = 'C-target' in labels
|
|
||||||
self.is_pr = 'merged' in data
|
|
||||||
self.is_committed = 'S-committed' in labels
|
|
||||||
self.is_in_progress = 'S-in-progress' in labels
|
|
||||||
self.waiting_on_review = 'S-waiting-on-review' in labels
|
|
||||||
self.url = data['url']
|
|
||||||
self.state = 'closed' if data['state'] in ['CLOSED', 'MERGED'] else 'open'
|
|
||||||
if 'milestone' in data and data['milestone']:
|
|
||||||
self.milestone = data['milestone']['title']
|
|
||||||
else:
|
|
||||||
# If we can't fetch issue data, assume we don't care.
|
|
||||||
self.title = ''
|
|
||||||
self.url = None
|
|
||||||
self.is_release = False
|
|
||||||
self.is_target = False
|
|
||||||
self.is_pr = False
|
|
||||||
self.is_committed = False
|
|
||||||
self.is_in_progress = False
|
|
||||||
self.waiting_on_review = False
|
|
||||||
self.state = 'closed'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
if self.repo_id in REPOS:
|
|
||||||
repo = REPOS[self.repo_id]
|
|
||||||
# Shorten the representation of long repo names.
|
|
||||||
if repo[0] == 'Electric-Coin-Company':
|
|
||||||
repo = ('ECC', repo[1])
|
|
||||||
repo = '/'.join(repo)
|
|
||||||
return '%s#%d' % (repo, self.issue_number)
|
|
||||||
else:
|
|
||||||
return 'Unknown'
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (self.repo_id, self.issue_number) == (other.repo_id, other.issue_number)
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((self.repo_id, self.issue_number))
|
|
||||||
|
|
||||||
def any_cat(self, categories):
|
|
||||||
release_cat = self.is_release if 'releases' in categories else False
|
|
||||||
targets_cat = self.is_target if 'targets' in categories else False
|
|
||||||
return release_cat or targets_cat
|
|
||||||
|
|
||||||
def fetch_issues(op, issues):
|
|
||||||
repos = set([repo for (repo, _) in issues])
|
|
||||||
repos = {repo: [issue for (r, issue) in issues if r == repo] for repo in repos}
|
|
||||||
|
|
||||||
for (repo, issues) in repos.items():
|
|
||||||
conn = op.repository(
|
|
||||||
owner=REPOS[repo][0],
|
|
||||||
name=REPOS[repo][1],
|
|
||||||
__alias__='repo%d' % repo,
|
|
||||||
)
|
|
||||||
|
|
||||||
for issue in issues:
|
|
||||||
res = conn.issue_or_pull_request(number=issue, __alias__='issue%d' % issue)
|
|
||||||
for typ in [schema.Issue, schema.PullRequest]:
|
|
||||||
node = res.__as__(typ)
|
|
||||||
node.labels(first=50).nodes().name()
|
|
||||||
node.state()
|
|
||||||
node.milestone().title()
|
|
||||||
node.title()
|
|
||||||
node.url()
|
|
||||||
if typ == schema.PullRequest:
|
|
||||||
node.merged()
|
|
||||||
|
|
||||||
def download_issues(endpoint, nodes):
|
|
||||||
issues = [(repo, issue) for (repo, issue) in nodes if repo in REPOS]
|
|
||||||
|
|
||||||
ret = {}
|
|
||||||
|
|
||||||
# Ensure that any graph nodes from ZenHub that are not in the repos we care about have
|
|
||||||
# default entries, to simplify subsequent graph manipulation code.
|
|
||||||
for (repo, issue) in [(repo, issue) for (repo, issue) in nodes if repo not in REPOS]:
|
|
||||||
ret[(repo, issue)] = GitHubIssue(repo, issue, None)
|
|
||||||
|
|
||||||
def chunks(lst, n):
|
|
||||||
for i in range(0, len(lst), n):
|
|
||||||
yield lst[i:i + n]
|
|
||||||
|
|
||||||
for issues in chunks(issues, 50):
|
|
||||||
op = Operation(schema.Query)
|
|
||||||
fetch_issues(op, issues)
|
|
||||||
|
|
||||||
d = endpoint(op)
|
|
||||||
data = (op + d)
|
|
||||||
|
|
||||||
for (repo, issue) in issues:
|
|
||||||
repo_data = data['repo%d' % repo]
|
|
||||||
issue_key = 'issue%d' % issue
|
|
||||||
# If GITHUB_TOKEN doesn't have permission to read from a particular private
|
|
||||||
# repository in REPOS, GitHub returns an empty repo_data section.
|
|
||||||
issue_data = repo_data[issue_key] if issue_key in repo_data else None
|
|
||||||
ret[(repo, issue)] = GitHubIssue(repo, issue, issue_data)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def fetch_workspace_graph(op, workspace_id, repos, cursor):
|
|
||||||
dependencies = op.workspace(id=workspace_id).issue_dependencies(
|
|
||||||
# TODO: This causes a 500 Internal Server Error. We need the ZenHub repo IDs here,
|
|
||||||
# not the GitHub repo IDs (which the previous REST API used).
|
|
||||||
# repository_ids=repos,
|
|
||||||
first=100,
|
|
||||||
after=cursor,
|
|
||||||
)
|
|
||||||
dependencies.nodes.id()
|
|
||||||
dependencies.nodes.blocked_issue.number()
|
|
||||||
dependencies.nodes.blocked_issue.repository.gh_id()
|
|
||||||
dependencies.nodes.blocking_issue.number()
|
|
||||||
dependencies.nodes.blocking_issue.repository.gh_id()
|
|
||||||
dependencies.page_info.has_next_page()
|
|
||||||
dependencies.page_info.end_cursor()
|
|
||||||
|
|
||||||
def get_dependency_graph(endpoint, workspace_id, repos):
|
|
||||||
edges = []
|
|
||||||
cursor = None
|
|
||||||
|
|
||||||
while True:
|
|
||||||
op = Operation(zenhub_schema.Query)
|
|
||||||
fetch_workspace_graph(op, workspace_id, repos, cursor)
|
|
||||||
|
|
||||||
d = endpoint(op)
|
|
||||||
data = (op + d)
|
|
||||||
|
|
||||||
dependencies = data.workspace.issue_dependencies
|
|
||||||
edges += [
|
|
||||||
(
|
|
||||||
(node.blocking_issue.repository.gh_id, node.blocking_issue.number),
|
|
||||||
(node.blocked_issue.repository.gh_id, node.blocked_issue.number),
|
|
||||||
) for node in dependencies.nodes
|
|
||||||
]
|
|
||||||
|
|
||||||
if dependencies.page_info.has_next_page:
|
|
||||||
cursor = dependencies.page_info.end_cursor
|
|
||||||
print('.', end='', flush=True)
|
|
||||||
else:
|
|
||||||
print()
|
|
||||||
break
|
|
||||||
|
|
||||||
return nx.DiGraph(edges)
|
|
||||||
|
|
||||||
def fetch_epics(op, workspace_id, repos, cursor):
|
|
||||||
epics = op.workspace(id=workspace_id).epics(
|
|
||||||
# TODO: This causes a 500 Internal Server Error. We need the ZenHub repo IDs here,
|
|
||||||
# not the GitHub repo IDs (which the previous REST API used).
|
|
||||||
# repository_ids=repos,
|
|
||||||
first=100,
|
|
||||||
after=cursor,
|
|
||||||
)
|
|
||||||
epics.nodes.id()
|
|
||||||
epics.nodes.issue.number()
|
|
||||||
epics.nodes.issue.repository.gh_id()
|
|
||||||
epics.page_info.has_next_page()
|
|
||||||
epics.page_info.end_cursor()
|
|
||||||
|
|
||||||
def get_epics(endpoint, workspace_id, repos):
|
|
||||||
epics = []
|
|
||||||
cursor = None
|
|
||||||
|
|
||||||
while True:
|
|
||||||
op = Operation(zenhub_schema.Query)
|
|
||||||
fetch_epics(op, workspace_id, repos, cursor)
|
|
||||||
|
|
||||||
d = endpoint(op)
|
|
||||||
data = (op + d)
|
|
||||||
|
|
||||||
epics_page = data.workspace.epics
|
|
||||||
epics += [
|
|
||||||
(node.id, (node.issue.repository.gh_id, node.issue.number))
|
|
||||||
for node in epics_page.nodes
|
|
||||||
]
|
|
||||||
|
|
||||||
if epics_page.page_info.has_next_page:
|
|
||||||
cursor = epics_page.page_info.end_cursor
|
|
||||||
print('.', end='', flush=True)
|
|
||||||
else:
|
|
||||||
print()
|
|
||||||
break
|
|
||||||
|
|
||||||
return epics
|
|
||||||
|
|
||||||
def fetch_epic_issues(op, workspace_id, epic_id, cursor):
|
|
||||||
epic = op.workspace(id=workspace_id).epics(ids=[epic_id])
|
|
||||||
child_issues = epic.nodes.child_issues(
|
|
||||||
first=100,
|
|
||||||
after=cursor,
|
|
||||||
)
|
|
||||||
child_issues.nodes.number()
|
|
||||||
child_issues.nodes.repository.gh_id()
|
|
||||||
child_issues.page_info.has_next_page()
|
|
||||||
child_issues.page_info.end_cursor()
|
|
||||||
|
|
||||||
def get_epic_issues(endpoint, workspace_id, epic_id):
|
|
||||||
epic_issues = []
|
|
||||||
cursor = None
|
|
||||||
|
|
||||||
while True:
|
|
||||||
op = Operation(zenhub_schema.Query)
|
|
||||||
fetch_epic_issues(op, workspace_id, epic_id, cursor)
|
|
||||||
|
|
||||||
d = endpoint(op)
|
|
||||||
data = (op + d)
|
|
||||||
|
|
||||||
epic = data.workspace.epics.nodes[0]
|
|
||||||
epic_issues += [
|
|
||||||
(node.repository.gh_id, node.number)
|
|
||||||
for node in epic.child_issues.nodes
|
|
||||||
]
|
|
||||||
|
|
||||||
if epic.child_issues.page_info.has_next_page:
|
|
||||||
cursor = epic.child_issues.page_info.end_cursor
|
|
||||||
print('.', end='', flush=True)
|
|
||||||
else:
|
|
||||||
print()
|
|
||||||
break
|
|
||||||
|
|
||||||
return epic_issues
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
gapi = HTTPEndpoint(
|
gapi = github.api(GITHUB_TOKEN)
|
||||||
'https://api.github.com/graphql',
|
zapi = zenhub.api(ZENHUB_TOKEN)
|
||||||
{'Authorization': 'bearer %s' % GITHUB_TOKEN},
|
|
||||||
)
|
|
||||||
zapi = HTTPEndpoint(
|
|
||||||
'https://api.zenhub.com/public/graphql',
|
|
||||||
{'Authorization': 'Bearer %s' % ZENHUB_TOKEN},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build the full dependency graph from ZenHub's per-workspace API.
|
# Build the full dependency graph from ZenHub's per-workspace API.
|
||||||
print('Fetching graph')
|
print('Fetching graph')
|
||||||
dg = nx.compose_all([
|
dg = nx.compose_all([
|
||||||
get_dependency_graph(zapi, workspace_id, repos)
|
zenhub.get_dependency_graph(zapi, workspace_id, repos)
|
||||||
for (workspace_id, repos) in WORKSPACES.items()
|
for (workspace_id, repos) in WORKSPACES.items()
|
||||||
if len(repos) > 0
|
if len(repos) > 0
|
||||||
])
|
])
|
||||||
|
@ -412,10 +71,10 @@ def main():
|
||||||
epics_issues = []
|
epics_issues = []
|
||||||
for (workspace_id, repos) in WORKSPACES.items():
|
for (workspace_id, repos) in WORKSPACES.items():
|
||||||
if len(repos) > 0:
|
if len(repos) > 0:
|
||||||
epics_issues += get_epics(zapi, workspace_id, repos)
|
epics_issues += zenhub.get_epics(zapi, workspace_id, repos)
|
||||||
epics_issues = set(epics_issues)
|
epics_issues = set(epics_issues)
|
||||||
|
|
||||||
epics_mapping = download_issues(gapi, [gh_ref for (_, gh_ref) in epics_issues])
|
epics_mapping = github.download_issues(gapi, [gh_ref for (_, gh_ref) in epics_issues], REPOS)
|
||||||
epics_mapping = {k: v for (k, v) in epics_mapping.items() if v.state != 'closed'}
|
epics_mapping = {k: v for (k, v) in epics_mapping.items() if v.state != 'closed'}
|
||||||
issues_by_epic = {}
|
issues_by_epic = {}
|
||||||
for (i, ((repo_id, epic_id), epic)) in enumerate(epics_mapping.items()):
|
for (i, ((repo_id, epic_id), epic)) in enumerate(epics_mapping.items()):
|
||||||
|
@ -428,7 +87,7 @@ def main():
|
||||||
id for (id, gh_ref) in epics_issues
|
id for (id, gh_ref) in epics_issues
|
||||||
if gh_ref == (repo_id, epic_id)
|
if gh_ref == (repo_id, epic_id)
|
||||||
][0]
|
][0]
|
||||||
issues = set(get_epic_issues(zapi, workspace_id, epic_id))
|
issues = set(zenhub.get_epic_issues(zapi, workspace_id, epic_id))
|
||||||
issues_by_epic[epic] = issues
|
issues_by_epic[epic] = issues
|
||||||
for i in issues:
|
for i in issues:
|
||||||
# zapi.dependencies only returns nodes that have some connection,
|
# zapi.dependencies only returns nodes that have some connection,
|
||||||
|
@ -448,7 +107,7 @@ def main():
|
||||||
dg = nx.subgraph(dg, terminate_at.union(*ancestors))
|
dg = nx.subgraph(dg, terminate_at.union(*ancestors))
|
||||||
|
|
||||||
# Fetch the issues within the graph.
|
# Fetch the issues within the graph.
|
||||||
mapping = download_issues(gapi, dg.nodes)
|
mapping = github.download_issues(gapi, dg.nodes, REPOS)
|
||||||
|
|
||||||
# Relabel the graph
|
# Relabel the graph
|
||||||
dg = nx.relabel_nodes(dg, mapping)
|
dg = nx.relabel_nodes(dg, mapping)
|
||||||
|
|
Loading…
Reference in New Issue