2017-03-02 21:36:46 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# PYTHON_ARGCOMPLETE_OK
|
|
|
|
|
|
|
|
# (c) 2016 Red Hat, Inc.
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
2017-05-17 07:49:04 +02:00
|
|
|
"""CLI tool for downloading results from Shippable CI runs."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2017-05-17 07:49:04 +02:00
|
|
|
# noinspection PyCompatibility
|
|
|
|
import argparse
|
2017-03-02 21:36:46 +01:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import re
|
2020-01-07 22:28:11 +01:00
|
|
|
import sys
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
import requests
|
|
|
|
|
|
|
|
try:
|
|
|
|
import argcomplete
|
|
|
|
except ImportError:
|
|
|
|
argcomplete = None
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2017-05-17 07:49:04 +02:00
|
|
|
"""Main program body."""
|
2020-01-07 22:28:11 +01:00
|
|
|
args = parse_args()
|
|
|
|
download_run(args)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
"""Parse and return args."""
|
2017-03-02 21:36:46 +01:00
|
|
|
api_key = get_api_key()
|
|
|
|
|
2017-05-17 07:49:04 +02:00
|
|
|
parser = argparse.ArgumentParser(description='Download results from a Shippable run.')
|
2017-03-02 21:36:46 +01:00
|
|
|
|
|
|
|
parser.add_argument('run_id',
|
2017-03-24 20:36:13 +01:00
|
|
|
metavar='RUN',
|
|
|
|
help='shippable run id, run url or run name formatted as: account/project/run_number')
|
2017-03-02 21:36:46 +01:00
|
|
|
|
|
|
|
parser.add_argument('-v', '--verbose',
|
|
|
|
dest='verbose',
|
|
|
|
action='store_true',
|
|
|
|
help='show what is being downloaded')
|
|
|
|
|
|
|
|
parser.add_argument('-t', '--test',
|
|
|
|
dest='test',
|
|
|
|
action='store_true',
|
|
|
|
help='show what would be downloaded without downloading')
|
|
|
|
|
|
|
|
parser.add_argument('--key',
|
|
|
|
dest='api_key',
|
|
|
|
default=api_key,
|
|
|
|
required=api_key is None,
|
|
|
|
help='api key for accessing Shippable')
|
|
|
|
|
|
|
|
parser.add_argument('--console-logs',
|
|
|
|
action='store_true',
|
|
|
|
help='download console logs')
|
|
|
|
|
|
|
|
parser.add_argument('--test-results',
|
|
|
|
action='store_true',
|
|
|
|
help='download test results')
|
|
|
|
|
|
|
|
parser.add_argument('--coverage-results',
|
|
|
|
action='store_true',
|
|
|
|
help='download code coverage results')
|
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
parser.add_argument('--job-metadata',
|
|
|
|
action='store_true',
|
|
|
|
help='download job metadata')
|
|
|
|
|
|
|
|
parser.add_argument('--run-metadata',
|
|
|
|
action='store_true',
|
|
|
|
help='download run metadata')
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
parser.add_argument('--all',
|
|
|
|
action='store_true',
|
|
|
|
help='download everything')
|
|
|
|
|
|
|
|
parser.add_argument('--job-number',
|
2017-03-24 20:36:13 +01:00
|
|
|
metavar='N',
|
2017-03-02 21:36:46 +01:00
|
|
|
action='append',
|
|
|
|
type=int,
|
|
|
|
help='limit downloads to the given job number')
|
|
|
|
|
|
|
|
if argcomplete:
|
|
|
|
argcomplete.autocomplete(parser)
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
old_runs_prefix = 'https://app.shippable.com/runs/'
|
|
|
|
|
|
|
|
if args.run_id.startswith(old_runs_prefix):
|
|
|
|
args.run_id = args.run_id[len(old_runs_prefix):]
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
if args.all:
|
|
|
|
args.console_logs = True
|
|
|
|
args.test_results = True
|
|
|
|
args.coverage_results = True
|
2017-03-24 20:36:13 +01:00
|
|
|
args.job_metadata = True
|
|
|
|
args.run_metadata = True
|
|
|
|
|
|
|
|
selections = (
|
|
|
|
args.console_logs,
|
|
|
|
args.test_results,
|
|
|
|
args.coverage_results,
|
|
|
|
args.job_metadata,
|
|
|
|
args.run_metadata,
|
|
|
|
)
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
if not any(selections):
|
|
|
|
parser.error('At least one download option is required.')
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2020-01-07 22:28:11 +01:00
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
def download_run(args):
|
|
|
|
"""Download a Shippable run."""
|
2017-03-02 21:36:46 +01:00
|
|
|
headers = dict(
|
|
|
|
Authorization='apiToken %s' % args.api_key,
|
|
|
|
)
|
|
|
|
|
2017-05-17 07:49:04 +02:00
|
|
|
match = re.search(
|
|
|
|
r'^https://app.shippable.com/github/(?P<account>[^/]+)/(?P<project>[^/]+)/runs/(?P<run_number>[0-9]+)(?:/summary|(/(?P<job_number>[0-9]+)))?$',
|
|
|
|
args.run_id)
|
2017-03-24 20:36:13 +01:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
match = re.search(r'^(?P<account>[^/]+)/(?P<project>[^/]+)/(?P<run_number>[0-9]+)$', args.run_id)
|
|
|
|
|
|
|
|
if match:
|
|
|
|
account = match.group('account')
|
|
|
|
project = match.group('project')
|
|
|
|
run_number = int(match.group('run_number'))
|
2017-05-17 07:49:04 +02:00
|
|
|
job_number = int(match.group('job_number')) if match.group('job_number') else None
|
|
|
|
|
|
|
|
if job_number:
|
|
|
|
if args.job_number:
|
2020-01-07 22:28:11 +01:00
|
|
|
sys.exit('ERROR: job number found in url and specified with --job-number')
|
2017-05-17 07:49:04 +02:00
|
|
|
|
|
|
|
args.job_number = [job_number]
|
2017-03-24 20:36:13 +01:00
|
|
|
|
|
|
|
url = 'https://api.shippable.com/projects'
|
|
|
|
response = requests.get(url, dict(projectFullNames='%s/%s' % (account, project)), headers=headers)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise Exception(response.content)
|
|
|
|
|
|
|
|
project_id = response.json()[0]['id']
|
|
|
|
|
2017-05-16 17:52:18 +02:00
|
|
|
url = 'https://api.shippable.com/runs?projectIds=%s&runNumbers=%s' % (project_id, run_number)
|
2017-03-24 20:36:13 +01:00
|
|
|
|
|
|
|
response = requests.get(url, headers=headers)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise Exception(response.content)
|
|
|
|
|
|
|
|
run = [run for run in response.json() if run['runNumber'] == run_number][0]
|
|
|
|
|
|
|
|
args.run_id = run['id']
|
2017-05-17 07:49:04 +02:00
|
|
|
elif re.search('^[a-f0-9]+$', args.run_id):
|
2017-03-24 20:36:13 +01:00
|
|
|
url = 'https://api.shippable.com/runs/%s' % args.run_id
|
|
|
|
|
|
|
|
response = requests.get(url, headers=headers)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise Exception(response.content)
|
|
|
|
|
|
|
|
run = response.json()
|
|
|
|
|
|
|
|
account = run['subscriptionOrgName']
|
|
|
|
project = run['projectName']
|
|
|
|
run_number = run['runNumber']
|
2017-05-17 07:49:04 +02:00
|
|
|
else:
|
2020-01-07 22:28:11 +01:00
|
|
|
sys.exit('ERROR: invalid run: %s' % args.run_id)
|
2017-03-24 20:36:13 +01:00
|
|
|
|
|
|
|
output_dir = '%s/%s/%s' % (account, project, run_number)
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
if not args.test:
|
|
|
|
if not os.path.exists(output_dir):
|
2017-03-24 20:36:13 +01:00
|
|
|
os.makedirs(output_dir)
|
|
|
|
|
|
|
|
if args.run_metadata:
|
|
|
|
path = os.path.join(output_dir, 'run.json')
|
|
|
|
contents = json.dumps(run, sort_keys=True, indent=4)
|
|
|
|
|
|
|
|
if args.verbose or args.test:
|
|
|
|
print(path)
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
if not args.test:
|
|
|
|
with open(path, 'w') as metadata_fd:
|
|
|
|
metadata_fd.write(contents)
|
|
|
|
|
2020-03-11 05:14:36 +01:00
|
|
|
download_run_recursive(args, headers, output_dir, run, True)
|
|
|
|
|
|
|
|
|
|
|
|
def download_run_recursive(args, headers, output_dir, run, is_given=False):
|
|
|
|
# Notes:
|
|
|
|
# - The /runs response tells us if we need to eventually go up another layer
|
|
|
|
# or not (if we are a re-run attempt or not).
|
|
|
|
# - Given a run id, /jobs will tell us all the jobs in that run, and whether
|
|
|
|
# or not we can pull results from them.
|
|
|
|
#
|
|
|
|
# When we initially run (i.e., in download_run), we'll have a /runs output
|
|
|
|
# which we can use to get a /jobs output. Using the /jobs output, we filter
|
|
|
|
# on the jobs we need to fetch (usually only successful ones unless we are
|
|
|
|
# processing the initial/given run and not one of its parent runs) and
|
|
|
|
# download them accordingly.
|
|
|
|
#
|
|
|
|
# Lastly, we check if the run we are currently processing has another
|
|
|
|
# parent (reRunBatchId). If it does, we pull that /runs result and
|
|
|
|
# recurse using it to start the process over again.
|
|
|
|
response = requests.get('https://api.shippable.com/jobs?runIds=%s' % run['id'], headers=headers)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise Exception(response.content)
|
|
|
|
|
|
|
|
jobs = sorted(response.json(), key=lambda job: int(job['jobNumber']))
|
|
|
|
|
|
|
|
if is_given:
|
|
|
|
needed_jobs = [j for j in jobs if j['isConsoleArchived']]
|
|
|
|
else:
|
|
|
|
needed_jobs = [j for j in jobs if j['isConsoleArchived'] and j['statusCode'] == 30]
|
|
|
|
|
|
|
|
if not args.test:
|
|
|
|
if not os.path.exists(output_dir):
|
|
|
|
os.makedirs(output_dir)
|
|
|
|
|
|
|
|
download_jobs(args, needed_jobs, headers, output_dir)
|
|
|
|
|
|
|
|
rerun_batch_id = run.get('reRunBatchId')
|
|
|
|
if rerun_batch_id:
|
|
|
|
print('Downloading previous run: %s' % rerun_batch_id)
|
|
|
|
response = requests.get('https://api.shippable.com/runs/%s' % rerun_batch_id, headers=headers)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise Exception(response.content)
|
|
|
|
|
|
|
|
run = response.json()
|
|
|
|
download_run_recursive(args, headers, output_dir, run)
|
2020-01-07 22:28:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
def download_jobs(args, jobs, headers, output_dir):
|
|
|
|
"""Download Shippable jobs."""
|
2017-03-24 20:36:13 +01:00
|
|
|
for j in jobs:
|
2017-03-02 21:36:46 +01:00
|
|
|
job_id = j['id']
|
|
|
|
job_number = j['jobNumber']
|
|
|
|
|
|
|
|
if args.job_number and job_number not in args.job_number:
|
|
|
|
continue
|
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
if args.job_metadata:
|
|
|
|
path = os.path.join(output_dir, '%s/job.json' % job_number)
|
2018-10-05 10:21:59 +02:00
|
|
|
contents = json.dumps(j, sort_keys=True, indent=4).encode('utf-8')
|
2017-03-24 20:36:13 +01:00
|
|
|
|
|
|
|
if args.verbose or args.test:
|
|
|
|
print(path)
|
|
|
|
|
|
|
|
if not args.test:
|
|
|
|
directory = os.path.dirname(path)
|
|
|
|
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
|
|
|
|
2018-10-05 10:21:59 +02:00
|
|
|
with open(path, 'wb') as metadata_fd:
|
2017-03-24 20:36:13 +01:00
|
|
|
metadata_fd.write(contents)
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
if args.console_logs:
|
2017-03-24 20:36:13 +01:00
|
|
|
path = os.path.join(output_dir, '%s/console.log' % job_number)
|
2017-03-02 21:36:46 +01:00
|
|
|
url = 'https://api.shippable.com/jobs/%s/consoles?download=true' % job_id
|
2017-03-24 20:36:13 +01:00
|
|
|
download(args, headers, path, url, is_json=False)
|
2017-03-02 21:36:46 +01:00
|
|
|
|
|
|
|
if args.test_results:
|
2017-03-24 20:36:13 +01:00
|
|
|
path = os.path.join(output_dir, '%s/test.json' % job_number)
|
2017-03-02 21:36:46 +01:00
|
|
|
url = 'https://api.shippable.com/jobs/%s/jobTestReports' % job_id
|
|
|
|
download(args, headers, path, url)
|
2017-03-24 20:36:13 +01:00
|
|
|
extract_contents(args, path, os.path.join(output_dir, '%s/test' % job_number))
|
2017-03-02 21:36:46 +01:00
|
|
|
|
|
|
|
if args.coverage_results:
|
2017-03-24 20:36:13 +01:00
|
|
|
path = os.path.join(output_dir, '%s/coverage.json' % job_number)
|
2017-03-02 21:36:46 +01:00
|
|
|
url = 'https://api.shippable.com/jobs/%s/jobCoverageReports' % job_id
|
|
|
|
download(args, headers, path, url)
|
2017-03-24 20:36:13 +01:00
|
|
|
extract_contents(args, path, os.path.join(output_dir, '%s/coverage' % job_number))
|
2017-03-02 21:36:46 +01:00
|
|
|
|
|
|
|
|
|
|
|
def extract_contents(args, path, output_dir):
|
2017-05-17 07:49:04 +02:00
|
|
|
"""
|
|
|
|
:type args: any
|
|
|
|
:type path: str
|
|
|
|
:type output_dir: str
|
|
|
|
"""
|
2017-03-02 21:36:46 +01:00
|
|
|
if not args.test:
|
2017-03-24 20:36:13 +01:00
|
|
|
if not os.path.exists(path):
|
|
|
|
return
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
with open(path, 'r') as json_fd:
|
|
|
|
items = json.load(json_fd)
|
|
|
|
|
|
|
|
for item in items:
|
2017-03-03 18:23:55 +01:00
|
|
|
contents = item['contents'].encode('utf-8')
|
2017-03-02 21:36:46 +01:00
|
|
|
path = output_dir + '/' + re.sub('^/*', '', item['path'])
|
|
|
|
|
|
|
|
directory = os.path.dirname(path)
|
|
|
|
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
|
|
|
|
|
|
|
if args.verbose:
|
|
|
|
print(path)
|
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
if path.endswith('.json'):
|
2018-10-05 10:21:59 +02:00
|
|
|
contents = json.dumps(json.loads(contents), sort_keys=True, indent=4).encode('utf-8')
|
2017-03-24 20:36:13 +01:00
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
if not os.path.exists(path):
|
2018-10-05 10:21:59 +02:00
|
|
|
with open(path, 'wb') as output_fd:
|
2017-03-02 21:36:46 +01:00
|
|
|
output_fd.write(contents)
|
|
|
|
|
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
def download(args, headers, path, url, is_json=True):
|
2017-05-17 07:49:04 +02:00
|
|
|
"""
|
|
|
|
:type args: any
|
|
|
|
:type headers: dict[str, str]
|
|
|
|
:type path: str
|
|
|
|
:type url: str
|
|
|
|
:type is_json: bool
|
|
|
|
"""
|
2017-03-02 21:36:46 +01:00
|
|
|
if args.verbose or args.test:
|
|
|
|
print(path)
|
|
|
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
return
|
|
|
|
|
|
|
|
if not args.test:
|
|
|
|
response = requests.get(url, headers=headers)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
2017-03-24 20:36:13 +01:00
|
|
|
path += '.error'
|
|
|
|
|
|
|
|
if is_json:
|
2018-10-05 10:21:59 +02:00
|
|
|
content = json.dumps(response.json(), sort_keys=True, indent=4).encode(response.encoding)
|
2017-03-24 20:36:13 +01:00
|
|
|
else:
|
|
|
|
content = response.content
|
|
|
|
|
|
|
|
directory = os.path.dirname(path)
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2017-03-24 20:36:13 +01:00
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2018-10-05 10:21:59 +02:00
|
|
|
with open(path, 'wb') as content_fd:
|
2017-05-17 07:49:04 +02:00
|
|
|
content_fd.write(content)
|
2017-03-02 21:36:46 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_api_key():
|
2017-05-17 07:49:04 +02:00
|
|
|
"""
|
|
|
|
rtype: str
|
|
|
|
"""
|
|
|
|
key = os.environ.get('SHIPPABLE_KEY', None)
|
|
|
|
|
|
|
|
if key:
|
|
|
|
return key
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
path = os.path.join(os.environ['HOME'], '.shippable.key')
|
|
|
|
|
|
|
|
try:
|
2017-05-17 07:49:04 +02:00
|
|
|
with open(path, 'r') as key_fd:
|
|
|
|
return key_fd.read().strip()
|
2017-03-02 21:36:46 +01:00
|
|
|
except IOError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|