forked from MirrorHub/synapse
parent
a9dab970b8
commit
97d7e4c7b7
6 changed files with 247 additions and 76 deletions
33
.buildkite/format_tap.py
Normal file
33
.buildkite/format_tap.py
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
import sys
|
||||||
|
from tap.parser import Parser
|
||||||
|
from tap.line import Result, Unknown, Diagnostic
|
||||||
|
|
||||||
|
out = ["### TAP Output for " + sys.argv[2]]
|
||||||
|
|
||||||
|
p = Parser()
|
||||||
|
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
for line in p.parse_file(sys.argv[1]):
|
||||||
|
if isinstance(line, Result):
|
||||||
|
if in_error:
|
||||||
|
out.append("")
|
||||||
|
out.append("</pre></code></details>")
|
||||||
|
out.append("")
|
||||||
|
out.append("----")
|
||||||
|
out.append("")
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
if not line.ok and not line.todo:
|
||||||
|
in_error = True
|
||||||
|
|
||||||
|
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
|
||||||
|
out.append("")
|
||||||
|
out.append("<details><summary>Show log</summary><code><pre>")
|
||||||
|
|
||||||
|
elif isinstance(line, Diagnostic) and in_error:
|
||||||
|
out.append(line.text)
|
||||||
|
|
||||||
|
if out:
|
||||||
|
for line in out[:-3]:
|
||||||
|
print(line)
|
|
@ -1,22 +1,21 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
|
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||||
# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
|
echo "Not merging forward, as this is a release branch"
|
||||||
echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
|
exit 0
|
||||||
source $BASH_ENV
|
fi
|
||||||
|
|
||||||
if [[ -z "${CIRCLE_PR_NUMBER}" ]]
|
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||||
then
|
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||||
echo "Can't figure out what the PR number is! Assuming merge target is develop."
|
|
||||||
|
|
||||||
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||||
# can probably assume it's based on it and will be merged into it.
|
# can probably assume it's based on it and will be merged into it.
|
||||||
GITBASE="develop"
|
GITBASE="develop"
|
||||||
else
|
else
|
||||||
# Get the reference, using the GitHub API
|
# Get the reference, using the GitHub API
|
||||||
GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
|
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Show what we are before
|
# Show what we are before
|
|
@ -2,6 +2,7 @@ env:
|
||||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e pep8"
|
- "tox -e pep8"
|
||||||
|
@ -46,6 +47,7 @@ steps:
|
||||||
|
|
||||||
- wait
|
- wait
|
||||||
|
|
||||||
|
|
||||||
- command:
|
- command:
|
||||||
- "python -m pip install tox"
|
- "python -m pip install tox"
|
||||||
- "tox -e py35-old,codecov"
|
- "tox -e py35-old,codecov"
|
||||||
|
@ -181,3 +183,61 @@ steps:
|
||||||
limit: 2
|
limit: 2
|
||||||
- exit_status: 2
|
- exit_status: 2
|
||||||
limit: 2
|
limit: 2
|
||||||
|
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash .buildkite/synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash .buildkite/synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
WORKERS: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash .buildkite/synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
soft_fail: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
145
.buildkite/synapse_sytest.sh
Normal file
145
.buildkite/synapse_sytest.sh
Normal file
|
@ -0,0 +1,145 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Fetch sytest, and then run the tests for synapse. The entrypoint for the
|
||||||
|
# sytest-synapse docker images.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
if [ -n "$BUILDKITE" ]
|
||||||
|
then
|
||||||
|
SYNAPSE_DIR=`pwd`
|
||||||
|
else
|
||||||
|
SYNAPSE_DIR="/src"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Attempt to find a sytest to use.
|
||||||
|
# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image.
|
||||||
|
if [ -d "/sytest" ]; then
|
||||||
|
# If the user has mounted in a SyTest checkout, use that.
|
||||||
|
echo "Using local sytests..."
|
||||||
|
|
||||||
|
# create ourselves a working directory and dos2unix some scripts therein
|
||||||
|
mkdir -p /work/jenkins
|
||||||
|
for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do
|
||||||
|
dos2unix -n "/sytest/$i" "/work/$i"
|
||||||
|
done
|
||||||
|
ln -sf /sytest/tests /work
|
||||||
|
ln -sf /sytest/keys /work
|
||||||
|
SYTEST_LIB="/sytest/lib"
|
||||||
|
else
|
||||||
|
if [ -n "BUILDKITE_BRANCH" ]
|
||||||
|
then
|
||||||
|
branch_name=$BUILDKITE_BRANCH
|
||||||
|
else
|
||||||
|
# Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch.
|
||||||
|
branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Try and fetch the branch
|
||||||
|
echo "Trying to get same-named sytest branch..."
|
||||||
|
wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || {
|
||||||
|
# Probably a 404, fall back to develop
|
||||||
|
echo "Using develop instead..."
|
||||||
|
wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdir -p /work
|
||||||
|
tar -C /work --strip-components=1 -xf sytest.tar.gz
|
||||||
|
SYTEST_LIB="/work/lib"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd /work
|
||||||
|
|
||||||
|
# PostgreSQL setup
|
||||||
|
if [ -n "$POSTGRES" ]
|
||||||
|
then
|
||||||
|
export PGUSER=postgres
|
||||||
|
export POSTGRES_DB_1=pg1
|
||||||
|
export POSTGRES_DB_2=pg2
|
||||||
|
|
||||||
|
# Start the database
|
||||||
|
su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
|
||||||
|
|
||||||
|
# Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse
|
||||||
|
jenkins/prep_sytest_for_postgres.sh
|
||||||
|
|
||||||
|
# Make the test databases for the two Synapse servers that will be spun up
|
||||||
|
su -c 'psql -c "CREATE DATABASE pg1;"' postgres
|
||||||
|
su -c 'psql -c "CREATE DATABASE pg2;"' postgres
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$OFFLINE" ]; then
|
||||||
|
# if we're in offline mode, just put synapse into the virtualenv, and
|
||||||
|
# hope that the deps are up-to-date.
|
||||||
|
#
|
||||||
|
# (`pip install -e` likes to reinstall setuptools even if it's already installed,
|
||||||
|
# so we just run setup.py explicitly.)
|
||||||
|
#
|
||||||
|
(cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop)
|
||||||
|
else
|
||||||
|
# We've already created the virtualenv, but lets double check we have all
|
||||||
|
# deps.
|
||||||
|
/venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR
|
||||||
|
/venv/bin/pip install -q --upgrade --no-cache-dir \
|
||||||
|
lxml psycopg2 coverage codecov tap.py
|
||||||
|
|
||||||
|
# Make sure all Perl deps are installed -- this is done in the docker build
|
||||||
|
# so will only install packages added since the last Docker build
|
||||||
|
./install-deps.pl
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# Run the tests
|
||||||
|
>&2 echo "+++ Running tests"
|
||||||
|
|
||||||
|
RUN_TESTS=(
|
||||||
|
perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all
|
||||||
|
)
|
||||||
|
|
||||||
|
TEST_STATUS=0
|
||||||
|
|
||||||
|
if [ -n "$WORKERS" ]; then
|
||||||
|
RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py)
|
||||||
|
else
|
||||||
|
RUN_TESTS+=(-I Synapse)
|
||||||
|
fi
|
||||||
|
|
||||||
|
"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$?
|
||||||
|
|
||||||
|
if [ $TEST_STATUS -ne 0 ]; then
|
||||||
|
>&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS"
|
||||||
|
else
|
||||||
|
>&2 echo -e "run-tests \e[32mPASSED\e[0m"
|
||||||
|
fi
|
||||||
|
|
||||||
|
>&2 echo "--- Copying assets"
|
||||||
|
|
||||||
|
# Copy out the logs
|
||||||
|
mkdir -p /logs
|
||||||
|
cp results.tap /logs/results.tap
|
||||||
|
rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*"
|
||||||
|
|
||||||
|
# Upload coverage to codecov and upload files, if running on Buildkite
|
||||||
|
if [ -n "$BUILDKITE" ]
|
||||||
|
then
|
||||||
|
/venv/bin/coverage combine || true
|
||||||
|
/venv/bin/coverage xml || true
|
||||||
|
/venv/bin/codecov -X gcov -f coverage.xml
|
||||||
|
|
||||||
|
wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz
|
||||||
|
tar xvf buildkite.tar.gz
|
||||||
|
chmod +x ./buildkite-agent
|
||||||
|
|
||||||
|
# Upload the files
|
||||||
|
./buildkite-agent artifact upload "/logs/**/*.log*"
|
||||||
|
./buildkite-agent artifact upload "/logs/results.tap"
|
||||||
|
|
||||||
|
if [ $TEST_STATUS -ne 0 ]; then
|
||||||
|
# Annotate, if failure
|
||||||
|
/venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
exit $TEST_STATUS
|
|
@ -17,77 +17,10 @@ jobs:
|
||||||
- run: docker push matrixdotorg/synapse:latest
|
- run: docker push matrixdotorg/synapse:latest
|
||||||
- run: docker push matrixdotorg/synapse:latest-py3
|
- run: docker push matrixdotorg/synapse:latest-py3
|
||||||
|
|
||||||
sytestpy3:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy3postgres:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: POSTGRES=1 /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy3merged:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: bash .circleci/merge_base_branch.sh
|
|
||||||
- run: /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
sytestpy3postgresmerged:
|
|
||||||
docker:
|
|
||||||
- image: matrixdotorg/sytest-synapsepy3
|
|
||||||
working_directory: /src
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: bash .circleci/merge_base_branch.sh
|
|
||||||
- run: POSTGRES=1 /synapse_sytest.sh
|
|
||||||
- store_artifacts:
|
|
||||||
path: /logs
|
|
||||||
destination: logs
|
|
||||||
- store_test_results:
|
|
||||||
path: /logs
|
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build:
|
build:
|
||||||
jobs:
|
jobs:
|
||||||
- sytestpy3:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /develop|master|release-.*/
|
|
||||||
- sytestpy3postgres:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: /develop|master|release-.*/
|
|
||||||
- sytestpy3merged:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
ignore: /develop|master|release-.*/
|
|
||||||
- sytestpy3postgresmerged:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
ignore: /develop|master|release-.*/
|
|
||||||
- dockerhubuploadrelease:
|
- dockerhubuploadrelease:
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
|
|
1
changelog.d/5459.misc
Normal file
1
changelog.d/5459.misc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
SyTest has been moved to Buildkite.
|
Loading…
Reference in a new issue