2016-02-03 06:26:24 -05:00
|
|
|
#!/usr/bin/env python
|
2017-01-17 08:55:33 -05:00
|
|
|
#
|
|
|
|
# auto-deploy script for https://riot.im/develop
|
|
|
|
#
|
|
|
|
# Listens for HTTP hits. When it gets one, downloads the artifact from jenkins
|
|
|
|
# and deploys it as the new version.
|
|
|
|
#
|
|
|
|
# Requires the following python packages:
|
|
|
|
#
|
|
|
|
# - requests
|
|
|
|
# - flask
|
|
|
|
#
|
2016-02-03 06:26:24 -05:00
|
|
|
from __future__ import print_function
|
2016-02-03 07:00:17 -05:00
|
|
|
import json, requests, tarfile, argparse, os, errno
|
2017-01-17 08:55:33 -05:00
|
|
|
import time
|
2016-02-03 06:26:24 -05:00
|
|
|
from urlparse import urljoin
|
|
|
|
from flask import Flask, jsonify, request, abort
|
2017-01-17 08:55:33 -05:00
|
|
|
|
2016-02-03 06:26:24 -05:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
2017-01-17 08:55:33 -05:00
|
|
|
arg_jenkins_url = None
|
|
|
|
arg_extract_path = None
|
|
|
|
arg_bundles_path = None
|
|
|
|
arg_should_clean = None
|
|
|
|
arg_symlink = None
|
|
|
|
arg_config_location = None
|
|
|
|
|
|
|
|
class DeployException(Exception):
|
|
|
|
pass
|
2016-02-03 06:26:24 -05:00
|
|
|
|
|
|
|
def download_file(url):
|
|
|
|
local_filename = url.split('/')[-1]
|
|
|
|
r = requests.get(url, stream=True)
|
|
|
|
with open(local_filename, 'wb') as f:
|
2016-12-22 07:02:54 -05:00
|
|
|
for chunk in r.iter_content(chunk_size=1024):
|
2016-02-03 06:26:24 -05:00
|
|
|
if chunk: # filter out keep-alive new chunks
|
|
|
|
f.write(chunk)
|
|
|
|
return local_filename
|
|
|
|
|
|
|
|
def untar_to(tarball, dest):
|
|
|
|
with tarfile.open(tarball) as tar:
|
|
|
|
tar.extractall(dest)
|
|
|
|
|
2016-02-03 07:00:17 -05:00
|
|
|
def create_symlink(source, linkname):
|
|
|
|
try:
|
|
|
|
os.symlink(source, linkname)
|
|
|
|
except OSError, e:
|
|
|
|
if e.errno == errno.EEXIST:
|
|
|
|
# atomic modification
|
|
|
|
os.symlink(source, linkname + ".tmp")
|
|
|
|
os.rename(linkname + ".tmp", linkname)
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
2016-02-03 06:26:24 -05:00
|
|
|
@app.route("/", methods=["POST"])
|
|
|
|
def on_receive_jenkins_poke():
|
|
|
|
# {
|
|
|
|
# "name": "VectorWebDevelop",
|
|
|
|
# "build": {
|
|
|
|
# "number": 8
|
|
|
|
# }
|
|
|
|
# }
|
|
|
|
incoming_json = request.get_json()
|
|
|
|
if not incoming_json:
|
|
|
|
abort(400, "No JSON provided!")
|
|
|
|
return
|
|
|
|
print("Incoming JSON: %s" % (incoming_json,))
|
|
|
|
|
|
|
|
job_name = incoming_json.get("name")
|
|
|
|
if not isinstance(job_name, basestring):
|
|
|
|
abort(400, "Bad job name: %s" % (job_name,))
|
|
|
|
return
|
|
|
|
|
|
|
|
build_num = incoming_json.get("build", {}).get("number", 0)
|
|
|
|
if not build_num or build_num <= 0 or not isinstance(build_num, int):
|
|
|
|
abort(400, "Missing or bad build number")
|
|
|
|
return
|
|
|
|
|
2017-01-17 08:55:33 -05:00
|
|
|
return fetch_jenkins_build(job_name, build_num)
|
|
|
|
|
|
|
|
def fetch_jenkins_build(job_name, build_num):
|
2016-02-03 06:26:24 -05:00
|
|
|
artifact_url = urljoin(
|
|
|
|
arg_jenkins_url, "job/%s/%s/api/json" % (job_name, build_num)
|
|
|
|
)
|
|
|
|
artifact_response = requests.get(artifact_url).json()
|
|
|
|
|
|
|
|
# {
|
|
|
|
# "actions": [],
|
|
|
|
# "artifacts": [
|
|
|
|
# {
|
|
|
|
# "displayPath": "vector-043f6991a4ed-react-20f77d1224ef-js-0a7efe3e8bd5.tar.gz",
|
|
|
|
# "fileName": "vector-043f6991a4ed-react-20f77d1224ef-js-0a7efe3e8bd5.tar.gz",
|
|
|
|
# "relativePath": "vector-043f6991a4ed-react-20f77d1224ef-js-0a7efe3e8bd5.tar.gz"
|
|
|
|
# }
|
|
|
|
# ],
|
|
|
|
# "building": false,
|
|
|
|
# "description": null,
|
|
|
|
# "displayName": "#11",
|
|
|
|
# "duration": 137976,
|
|
|
|
# "estimatedDuration": 132008,
|
|
|
|
# "executor": null,
|
|
|
|
# "fullDisplayName": "VectorWebDevelop #11",
|
|
|
|
# "id": "11",
|
|
|
|
# "keepLog": false,
|
|
|
|
# "number": 11,
|
|
|
|
# "queueId": 12254,
|
|
|
|
# "result": "SUCCESS",
|
|
|
|
# "timestamp": 1454432640079,
|
|
|
|
# "url": "http://matrix.org/jenkins/job/VectorWebDevelop/11/",
|
|
|
|
# "builtOn": "",
|
|
|
|
# "changeSet": {},
|
|
|
|
# "culprits": []
|
|
|
|
# }
|
|
|
|
if artifact_response.get("result") != "SUCCESS":
|
|
|
|
abort(404, "Not deploying. Build was not marked as SUCCESS.")
|
|
|
|
return
|
|
|
|
|
|
|
|
if len(artifact_response.get("artifacts", [])) != 1:
|
|
|
|
abort(404, "Not deploying. Build has an unexpected number of artifacts.")
|
|
|
|
return
|
|
|
|
|
|
|
|
tar_gz_path = artifact_response["artifacts"][0]["relativePath"]
|
|
|
|
if not tar_gz_path.endswith(".tar.gz"):
|
|
|
|
abort(404, "Not deploying. Artifact is not a .tar.gz file")
|
|
|
|
return
|
|
|
|
|
|
|
|
tar_gz_url = urljoin(
|
|
|
|
arg_jenkins_url, "job/%s/%s/artifact/%s" % (job_name, build_num, tar_gz_path)
|
|
|
|
)
|
|
|
|
|
2017-01-17 08:55:33 -05:00
|
|
|
# we extract into a directory based on the build number. This avoids the
|
|
|
|
# problem of multiple builds building the same git version and thus having
|
|
|
|
# the same tarball name. That would lead to two potential problems:
|
|
|
|
# (a) sometimes jenkins serves corrupted artifacts; we would replace
|
|
|
|
# a good deploy with a bad one
|
|
|
|
# (b) we'll be overwriting the live deployment, which means people might
|
|
|
|
# see half-written files.
|
|
|
|
build_dir = os.path.join(arg_extract_path, "%s-#%s" % (job_name, build_num))
|
|
|
|
try:
|
|
|
|
deploy_tarball(tar_gz_url, build_dir)
|
|
|
|
except DeployException as e:
|
|
|
|
abort(400, e.message)
|
|
|
|
|
|
|
|
return jsonify({})
|
|
|
|
|
|
|
|
def deploy_tarball(tar_gz_url, build_dir):
|
|
|
|
"""Download a tarball from jenkins and deploy it as the new version
|
|
|
|
"""
|
|
|
|
print("Deploying %s to %s" % (tar_gz_url, build_dir))
|
|
|
|
|
|
|
|
if os.path.exists(build_dir):
|
|
|
|
raise DeployException(
|
|
|
|
"Not deploying. We have previously deployed this build."
|
|
|
|
)
|
|
|
|
os.mkdir(build_dir)
|
2016-12-22 07:02:54 -05:00
|
|
|
|
|
|
|
# we rely on the fact that flask only serves one request at a time to
|
|
|
|
# ensure that we do not overwrite a tarball from a concurrent request.
|
2016-02-03 06:26:24 -05:00
|
|
|
filename = download_file(tar_gz_url)
|
|
|
|
print("Downloaded file: %s" % filename)
|
2016-11-08 12:08:01 -05:00
|
|
|
|
2016-12-22 07:02:54 -05:00
|
|
|
try:
|
|
|
|
untar_to(filename, build_dir)
|
|
|
|
print("Extracted to: %s" % build_dir)
|
|
|
|
finally:
|
|
|
|
if arg_should_clean:
|
|
|
|
os.remove(filename)
|
2016-02-03 06:26:24 -05:00
|
|
|
|
2016-12-22 07:02:54 -05:00
|
|
|
name_str = filename.replace(".tar.gz", "")
|
|
|
|
extracted_dir = os.path.join(build_dir, name_str)
|
2016-02-03 06:26:24 -05:00
|
|
|
|
2016-09-16 13:26:46 -04:00
|
|
|
if arg_config_location:
|
2016-11-08 12:08:01 -05:00
|
|
|
create_symlink(source=arg_config_location, linkname=os.path.join(extracted_dir, 'config.json'))
|
2016-09-16 13:15:15 -04:00
|
|
|
|
2017-01-17 08:55:33 -05:00
|
|
|
if arg_bundles_path:
|
|
|
|
extracted_bundles = os.path.join(extracted_dir, 'bundles')
|
|
|
|
move_bundles(source=extracted_bundles, dest=arg_bundles_path)
|
|
|
|
|
|
|
|
# replace the (hopefully now empty) extracted_bundles dir with a
|
|
|
|
# symlink to the common dir.
|
|
|
|
relpath = os.path.relpath(arg_bundles_path, extracted_dir)
|
|
|
|
os.rmdir(extracted_bundles)
|
|
|
|
print ("Symlink %s -> %s" % (extracted_bundles, relpath))
|
|
|
|
os.symlink(relpath, extracted_bundles)
|
|
|
|
|
2016-12-22 07:02:54 -05:00
|
|
|
create_symlink(source=extracted_dir, linkname=arg_symlink)
|
|
|
|
|
2017-01-17 08:55:33 -05:00
|
|
|
def move_bundles(source, dest):
|
|
|
|
"""Move the contents of the 'bundles' directory to a common dir
|
|
|
|
|
|
|
|
We check that we will not be overwriting anything before we proceed.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
source (str): path to 'bundles' within the extracted tarball
|
|
|
|
dest (str): target common directory
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.isdir(dest):
|
|
|
|
os.mkdir(dest)
|
|
|
|
|
|
|
|
# build a map from source to destination, checking for non-existence as we go.
|
|
|
|
renames = {}
|
|
|
|
for f in os.listdir(source):
|
|
|
|
dst = os.path.join(dest, f)
|
|
|
|
if os.path.exists(dst):
|
|
|
|
raise DeployException(
|
|
|
|
"Not deploying. The bundle includes '%s' which we have previously deployed."
|
|
|
|
% f
|
|
|
|
)
|
|
|
|
renames[os.path.join(source, f)] = dst
|
|
|
|
|
|
|
|
for (src, dst) in renames.iteritems():
|
|
|
|
print ("Move %s -> %s" % (src, dst))
|
|
|
|
os.rename(src, dst)
|
|
|
|
|
2016-02-03 06:26:24 -05:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser("Runs a Vector redeployment server.")
|
|
|
|
parser.add_argument(
|
2016-12-07 09:07:31 -05:00
|
|
|
"-j", "--jenkins", dest="jenkins", default="https://matrix.org/jenkins/", help=(
|
2016-02-03 06:26:24 -05:00
|
|
|
"The base URL of the Jenkins web server. This will be hit to get the\
|
|
|
|
built artifacts (the .gz file) for redeploying."
|
|
|
|
)
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2016-02-03 08:11:51 -05:00
|
|
|
"-p", "--port", dest="port", default=4000, type=int, help=(
|
2016-02-03 06:26:24 -05:00
|
|
|
"The port to listen on for requests from Jenkins."
|
|
|
|
)
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-e", "--extract", dest="extract", default="./extracted", help=(
|
|
|
|
"The location to extract .tar.gz files to."
|
|
|
|
)
|
|
|
|
)
|
2017-01-17 08:55:33 -05:00
|
|
|
parser.add_argument(
|
|
|
|
"-b", "--bundles-dir", dest="bundles_dir", help=(
|
|
|
|
"A directory to move the contents of the 'bundles' directory to. A \
|
|
|
|
symlink to the bundles directory will also be written inside the \
|
|
|
|
extracted tarball. Example: './bundles'."
|
|
|
|
)
|
|
|
|
)
|
2016-02-03 06:26:24 -05:00
|
|
|
parser.add_argument(
|
|
|
|
"-c", "--clean", dest="clean", action="store_true", default=False, help=(
|
|
|
|
"Remove .tar.gz files after they have been downloaded and extracted."
|
|
|
|
)
|
|
|
|
)
|
2016-02-03 07:00:17 -05:00
|
|
|
parser.add_argument(
|
|
|
|
"-s", "--symlink", dest="symlink", default="./latest", help=(
|
|
|
|
"Write a symlink to this location pointing to the extracted tarball. \
|
|
|
|
New builds will keep overwriting this symlink. The symlink will point \
|
|
|
|
to the /vector directory INSIDE the tarball."
|
|
|
|
)
|
|
|
|
)
|
2016-09-16 13:26:46 -04:00
|
|
|
parser.add_argument(
|
|
|
|
"--config", dest="config", help=(
|
|
|
|
"Write a symlink to config.json in the extracted tarball. \
|
|
|
|
To this location."
|
|
|
|
)
|
|
|
|
)
|
2017-01-17 08:55:33 -05:00
|
|
|
parser.add_argument(
|
|
|
|
"--test", dest="tarball_uri", help=(
|
|
|
|
"Don't start an HTTP listener. Instead download a build from Jenkins \
|
|
|
|
immediately."
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2016-02-03 06:26:24 -05:00
|
|
|
args = parser.parse_args()
|
|
|
|
if args.jenkins.endswith("/"): # important for urljoin
|
|
|
|
arg_jenkins_url = args.jenkins
|
|
|
|
else:
|
|
|
|
arg_jenkins_url = args.jenkins + "/"
|
|
|
|
arg_extract_path = args.extract
|
2017-01-17 08:55:33 -05:00
|
|
|
arg_bundles_path = args.bundles_dir
|
2016-02-03 06:26:24 -05:00
|
|
|
arg_should_clean = args.clean
|
2016-02-03 07:00:17 -05:00
|
|
|
arg_symlink = args.symlink
|
2016-09-16 13:15:15 -04:00
|
|
|
arg_config_location = args.config
|
2017-01-17 08:55:33 -05:00
|
|
|
|
|
|
|
if not os.path.isdir(arg_extract_path):
|
|
|
|
os.mkdir(arg_extract_path)
|
|
|
|
|
|
|
|
if args.tarball_uri is not None:
|
|
|
|
build_dir = os.path.join(arg_extract_path, "test-%i" % (time.time()))
|
|
|
|
deploy_tarball(args.tarball_uri, build_dir)
|
|
|
|
else:
|
|
|
|
print(
|
|
|
|
"Listening on port %s. Extracting to %s%s. Symlinking to %s. Jenkins URL: %s. Config location: %s" %
|
|
|
|
(args.port, arg_extract_path,
|
|
|
|
" (clean after)" if arg_should_clean else "", arg_symlink, arg_jenkins_url, arg_config_location)
|
|
|
|
)
|
|
|
|
app.run(host="0.0.0.0", port=args.port, debug=True)
|