lila/bin/deploy

355 lines
11 KiB
Plaintext
Raw Normal View History

2020-04-14 07:07:37 -06:00
#!/usr/bin/python3
"""Deploy lila server and assets from GitHub workflow runs"""
2020-04-14 07:34:56 -06:00
2020-04-14 07:07:37 -06:00
import argparse
2020-03-18 07:45:20 -06:00
import sys
2020-03-18 14:16:57 -06:00
import os
import os.path
import pickle
2020-03-18 08:43:53 -06:00
import shlex
2020-03-18 14:16:57 -06:00
import subprocess
2020-03-18 15:14:14 -06:00
import time
import textwrap
import contextlib
2020-04-14 07:07:37 -06:00
try:
import requests
except ImportError:
print("Need requests:")
print("* Arch: pacman -S python-requests")
print("* Debian: apt install python3-requests")
print("* Pip: pip install requests")
print()
raise
2020-04-14 07:04:17 -06:00
try:
import git
except ImportError:
print("Need GitPython:")
print("* Arch: pacman -S python-gitpython")
print("* Debian: apt install python3-git")
print("* Pip: pip install GitPython")
print()
raise
2020-03-18 15:32:34 -06:00
ASSETS_FILES = [
".github/workflows/assets.yml",
"public",
"ui",
"package.json",
"yarn.lock",
]
2020-03-18 15:32:34 -06:00
SERVER_FILES = [
".github/workflows/server.yml",
"app",
"conf",
"modules",
"project",
2020-03-27 16:39:43 -06:00
"translation",
2020-03-18 15:32:34 -06:00
"build.sbt",
"lila",
"conf/application.conf.default",
".sbtopts.default",
2020-03-18 15:32:34 -06:00
]
ASSETS_BUILD_URL = "https://api.github.com/repos/ornicar/lila/actions/workflows/assets.yml/runs"
SERVER_BUILD_URL = "https://api.github.com/repos/ornicar/lila/actions/workflows/server.yml/runs"
2020-03-18 07:45:20 -06:00
2020-04-14 08:26:12 -06:00
ARTIFACT_DIR = "/home/lichess-artifacts"
2020-09-08 05:46:09 -06:00
def curl_cli(command, *, url="https://lichess.org/cli"):
2021-05-20 03:56:39 -06:00
return f"curl --fail -X POST --data {shlex.quote(command)} {shlex.quote(url)} -H @.lila-cli"
2020-09-08 05:46:09 -06:00
2020-04-14 06:57:41 -06:00
def asset_profile(ssh, *,
deploy_dir="/home/lichess-deploy",
2020-09-08 05:46:09 -06:00
post=curl_cli("change asset version"),
stage=False):
2020-04-14 06:57:41 -06:00
return {
"ssh": ssh,
"deploy_dir": deploy_dir,
2020-03-18 15:32:34 -06:00
"files": ASSETS_FILES,
"workflow_url": ASSETS_BUILD_URL,
2020-03-18 15:24:15 -06:00
"artifact_name": "lila-assets",
2020-03-18 16:06:51 -06:00
"symlinks": ["public"],
2020-04-14 06:57:41 -06:00
"post": post,
"stage": stage,
2020-04-14 06:57:41 -06:00
}
def server_profile(ssh, *,
deploy_dir="/home/lichess-deploy",
2020-05-07 10:17:47 -06:00
post="systemctl restart lichess",
stage=False):
2020-04-14 06:57:41 -06:00
return {
"ssh": ssh,
"deploy_dir": deploy_dir,
2020-03-27 14:28:29 -06:00
"files": SERVER_FILES,
"workflow_url": SERVER_BUILD_URL,
"artifact_name": "lila-server",
"symlinks": ["lib", "bin"],
2020-04-14 06:57:41 -06:00
"post": post,
"stage": stage,
2020-04-14 06:57:41 -06:00
}
PROFILES = {
"khiaw-assets": asset_profile("root@khiaw.lichess.ovh", post=curl_cli("change asset version", url="https://lichess.dev/cli"), stage=True),
"khiaw-server": server_profile("root@khiaw.lichess.ovh", post="systemctl restart lichess-stage", stage=True),
2020-04-14 06:57:41 -06:00
"ocean-server": server_profile("root@ocean.lichess.ovh", deploy_dir="/home/lichess"),
"ocean-assets": asset_profile("root@ocean.lichess.ovh", deploy_dir="/home/lichess"),
2020-03-18 15:24:15 -06:00
}
class DeployError(Exception):
pass
class ConfigError(Exception):
2020-03-18 17:42:27 -06:00
pass
def hash_files(tree, files):
return tuple(tree[path].hexsha for path in files)
def find_commits(commit, files, wanted_hash):
try:
if hash_files(commit.tree, files) != wanted_hash:
return
except KeyError:
return
yield commit.hexsha
for parent in commit.parents:
yield from find_commits(parent, files, wanted_hash)
@contextlib.contextmanager
def workflow_run_db(repo):
with open(os.path.join(repo.common_dir, "workflow_runs.pickle"), "ab+") as f:
try:
f.seek(0)
db = pickle.load(f)
except EOFError:
2020-03-18 15:14:14 -06:00
print("Created workflow run database.")
db = {}
yield db
f.seek(0)
f.truncate()
pickle.dump(db, f)
print("Saved workflow run database.")
def update_workflow_run_db(db, session, workflow_url, *, silent=False):
if not silent:
print("Updating workflow runs ...")
url = workflow_url
new = 0
synced = False
while not synced:
if not silent:
print(f"- {url}")
res = session.get(url)
if res.status_code != 200:
print(f"Unexpected response: {res.status_code} {res.text}")
break
for run in res.json()["workflow_runs"]:
if run["id"] in db and db[run["id"]]["status"] == "completed":
synced = True
else:
new += 1
run["_workflow_url"] = workflow_url
db[run["id"]] = run
if "next" not in res.links:
break
url = res.links["next"]["url"]
if not silent:
print(f"Added/updated {new} workflow run(s).")
return new
def find_workflow_run(repo, session, workflow_url, wanted_commits, *, stage):
with workflow_run_db(repo) as db:
print("Searching workflow runs ...")
backoff = 1
fresh = False
while True:
found = None
pending = False
for run in db.values():
if run["head_commit"]["id"] not in wanted_commits or run["_workflow_url"] != workflow_url:
continue
if run["event"] == "pull_request" and not stage:
# Not accepted in production, because pull request builds
# do not have access to the secret store. Hence no ab.
print(f"- {run['html_url']} PULL REQUEST (no ab)")
elif run["status"] != "completed":
print(f"- {run['html_url']} PENDING (waiting {backoff}s)")
pending = True
elif run["conclusion"] != "success":
print(f"- {run['html_url']} FAILED.")
else:
print(f"- {run['html_url']} succeeded.")
if found is None:
found = run
if found:
print(f"Selected {found['html_url']}.")
return found
if not fresh:
fresh = True
if update_workflow_run_db(db, session, workflow_url):
continue
if pending:
time.sleep(backoff)
2020-04-16 05:20:36 -06:00
backoff = min(backoff * 2, 30)
update_workflow_run_db(db, session, workflow_url, silent=True)
continue
raise DeployError("Did not find successful matching workflow run.")
2020-03-18 08:32:10 -06:00
def artifact_url(session, run, name):
for artifact in session.get(run["artifacts_url"]).json()["artifacts"]:
2020-03-18 08:16:21 -06:00
if artifact["name"] == name:
if artifact["expired"]:
2020-03-18 15:14:14 -06:00
print("Artifact expired.")
2020-03-18 08:16:21 -06:00
return artifact["archive_download_url"]
raise DeployError(f"Did not find artifact {name}.")
2020-03-18 08:16:21 -06:00
2020-04-14 08:26:12 -06:00
def tmux(ssh, script, *, dry_run=False):
command = f"/bin/sh -e -c {shlex.quote(';'.join(script))};/bin/bash"
outer_command = f"/bin/sh -c {shlex.quote(command)}"
2020-04-14 08:26:12 -06:00
shell_command = ["mosh", ssh, "--", "tmux", "new-session", "-A", "-s", "ci-deploy", outer_command]
if dry_run:
print(shlex.join(shell_command))
return 0
else:
return subprocess.call(shell_command, stdout=sys.stdout, stdin=sys.stdin)
2020-04-14 10:17:07 -06:00
def deploy_script(profile, session, run, url):
2020-04-22 13:51:29 -06:00
auth_header = f"Authorization: {session.headers['Authorization']}"
ua_header = f"User-Agent: {session.headers['User-Agent']}"
2020-04-14 10:17:07 -06:00
deploy_dir = profile["deploy_dir"]
artifact_unzipped = f"{ARTIFACT_DIR}/{profile['artifact_name']}-{run['id']:d}"
artifact_zip = f"{artifact_unzipped}.zip"
2020-09-08 05:46:09 -06:00
deploy_prompt = f"read -n 1 -p {shlex.quote('PRESS ENTER TO RUN: ' + profile['post'])}"
2020-04-14 10:17:07 -06:00
return [
"echo \\# Downloading ...",
f"mkdir -p {ARTIFACT_DIR}",
f"mkdir -p {deploy_dir}/application.home_IS_UNDEFINED/logs",
2020-04-22 13:51:29 -06:00
f"[ -f {artifact_zip} ] || wget --header={shlex.quote(auth_header)} --header={shlex.quote(ua_header)} --no-clobber -O {artifact_zip} {shlex.quote(url)}",
2020-04-14 10:17:07 -06:00
"echo",
"echo \\# Unpacking ...",
f"unzip -q -o {artifact_zip} -d {artifact_unzipped}",
f"mkdir -p {artifact_unzipped}/d",
f"tar -xf {artifact_unzipped}/*.tar.xz -C {artifact_unzipped}/d",
f"cat {artifact_unzipped}/d/commit.txt",
f"chown -R lichess:lichess {ARTIFACT_DIR}",
"echo",
"echo \\# Installing ...",
] + [
f"echo \"{artifact_unzipped}/d/{symlink} -> {deploy_dir}/{symlink}\";ln -f --no-target-directory -s {artifact_unzipped}/d/{symlink} {deploy_dir}/{symlink}"
for symlink in profile["symlinks"]
] + [
f"chown -R lichess:lichess {deploy_dir}",
f"chmod -f +x {deploy_dir}/bin/lila || true",
f"echo \"SSH: {profile['ssh']}\"",
f"echo {shlex.quote('Running: ' + profile['post'])}" if profile["stage"] else f"/bin/bash -c {shlex.quote(deploy_prompt)}",
2020-04-14 10:17:07 -06:00
profile["post"],
"echo",
f"echo \\# Done.",
]
def deploy(profile, repo, commit, github_api_token, dry_run):
print("# Preparing deploy ...")
session = requests.Session()
session.headers["Authorization"] = f"token {github_api_token}"
2020-04-22 13:51:29 -06:00
session.headers["User-Agent"] = "ornicar/lila"
2020-04-14 10:17:07 -06:00
try:
wanted_hash = hash_files(commit.tree, profile["files"])
except KeyError:
raise DeployError("Commit is missing a required file.")
wanted_commits = set(find_commits(commit, profile["files"], wanted_hash))
print(f"Found {len(wanted_commits)} matching commits.")
run = find_workflow_run(repo, session, profile["workflow_url"], wanted_commits, stage=profile["stage"])
2020-04-14 10:17:07 -06:00
url = artifact_url(session, run, profile["artifact_name"])
print(f"Deploying {url} to {profile['ssh']}...")
return tmux(profile["ssh"], deploy_script(profile, session, run, url), dry_run=dry_run)
2020-04-14 07:34:56 -06:00
def main():
# Parse command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("profile", choices=PROFILES.keys())
2020-04-14 08:26:12 -06:00
parser.add_argument("--dry-run", action="store_true")
2020-04-14 08:58:38 -06:00
group = parser.add_mutually_exclusive_group()
2020-04-14 09:45:11 -06:00
group.add_argument("--commit", "-c")
2020-04-14 07:34:56 -06:00
2020-04-14 08:58:38 -06:00
# With optional tab completion.
2020-04-14 07:34:56 -06:00
try:
import argcomplete
except ImportError:
pass
else:
argcomplete.autocomplete(parser)
args = parser.parse_args()
2020-04-14 08:58:38 -06:00
# Read GITHUB_API_TOKEN.
try:
github_api_token = os.environ["GITHUB_API_TOKEN"]
except KeyError:
raise ConfigError(textwrap.dedent("""\
Need environment variable GITHUB_API_TOKEN.
* Create token on https://github.com/settings/tokens/new
* Required scope: public_repo"""))
# Repository and wanted commit.
2020-04-14 08:58:38 -06:00
repo = git.Repo(search_parent_directories=True)
if args.commit is None:
if repo.is_dirty():
raise ConfigError("Repo is dirty. Run with --commit HEAD to ignore.")
commit = repo.head.commit
else:
try:
commit = repo.commit(args.commit)
except git.exc.BadName as err:
raise ConfigError(err)
2020-04-14 08:58:38 -06:00
return deploy(PROFILES[args.profile], repo, commit, github_api_token, args.dry_run)
2020-03-18 08:32:10 -06:00
if __name__ == "__main__":
2020-04-14 07:34:56 -06:00
try:
main()
except ConfigError as err:
print(err)
sys.exit(128)
except DeployError as err:
2020-04-14 07:34:56 -06:00
print(err)
sys.exit(1)