Fix CI sync script

pull/962/head
Willem Melching 2020-01-23 13:05:54 -08:00
parent e2c784740a
commit 09283f4d6a
1 changed files with 52 additions and 29 deletions

View File

@ -1,49 +1,72 @@
#!/usr/bin/env python3
import os
import subprocess
import tempfile
import shutil
import subprocess
from common.basedir import BASEDIR
from azure.storage.blob import BlockBlobService
from selfdrive.test.test_car_models import routes as test_car_models_routes, non_public_routes
from selfdrive.test.process_replay.test_processes import segments as replay_segments
from xx.chffr.lib import azureutil
from xx.chffr.lib.storage import upload_dir_serial, download_dir_tpe, key_prefix_exists
from xx.chffr.lib.storage import upload_dir_serial, download_dir_tpe
from xx.chffr.lib.storage import _DATA_ACCOUNT_PRODUCTION, _DATA_ACCOUNT_CI, _DATA_BUCKET_PRODUCTION, _DATA_BUCKET_CI
sas_token = os.getenv("TOKEN", None)
if sas_token is None:
sas_token = subprocess.check_output("az storage container generate-sas --account-name commadataci --name openpilotci --https-only --permissions lrw --expiry $(date -u '+%Y-%m-%dT%H:%M:%SZ' -d '+1 hour') --auth-mode login --as-user --output tsv", shell=True).decode().strip("\n")
service = BlockBlobService(account_name=_DATA_ACCOUNT_CI, sas_token=sas_token)
SOURCES = [
(_DATA_ACCOUNT_PRODUCTION, _DATA_BUCKET_PRODUCTION),
(_DATA_ACCOUNT_PRODUCTION, "preserve"),
]
def sync_to_ci_public(service, route):
DEST_KEY = azureutil.get_user_token(_DATA_ACCOUNT_CI, "openpilotci")
SOURCE_KEYS = [azureutil.get_user_token(account, bucket) for account, bucket in SOURCES]
SERVICE = BlockBlobService(_DATA_ACCOUNT_CI, sas_token=DEST_KEY)
def sync_to_ci_public(route):
print(f"Uploading {route}")
key_prefix = route.replace('|', '/')
if next(azureutil.list_all_blobs(service, "openpilotci", prefix=key_prefix), None) is not None:
return
if next(azureutil.list_all_blobs(SERVICE, "openpilotci", prefix=key_prefix), None) is not None:
print("Already synced")
return True
print("uploading", route)
for (source_account, source_bucket), source_key in zip(SOURCES, SOURCE_KEYS):
print(f"Trying {source_account}/{source_bucket}")
cmd = [
f"{BASEDIR}/external/bin/azcopy",
"copy",
"https://{}.blob.core.windows.net/{}/{}?{}".format(source_account, source_bucket, key_prefix, source_key),
"https://{}.blob.core.windows.net/{}?{}".format(_DATA_ACCOUNT_CI, "openpilotci", DEST_KEY),
"--recursive=true",
"--overwrite=false",
]
tmpdir = tempfile.mkdtemp()
try:
print(f"download_dir_tpe({_DATA_ACCOUNT_PRODUCTION}, {_DATA_BUCKET_PRODUCTION}, {key_prefix}, {tmpdir})")
try:
result = subprocess.call(cmd, stdout=subprocess.DEVNULL)
if result == 0:
print("Success")
return True
except subprocess.CalledProcessError:
print("Failed")
# production -> openpilotci
download_dir_tpe(_DATA_ACCOUNT_PRODUCTION, _DATA_BUCKET_PRODUCTION, tmpdir, key_prefix)
return False
# commadataci -> openpilotci
#download_dir_tpe(_DATA_ACCOUNT_CI, _DATA_BUCKET_CI, tmpdir, key_prefix)
upload_dir_serial(_DATA_ACCOUNT_CI, "openpilotci", tmpdir, key_prefix)
finally:
shutil.rmtree(tmpdir)
if __name__ == "__main__":
failed_routes = []
# sync process replay routes
for s in replay_segments:
route_name, _ = s.rsplit('--', 1)
sync_to_ci_public(service, route_name)
# sync process replay routes
for s in replay_segments:
route_name, _ = s.rsplit('--', 1)
if not sync_to_ci_public(route_name):
failed_routes.append(route_name)
# sync test_car_models routes
for r in test_car_models_routes:
if r not in non_public_routes:
sync_to_ci_public(service, r)
# sync test_car_models routes
for r in list(test_car_models_routes.keys()):
if r not in non_public_routes:
if not sync_to_ci_public(r):
failed_routes.append(r)
if len(failed_routes):
print("failed routes:")
print(failed_routes)