improve logging around uploading (#23750)

* improve logging around uploading

* use int

* use raw

* duplicate
pull/23751/head
Willem Melching 2022-02-11 12:58:41 +01:00 committed by GitHub
parent ab9fb0541d
commit 7765bc2166
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 22 additions and 14 deletions

View File

@ -189,16 +189,29 @@ def upload_handler(end_event: threading.Event) -> None:
cur_upload_items[tid] = cur_upload_items[tid]._replace(progress=cur / sz if sz else 1)
network_type = sm['deviceState'].networkType.raw
fn = cur_upload_items[tid].path
try:
sz = os.path.getsize(fn)
except OSError:
sz = -1
cloudlog.event("athena.upload_handler.upload_start", fn=fn, sz=sz, network_type=network_type)
response = _do_upload(cur_upload_items[tid], cb)
if response.status_code not in (200, 201, 403, 412):
cloudlog.warning(f"athena.upload_handler.retry {response.status_code} {cur_upload_items[tid]}")
cloudlog.event("athena.upload_handler.retry", status_code=response.status_code, fn=fn, sz=sz, network_type=network_type)
retry_upload(tid, end_event)
else:
cloudlog.event("athena.upload_handler.success", fn=fn, sz=sz, network_type=network_type)
UploadQueueCache.cache(upload_queue)
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, requests.exceptions.SSLError) as e:
cloudlog.warning(f"athena.upload_handler.retry {e} {cur_upload_items[tid]}")
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, requests.exceptions.SSLError):
cloudlog.event("athena.upload_handler.timeout", fn=fn, sz=sz, network_type=network_type)
retry_upload(tid, end_event)
except AbortTransferException:
cloudlog.warning(f"athena.upload_handler.abort {cur_upload_items[tid]}")
cloudlog.event("athena.upload_handler.abort", fn=fn, sz=sz, network_type=network_type)
retry_upload(tid, end_event, False)
except queue.Empty:

View File

@ -165,16 +165,14 @@ class Uploader():
return self.last_resp
def upload(self, key, fn):
def upload(self, key, fn, network_type):
try:
sz = os.path.getsize(fn)
except OSError:
cloudlog.exception("upload: getsize failed")
return False
cloudlog.event("upload", key=key, fn=fn, sz=sz)
cloudlog.debug("checking %r with size %r", key, sz)
cloudlog.event("upload_start", key=key, fn=fn, sz=sz, network_type=network_type)
if sz == 0:
try:
@ -185,10 +183,8 @@ class Uploader():
success = True
else:
start_time = time.monotonic()
cloudlog.debug("uploading %r", fn)
stat = self.normal_upload(key, fn)
if stat is not None and stat.status_code in (200, 201, 403, 412):
cloudlog.event("upload_success" if stat.status_code != 412 else "upload_ignored", key=key, fn=fn, sz=sz, debug=True)
try:
# tag file as uploaded
setxattr(fn, UPLOAD_ATTR_NAME, UPLOAD_ATTR_VALUE)
@ -199,9 +195,10 @@ class Uploader():
self.last_time = time.monotonic() - start_time
self.last_speed = (sz / 1e6) / self.last_time
success = True
cloudlog.event("upload_success" if stat.status_code != 412 else "upload_ignored", key=key, fn=fn, sz=sz, network_type=network_type)
else:
cloudlog.event("upload_failed", stat=stat, exc=self.last_exc, key=key, fn=fn, sz=sz, debug=True)
success = False
cloudlog.event("upload_failed", stat=stat, exc=self.last_exc, key=key, fn=fn, sz=sz, network_type=network_type)
return success
@ -248,8 +245,7 @@ def uploader_fn(exit_event):
key, fn = d
cloudlog.debug("upload %r over %s", d, network_type)
success = uploader.upload(key, fn)
success = uploader.upload(key, fn, sm['deviceState'].networkType.raw)
if success:
backoff = 0.1
elif allow_sleep:
@ -258,7 +254,6 @@ def uploader_fn(exit_event):
backoff = min(backoff*2, 120)
pm.send("uploaderState", uploader.get_msg())
cloudlog.info("upload done, success=%r", success)
def main():
uploader_fn(threading.Event())