2017-09-27 11:04:18 -06:00
|
|
|
import json
|
2017-11-24 15:38:25 -07:00
|
|
|
import os
|
2017-09-27 11:04:18 -06:00
|
|
|
import urllib2
|
2019-09-23 05:08:11 -06:00
|
|
|
from datetime import timedelta
|
2017-09-27 11:04:18 -06:00
|
|
|
|
|
|
|
from django.conf import settings
|
2017-12-19 15:39:48 -07:00
|
|
|
from django.contrib.sites.models import Site
|
2018-03-07 16:01:38 -07:00
|
|
|
from django.core.cache import cache
|
2019-03-05 11:51:13 -07:00
|
|
|
from django.core.mail import send_mail
|
2019-07-24 07:45:55 -06:00
|
|
|
from django.db.models import Prefetch
|
2017-09-27 11:05:12 -06:00
|
|
|
from django.utils.timezone import now
|
2019-09-23 05:08:11 -06:00
|
|
|
from internetarchive import upload
|
|
|
|
from requests.exceptions import HTTPError, ReadTimeout
|
|
|
|
from satellite_tle import fetch_tle_from_celestrak
|
2017-09-27 11:04:18 -06:00
|
|
|
|
2019-09-23 05:08:11 -06:00
|
|
|
from network.base.models import DemodData, LatestTle, Observation, Satellite, \
|
|
|
|
Station, Tle, Transmitter
|
2019-02-06 19:08:28 -07:00
|
|
|
from network.base.utils import demod_to_db
|
2019-09-23 05:08:11 -06:00
|
|
|
from network.celery import app
|
2017-09-27 11:04:18 -06:00
|
|
|
|
|
|
|
|
2018-03-02 06:55:28 -07:00
|
|
|
@app.task(ignore_result=True)
|
2017-09-27 11:04:18 -06:00
|
|
|
def update_all_tle():
|
|
|
|
"""Task to update all satellite TLEs"""
|
2019-07-24 07:45:55 -06:00
|
|
|
latest_tle_queryset = LatestTle.objects.all()
|
|
|
|
satellites = Satellite.objects.exclude(
|
|
|
|
manual_tle=True,
|
|
|
|
norad_follow_id__isnull=True
|
|
|
|
).prefetch_related(
|
|
|
|
Prefetch('tles', queryset=latest_tle_queryset, to_attr='tle')
|
|
|
|
)
|
2018-12-13 05:02:24 -07:00
|
|
|
|
2019-04-27 16:16:28 -06:00
|
|
|
print "==Fetching TLEs=="
|
|
|
|
|
2017-09-27 11:04:18 -06:00
|
|
|
for obj in satellites:
|
2018-12-04 12:16:43 -07:00
|
|
|
norad_id = obj.norad_cat_id
|
|
|
|
if (obj.manual_tle):
|
|
|
|
norad_id = obj.norad_follow_id
|
2019-04-27 16:00:17 -06:00
|
|
|
|
2017-09-27 11:04:18 -06:00
|
|
|
try:
|
2019-04-27 16:00:17 -06:00
|
|
|
# Fetch latest satellite TLE
|
2019-09-15 12:53:58 -06:00
|
|
|
tle = fetch_tle_from_celestrak(norad_id)
|
|
|
|
except LookupError:
|
2019-04-27 16:16:28 -06:00
|
|
|
print '{} - {}: TLE not found [error]'.format(obj.name, norad_id)
|
2017-09-27 11:04:18 -06:00
|
|
|
continue
|
|
|
|
|
2019-07-24 07:45:55 -06:00
|
|
|
if obj.tle and obj.tle[0].tle1 == tle[1]:
|
|
|
|
# Stored TLE is already the latest available for this satellite
|
|
|
|
print '{} - {}: TLE already exists [defer]'.format(obj.name, norad_id)
|
|
|
|
continue
|
2017-09-27 11:04:18 -06:00
|
|
|
|
|
|
|
Tle.objects.create(tle0=tle[0], tle1=tle[1], tle2=tle[2], satellite=obj)
|
2019-04-27 16:16:28 -06:00
|
|
|
print '{} - {}: new TLE found [updated]'.format(obj.name, norad_id)
|
2017-09-27 11:04:18 -06:00
|
|
|
|
|
|
|
|
2018-03-02 06:55:28 -07:00
|
|
|
@app.task(ignore_result=True)
|
2017-09-27 11:04:18 -06:00
|
|
|
def fetch_data():
|
|
|
|
"""Task to fetch all data from DB"""
|
|
|
|
apiurl = settings.DB_API_ENDPOINT
|
2019-04-05 13:37:08 -06:00
|
|
|
if len(apiurl) == 0:
|
|
|
|
return
|
2017-09-27 11:04:18 -06:00
|
|
|
satellites_url = "{0}satellites".format(apiurl)
|
|
|
|
transmitters_url = "{0}transmitters".format(apiurl)
|
|
|
|
|
|
|
|
try:
|
|
|
|
satellites = urllib2.urlopen(satellites_url).read()
|
|
|
|
transmitters = urllib2.urlopen(transmitters_url).read()
|
2017-11-17 07:31:21 -07:00
|
|
|
except urllib2.URLError:
|
2017-09-27 11:04:18 -06:00
|
|
|
raise Exception('API is unreachable')
|
|
|
|
|
|
|
|
# Fetch Satellites
|
|
|
|
for sat in json.loads(satellites):
|
|
|
|
norad_cat_id = sat['norad_cat_id']
|
2019-01-29 08:51:04 -07:00
|
|
|
sat.pop('decayed', None)
|
2017-09-27 11:04:18 -06:00
|
|
|
try:
|
|
|
|
existing_satellite = Satellite.objects.get(norad_cat_id=norad_cat_id)
|
|
|
|
existing_satellite.__dict__.update(sat)
|
|
|
|
existing_satellite.save()
|
|
|
|
except Satellite.DoesNotExist:
|
|
|
|
Satellite.objects.create(**sat)
|
|
|
|
|
|
|
|
# Fetch Transmitters
|
|
|
|
for transmitter in json.loads(transmitters):
|
|
|
|
uuid = transmitter['uuid']
|
|
|
|
|
|
|
|
try:
|
2019-05-02 14:05:25 -06:00
|
|
|
Transmitter.objects.get(uuid=uuid)
|
2017-09-27 11:04:18 -06:00
|
|
|
except Transmitter.DoesNotExist:
|
2019-05-02 14:05:25 -06:00
|
|
|
Transmitter.objects.create(uuid=uuid)
|
2017-09-27 11:05:12 -06:00
|
|
|
|
|
|
|
|
2018-03-02 06:55:28 -07:00
|
|
|
@app.task(ignore_result=True)
|
2017-11-24 15:38:25 -07:00
|
|
|
def archive_audio(obs_id):
|
|
|
|
obs = Observation.objects.get(id=obs_id)
|
|
|
|
suffix = '-{0}'.format(settings.ENVIRONMENT)
|
|
|
|
if settings.ENVIRONMENT == 'production':
|
|
|
|
suffix = ''
|
|
|
|
identifier = 'satnogs{0}-observation-{1}'.format(suffix, obs.id)
|
2018-03-02 06:55:28 -07:00
|
|
|
|
|
|
|
ogg = obs.payload.path
|
|
|
|
filename = obs.payload.name.split('/')[-1]
|
|
|
|
site = Site.objects.get_current()
|
|
|
|
description = ('<p>Audio file from SatNOGS{0} <a href="{1}/observations/{2}">'
|
|
|
|
'Observation {3}</a>.</p>').format(suffix, site.domain,
|
|
|
|
obs.id, obs.id)
|
|
|
|
md = dict(collection=settings.ARCHIVE_COLLECTION,
|
|
|
|
title=identifier,
|
|
|
|
mediatype='audio',
|
|
|
|
licenseurl='http://creativecommons.org/licenses/by-sa/4.0/',
|
|
|
|
description=description)
|
|
|
|
try:
|
|
|
|
res = upload(identifier, files=[ogg], metadata=md,
|
|
|
|
access_key=settings.S3_ACCESS_KEY,
|
|
|
|
secret_key=settings.S3_SECRET_KEY)
|
|
|
|
except (ReadTimeout, HTTPError):
|
|
|
|
return
|
|
|
|
if res[0].status_code == 200:
|
|
|
|
obs.archived = True
|
|
|
|
obs.archive_url = '{0}{1}/{2}'.format(settings.ARCHIVE_URL, identifier, filename)
|
|
|
|
obs.archive_identifier = identifier
|
|
|
|
obs.save()
|
|
|
|
obs.payload.delete()
|
2017-11-24 15:38:25 -07:00
|
|
|
|
|
|
|
|
2018-03-07 16:01:38 -07:00
|
|
|
@app.task(ignore_result=True)
|
2017-09-27 11:05:12 -06:00
|
|
|
def clean_observations():
|
|
|
|
"""Task to clean up old observations that lack actual data."""
|
2017-12-21 11:40:36 -07:00
|
|
|
threshold = now() - timedelta(days=int(settings.OBSERVATION_OLD_RANGE))
|
2018-03-07 08:45:46 -07:00
|
|
|
observations = Observation.objects.filter(end__lt=threshold, archived=False) \
|
|
|
|
.exclude(payload='')
|
2017-12-21 11:40:36 -07:00
|
|
|
for obs in observations:
|
|
|
|
if settings.ENVIRONMENT == 'stage':
|
2017-12-24 12:17:37 -07:00
|
|
|
if not obs.is_good:
|
2017-09-27 11:05:12 -06:00
|
|
|
obs.delete()
|
2018-03-02 06:55:28 -07:00
|
|
|
return
|
2018-03-07 08:45:46 -07:00
|
|
|
if os.path.isfile(obs.payload.path):
|
|
|
|
archive_audio.delay(obs.id)
|
2018-01-09 09:15:36 -07:00
|
|
|
|
|
|
|
|
2018-09-02 14:05:15 -06:00
|
|
|
@app.task
|
|
|
|
def sync_to_db():
|
|
|
|
"""Task to send demod data to db / SiDS"""
|
|
|
|
q = now() - timedelta(days=1)
|
2019-05-02 14:05:25 -06:00
|
|
|
transmitters = Transmitter.objects.filter(sync_to_db=True).values_list('uuid', flat=True)
|
2018-09-02 14:05:15 -06:00
|
|
|
frames = DemodData.objects.filter(observation__end__gte=q,
|
|
|
|
copied_to_db=False,
|
2019-05-02 14:05:25 -06:00
|
|
|
observation__transmitter_uuid__in=transmitters)
|
2018-09-02 14:05:15 -06:00
|
|
|
for frame in frames:
|
|
|
|
try:
|
2019-02-06 19:08:28 -07:00
|
|
|
if not frame.is_image() and not frame.copied_to_db:
|
2018-09-02 14:05:15 -06:00
|
|
|
if os.path.isfile(frame.payload_demod.path):
|
2019-02-06 19:08:28 -07:00
|
|
|
demod_to_db(frame.id)
|
2018-09-02 14:05:15 -06:00
|
|
|
except Exception:
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
2018-03-02 06:55:28 -07:00
|
|
|
@app.task(ignore_result=True)
|
2018-01-09 09:15:36 -07:00
|
|
|
def station_status_update():
|
|
|
|
"""Task to update Station status."""
|
|
|
|
for station in Station.objects.all():
|
|
|
|
if station.is_offline:
|
|
|
|
station.status = 0
|
|
|
|
elif station.testing:
|
|
|
|
station.status = 1
|
|
|
|
else:
|
|
|
|
station.status = 2
|
|
|
|
station.save()
|
2018-03-07 16:01:38 -07:00
|
|
|
|
|
|
|
|
2019-03-05 11:51:13 -07:00
|
|
|
@app.task(ignore_result=True)
|
|
|
|
def notify_for_stations_without_results():
|
|
|
|
"""Task to send email for stations with observations without results."""
|
|
|
|
email_to = settings.EMAIL_FOR_STATIONS_ISSUES
|
|
|
|
if email_to is not None and len(email_to) > 0:
|
|
|
|
stations = ''
|
|
|
|
obs_limit = settings.OBS_NO_RESULTS_MIN_COUNT
|
|
|
|
time_limit = now() - timedelta(seconds=settings.OBS_NO_RESULTS_IGNORE_TIME)
|
|
|
|
last_check = time_limit - timedelta(seconds=settings.OBS_NO_RESULTS_CHECK_PERIOD)
|
|
|
|
for station in Station.objects.filter(status=2):
|
|
|
|
last_obs = Observation.objects.filter(ground_station=station,
|
|
|
|
end__lt=time_limit).order_by("-end")[:obs_limit]
|
|
|
|
obs_without_results = 0
|
|
|
|
obs_after_last_check = False
|
|
|
|
for observation in last_obs:
|
|
|
|
if not (observation.has_audio and observation.has_waterfall):
|
|
|
|
obs_without_results += 1
|
|
|
|
if observation.end >= last_check:
|
|
|
|
obs_after_last_check = True
|
|
|
|
if obs_without_results == obs_limit and obs_after_last_check:
|
|
|
|
stations += ' ' + str(station.id)
|
|
|
|
if len(stations) > 0:
|
|
|
|
# Notify user
|
|
|
|
subject = '[satnogs] Station with observations without results'
|
|
|
|
send_mail(subject, stations, settings.DEFAULT_FROM_EMAIL,
|
|
|
|
[settings.EMAIL_FOR_STATIONS_ISSUES], False)
|
|
|
|
|
|
|
|
|
2018-03-07 16:01:38 -07:00
|
|
|
@app.task(ignore_result=True)
|
|
|
|
def stations_cache_rates():
|
|
|
|
stations = Station.objects.all()
|
|
|
|
for station in stations:
|
2018-08-31 07:45:42 -06:00
|
|
|
observations = station.observations.exclude(testing=True).exclude(vetted_status="unknown")
|
|
|
|
success = observations.filter(id__in=(o.id for o in observations
|
|
|
|
if o.is_good or o.is_bad)).count()
|
2018-03-07 16:01:38 -07:00
|
|
|
if observations:
|
|
|
|
rate = int(100 * (float(success) / float(observations.count())))
|
2018-08-31 07:45:42 -06:00
|
|
|
cache.set('station-{0}-rate'.format(station.id), rate, 60 * 60 * 2)
|