2019-07-12 20:36:18 -06:00
|
|
|
"""Miscellaneous functions for SatNOGS DB"""
|
2018-08-18 18:43:02 -06:00
|
|
|
import binascii
|
2019-04-27 04:56:53 -06:00
|
|
|
import logging
|
2020-07-28 02:06:29 -06:00
|
|
|
import math
|
2018-08-18 18:43:02 -06:00
|
|
|
from datetime import datetime, timedelta
|
2020-07-28 02:06:29 -06:00
|
|
|
from decimal import Decimal
|
2019-04-27 04:56:53 -06:00
|
|
|
|
2018-08-18 18:43:02 -06:00
|
|
|
from django.conf import settings
|
2019-04-27 04:56:53 -06:00
|
|
|
from django.core.cache import cache
|
2020-09-12 15:10:06 -06:00
|
|
|
from django.db.models import Count, Max, Q
|
2018-08-18 18:43:02 -06:00
|
|
|
from django.utils.timezone import make_aware
|
|
|
|
from influxdb import InfluxDBClient
|
2019-05-11 05:11:53 -06:00
|
|
|
from satnogsdecoders import __version__ as decoders_version
|
2018-11-07 09:04:06 -07:00
|
|
|
from satnogsdecoders import decoder
|
2019-04-27 04:56:53 -06:00
|
|
|
|
|
|
|
from db.base.models import DemodData, Mode, Satellite, Telemetry, Transmitter
|
2017-11-13 03:27:11 -07:00
|
|
|
|
2019-07-11 17:58:30 -06:00
|
|
|
LOGGER = logging.getLogger('db')
|
2018-12-20 06:43:48 -07:00
|
|
|
|
2017-11-13 03:27:11 -07:00
|
|
|
|
|
|
|
def calculate_statistics():
|
2019-07-12 20:36:18 -06:00
|
|
|
"""Calculates statistics about the data we have in DB
|
|
|
|
|
|
|
|
:returns: a dictionary of statistics
|
|
|
|
"""
|
2019-07-27 10:05:31 -06:00
|
|
|
# satellite statistics
|
2017-11-13 03:27:11 -07:00
|
|
|
satellites = Satellite.objects.all()
|
|
|
|
total_satellites = satellites.count()
|
2019-07-27 10:05:31 -06:00
|
|
|
|
|
|
|
# data statistics
|
2017-11-13 03:27:11 -07:00
|
|
|
total_data = DemodData.objects.all().count()
|
2019-07-27 10:05:31 -06:00
|
|
|
|
|
|
|
# transmitter statistics
|
|
|
|
transmitters, total_transmitters, alive_transmitters_percentage = \
|
|
|
|
calculate_transmitters_stats()
|
|
|
|
|
|
|
|
# mode statistics
|
|
|
|
mode_data_sorted, mode_label_sorted = \
|
|
|
|
calculate_mode_stats(transmitters)
|
|
|
|
|
|
|
|
# band statistics
|
|
|
|
band_label_sorted, band_data_sorted = \
|
|
|
|
calculate_band_stats(transmitters)
|
|
|
|
|
|
|
|
statistics = {
|
|
|
|
'total_satellites': total_satellites,
|
|
|
|
'total_data': total_data,
|
|
|
|
'transmitters': total_transmitters,
|
|
|
|
'transmitters_alive': alive_transmitters_percentage,
|
|
|
|
'mode_label': mode_label_sorted,
|
|
|
|
'mode_data': mode_data_sorted,
|
|
|
|
'band_label': band_label_sorted,
|
|
|
|
'band_data': band_data_sorted
|
|
|
|
}
|
|
|
|
return statistics
|
|
|
|
|
|
|
|
|
|
|
|
def calculate_transmitters_stats():
|
|
|
|
"""Helper function to provite transmitters and statistics about
|
|
|
|
transmitters in db (such as total and percentage of alive)"""
|
|
|
|
transmitters = Transmitter.objects.all()
|
|
|
|
total_transmitters = transmitters.count()
|
2019-07-27 12:29:24 -06:00
|
|
|
# Remove the following pylint disable after Python 3 migration
|
|
|
|
alive_transmitters = transmitters.filter(status='active').count() # pylint: disable=E1101
|
2019-01-06 10:34:10 -07:00
|
|
|
if alive_transmitters > 0 and total_transmitters > 0:
|
|
|
|
try:
|
2019-04-27 03:04:49 -06:00
|
|
|
alive_transmitters_percentage = '{0}%'.format(
|
|
|
|
round((float(alive_transmitters) / float(total_transmitters)) * 100, 2)
|
|
|
|
)
|
2019-01-06 10:34:10 -07:00
|
|
|
except ZeroDivisionError as error:
|
2019-07-11 17:58:30 -06:00
|
|
|
LOGGER.error(error, exc_info=True)
|
2019-01-06 10:34:10 -07:00
|
|
|
alive_transmitters_percentage = '0%'
|
|
|
|
else:
|
2018-12-20 06:43:48 -07:00
|
|
|
alive_transmitters_percentage = '0%'
|
2017-11-13 03:27:11 -07:00
|
|
|
|
2019-07-27 10:05:31 -06:00
|
|
|
return transmitters, total_transmitters, alive_transmitters_percentage
|
|
|
|
|
2019-07-27 13:44:32 -06:00
|
|
|
|
2019-07-27 10:05:31 -06:00
|
|
|
def calculate_mode_stats(transmitters):
|
|
|
|
"""Helper function to provide data and labels for modes associated with
|
|
|
|
transmitters provided"""
|
|
|
|
modes = Mode.objects.all()
|
2017-11-13 03:27:11 -07:00
|
|
|
mode_label = []
|
|
|
|
mode_data = []
|
2019-07-27 10:05:31 -06:00
|
|
|
|
2017-11-13 03:27:11 -07:00
|
|
|
for mode in modes:
|
2019-11-09 03:09:41 -07:00
|
|
|
filtered_transmitters = transmitters.filter(
|
|
|
|
downlink_mode=mode
|
|
|
|
).count() + transmitters.filter(uplink_mode=mode).count()
|
2017-11-13 03:27:11 -07:00
|
|
|
mode_label.append(mode.name)
|
2019-07-11 17:58:30 -06:00
|
|
|
mode_data.append(filtered_transmitters)
|
2017-11-13 03:27:11 -07:00
|
|
|
|
2019-01-06 10:34:10 -07:00
|
|
|
# needed to pass testing in a fresh environment with no modes in db
|
2019-07-12 00:20:01 -06:00
|
|
|
if not mode_label:
|
2019-01-06 10:34:10 -07:00
|
|
|
mode_label = ['FM']
|
2019-07-12 00:20:01 -06:00
|
|
|
if not mode_data:
|
2019-01-06 10:34:10 -07:00
|
|
|
mode_data = ['FM']
|
|
|
|
|
2019-07-27 10:05:31 -06:00
|
|
|
mode_data_sorted, mode_label_sorted = \
|
|
|
|
list(zip(*sorted(zip(mode_data, mode_label), reverse=True)))
|
|
|
|
|
2019-09-01 17:53:03 -06:00
|
|
|
return mode_data_sorted, mode_label_sorted
|
2019-07-27 10:05:31 -06:00
|
|
|
|
|
|
|
|
|
|
|
def calculate_band_stats(transmitters):
|
|
|
|
"""Helper function to provide data and labels for bands associated with
|
|
|
|
transmitters provided"""
|
2017-11-13 03:27:11 -07:00
|
|
|
band_label = []
|
|
|
|
band_data = []
|
|
|
|
|
2019-07-16 08:49:19 -06:00
|
|
|
bands = [
|
|
|
|
# <30.000.000 - HF
|
|
|
|
{
|
|
|
|
'lower_limit': 0,
|
|
|
|
'upper_limit': 30000000,
|
|
|
|
'label': 'HF'
|
|
|
|
},
|
|
|
|
# 30.000.000 ~ 300.000.000 - VHF
|
|
|
|
{
|
|
|
|
'lower_limit': 30000000,
|
|
|
|
'upper_limit': 300000000,
|
|
|
|
'label': 'VHF'
|
|
|
|
},
|
|
|
|
# 300.000.000 ~ 1.000.000.000 - UHF
|
|
|
|
{
|
|
|
|
'lower_limit': 300000000,
|
|
|
|
'upper_limit': 1000000000,
|
|
|
|
'label': 'UHF',
|
|
|
|
},
|
|
|
|
# 1G ~ 2G - L
|
|
|
|
{
|
|
|
|
'lower_limit': 1000000000,
|
|
|
|
'upper_limit': 2000000000,
|
|
|
|
'label': 'L',
|
|
|
|
},
|
|
|
|
# 2G ~ 4G - S
|
|
|
|
{
|
|
|
|
'lower_limit': 2000000000,
|
|
|
|
'upper_limit': 4000000000,
|
|
|
|
'label': 'S',
|
|
|
|
},
|
|
|
|
# 4G ~ 8G - C
|
|
|
|
{
|
|
|
|
'lower_limit': 4000000000,
|
|
|
|
'upper_limit': 8000000000,
|
|
|
|
'label': 'C',
|
|
|
|
},
|
|
|
|
# 8G ~ 12G - X
|
|
|
|
{
|
|
|
|
'lower_limit': 8000000000,
|
|
|
|
'upper_limit': 12000000000,
|
|
|
|
'label': 'X',
|
|
|
|
},
|
|
|
|
# 12G ~ 18G - Ku
|
|
|
|
{
|
|
|
|
'lower_limit': 12000000000,
|
|
|
|
'upper_limit': 18000000000,
|
|
|
|
'label': 'Ku',
|
|
|
|
},
|
|
|
|
# 18G ~ 27G - K
|
|
|
|
{
|
|
|
|
'lower_limit': 18000000000,
|
|
|
|
'upper_limit': 27000000000,
|
|
|
|
'label': 'K',
|
|
|
|
},
|
|
|
|
# 27G ~ 40G - Ka
|
|
|
|
{
|
|
|
|
'lower_limit': 27000000000,
|
|
|
|
'upper_limit': 40000000000,
|
|
|
|
'label': 'Ka',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
for band in bands:
|
|
|
|
filtered = transmitters.filter(
|
|
|
|
downlink_low__gte=band['lower_limit'], downlink_low__lt=band['upper_limit']
|
|
|
|
).count()
|
|
|
|
band_label.append(band['label'])
|
|
|
|
band_data.append(filtered)
|
2017-11-13 03:27:11 -07:00
|
|
|
|
2019-07-04 19:35:01 -06:00
|
|
|
band_data_sorted, band_label_sorted = \
|
|
|
|
list(zip(*sorted(zip(band_data, band_label), reverse=True)))
|
2017-11-13 03:27:11 -07:00
|
|
|
|
2019-07-27 10:05:31 -06:00
|
|
|
return band_label_sorted, band_data_sorted
|
2018-08-18 18:43:02 -06:00
|
|
|
|
|
|
|
|
2019-05-11 05:11:53 -06:00
|
|
|
def create_point(fields, satellite, telemetry, demoddata, version):
|
2019-07-12 20:36:18 -06:00
|
|
|
"""Create a decoded data point in JSON format that is influxdb compatible
|
|
|
|
|
|
|
|
:returns: a JSON formatted time series data point
|
|
|
|
"""
|
2019-01-09 11:51:22 -07:00
|
|
|
point = [
|
|
|
|
{
|
|
|
|
'time': demoddata.timestamp.strftime('%Y-%m-%dT%H:%M:%SZ'),
|
|
|
|
'measurement': satellite.norad_cat_id,
|
|
|
|
'tags': {
|
|
|
|
'satellite': satellite.name,
|
|
|
|
'decoder': telemetry.decoder,
|
|
|
|
'station': demoddata.station,
|
|
|
|
'observer': demoddata.observer,
|
2019-05-11 05:11:53 -06:00
|
|
|
'source': demoddata.app_source,
|
|
|
|
'version': version
|
2019-01-09 11:51:22 -07:00
|
|
|
},
|
|
|
|
'fields': fields
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
return point
|
2018-08-25 09:58:57 -06:00
|
|
|
|
|
|
|
|
|
|
|
def write_influx(json_obj):
|
|
|
|
"""Take a json object and send to influxdb."""
|
2019-04-27 03:04:49 -06:00
|
|
|
client = InfluxDBClient(
|
|
|
|
settings.INFLUX_HOST,
|
|
|
|
settings.INFLUX_PORT,
|
|
|
|
settings.INFLUX_USER,
|
|
|
|
settings.INFLUX_PASS,
|
|
|
|
settings.INFLUX_DB,
|
2019-12-16 12:21:26 -07:00
|
|
|
ssl=settings.INFLUX_SSL,
|
|
|
|
verify_ssl=settings.INFLUX_VERIFY_SSL
|
2019-04-27 03:04:49 -06:00
|
|
|
)
|
2018-08-25 09:58:57 -06:00
|
|
|
client.write_points(json_obj)
|
2018-08-18 18:43:02 -06:00
|
|
|
|
|
|
|
|
|
|
|
def decode_data(norad, period=None):
|
2019-07-12 20:36:18 -06:00
|
|
|
"""Decode data for a satellite, with an option to limit the scope.
|
|
|
|
|
|
|
|
:param norad: the NORAD ID of the satellite to decode data for
|
|
|
|
:param period: if period exists, only attempt to decode the last 4 hours,
|
|
|
|
otherwise attempt to decode everything
|
|
|
|
"""
|
2018-08-18 18:43:02 -06:00
|
|
|
sat = Satellite.objects.get(norad_cat_id=norad)
|
2019-07-27 13:24:42 -06:00
|
|
|
if not sat.telemetry_decoder_count:
|
2019-07-13 16:49:18 -06:00
|
|
|
return
|
|
|
|
|
|
|
|
if period:
|
2020-02-14 06:58:46 -07:00
|
|
|
time_period = datetime.utcnow() - timedelta(hours=4)
|
2019-07-13 16:49:18 -06:00
|
|
|
time_period = make_aware(time_period)
|
2019-07-13 19:05:38 -06:00
|
|
|
data = DemodData.objects.filter(satellite__norad_cat_id=norad, timestamp__gte=time_period)
|
2019-07-13 16:49:18 -06:00
|
|
|
else:
|
|
|
|
data = DemodData.objects.filter(satellite=sat)
|
|
|
|
telemetry_decoders = Telemetry.objects.filter(satellite=sat)
|
|
|
|
|
|
|
|
# iterate over DemodData objects
|
|
|
|
for obj in data:
|
|
|
|
# iterate over Telemetry decoders
|
|
|
|
for tlmdecoder in telemetry_decoders:
|
|
|
|
try:
|
|
|
|
decoder_class = getattr(decoder, tlmdecoder.decoder.capitalize())
|
|
|
|
except AttributeError:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
with open(obj.payload_frame.path) as frame_file:
|
|
|
|
# we get data frames in hex but kaitai requires binary
|
|
|
|
hexdata = frame_file.read()
|
|
|
|
bindata = binascii.unhexlify(hexdata)
|
|
|
|
|
|
|
|
# if we are set to use InfluxDB, send the decoded data
|
|
|
|
# there, otherwise we store it in the local DB.
|
|
|
|
if settings.USE_INFLUX:
|
|
|
|
try:
|
|
|
|
frame = decoder_class.from_bytes(bindata)
|
|
|
|
json_obj = create_point(
|
|
|
|
decoder.get_fields(frame), sat, tlmdecoder, obj, decoders_version
|
|
|
|
)
|
|
|
|
write_influx(json_obj)
|
|
|
|
obj.payload_decoded = 'influxdb'
|
|
|
|
obj.is_decoded = True
|
|
|
|
obj.save()
|
|
|
|
break
|
2019-07-17 07:23:02 -06:00
|
|
|
except Exception: # pylint: disable=W0703
|
2019-07-13 16:49:18 -06:00
|
|
|
obj.is_decoded = False
|
|
|
|
obj.save()
|
|
|
|
continue
|
|
|
|
else: # store in the local db instead of influx
|
|
|
|
try:
|
|
|
|
frame = decoder_class.from_bytes(bindata)
|
2019-07-17 07:23:02 -06:00
|
|
|
except Exception: # pylint: disable=W0703
|
2019-07-13 16:49:18 -06:00
|
|
|
obj.payload_decoded = ''
|
|
|
|
obj.is_decoded = False
|
|
|
|
obj.save()
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
json_obj = create_point(
|
|
|
|
decoder.get_fields(frame), sat, tlmdecoder, obj, decoders_version
|
|
|
|
)
|
|
|
|
obj.payload_decoded = json_obj
|
|
|
|
obj.is_decoded = True
|
|
|
|
obj.save()
|
|
|
|
break
|
2020-02-14 06:58:46 -07:00
|
|
|
except (IOError, binascii.Error) as error:
|
|
|
|
LOGGER.error(error, exc_info=True)
|
2019-07-13 16:49:18 -06:00
|
|
|
continue
|
2019-01-06 10:34:10 -07:00
|
|
|
|
|
|
|
|
|
|
|
# Caches stats about satellites and data
|
|
|
|
def cache_statistics():
|
2019-07-12 20:36:18 -06:00
|
|
|
"""Populate a django cache with statistics from data in DB
|
|
|
|
|
|
|
|
.. seealso:: calculate_statistics
|
|
|
|
"""
|
2019-01-06 10:34:10 -07:00
|
|
|
statistics = calculate_statistics()
|
|
|
|
cache.set('stats_transmitters', statistics, 60 * 60 * 2)
|
|
|
|
|
2020-07-25 02:04:45 -06:00
|
|
|
ids = Satellite.objects.values('id')
|
|
|
|
cache.set('satellites_ids', ids, 60 * 60 * 2)
|
2020-04-10 06:21:52 -06:00
|
|
|
|
2019-01-06 10:34:10 -07:00
|
|
|
satellites = Satellite.objects \
|
2020-07-25 02:04:45 -06:00
|
|
|
.values('name', 'norad_cat_id', 'id') \
|
2019-01-06 10:34:10 -07:00
|
|
|
.annotate(count=Count('telemetry_data'),
|
2020-09-12 15:10:06 -06:00
|
|
|
decoded=Count('telemetry_data',
|
|
|
|
filter=Q(telemetry_data__is_decoded=True)),
|
2019-01-06 10:34:10 -07:00
|
|
|
latest_payload=Max('telemetry_data__timestamp')) \
|
|
|
|
.order_by('-count')
|
2020-04-10 06:21:52 -06:00
|
|
|
|
|
|
|
for sat in satellites:
|
2020-07-25 02:04:45 -06:00
|
|
|
cache.set(sat['id'], sat, 60 * 60 * 2)
|
2019-01-06 10:34:10 -07:00
|
|
|
|
|
|
|
observers = DemodData.objects \
|
|
|
|
.values('observer') \
|
|
|
|
.annotate(count=Count('observer'),
|
|
|
|
latest_payload=Max('timestamp')) \
|
|
|
|
.order_by('-count')
|
|
|
|
cache.set('stats_observers', observers, 60 * 60 * 2)
|
2020-07-28 02:06:29 -06:00
|
|
|
|
|
|
|
|
|
|
|
def remove_exponent(converted_number):
|
|
|
|
"""Remove exponent."""
|
|
|
|
return converted_number.quantize(
|
|
|
|
Decimal(1)
|
|
|
|
) if converted_number == converted_number.to_integral() else converted_number.normalize()
|
|
|
|
|
|
|
|
|
|
|
|
def millify(number, precision=0):
|
|
|
|
"""Humanize number."""
|
|
|
|
millnames = ['', 'k', 'M', 'B', 'T', 'P', 'E', 'Z', 'Y']
|
|
|
|
number = float(number)
|
|
|
|
millidx = max(
|
|
|
|
0,
|
|
|
|
min(
|
|
|
|
len(millnames) - 1, int(math.floor(0 if number == 0 else math.log10(abs(number)) / 3))
|
|
|
|
)
|
|
|
|
)
|
|
|
|
result = '{:.{precision}f}'.format(number / 10**(3 * millidx), precision=precision)
|
|
|
|
result = remove_exponent(Decimal(result))
|
|
|
|
return '{0}{dx}'.format(result, dx=millnames[millidx])
|
2020-08-03 19:49:57 -06:00
|
|
|
|
|
|
|
|
|
|
|
def read_influx(norad):
|
|
|
|
"""Queries influxdb for the last 30d of data points (counted) in 1d resolution.
|
|
|
|
|
|
|
|
:param norad: the NORAD ID of the satellite to query influxdb for
|
|
|
|
:returns: a raw json of the measurement, timestamps, and point counts
|
|
|
|
"""
|
|
|
|
client = InfluxDBClient(
|
|
|
|
settings.INFLUX_HOST,
|
|
|
|
settings.INFLUX_PORT,
|
|
|
|
settings.INFLUX_USER,
|
|
|
|
settings.INFLUX_PASS,
|
|
|
|
settings.INFLUX_DB,
|
|
|
|
ssl=settings.INFLUX_SSL,
|
|
|
|
verify_ssl=settings.INFLUX_VERIFY_SSL
|
|
|
|
)
|
|
|
|
|
|
|
|
# check against injection
|
|
|
|
if isinstance(norad, int):
|
|
|
|
# epoch:s to set the return timestamp in unixtime for easier conversion
|
|
|
|
params = {'epoch': 's'}
|
|
|
|
results = client.query(
|
|
|
|
'SELECT count(*) FROM "' + str(norad) +
|
|
|
|
'" WHERE time > now() - 30d GROUP BY time(1d) fill(null)',
|
|
|
|
params=params
|
|
|
|
)
|
|
|
|
return results.raw
|
|
|
|
# no-else-return
|
|
|
|
return ''
|