wut-ml-load wut-ml-save

master
ml server 2020-01-04 01:06:03 -07:00
parent f67ebdb9d4
commit 2707edb725
5 changed files with 129 additions and 6 deletions

View File

@ -27,9 +27,12 @@ do
echo -n "$OBSID "
echo -n "Vet: $VET "
# Get Machine Learning Result
WUT_VET=`./wut $OBSID | cut -f 2 -d " "`
echo -n "Wut: $WUT_VET "
if [ $VET = $WUT_VET ] ; then
WUT_VETS=`./wut $OBSID`
WUT_VET=`echo $WUT_VETS | cut -f 2 -d " "`
WUT_RATE=`echo $WUT_VETS | cut -f 1 -d " "`
echo -n "$WUT_VET, "
echo -n "$WUT_RATE, "
if [ "$VET" = "$WUT_VET" ] ; then
let CORRECT=$CORRECT+1
else
let INCORRECT=$INCORRECT+1

2
wut-ml
View File

@ -54,7 +54,7 @@ model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
model.fit(x=train_it, validation_data=val_it, epochs=1, verbose=2, workers=8, use_multiprocessing=True)
model.fit(x=train_it, validation_data=val_it, epochs=16, verbose=2, workers=16, use_multiprocessing=True)
prediction = model.predict(x=test_it, batch_size=None, verbose=0, steps=None, use_multiprocessing=True)
print(prediction)

60
wut-ml-load 100755
View File

@ -0,0 +1,60 @@
#!/usr/bin/python3
# wut-ml
#
# Vet a SatNOGS image using machine learning (guessing).
# It will vet the image located at test/unvetted/waterfall.png.
#
# Note, there is an issue to fix where it will vet everything
# under the data/test directory, so fix that. For now, just delete
# everything else. :)
#
# Usage:
# wut-ml
# Example:
# wut-ml
import os
import numpy as np
import tensorflow.python.keras
from tensorflow.python.keras import Sequential
from tensorflow.python.keras.layers import Activation, Dropout, Flatten, Dense
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.preprocessing import image
from tensorflow.python.keras.models import load_model
from tensorflow.python.keras.preprocessing.image import load_img
from tensorflow.python.keras.preprocessing.image import img_to_array
model = load_model('wut.h5')
img_width=256
img_height=256
model = Sequential()
model.add(Convolution2D(32, 3, 3, input_shape=(img_width, img_height,3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
prediction = model.predict(x=test_it, batch_size=None, verbose=0, steps=None, use_multiprocessing=True, workers=16)
print(prediction)
if prediction[0][0] == 1:
rating = 'bad'
else:
rating = 'good'
print('Observation: %s' % (rating))

60
wut-ml-save 100755
View File

@ -0,0 +1,60 @@
#!/usr/bin/python3
# wut-ml
#
# Vet a SatNOGS image using machine learning (guessing).
# It will vet the image located at test/unvetted/waterfall.png.
#
# Note, there is an issue to fix where it will vet everything
# under the data/test directory, so fix that. For now, just delete
# everything else. :)
#
# Usage:
# wut-ml
# Example:
# wut-ml
import os
import numpy as np
import tensorflow.python.keras
from tensorflow.python.keras import Sequential
from tensorflow.python.keras.layers import Activation, Dropout, Flatten, Dense
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.preprocessing import image
from tensorflow.python.keras.models import load_model
from tensorflow.python.keras.preprocessing.image import load_img
from tensorflow.python.keras.preprocessing.image import img_to_array
datagen = ImageDataGenerator()
train_it = datagen.flow_from_directory('data/train/', class_mode='binary')
val_it = datagen.flow_from_directory('data/val/', class_mode='binary')
test_it = datagen.flow_from_directory('data/test/', class_mode='binary')
batchX, batchy = train_it.next()
print('Batch shape=%s, min=%.3f, max=%.3f' % (batchX.shape, batchX.min(), batchX.max()))
img_width=256
img_height=256
model = Sequential()
model.add(Convolution2D(32, 3, 3, input_shape=(img_width, img_height,3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(32, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
model.fit(x=train_it, validation_data=val_it, epochs=1, verbose=2, workers=16, use_multiprocessing=True)
model.save('wut.h5')

View File

@ -36,7 +36,7 @@ while [ $OBSID -gt $OBSIDMIN ]
--http2 --ipv4 \
--silent \
--output $OBSID.json \
"$APIURL/observations/?id=$OBSID&ground_station=&satellite__norad_cat_id=&transmitter_uuid=&transmitter_mode=&transmitter_type=&vetted_status=&vetted_user=&start=&end=" && sleep `echo $((0 + RANDOM % 1))`
"$APIURL/observations/?id=$OBSID&ground_station=&satellite__norad_cat_id=&transmitter_uuid=&transmitter_mode=&transmitter_type=&vetted_status=&vetted_user=&start=&end=" && sleep `echo $((0 + RANDOM % 11))`
WATERURL=`cat $OBSID.json | jq --compact-output '.[0] | {waterfall}' | cut -f 2- -d : | sed -e 's/}//g' -e 's/http:/https:/g' -e 's/"//g'`
WATERFILE=`basename $WATERURL`
[ ! -f $WATERFILE ] && \
@ -46,7 +46,7 @@ while [ $OBSID -gt $OBSIDMIN ]
--continue-at - \
--remote-time \
--output $WATERFILE \
$WATERURL && sleep `echo $((0 + RANDOM % 2))`
$WATERURL && sleep `echo $((0 + RANDOM % 11))`
cd ..
let OBSID=$OBSID-1
done