Merge pull request #794 from RickCarlino/verbiage

Pre-Deploy Cleanup
pull/795/head
Rick Carlino 2018-04-18 15:44:34 -05:00 committed by GitHub
commit 95fb9431ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 240 additions and 122 deletions

1
.gitignore vendored
View File

@ -9,6 +9,7 @@ config/application.yml
config/database.yml
coverage
erd.pdf
erd_diagram.png
latest_corpus.ts
log/*.log
mqtt/rabbitmq.config

View File

@ -134,7 +134,6 @@ $.ajax({
});
```
# Want to Help?
[Low Hanging Fruit](https://github.com/FarmBot/Farmbot-Web-App/search?utf8=%E2%9C%93&q=todo). [Raise an issue](https://github.com/FarmBot/Farmbot-Web-App/issues/new?title=Question%20about%20a%20TODO) if you have any questions.

View File

@ -1,14 +1,12 @@
class LogDeliveryMailer < ApplicationMailer
WHOAH = "Device %s is sending too many emails!!! (> 20 / hr)"
def log_digest(device)
total_sent_this_hour = LogDispatch
.where(sent_at: 1.hours.ago..Time.now)
.count
if total_sent_this_hour > LogDispatch.max_per_hour
raise LogDispatch::RateLimitError,
"Device #{device.id} is sending too many emails!!! (> 20 / hr)"
end
ld = LogDispatch.where(sent_at: nil, device: device)
query_params = { sent_at: 1.hours.ago..Time.now, device_id: device.id }
sent_this_hour = LogDispatch.where(query_params).count
too_many = sent_this_hour > LogDispatch.max_per_hour
raise LogDispatch::RateLimitError, WHOAH % [device.id] if too_many
ld = LogDispatch.where(sent_at: nil, device: device)
if(ld.any?)
logs = Log.find(ld.pluck(:log_id))
@emails = device.users.pluck(:email)

View File

@ -59,7 +59,6 @@ class ApplicationRecord < ActiveRecord::Base
end
def broadcast!
# `espeak "ding"`
AutoSyncJob.perform_later(broadcast_payload,
Device.current.id,
chan_name,

View File

@ -10,7 +10,7 @@ class Log < ApplicationRecord
# pagination, but could later on.
PAGE_SIZE = 25
DISCARD = ["fun", "debug"]
DISCARD = ["fun", "debug", nil]
# self.meta[:type] is used by the bot and the frontend as a sort of
TYPES = CeleryScriptSettingsBag::ALLOWED_MESSAGE_TYPES
# The means by which the message will be sent. Ex: frontend toast notification

View File

@ -7,20 +7,29 @@ class LogDispatch < ApplicationRecord
class_attribute :max_per_hour
self.max_per_hour = 20
WAIT_PERIOD = 30
WAIT_UNIT = :seconds
# If this method grows, create a mutation.
def self.deliver(device, log)
send_routine_emails(log, device)
send_fatal_emails(log, device)
end
def self.send_routine_emails(log, device)
return unless (log["channels"] || []).include?("email")
def self.digest_wait_time
{ wait: WAIT_PERIOD.send(WAIT_UNIT) }
end
# TODO: Why must I explicitly pass `mailer_klass`? Somethings not right with
# mocks.
def self.send_routine_emails(log, device, mailer_klass = LogDeliveryMailer)
return unless (log.channels || []).include?("email")
self.create!(device: device, log: log)
LogDeliveryMailer.log_digest(device).deliver_later
mailer_klass.log_digest(device).deliver_later(digest_wait_time)
end
def self.send_fatal_emails(log, device)
return unless (log["channels"] || []).include?("fatal_email")
return unless (log.channels || []).include?("fatal_email")
FatalErrorMailer.fatal_error(device, log).deliver_later
end

View File

@ -1,7 +1,7 @@
module Points
class Destroy < Mutations::Command
STILL_IN_USE = "Could not delete the following point(s): %s. "\
"They are in use by the following sequence(s): %s"
STILL_IN_USE = "Could not delete the following item(s): %s. Item(s) are "\
"in use by the following sequence(s): %s."
required do
model :device, class: Device
@ -30,7 +30,7 @@ module Points
sequences = errors[S].sort.uniq.join(", ")
errors = STILL_IN_USE % [points, sequences]
add_error :point, :in_use, errors
add_error :whoops, :in_use, errors
end
end

View File

@ -13,7 +13,7 @@ FarmBot::Application.configure do
config.public_file_server.enabled = false
config.after_initialize do
Bullet.enable = true
Bullet.console = true
Bullet.console = true
Bullet.rollbar = true if ENV["ROLLBAR_ACCESS_TOKEN"]
end
# HACK AHEAD! Here's why:

View File

@ -0,0 +1,5 @@
class NeedMoarIntegrity < ActiveRecord::Migration[5.1]
def change
add_foreign_key :points, :tools, null: true
end
end

View File

@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180413145332) do
ActiveRecord::Schema.define(version: 20180417123713) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@ -491,6 +491,7 @@ ActiveRecord::Schema.define(version: 20180413145332) do
add_foreign_key "pin_bindings", "devices"
add_foreign_key "pin_bindings", "sequences"
add_foreign_key "points", "devices"
add_foreign_key "points", "tools"
add_foreign_key "primary_nodes", "sequences"
add_foreign_key "sensor_readings", "devices"
add_foreign_key "sensors", "devices"
@ -528,7 +529,7 @@ ActiveRecord::Schema.define(version: 20180413145332) do
SELECT sequences.id AS sequence_id,
( SELECT count(*) AS count
FROM edge_nodes
WHERE (((edge_nodes.kind)::text = 'sequence_id'::text) AND ((edge_nodes.value)::integer = sequences.id))) AS edge_node_count,
WHERE ((edge_nodes.sequence_id = sequences.id) AND ((edge_nodes.kind)::text = 'sequence_id'::text) AND ((edge_nodes.value)::text = (sequences.id)::text))) AS edge_node_count,
( SELECT count(*) AS count
FROM farm_events
WHERE ((farm_events.executable_id = sequences.id) AND ((farm_events.executable_type)::text = 'Sequence'::text))) AS farm_event_count,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

36
fake_logs.rb 100644
View File

@ -0,0 +1,36 @@
$log = {
z: 0,
y: 0,
x: 0,
verbosity: 1,
type: "info",
major_version: 6,
minor_version: 4,
patch_version: 1,
message: "hey!!!",
created_at: Time.now.to_i,
channels: ["email"]
}
$count = 0
$device_id = Device.last.id
LogDispatch.destroy_all
Log.destroy_all
ATTEMPT_LOG = { WORKS: 1, TRIGGERS_ERROR: 0.1 }
def ping(interval = 0)
sleep interval
$count += 1
puts "Log ##{$count}"
$log[:message] = "Hey! #{$count}"
Transport.amqp_send($log.to_json, $device_id, "logs")
end
loop do
puts "Sending..."
5.times { ping(0.1) }
puts "Enter to send again, y to exit."
exit if gets.chomp.downcase == "y"
end

Binary file not shown.

Before

Width:  |  Height:  |  Size: 97 KiB

View File

@ -2,10 +2,11 @@ require_relative "./log_service_support"
begin
# Listen to all logs on the message broker and store them in the database.
Transport
.log_channel
.subscribe(block: true) { |info, _, payl| LogService.process(info, payl) }
.subscribe(block: true) do |info, _, payl|
LogService.process(info, payl)
end
rescue => Bunny::TCPConnectionFailedForAllHosts
puts "MQTT Broker is unreachable. Waiting 5 seconds..."
sleep 5

View File

@ -1,10 +1,16 @@
class LogService
# Determines if the log should be discarded (Eg: "fun"/"debug" logs do not
# go in the DB)
def self.save?(log_as_ruby_hash)
# TODO: Once we gt rid of legacy `log.meta` calls, this method can be
# simplified.
h = (log_as_ruby_hash.is_a?(Hash) && log_as_ruby_hash) || {}
t = h.dig("meta", "type") || h.dig("type")
!!(t && !Log::DISCARD.include?(t))
# TODO: Once we gt rid of legacy `log.meta` calls,
# this method can be simplified.
is_a_hash = log_as_ruby_hash.is_a?(Hash)
hash = is_a_hash ? log_as_ruby_hash : {}
legacy_type = hash.dig("meta", "type")
type = legacy_type || hash.dig("type")
should_discard = Log::DISCARD.include?(type)
!should_discard
end
# Prevent logs table from growing out of proportion. For now, it is

View File

@ -1,25 +0,0 @@
def find_next_seq
Sequence.where(migrated_nodes: false).order("updated_at").last
end
next_seq = find_next_seq
count = 0
total_time = Time.now
until next_seq == nil
begin
t = Time.now
count += 1
puts "=== Migrating sequence #{next_seq.id}: #{next_seq.name}..."
Sequence.transaction { CeleryScript::StoreCelery.run!(sequence: next_seq) }
puts "=== migrated in #{Time.now - t} seconds"
next_seq = find_next_seq
rescue => e
puts "ERROR MIGRATING SEQUENCE #{next_seq.id}, #{next_seq.name.inspect}. #{e.try(:message)}"
exit
end
end
t2 = Time.now - total_time
puts "=== DONE MIGRATING #{count} sequences in #{t2} seconds! (#{count/t2} per second)"

View File

@ -100,7 +100,8 @@ describe Api::LogsController do
expect(user.device.logs.count).to eq(0)
end
it 'delivers emails for logs marked as `email`' do
it '(PENDING) delivers emails for logs marked as `email`' do
pending "Something is not right with the queue adapter in test ENV 🤔"
sign_in user
empty_mail_bag
before_count = LogDispatch.count
@ -119,6 +120,17 @@ describe Api::LogsController do
end
end
it 'delivers emails for logs marked as `email`' do
LogDispatch.destroy_all
log = logs.first
LogDispatch.create!(log: log, device: log.device)
b4 = LogDispatch.where(sent_at: nil).count
ldm = LogDeliveryMailer.new
allow(ldm).to receive(:mail)
ldm.log_digest(log.device)
expect(LogDispatch.where(sent_at: nil).count).to be < b4
end
it 'delivers emails for logs marked as `fatal_email`' do
message = "KABOOOOMM - SYSTEM ERROR!"
sign_in user
@ -184,10 +196,6 @@ describe Api::LogsController do
expect(json.length).to eq(EXAMPLES.length)
end
it 'sends emails'
it 'sends fatal_emails'
it 'filters NO logs based on log filtering settings in `WebAppConfig` ' do
sign_in user
Log.destroy_all

View File

@ -1,7 +1,37 @@
require 'spec_helper'
describe LogDispatch do
it "is" do
expect(LogDispatch).to be
class FakeLogDeliveryMailer
attr_accessor :calls
def initialize
@calls = 0
end
def log_digest(*)
self
end
def deliver_later(*)
@calls += 1
self
end
end
let(:log) do
FactoryBot.create(:log, channels: ["email"])
end
it "has a default wait time for batching" do
wt = LogDispatch.digest_wait_time
expect(wt).to be_kind_of(Hash)
expect(wt[:wait]).to eq(30.seconds)
end
it "sends routine emails" do
fdm = FakeLogDeliveryMailer.new
expect(fdm.calls).to eq(0)
LogDispatch.send_routine_emails(log, log.device, fdm)
expect(fdm.calls).to eq(1)
end
end

View File

@ -26,8 +26,8 @@ describe "Point deletion edge cases" do
}])
result = Points::Destroy.run(point_ids: [tool_slot.id], device: device)
errors = result.errors.message_list
expected = "Could not delete the following point(s): foo tool. They are" \
" in use by the following sequence(s): sequence"
expected = "Could not delete the following item(s): foo tool. Item(s) are "\
"in use by the following sequence(s): sequence."
expect(errors).to include(expected)
end
end

View File

@ -37,9 +37,9 @@ describe Points::Destroy do
expect(Point.count).to eq(before)
expect(result.errors.message_list.count).to eq(1)
expect(result.errors.message_list.first).to include(params[:name])
coords = [:x,:y,:z].map{|c|points.first[c]}.join(", ")
expected = "Could not delete the following point(s): point at (#{coords}" \
"). They are in use by the following sequence(s): Test Case I"
coords = [:x, :y, :z].map{|c|points.first[c]}.join(", ")
expected = "Could not delete the following item(s): point at (#{coords})."\
" Item(s) are in use by the following sequence(s): Test Case I."
expect(result.errors.message_list.first).to include(expected)
end
@ -48,8 +48,9 @@ describe Points::Destroy do
point_ids = [s.tool_slot.id]
result = Points::Destroy.run(point_ids: point_ids, device: s.device)
expect(result.success?).to be(false)
expected = "Could not delete the following point(s): Scenario Tool. They "\
"are in use by the following sequence(s): Scenario Sequence"
expected = "Could not delete the following item(s): Scenario Tool. "\
"Item(s) are in use by the following sequence(s): Scenario "\
"Sequence."
expect(result.errors.message_list).to include(expected)
end
@ -127,11 +128,10 @@ describe Points::Destroy do
.run(point_ids: [point.id, plant.id], device: device)
.errors
.message
expected = "Could not delete the following point(s): plant at (0.0, 1.0,"\
" 0.0). They are in use by the following sequence(s): Sequence"\
" A, Sequence B"
expect(result[:point]).to eq(expected)
expected = "Could not delete the following item(s): plant at (0.0, 1.0,"\
" 0.0). Item(s) are in use by the following sequence(s): "\
"Sequence A, Sequence B."
expect(result[:whoops]).to eq(expected)
end
it "performs a hard (real) delete" do

View File

@ -39,9 +39,9 @@ import {
onSent,
onOnline,
onMalformed,
onLogs,
speakLogAloud
} from "../../connect_device";
import { onLogs } from "../../log_handlers";
import { Actions, Content } from "../../../constants";
import { Log } from "../../../interfaces";
import { ALLOWED_CHANNEL_NAMES, ALLOWED_MESSAGE_TYPES, Farmbot } from "farmbot";
@ -50,6 +50,7 @@ import { dispatchNetworkUp, dispatchNetworkDown } from "../../index";
import { getDevice } from "../../../device";
import { fakeState } from "../../../__test_support__/fake_state";
import { talk } from "browser-speech";
import { globalQueue } from "../../batch_queue";
describe("readStatus()", () => {
it("forces a read_status request to FarmBot", () => {
@ -221,6 +222,7 @@ describe("onLogs", () => {
const log = fakeLog("error", []);
log.message = "bot xyz is offline";
fn(log);
globalQueue.work();
expect(dispatchNetworkDown).toHaveBeenCalledWith("bot.mqtt");
});
});

View File

@ -0,0 +1,27 @@
/** Performs operations in batches at a regular interval.
* Useful for rendering intensive tasks such as handling massive amounts of
* incoming logs.
* We only need one work queue for the whole app, but singletons are bad. */
class BatchQueue {
private queue: Function[] = [];
private timerId = 0;
/** Create a new batch queue that will check for new messages and execute them
* at a specified work rate (ms).*/
constructor(workRateMS = 600) {
this.timerId = window.setInterval(this.work, workRateMS);
}
work = () => {
this.queue.map(fn => fn());
this.clear();
}
push = (job: Function) => this.queue.push(job);
clear = () => this.queue = [];
destroy = () => window.clearInterval(this.timerId);
}
/** The only work queue needed for the whole app.
* Mock this out in your tests. */
export const globalQueue = new BatchQueue(250);

View File

@ -2,14 +2,13 @@ import { fetchNewDevice, getDevice } from "../device";
import { dispatchNetworkUp, dispatchNetworkDown } from "./index";
import { Log } from "../interfaces";
import { ALLOWED_CHANNEL_NAMES, Farmbot, BotStateTree } from "farmbot";
import { throttle, noop } from "lodash";
import { noop, throttle } from "lodash";
import { success, error, info, warning } from "farmbot-toastr";
import { HardwareState } from "../devices/interfaces";
import { GetState, ReduxAction } from "../redux/interfaces";
import { Content, Actions } from "../constants";
import { t } from "i18next";
import {
isLog,
EXPECTED_MAJOR,
EXPECTED_MINOR,
commandOK,
@ -24,10 +23,9 @@ import { talk } from "browser-speech";
import { getWebAppConfigValue } from "../config_storage/actions";
import { BooleanSetting } from "../session_keys";
import { versionOK } from "../util";
import * as _ from "lodash";
import { onLogs } from "./log_handlers";
export const TITLE = "New message from bot";
const THROTTLE_MS = 600;
/** TODO: This ought to be stored in Redux. It is here because of historical
* reasons. Feel free to factor out when time allows. -RC, 10 OCT 17 */
export const HACKY_FLAGS = {
@ -105,7 +103,6 @@ export const changeLastClientConnected = (bot: Farmbot) => () => {
"LAST_CLIENT_CONNECTED": JSON.stringify(new Date())
}).catch(() => { });
};
const onStatus = (dispatch: Function, getState: GetState) =>
(throttle(function (msg: BotStateTree) {
bothUp();
@ -119,46 +116,13 @@ const onStatus = (dispatch: Function, getState: GetState) =>
if (!IS_OK) { badVersion(); }
HACKY_FLAGS.needVersionCheck = false;
}
}, THROTTLE_MS));
}, 600, { leading: false, trailing: true }));
type Client = { connected?: boolean };
export const onSent = (client: Client) => () => !!client.connected ?
dispatchNetworkUp("user.mqtt") : dispatchNetworkDown("user.mqtt");
const LEGACY_META_KEY_NAMES: (keyof Log)[] = [
"type",
"x",
"y",
"z",
"verbosity",
"major_version",
"minor_version"
];
function legacyKeyTransformation(log: Log, key: keyof Log) {
log[key] = log[key] || _.get(log, ["meta", key], undefined);
}
export const onLogs = (dispatch: Function, getState: GetState) => throttle((msg: Log) => {
bothUp();
if (isLog(msg)) {
LEGACY_META_KEY_NAMES.map(key => legacyKeyTransformation(msg, key));
actOnChannelName(msg, "toast", showLogOnScreen);
actOnChannelName(msg, "espeak", speakLogAloud(getState));
dispatch(initLog(msg));
// CORRECT SOLUTION: Give each device its own topic for publishing
// MQTT last will message.
// FAST SOLUTION: We would need to re-publish FBJS and FBOS to
// change topic structure. Instead, we will use
// inband signalling (for now).
// TODO: Make a `bot/device_123/offline` channel.
const died =
msg.message.includes("is offline") && msg.type === "error";
died && dispatchNetworkDown("bot.mqtt");
}
}, THROTTLE_MS);
export function onMalformed() {
bothUp();
if (!HACKY_FLAGS.alreadyToldUserAboutMalformedMsg) {

View File

@ -0,0 +1,52 @@
import { isLog } from "../devices/actions";
import {
bothUp,
actOnChannelName,
showLogOnScreen,
speakLogAloud,
initLog
} from "./connect_device";
import { GetState } from "../redux/interfaces";
import { dispatchNetworkDown } from ".";
import { Log } from "../interfaces";
import * as _ from "lodash";
import { globalQueue } from "./batch_queue";
const LEGACY_META_KEY_NAMES: (keyof Log)[] = [
"type",
"x",
"y",
"z",
"verbosity",
"major_version",
"minor_version"
];
function legacyKeyTransformation(log: Log,
key: keyof Log) {
const before = log[key];
// You don't want to use || here, trust me. -RC
log[key] = !_.isUndefined(before) ? before : _.get(log, ["meta", key], undefined);
}
export const onLogs =
(dispatch: Function, getState: GetState) => (msg: Log) => {
bothUp();
if (isLog(msg)) {
LEGACY_META_KEY_NAMES.map(key => legacyKeyTransformation(msg, key));
actOnChannelName(msg, "toast", showLogOnScreen);
actOnChannelName(msg, "espeak", speakLogAloud(getState));
globalQueue.push(() => {
dispatch(initLog(msg));
// CORRECT SOLUTION: Give each device its own topic for publishing
// MQTT last will message.
// FAST SOLUTION: We would need to re-publish FBJS and FBOS to
// change topic structure. Instead, we will use
// inband signalling (for now).
// TODO: Make a `bot/device_123/offline` channel.
const died =
msg.message.includes("is offline") && msg.type === "error";
died && dispatchNetworkDown("bot.mqtt");
});
}
};

View File

@ -1,9 +1,7 @@
import * as React from "react";
import { t } from "i18next";
import { EditPlantInfoProps, PlantOptions } from "../interfaces";
import { history, getPathArray } from "../../history";
import { destroy, edit, save } from "../../api/crud";
import { error } from "farmbot-toastr";
export abstract class PlantInfoBase extends
React.Component<EditPlantInfoProps, {}> {
@ -17,8 +15,7 @@ export abstract class PlantInfoBase extends
destroy = (plantUUID: string) => {
this.props.dispatch(destroy(plantUUID))
.then(() => history.push("/app/designer/plants"))
.catch(() => error(t("Could not delete plant."), t("Error")));
.then(() => history.push("/app/designer/plants"), () => { });
}
updatePlant = (plantUUID: string, update: PlantOptions) => {

View File

@ -7,7 +7,6 @@ import { TaggedPlantPointer } from "../../resources/tagged_resources";
import { selectAllPlantPointers } from "../../resources/selectors";
import { PlantInventoryItem } from "./plant_inventory_item";
import { destroy } from "../../api/crud";
import { error } from "farmbot-toastr";
import { BackArrow } from "../../ui/index";
import { unselectPlant } from "../actions";
import { Actions } from "../../constants";
@ -69,8 +68,10 @@ export class SelectPlants
if (plantUUIDs &&
confirm(`Are you sure you want to delete ${plantUUIDs.length} plants?`)) {
plantUUIDs.map(uuid => {
this.props.dispatch(destroy(uuid, true))
.catch(() => error(t("Could not delete plant."), t("Error")));
this
.props
.dispatch(destroy(uuid, true))
.then(() => { }, () => { });
});
history.push("/app/designer/plants");
}

View File

@ -10,12 +10,19 @@ export interface AxiosErrorResponse {
};
}
const mapper = (v: string, k: string) => {
// "Reason: Explanation lorem ipsum dolor ipsum."
const reason = _.capitalize(("" + k).split("_").join(" "));
const explanation = v.toString();
return `${reason}: ${explanation}`;
};
/** Concats and capitalizes all of the error key/value
* pairs returned by the /api/xyz endpoint. */
export function prettyPrintApiErrors(err: AxiosErrorResponse) {
return _.map(safelyFetchErrors(err),
(v, k) => `${("" + k).split("_").join(" ")}: ${v.toString()}`.toLowerCase())
.map(str => _.capitalize(str)).join(" ");
const errors = safelyFetchErrors(err);
return _.map(errors, mapper).join(" ");
}
function safelyFetchErrors(err: AxiosErrorResponse): Dictionary<string> {