Add Timestamps to Log Digest Emails (#951)

First draft of "cascading" auto_sync to keep `in_use` flag synced with API.
pull/952/head
Rick Carlino 2018-08-07 16:13:56 -05:00 committed by GitHub
parent ca8b64564b
commit f25241350e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 83 additions and 40 deletions

View File

@ -6,9 +6,12 @@ class FatalErrorMailer < ApplicationMailer
.logs
.where(Log::IS_FATAL_EMAIL)
.where(sent_at: nil)
.order(created_at: :desc)
return if @logs.empty?
@message = @logs
.pluck(:message)
.pluck(:created_at, :message)
.map{|(t,m)| [t.in_time_zone(device.timezone || "UTC"), m] }
.map{|(x,y)| "[#{x}]: #{y}"}
.join("\n\n")
@device_name = device.name || "Farmbot"
mail(to: @emails, subject: "🚨 New error reported by #{@device_name}!")

View File

@ -13,8 +13,13 @@ class LogDeliveryMailer < ApplicationMailer
.where(id: unsent.pluck(:id))
.where
.not(Log::IS_FATAL_EMAIL)
.order(created_at: :desc)
@emails = device.users.pluck(:email)
@messages = logs.map(&:message)
@messages = logs
.pluck(:created_at, :message)
.map{|(t,m)| [t.in_time_zone(device.timezone || "UTC"), m] }
.map{|(x,y)| "[#{x}]: #{y}"}
.join("\n\n")
@device_name = device.name || "Farmbot"
mail(to: @emails, subject: "🌱 New message from #{@device_name}!")
unsent.update_all(sent_at: Time.now)

View File

@ -8,18 +8,33 @@ class EdgeNode < ApplicationRecord
belongs_to :sequence
serialize :value, JSON
validates_presence_of :sequence
after_save :maybe_cascade_save
after_destroy :cascade_destruction
def broadcast?
false
end
after_save :maybe_cascade_changes, on: [:create, :update, :destroy]
def maybe_cascade_changes
is_sid = kind == "sequence_id"
(the_changes["value"] || [])
.compact
.map { |x| Sequence.find_by(id: x) }
.compact
.map { |x| x.broadcast!(Transport.current.cascade_id) } if is_sid
def is_sequence_id?
kind == "sequence_id"
end
def maybe_cascade_save
(the_changes["value"] || []) # Grab old ID _AND_ new ID
.compact
.uniq
.reject { |x| x == sequence_id } # 🤯 Skip recursive nodes
.map { |x| Sequence.find_by(id: x) }
.compact
.map { |x| x.delay.broadcast!(Transport.current.cascade_id) } if is_sequence_id?
end
# This can't be bundled into the functionality of `maybe_cascade_save`
# because `#the_changes` returns an empty change set ({}) on destroy.
def cascade_destruction
if is_sequence_id?
s = Sequence.find_by(id: self.value)
s && s.delay.broadcast!
end
end
end

View File

@ -12,11 +12,33 @@ class FarmEvent < ApplicationRecord
WITH_YEAR = "%m/%d/%y"
NO_YEAR = "%m/%d"
belongs_to :executable, polymorphic: true
validates :executable, presence: true
belongs_to :device
validates :device_id, presence: true
validate :within_20_year_window
belongs_to :device
belongs_to :executable, polymorphic: true
validate :within_20_year_window
validates :device_id, presence: true
validates :executable, presence: true
after_destroy :cascade_destruction
after_save :maybe_cascade_save
def maybe_cascade_save
eid = the_changes["executable_id"]
if eid
ets = (the_changes["executable_type"] || [])
eid.compact.uniq.each_with_index.map do |id, inx|
klass = ets[inx] || executable_type
Resources::RESOURCES.fetch(klass).find_by(id: id)
end
.compact
.map { |model| model.delay.broadcast! }
end
end
def cascade_destruction
if executable
executable.delay.broadcast!
end
end
def within_20_year_window
too_early = start_time && start_time < (Time.now - 20.years)
@ -33,18 +55,4 @@ class FarmEvent < ApplicationRecord
def fancy_name
start_time.strftime(start_time.year == Time.now.year ? NO_YEAR : WITH_YEAR)
end
after_save :maybe_cascade_changes, on: [:create, :update, :destroy]
def maybe_cascade_changes
# TODO: Possible N+1? #ShipIt
# BETTER IDEA: Make a rails Job that takes a "StringyClassName" and resource
# ID, and `broadcast` in the background.
(the_changes["executable_type"] || [])
.zip(the_changes["executable_id"] || [])
.select{ |x| x.first && x.last }
.map { |(kind, id)| Resources::RESOURCES.fetch(kind).find_by(id: id) }
.compact
.map { |x| x.broadcast!(Transport.current.cascade_id) }
end
end

View File

@ -5,19 +5,27 @@
class RegimenItem < ApplicationRecord
belongs_to :regimen
belongs_to :sequence
validates :sequence, presence: true
validates :sequence, presence: true
def broadcast?
false
end
after_destroy :cascade_destruction
after_save :maybe_cascade_save
after_save :maybe_cascade_changes, on: [:create, :update, :destroy]
def maybe_cascade_changes
def maybe_cascade_save
(the_changes["sequence_id"] || [])
.compact
.map { |x| Sequence.find_by(id: x) }
.compact
.map { |x| x.broadcast!(Transport.current.cascade_id) }
end
def cascade_destruction
s = Sequence.find_by(id: sequence_id)
if s
s.delay.broadcast!
end
end
def broadcast?
false
end
end

View File

@ -15,7 +15,7 @@ describe Api::DiagnosticDumpsController do
expect(json.pluck(:device_id).uniq).to eq([user.device.id])
end
it 'creates a dump' do
it 'creates a dump' do # Blinky test 7-AUG-18 RC
sign_in user
DiagnosticDump.destroy_all
b4 = DiagnosticDump.count

View File

@ -1,3 +1,5 @@
require "spec_helper"
describe Api::RegimensController do
include Devise::Test::ControllerHelpers
@ -19,15 +21,17 @@ describe Api::RegimensController do
expect(Regimen.exists?(regimen.id)).to eq(true)
end
it 'retrieves all regimina' do
it 'deletes a regimen' do
sign_in user
old_count = Regimen.count
device = regimen.device
s = FakeSequence.create( device: device)
RegimenItem.create!(time_offset: 1, regimen: regimen, sequence: s)
delete :destroy, params: { id: regimen.id }
new_count = Regimen.count
expect(response.status).to eq(200)
expect(old_count).to be > new_count
expect { regimen.reload }
.to(raise_error(ActiveRecord::RecordNotFound))
expect { regimen.reload }.to(raise_error(ActiveRecord::RecordNotFound))
end
end
end