Merge pull request #933 from FarmBot/staging

v6.6: 25 July Production Deployment
pull/934/head
Rick Carlino 2018-07-26 10:11:18 -05:00 committed by GitHub
commit 032bd0274f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
421 changed files with 10943 additions and 4311 deletions

9
.gitignore vendored
View File

@ -2,25 +2,26 @@
.vscode/
*.log
*.pem
*scratchpad*
/tmp
# For self hosted users...
api_docs.md
config/application.yml
config/database.yml
config/secrets.yml
coverage
erd.pdf
erd_diagram.png
erd.pdf
latest_corpus.ts
log/*.log
mqtt/rabbitmq.conf
mqtt/rabbitmq.config
node_modules
node_modules/
package-lock.json
public/direct_upload/temp/*.jpg
public/dist
public/system
public/webpack
public/webpack/*
tmp
public/direct_upload/temp/*.jpg
scratchpad.rb
tmp

View File

@ -1,6 +1,6 @@
language: node_js
node_js:
- 8.9.4
- 8.11.3
cache:
yarn: true
directories:
@ -8,6 +8,7 @@ cache:
- /home/travis/bundle
env:
global:
- ADMIN_PASSWORD=not_a_real_password
- SECRET_TOKEN=e815982094c62436066bafc9151f2d33c4a351a776654cb7487476de260a4592
- OS_UPDATE_SERVER=http://example.com
- FW_UPDATE_SERVER=http://example.com

View File

@ -8,7 +8,7 @@ gem "delayed_job"
gem "devise"
gem "discard"
gem "figaro"
gem "fog-google", git: "https://github.com/fog/fog-google"
gem "fog-google"
gem "font-awesome-rails"
gem "foreman"
gem "jwt"
@ -28,6 +28,8 @@ gem "skylight"
gem "tzinfo" # For validation of user selected timezone names
gem "valid_url"
gem "webpack-rails"
# Still working out the bugs. - RC 5 Jul 18
gem "rabbitmq_http_api_client"
group :development, :test do
gem "thin"

View File

@ -4,16 +4,6 @@ GIT
specs:
smarf_doc (1.0.0)
GIT
remote: https://github.com/fog/fog-google
revision: a2e46a2dafb01e896713ae419e5643ae6c2549a2
specs:
fog-google (1.3.3)
fog-core
fog-json
fog-xml
google-api-client (~> 0.19.1)
GEM
remote: https://rubygems.org/
specs:
@ -69,9 +59,9 @@ GEM
arel (9.0.0)
bcrypt (3.1.12)
builder (3.2.3)
bunny (2.10.0)
bunny (2.11.0)
amq-protocol (~> 2.3.0)
capybara (3.2.1)
capybara (3.3.1)
addressable
mini_mime (>= 0.1.3)
nokogiri (~> 1.8)
@ -110,6 +100,7 @@ GEM
discard (1.0.0)
activerecord (>= 4.2, < 6)
docile (1.3.1)
effin_utf8 (1.0)
erubi (1.7.1)
eventmachine (1.2.7)
excon (0.62.0)
@ -120,8 +111,10 @@ GEM
railties (>= 3.0.0)
faker (1.8.7)
i18n (>= 0.7)
faraday (0.15.2)
faraday (0.13.1)
multipart-post (>= 1.2, < 3)
faraday_middleware (0.12.2)
faraday (>= 0.7.4, < 1.0)
ffi (1.9.25)
figaro (1.1.1)
thor (~> 0.14)
@ -130,20 +123,25 @@ GEM
excon (~> 0.58)
formatador (~> 0.2)
mime-types
fog-json (1.1.0)
fog-core (~> 2.0)
fog-google (1.6.0)
fog-core
fog-json
fog-xml
google-api-client (~> 0.23.0)
fog-json (1.2.0)
fog-core
multi_json (~> 1.10)
fog-xml (0.1.3)
fog-core
nokogiri (>= 1.5.11, < 2.0.0)
font-awesome-rails (4.7.0.4)
railties (>= 3.2, < 6.0)
foreman (0.84.0)
foreman (0.85.0)
thor (~> 0.19.1)
formatador (0.2.5)
globalid (0.4.1)
activesupport (>= 4.2.0)
google-api-client (0.19.8)
google-api-client (0.23.2)
addressable (~> 2.5, >= 2.5.1)
googleauth (>= 0.5, < 0.7.0)
httpclient (>= 2.8.1, < 3.0)
@ -159,6 +157,7 @@ GEM
os (~> 0.9)
signet (~> 0.7)
hashdiff (0.3.7)
hashie (3.5.7)
httpclient (2.8.3)
i18n (1.0.1)
concurrent-ruby (~> 1.0)
@ -198,7 +197,7 @@ GEM
mutations (0.8.2)
activesupport
nio4r (2.3.1)
nokogiri (1.8.2)
nokogiri (1.8.4)
mini_portile2 (~> 2.3.0)
orm_adapter (0.5.0)
os (0.9.6)
@ -208,7 +207,7 @@ GEM
mime-types
mimemagic (~> 0.3.0)
terrapin (~> 0.6.0)
passenger (5.3.1)
passenger (5.3.3)
rack
rake (>= 0.8.1)
pg (1.0.0)
@ -220,9 +219,15 @@ GEM
pry-rails (0.3.6)
pry (>= 0.10.4)
public_suffix (3.0.2)
rabbitmq_http_api_client (1.9.1)
effin_utf8 (~> 1.0.0)
faraday (~> 0.13.0)
faraday_middleware (~> 0.12.0)
hashie (~> 3.5)
multi_json (~> 1.12)
rack (2.0.5)
rack-attack (5.2.0)
rack
rack-attack (5.4.0)
rack (>= 1.0, < 3)
rack-cors (1.0.2)
rack-test (1.0.0)
rack (>= 1.0, < 3)
@ -270,7 +275,7 @@ GEM
responders (2.4.0)
actionpack (>= 4.2.0, < 5.3)
railties (>= 4.2.0, < 5.3)
retriable (3.1.1)
retriable (3.1.2)
rollbar (2.16.2)
multi_json
rspec (3.7.0)
@ -300,7 +305,7 @@ GEM
activerecord (>= 4.0.0)
railties (>= 4.0.0)
secure_headers (6.0.0)
selenium-webdriver (3.12.0)
selenium-webdriver (3.13.0)
childprocess (~> 0.5)
rubyzip (~> 1.2)
signet (0.8.1)
@ -365,7 +370,7 @@ DEPENDENCIES
factory_bot_rails
faker
figaro
fog-google!
fog-google
font-awesome-rails
foreman
hashdiff
@ -379,6 +384,7 @@ DEPENDENCIES
polymorphic_constraints
pry
pry-rails
rabbitmq_http_api_client
rack-attack
rack-cors
rails

View File

@ -1,3 +1,4 @@
web: bundle exec passenger start -p $PORT -e $RAILS_ENV --max-pool-size 3
log_service: bin/rails r lib/log_service.rb
worker: bundle exec rake jobs:work
background_jobs: bundle exec rake jobs:work
log_worker: bin/rails r lib/log_service_runner.rb
resource_worker: bin/rails r lib/resource_service_runner.rb
web: bundle exec passenger start -p $PORT -e $RAILS_ENV --max-pool-size 3

View File

@ -0,0 +1,5 @@
# Run Rails & Webpack concurrently
rails: rails s -e development -p ${API_PORT:-3000} -b 0.0.0.0
log_service: rails r lib/log_service_runner.rb
resource_service: rails r lib/resource_service_runner.rb
worker: rake jobs:work

View File

@ -1,8 +1,9 @@
# Run Rails & Webpack concurrently
rails: rails s -e development -p ${API_PORT:-3000} -b 0.0.0.0
webpack: ./node_modules/.bin/webpack-dev-server --config config/webpack.config.js
worker: rake jobs:work
logger: rails r lib/log_service.rb
rails: rails s -e development -p ${API_PORT:-3000} -b 0.0.0.0
log_service: rails r lib/log_service_runner.rb
resource_service: rails r lib/resource_service_runner.rb
webpack: ./node_modules/.bin/webpack-dev-server --config config/webpack.config.js
worker: rake jobs:work
# UNCOMMENT THIS LINE IF YOU ARE DOING MOBILE TESTING:
# Get started with `npm install weinre -g`

View File

@ -11,6 +11,10 @@ This repository is intended for *software developers* who wish to modify the [Fa
If you are a developer interested in contributing or would like to provision your own server, you are in the right place.
We do not have the resources available to help novice developers learn to setup servers, environments, configurations, or perform basic Linux command line instructions.
If you raise an issue indicating that you haven't followed the setup instructions, looked through past issues, or done a cursory internet search for basic help, expect the issue to be closed and we'll point you to the setup instructions. *Again, if you do not have at least intermediate Linux and Ruby experience, please use the hosted version of the web app at my.farm.bot.*
# Q: Where do I report security issues?
We take security seriously and value the input of independent researchers. Please see our [responsible disclosure guidelines](https://farm.bot/responsible-disclosure-of-security-vulnerabilities/).
@ -25,17 +29,11 @@ For a list of example API requests and responses, see our [reference documentati
# Q: How do I Setup an instance locally?
## Prerequisites
We provide example setup instructions for Ubuntu 18 [here](https://github.com/FarmBot/Farmbot-Web-App/blob/master/ubuntu_example.sh).
Installation requires an x86 desktop machine running a fresh installation of Ubuntu 18.
Installation was last tested against Ubuntu 18.04 in June of 2018 on an x86 based machine.
We **do not recomend running the server on a Raspberry Pi** due to issues with ARM compilation and memory usage. **Windows is not supported** at this time.
## Setup
A step-by-step setup guide for Ubuntu 18 can be found [here](https://github.com/FarmBot/Farmbot-Web-App/blob/master/ubuntu_example.sh). Installation on distributions other than Ubuntu is possible, but we do not provide installation support.
Installation was last tested against Ubuntu 18.04 in June of 2018. Please [Raise an issue](https://github.com/FarmBot/Farmbot-Web-App/issues/new?title=Installation%20Failure) if you hit problems with any of these steps. *We can't fix issues we don't know about.*
Our ability to help individual users with private setup is limited. Using the public server at http://my.farm.bot is the recommended setup for end users. Please see the top of this document for more information.
# Config Settings (important)

View File

@ -1,5 +1,5 @@
<% if (response.success? || note.present?) %>
# <%= response.success? ? "" : "(NOT OK)" %> <%= request.pretty_url %>
<% if (response.successful? || note.present?) %>
# <%= response.successful? ? "" : "(NOT OK)" %> <%= request.pretty_url %>
<% if note.present? %>
**Notes:** <%= note %>
<% end %>

View File

@ -14,8 +14,7 @@ module Api
end
def update
mutate Configs::Update
.run(target: config_object, update_attrs: raw_json)
mutate Configs::Update.run(target: config_object, update_attrs: raw_json)
end
def destroy

View File

@ -101,16 +101,6 @@ private
reset_session
end
def current_device
if @current_device
@current_device
else
@current_device = (current_user.try(:device) || no_device)
Device.current = @current_device # Mutable state eww
@current_device
end
end
def no_device
raise Errors::NoBot
end

View File

@ -0,0 +1,28 @@
module Api
class DiagnosticDumpsController < Api::AbstractController
def index
render json: diagnostic_dumps
end
def create
Rollbar.info("Device #{current_device.id} created a diagnostic")
mutate DiagnosticDumps::Create.run(raw_json, device: current_device)
end
def destroy
diagnostic_dump.destroy!
render json: ""
end
private
def diagnostic_dumps
current_device.diagnostic_dumps
end
def diagnostic_dump
@diagnostic_dump ||= diagnostic_dumps.find(params[:id])
end
end
end

View File

@ -20,7 +20,7 @@ module Api
end
def destroy
image.delay.destroy!
Image.delay.maybe_destroy(image.id) # See notes. This is for edge cases.
render json: ""
end

View File

@ -27,7 +27,9 @@ module Api
end
def your_regimens
Regimen.includes(:farm_events).where(regimen_params)
Regimen
.includes(:farm_events, :regimen_items)
.where(regimen_params)
end
def regimen_params

View File

@ -0,0 +1,112 @@
module Api
# When RabbitMQ gets a connection, it will check in with the API to make sure
# the user is allowed to perform the action.
# Returning "allow" will allow them to perform the requested action.
# Any other response results in denial.
# Results are cached for 10 minutes to prevent too many requests to the API.
class RmqUtilsController < Api::AbstractController
# The only valid format for AMQP / MQTT topics.
# Prevents a whole host of abuse / security issues.
TOPIC_REGEX = \
/(bot\.device_)\d*\.(from_clients|from_device|logs|status|sync|resources_v0|from_api|\#|\*)\.?.*/
MALFORMED_TOPIC = "malformed topic. Must match #{TOPIC_REGEX.inspect}"
ALL = [:user, :vhost, :resource, :topic]
VHOST = ENV.fetch("MQTT_VHOST") { "/" }
RESOURCES = ["queue", "exchange"]
PERMISSIONS = ["configure", "read", "write"]
skip_before_action :check_fbos_version, only: ALL
skip_before_action :authenticate_user!, only: ALL
before_action :scrutinize_topic_string
def user
case username
when "guest" then deny
when "admin" then authenticate_admin
else; device_id_in_username == current_device.id ? allow : deny
end
end
def vhost
if is_admin
allow
else
params["vhost"] == VHOST ? allow : deny
end
end
def resource
if is_admin
allow
else
res, perm = [params["resource"], params["permission"]]
ok = RESOURCES.include?(res) && PERMISSIONS.include?(perm)
ok ? allow : deny
end
end
def topic
if is_admin
allow
else
device_id_in_topic == device_id_in_username ? allow : deny
end
end
private
def is_admin
username == "admin"
end
def authenticate_admin
correct_pw = password == ENV.fetch("ADMIN_PASSWORD")
ok = is_admin && correct_pw
ok ? allow("management", "administrator") : deny
end
def deny
render json: "deny", status: 403
end
def allow(*tags)
render json: (["allow"] + tags).join(" ")
end
def username
@username ||= params["username"]
end
def password
@password ||= params["password"]
end
def routing_key
@routing_key ||= params["routing_key"]
end
def scrutinize_topic_string
return if is_admin
is_ok = routing_key ? !!TOPIC_REGEX.match(routing_key) : true
render json: MALFORMED_TOPIC, status: 422 unless is_ok
end
def device_id_in_topic
(routing_key || "") # "bot.device_9.logs"
.gsub("bot.device_", "") # "9.logs"
.split(".") # ["9", "logs"]
.first # "9"
.to_i || 0 # 9
end
def current_device
@current_device ||= Auth::FromJWT.run!(jwt: password).device
rescue Mutations::ValidationException => e
raise JWT::VerificationError, "RMQ Provided bad token"
end
def device_id_in_username
@device_id ||= username.gsub("device_", "").to_i
end
end
end

View File

@ -24,7 +24,7 @@ module Api
end
def reading
@image ||= readings.find(params[:id])
@reading ||= readings.find(params[:id])
end
end
end

View File

@ -23,7 +23,7 @@ module Api
end
def destroy
mutate Sequences::Delete.run(sequence: sequence, device: current_device)
mutate Sequences::Destroy.run(sequence: sequence, device: current_device)
end
private

View File

@ -44,9 +44,7 @@ module Api
# Every time a token is created, sweep the old TokenIssuances out of the
# database.
def clean_out_old_tokens
TokenIssuance
.where("exp < ?", Time.now.to_i)
.destroy_all
CleanOutOldDbItemsJob.perform_later if TokenIssuance.any_expired?
end
def if_properly_formatted

View File

@ -1,4 +1,18 @@
class ApplicationController < ActionController::Base
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :null_session
def current_device
if @current_device
@current_device
else
@current_device = (current_user.try(:device) || no_device)
Device.current = @current_device # Mutable state eww
@current_device
end
end
def current_device_id
"device_#{current_device.try(:id) || 0}"
end
end

View File

@ -0,0 +1,7 @@
class CleanOutOldDbItemsJob < ApplicationJob
queue_as :default
def perform(*args)
TokenIssuance.clean_old_tokens
end
end

View File

@ -4,13 +4,14 @@
class CreateAttachmentFromUrlJob < ApplicationJob
queue_as :default
def perform(image:, attachment_url:)
image
.device
.auto_sync_transaction do
def perform(image_id:, attachment_url:)
image = Image.find_by(id: image_id)
if image
image.device.auto_sync_transaction do
image.set_attachment_by_url(attachment_url)
image.save!
end
end
end
def max_attempts

View File

@ -11,6 +11,7 @@ class SendFactoryResetJob < ApplicationJob
def perform(device, transport = Transport)
payl = SendFactoryResetJob.rpc_payload(device)
# TODO: Use `from_api` now. RC 23-JUL-18
transport.current.amqp_send(payl.to_json, device.id, "from_clients")
end
end

View File

@ -19,7 +19,8 @@ module CeleryScript
{ String => "string",
Integer => "integer",
TrueClass => "boolean",
FalseClass => "boolean", }[v] || v
FalseClass => "boolean",
Float => "float"}[v] || v
end
def as_json(optns)

View File

@ -54,7 +54,6 @@ module CeleryScript
def iterate_over_body(heap, canonical_node, parentAddr)
body = (canonical_node[:body] || []).map(&:deep_symbolize_keys)
# !body.none? && heap.put(parentAddr, CSHeap::BODY, parentAddr + 1)
@nesting_level += 1
recurse_into_body(heap, body, parentAddr)
@nesting_level -= 1

View File

@ -5,6 +5,7 @@ class KeyGen
PROD_KEY_FILE = "/keys/production.pem"
KEY_FILE = "jwt.#{Rails.env}.pem"
SAVE_PATH = (Rails.env == "production") ? PROD_KEY_FILE : KEY_FILE
# SAVE_PATH = KEY_FILE
def self.try_file
OpenSSL::PKey::RSA.new(File.read(SAVE_PATH)) if File.file?(SAVE_PATH)

View File

@ -27,7 +27,7 @@ class LogService
def self.deliver(data)
dev, log = [data.device, data.payload]
dev.maybe_unthrottle
LogDispatch.deliver(dev, Logs::Create.run!(log, device: dev))
Log.deliver(dev, Logs::Create.run!(log, device: dev))
end
def self.warn_user(data, violation)

View File

@ -0,0 +1,23 @@
module Resources
DEVICE_REGEX = /device_\d*/
ACTIONS = [
DESTROY = "destroy"
]
RESOURCES = { # Because I don't trust Kernel.const_get
"FarmEvent" => FarmEvent,
"FarmwareInstallations" => FarmwareInstallation,
"Image" => Image,
"Log" => Log,
"Peripheral" => Peripheral,
"PinBinding" => PinBinding,
"PlantTemplate" => PlantTemplate,
"Point" => Point,
"Regimen" => Regimen,
"SavedGarden" => SavedGarden,
"Sensor" => Sensor,
"SensorReading" => SensorReading,
"Sequence" => Sequence,
"Tool" => Tool,
"WebcamFeed" => WebcamFeed,
}
end # Resources

View File

@ -0,0 +1,48 @@
module Resources
class Job < Mutations::Command
NOT_FOUND = "Resource not found"
required do
duck :body, methods: [:[], :[]=]
duck :resource, duck: [:where, :find_by]
integer :resource_id
model :device, class: Device
string :action, in: ACTIONS
string :uuid
end
def validate
# Should never trigger in production.
never unless RESOURCES.values.include?(resource) # Security critical
end
def execute
case action
when DESTROY then do_deletion
else; never
end
end
private
def plural_resource
@plural_resource ||= resource.name.pluralize
end
def do_deletion
model_name = resource.model_name
mutation = Kernel.const_get(model_name.name.pluralize)::Destroy
mutation.run!(model_name.singular => model, device: device)
rescue ActiveRecord::RecordNotFound
add_error :resource, :resource, NOT_FOUND
end
def model
@model ||= device.send(plural_resource.tableize).find(resource_id)
end
# Escape hatch for things that should "never happen".
def never
raise "PANIC: Tried to do batch op on #{resource}"
end
end # Job
end # Resources

View File

@ -0,0 +1,68 @@
module Resources
# Takes a bunch of unsafe, string-y data that came in over AMQP and parses it
# into fully formed
class PreProcessor < Mutations::Command
def self.from_amqp(delivery_info, body)
# Parse the AMQP rotuing key into an Array of strings.
# A properly formatted routing_key will look like this after processing:
#
# ["bot", "device_3", "resources_v0", "destroy", "Sequence", "2", "xyz"]
segments = delivery_info.routing_key.split(".")
_, device_name, _, action, resource, resource_id, uuid = segments
run!(device_name: device_name,
action: action,
resource: resource,
resource_id: resource_id,
uuid: uuid,
body: body.empty? ? "{}" : body)
end
required do
string :action, in: ACTIONS # "destroy"
string :device_name, matches: DEVICE_REGEX # "device_3"
string :resource, in: RESOURCES.keys # "Sequence"
end
optional do
integer :resource_id, default: 0 # 2
string :body # "{\"json\":true}"
string :uuid, default: "NONE" # "0dce-1d-41-1d-e95c3b"
end
def validate
maybe_set_device
maybe_set_body
end
def execute
{
action: action,
device: @device,
body: @body,
resource_id: resource_id,
resource: RESOURCES.fetch(resource),
uuid: uuid,
}
end
private
def fail_body
add_error :body, :body, "body must be a JSON object"
end
def maybe_set_body
hash = JSON.parse(body)
fail_body unless hash.is_a?(Hash)
@body = hash
rescue JSON::ParserError
fail_body
end
def maybe_set_device
id = device_name.gsub("device_", "").to_i
@device = Device.find_by(id: id)
add_error :device, :device, "Can't find device ##{id}" unless @device
end
end # PreProcessor
end # Resources

View File

@ -0,0 +1,57 @@
module Resources
class Service
MQTT_CHAN = "from_api"
def self.ok(uuid)
{ kind: "rpc_ok", args: { label: uuid } }.to_json
end
def self.rpc_err(uuid, error)
{
kind: "rpc_error",
args: { label: uuid },
body: (error
.errors
.values
.map { |err| { kind: "explanation", args: { message: err.message }} })
}.to_json
end
def self.step1(delivery_info, body) # Returns params or nil
PreProcessor.from_amqp(delivery_info, body)
rescue Mutations::ValidationException => q
Rollbar.error(q)
raw_chan = delivery_info&.routing_key&.split(".") || []
id = raw_chan[1]&.gsub("device_", "")&.to_i
uuid = (raw_chan.last || "NONE")
Transport.current.amqp_send(rpc_err(uuid, q), id, MQTT_CHAN) if id
nil
end
def self.step2(params)
puts params if Rails.env.production?
Job.run!(params)
uuid = (params[:uuid] || "NONE")
dev = params[:device]
dev.auto_sync_transaction do
Transport.current.amqp_send(ok(uuid), dev.id, MQTT_CHAN)
end
rescue Mutations::ValidationException => q
Rollbar.error(q)
device = params.fetch(:device)
uuid = params.fetch(:uuid)
errors = q.errors.values.map do |err|
{ kind: "explanation", args: { message: err.message }}
end
message = { kind: "rpc_error",
args: { label: uuid },
body: errors }.to_json
Transport.current.amqp_send(message, device.id, MQTT_CHAN)
end
def self.process(delivery_info, body)
params = step1(delivery_info, body)
params && step2(params)
end
end # Service
end # Resources

View File

@ -0,0 +1,29 @@
class ServiceRunner
WAIT_TIME = Rails.env.test? ? 0.01 : 5
OFFLINE_ERROR = Bunny::TCPConnectionFailedForAllHosts
CRASH_MSG = Rails.env.test? ?
"\e[32m.\e[0m" : "Something caused the broker to crash...\n"
def self.go!(channel, worker_klass)
self.new(channel, worker_klass).run!
end
def initialize(channel, worker_klass)
@channel = channel
@worker = worker_klass
end
def run!
@channel.subscribe(block: true) do |info, _, payl|
@worker.process(info, payl.force_encoding("UTF-8"))
end
rescue OFFLINE_ERROR => e
rescue StandardError => e
unless e.is_a?(OFFLINE_ERROR)
Rollbar.error(e)
print CRASH_MSG
end
sleep WAIT_TIME
retry
end
end

View File

@ -53,6 +53,5 @@ class ThrottlePolicy
def calculate_period(time)
(time.to_i / @time_unit)
end
end
end

View File

@ -1,16 +1,18 @@
class FatalErrorMailer < ApplicationMailer
def fatal_error(device, log)
@emails = device.users.pluck(:email)
@logs = device
.logs
.where(Log::IS_FATAL_EMAIL)
.where(sent_at: nil)
return if @logs.empty?
@message = @logs
.pluck(:message)
.join("\n\n")
@device_name = device.name || "Farmbot"
mail(to: @emails, subject: "🚨 New error reported by #{@device_name}!")
@logs.update_all(sent_at: Time.now)
Log.transaction do
@emails = device.users.pluck(:email)
@logs = device
.logs
.where(Log::IS_FATAL_EMAIL)
.where(sent_at: nil)
return if @logs.empty?
@message = @logs
.pluck(:message)
.join("\n\n")
@device_name = device.name || "Farmbot"
mail(to: @emails, subject: "🚨 New error reported by #{@device_name}!")
@logs.update_all(sent_at: Time.now)
end
end
end

View File

@ -2,21 +2,23 @@ class LogDeliveryMailer < ApplicationMailer
WHOAH = "Device %s is sending too many emails!!! (> 20 / hr)"
def log_digest(device)
query_params = { sent_at: 1.hours.ago..Time.now, device_id: device.id }
sent_this_hour = LogDispatch.where(query_params).count
too_many = sent_this_hour > LogDispatch.max_per_hour
raise LogDispatch::RateLimitError, WHOAH % [device.id] if too_many
ld = LogDispatch.where(sent_at: nil, device: device)
if(ld.any?)
logs = Log
.where(id: ld.pluck(:log_id))
.where
.not(Log::IS_FATAL_EMAIL)
@emails = device.users.pluck(:email)
@messages = logs.map(&:message)
@device_name = device.name || "Farmbot"
mail(to: @emails, subject: "🌱 New message from #{@device_name}!")
ld.update_all(sent_at: Time.now)
Log.transaction do
query_params = { sent_at: 1.hours.ago..Time.now, device_id: device.id }
sent_this_hour = Log.where(query_params).count
too_many = sent_this_hour > Log.max_per_hour
raise Log::RateLimitError, WHOAH % [device.id] if too_many
unsent = Log.where(sent_at: nil, device: device)
if(unsent.any?)
logs = Log
.where(id: unsent.pluck(:id))
.where
.not(Log::IS_FATAL_EMAIL)
@emails = device.users.pluck(:email)
@messages = logs.map(&:message)
@device_name = device.name || "Farmbot"
mail(to: @emails, subject: "🌱 New message from #{@device_name}!")
unsent.update_all(sent_at: Time.now)
end
end
end
end

View File

@ -5,11 +5,20 @@
# the rug. Shoving configuration into a module is not a design pattern. Feedback
# welcome for refactoring of this code.
module CeleryScriptSettingsBag
class BoxLed
def self.name
"Raspberry Pi Box LED"
end
def self.exists?(id)
true # Not super important right now. - RC 22 JUL 18
end
end
# List of all celery script nodes that can be used as a varaible...
ANY_VARIABLE = [:tool, :coordinate, :point, :identifier]
PLANT_STAGES = %w(planned planted harvested)
ALLOWED_PIN_MODES = [DIGITAL = 0, ANALOG = 1]
ALLOWED_PIN_TYPES = [Peripheral, Sensor].map(&:name)
ALLOWED_RPC_NODES = %w(home emergency_lock emergency_unlock read_status
sync check_updates power_off reboot toggle_pin
config_update calibrate execute move_absolute
@ -18,7 +27,7 @@ module CeleryScriptSettingsBag
install_farmware update_farmware take_photo zero
install_first_party_farmware remove_farmware
find_home register_gpio unregister_gpio
set_servo_angle change_ownership)
set_servo_angle change_ownership dump_info)
ALLOWED_PACKAGES = %w(farmbot_os arduino_firmware)
ALLOWED_CHAGES = %w(add remove update)
RESOURCE_NAME = %w(images plants regimens peripherals
@ -32,6 +41,8 @@ module CeleryScriptSettingsBag
ALLOWED_AXIS = %w(x y z all)
ALLOWED_LHS_TYPES = [String, :named_pin]
ALLOWED_LHS_STRINGS = [*(0..69)].map{|x| "pin#{x}"}.concat(%w(x y z))
ALLOWED_SPEC_ACTION = %w(dump_info emergency_lock emergency_unlock power_off
read_status reboot sync take_photo)
STEPS = %w(_if execute execute_script find_home move_absolute
move_relative read_pin send_message take_photo wait
write_pin )
@ -53,11 +64,16 @@ module CeleryScriptSettingsBag
BAD_AXIS = '"%s" is not a valid axis. Allowed values: %s'
BAD_POINTER_ID = "Bad point ID: %s"
BAD_PIN_ID = "Can't find %s with id of %s"
NO_PIN_ID = "You must select a %s before using it."
NO_PIN_ID = "%s requires a valid pin number"
BAD_POINTER_TYPE = '"%s" is not a type of point. Allowed values: %s'
BAD_PIN_TYPE = '"%s" is not a type of pin. Allowed values: %s'
BAD_SPEED = "Speed must be a percentage between 1-100"
PIN_TYPE_MAP = { "Peripheral" => Peripheral, "Sensor" => Sensor }
PIN_TYPE_MAP = { "Peripheral" => Peripheral,
"Sensor" => Sensor,
"BoxLed3" => BoxLed,
"BoxLed4" => BoxLed }
CANT_ANALOG = "Analog modes are not supported for Box LEDs"
ALLOWED_PIN_TYPES = PIN_TYPE_MAP.keys
KLASS_LOOKUP = Point::POINTER_KINDS.reduce({}) do |acc, val|
(acc[val] = Kernel.const_get(val)) && acc
end
@ -80,9 +96,9 @@ module CeleryScriptSettingsBag
.arg(:url, [String])
.arg(:value, [String, Integer, TrueClass, FalseClass])
.arg(:version, [Integer])
.arg(:x, [Integer])
.arg(:y, [Integer])
.arg(:z, [Integer])
.arg(:x, [Integer, Float])
.arg(:y, [Integer, Float])
.arg(:z, [Integer, Float])
.arg(:pin_id, [Integer])
.arg(:pin_type, [String]) do |node|
within(ALLOWED_PIN_TYPES, node) do |val|
@ -171,22 +187,24 @@ module CeleryScriptSettingsBag
end
end
.node(:named_pin, [:pin_type, :pin_id]) do |node|
args = HashWithIndifferentAccess.new(node.args)
x = args[:pin_type].value
klass = PIN_TYPE_MAP[x]
raise "NEVER" unless klass
id = args[:pin_id].value
node.invalidate!(NO_PIN_ID % [klass]) if (id == 0)
args = HashWithIndifferentAccess.new(node.args)
klass = PIN_TYPE_MAP.fetch(args[:pin_type].value)
id = args[:pin_id].value
node.invalidate!(NO_PIN_ID % [klass.name]) if (id == 0)
bad_node = !klass.exists?(id)
node.invalidate!(BAD_PIN_ID % [klass, id]) if bad_node
node.invalidate!(BAD_PIN_ID % [klass.name, id]) if bad_node
end
.node(:nothing, [])
.node(:tool, [:tool_id])
.node(:coordinate, [:x, :y, :z])
.node(:move_absolute, [:location, :speed, :offset])
.node(:move_relative, [:x, :y, :z, :speed])
.node(:write_pin, [:pin_number, :pin_value, :pin_mode ])
.node(:read_pin, [:pin_number, :label, :pin_mode])
.node(:write_pin, [:pin_number, :pin_value, :pin_mode ]) do |n|
no_rpi_analog(n)
end
.node(:read_pin, [:pin_number, :label, :pin_mode]) do |n|
no_rpi_analog(n)
end
.node(:channel, [:channel_name])
.node(:wait, [:milliseconds])
.node(:send_message, [:message, :message_type], [:channel])
@ -227,6 +245,7 @@ module CeleryScriptSettingsBag
.node(:parameter_declaration, [:label, :data_type], [])
.node(:set_servo_angle, [:pin_number, :pin_value], [])
.node(:change_ownership, [], [:pair])
.node(:dump_info, [], [])
.node(:install_first_party_farmware, [])
ANY_ARG_NAME = Corpus.as_json[:args].pluck("name").map(&:to_s)
@ -238,4 +257,19 @@ module CeleryScriptSettingsBag
val = node&.value
node.invalidate!(yield(val)) if !array.include?(val)
end
def self.no_rpi_analog(node)
args = HashWithIndifferentAccess.new(node.args)
pin_mode = args.fetch(:pin_mode).try(:value) || DIGITAL
pin_number = args.fetch(:pin_number)
is_analog = pin_mode == ANALOG
is_node = pin_number.is_a?(CeleryScript::AstNode)
needs_check = is_analog && is_node
if needs_check
pin_type_args = pin_number.args.with_indifferent_access
pin_type = pin_type_args.fetch(:pin_type).try(:value) || ""
node.invalidate!(CANT_ANALOG) if pin_type.include?("BoxLed")
end
end
end

View File

@ -12,28 +12,29 @@ class Device < ApplicationRecord
"Resuming log storage."
CACHE_KEY = "devices.%s"
has_many :device_configs, dependent: :destroy
has_many :farm_events, dependent: :destroy
has_many :device_configs, dependent: :destroy
has_many :farm_events, dependent: :destroy
has_many :farmware_installations, dependent: :destroy
has_many :images, dependent: :destroy
has_many :logs, dependent: :destroy
has_many :peripherals, dependent: :destroy
has_many :pin_bindings, dependent: :destroy
has_many :plant_templates, dependent: :destroy
has_many :points, dependent: :destroy
has_many :regimens, dependent: :destroy
has_many :saved_gardens, dependent: :destroy
has_many :sensor_readings, dependent: :destroy
has_many :sensors, dependent: :destroy
has_many :sequences, dependent: :destroy
has_many :token_issuances, dependent: :destroy
has_many :tools, dependent: :destroy
has_many :webcam_feeds, dependent: :destroy
has_one :fbos_config, dependent: :destroy
has_many :images, dependent: :destroy
has_many :logs, dependent: :destroy
has_many :peripherals, dependent: :destroy
has_many :pin_bindings, dependent: :destroy
has_many :plant_templates, dependent: :destroy
has_many :points, dependent: :destroy
has_many :regimens, dependent: :destroy
has_many :saved_gardens, dependent: :destroy
has_many :sensor_readings, dependent: :destroy
has_many :sensors, dependent: :destroy
has_many :sequences, dependent: :destroy
has_many :token_issuances, dependent: :destroy
has_many :tools, dependent: :destroy
has_many :webcam_feeds, dependent: :destroy
has_many :diagnostic_dumps, dependent: :destroy
has_one :fbos_config, dependent: :destroy
has_many :in_use_tools
has_many :in_use_points
has_many :users
validates_presence_of :name
validates :timezone,
inclusion: { in: TIMEZONES, message: BAD_TZ, allow_nil: true }
@ -78,11 +79,13 @@ class Device < ApplicationRecord
points.where(pointer_type: "Plant")
end
TIMEOUT = 150.seconds
# Like Device.find, but with 150 seconds of caching to avoid DB calls.
def self.cached_find(id)
Rails
.cache
.fetch(CACHE_KEY % id, expires_in: 150.seconds) { Device.find(id) }
.fetch(CACHE_KEY % id, expires_in: TIMEOUT) { Device.find(id) }
end
def refresh_cache
@ -142,6 +145,10 @@ class Device < ApplicationRecord
tell(message, channels , type).save
end
def regimina
regimens # :(
end
# CONTEXT:
# * We tried to use Rails low level caching, but it hit marshalling issues.
# * We did a hack with Device.new(self.as_json) to get around it.

View File

@ -0,0 +1,3 @@
class DiagnosticDump < ApplicationRecord
belongs_to :device
end

View File

@ -44,4 +44,17 @@ class Image < ApplicationRecord
self.attachment_processed_at = Time.now
self
end
# Scenario:
# User clicks "take photo" and "delete" on Image#123 very quickly.
# Problem:
# Now there's a Delayed::Job pointing to (nonexistent) Image#123,
# causing runtime errrors in the work queue.
# Solution:
# Don't retry failed deletions. Users can always click the "delete"
# button again if need be.
def self.maybe_destroy(id)
image = find_by(id: id)
image.destroy! if image
end
end

View File

@ -1,6 +1,7 @@
# A device will emit logs when events occur on the Raspberry Pi. Logs are then
# read by clients. Logs are only created by devices.
class Log < ApplicationRecord
include LogDeliveryStuff
# We use log.type to store the log's type.
# Rails wants to use that name for single table inheritence, which we don't
# need for this table.
@ -20,11 +21,10 @@ class Log < ApplicationRecord
validates :device, presence: true
validates :type, presence: true
serialize :meta
serialize :meta
validates :meta, presence: true
# http://stackoverflow.com/a/5127684/1064917
before_validation :set_defaults
has_one :log_dispatch, dependent: :destroy
def set_defaults
self.channels ||= []

View File

@ -0,0 +1,39 @@
# Prevents spamming a user when a malfunctioning Farmware tries to send
# 200000 emails in 4 seconds.
# Also helps group "fast" messages into a digest.
module LogDeliveryStuff
class RateLimitError < StandardError; end
WAIT_PERIOD = 30
WAIT_UNIT = :seconds
module ClassMethods
attr_accessor :max_per_hour
# If this method grows, create a mutation.
def deliver(device, log)
send_routine_emails(log, device)
send_fatal_emails(log, device)
end
def digest_wait_time
{ wait: WAIT_PERIOD.send(WAIT_UNIT) }
end
# TODO: Why must I explicitly pass `mailer_klass`? Somethings not right with
# mocks.
def send_routine_emails(log, device, mailer_klass = LogDeliveryMailer)
return unless (log.channels || []).include?("email")
mailer_klass.log_digest(device).deliver_later(digest_wait_time)
end
def send_fatal_emails(log, device)
return unless (log.channels || []).include?("fatal_email")
FatalErrorMailer.fatal_error(device, log).deliver_later
end
end
def self.included(receiver)
receiver.extend(ClassMethods)
receiver.max_per_hour = 20
end
end

View File

@ -1,42 +0,0 @@
# Prevents spamming a user when a malfunctioning Farmware tries to send
# 200000 emails in 4 seconds.
# Also helps group "fast" messages into a digest.
class LogDispatch < ApplicationRecord
class RateLimitError < StandardError; end
belongs_to :device
belongs_to :log
class_attribute :max_per_hour
self.max_per_hour = 20
WAIT_PERIOD = 30
WAIT_UNIT = :seconds
# If this method grows, create a mutation.
def self.deliver(device, log)
send_routine_emails(log, device)
send_fatal_emails(log, device)
end
def self.digest_wait_time
{ wait: WAIT_PERIOD.send(WAIT_UNIT) }
end
# TODO: Why must I explicitly pass `mailer_klass`? Somethings not right with
# mocks.
def self.send_routine_emails(log, device, mailer_klass = LogDeliveryMailer)
return unless (log.channels || []).include?("email")
self.create!(device: device, log: log)
mailer_klass.log_digest(device).deliver_later(digest_wait_time)
end
def self.send_fatal_emails(log, device)
return unless (log.channels || []).include?("fatal_email")
FatalErrorMailer.fatal_error(device, log).deliver_later
end
def broadcast?
false
end
end

View File

@ -1,8 +1,28 @@
class PinBinding < ApplicationRecord
OFF_LIMITS = [ 6, 12, 13, 17, 21, 23, 24, 25, 27 ]
BAD_PIN_NUM = \
"The following pin numbers cannot be used: %s" % OFF_LIMITS.join(", ")
belongs_to :device
belongs_to :sequence
enum special_action: { dump_info: "dump_info",
emergency_lock: "emergency_lock",
emergency_unlock: "emergency_unlock",
power_off: "power_off",
read_status: "read_status",
reboot: "reboot",
sync: "sync",
take_photo: "take_photo" }
validates :pin_num, uniqueness: { scope: :device }
def fancy_name
"pin #{pin_num}"
end
def random_pin_num
[*(0..69)]
.without(*OFF_LIMITS)
.without(*device.pin_bindings.pluck(:pin_num))
.sample
end
end

View File

@ -2,8 +2,43 @@
# expiration date).
class TokenIssuance < ApplicationRecord
belongs_to :device
# Number of ms Rails will wait for the API.
API_TIMEOUT = Rails.env.test? ? 0.01 : 2.5
def broadcast?
false
end
# PROBLEM:
# A token issuance was destroyed, but people are still on the broker using
# a revoked token.
#
# COMPLICATED SOLUTION:
# Track the JTI of all users and selectively boot only the users that have
# an expired JTI. This requires external caching and session storage.
#
# SIMPLE SOLUTION:
# Kick _everyone_ off the broker. The clients with the revoked token will
# not be able to reconnect.
def maybe_evict_clients
Timeout::timeout(API_TIMEOUT) do
id = "device_#{device_id}"
Transport::Mgmt.try(:close_connections_for_username, id)
end
rescue Faraday::ConnectionFailed
rescue Timeout::Error
Rollbar.error("Failed to evict clients on token revocation")
end
def self.expired
self.where("exp < ?", Time.now.to_i)
end
def self.any_expired?
expired.any?
end
def self.clean_old_tokens
expired.destroy_all
end
end

View File

@ -1,10 +1,15 @@
require "bunny"
# A wrapper around AMQP to stay DRY. Will make life easier if we ever need to
# change protocols
class Transport
LOCAL = "amqp://guest:guest@localhost:5672"
AMQP_URL = ENV['CLOUDAMQP_URL'] || ENV['RABBITMQ_URL'] || LOCAL
OPTS = { read_timeout: 10, heartbeat: 10, log_level: 'info' }
OPTS = { read_timeout: 10, heartbeat: 10, log_level: "info" }
def self.amqp_url
@amqp_url ||= ENV['CLOUDAMQP_URL'] ||
ENV['RABBITMQ_URL'] ||
"amqp://admin:#{ENV.fetch("ADMIN_PASSWORD")}@#{ENV.fetch("MQTT_HOST")}:5672"
end
def self.default_amqp_adapter=(value)
@default_amqp_adapter = value
@ -25,14 +30,22 @@ class Transport
end
def connection
@connection ||= Transport.default_amqp_adapter.new(AMQP_URL, OPTS).start
@connection ||= Transport
.default_amqp_adapter.new(Transport.amqp_url, OPTS).start
end
def log_channel
@log_channel ||= self.connection
.create_channel
.queue("", exclusive: true)
.bind("amq.topic", routing_key: "bot.*.logs")
@log_channel ||= self.connection
.create_channel
.queue("api_log_workers")
.bind("amq.topic", routing_key: "bot.*.logs")
end
def resource_channel
@resource_channel ||= self.connection
.create_channel
.queue("resource_workers")
.bind("amq.topic", routing_key: "bot.*.resources_v0.#")
end
def amqp_topic
@ -43,7 +56,10 @@ class Transport
end
def amqp_send(message, id, channel)
amqp_topic.publish(message, routing_key: "bot.device_#{id}.#{channel}")
raise "BAD `id`" unless id.is_a?(String) || id.is_a?(Integer)
routing_key = "bot.device_#{id}.#{channel}"
puts message if Rails.env.production?
amqp_topic.publish(message, routing_key: routing_key)
end
# We need to hoist the Rack X-Farmbot-Rpc-Id to a global state so that it can
@ -55,4 +71,46 @@ class Transport
def set_current_request_id(uuid)
RequestStore.store[:current_request_id] = uuid
end
end
module Mgmt
require "rabbitmq/http/client"
def self.username
@username ||= URI(Transport.amqp_url).user || "admin"
end
def self.password
@password ||= URI(Transport.amqp_url).password
end
def self.api_url
uri = URI(Transport.amqp_url)
uri.scheme = ENV["FORCE_SSL"] ? "https" : "http"
uri.user = nil
uri.port = 15672
uri.to_s
end
def self.client
@client ||= RabbitMQ::HTTP::Client.new(ENV["RABBIT_MGMT_URL"] || api_url,
username: self.username,
password: self.password)
end
def self.connections
client.list_connections
end
def self.find_connection_by_name(name)
connections
.select { |x| x.fetch("user").include?(name) }
.pluck("name")
.compact
.uniq
end
def self.close_connections_for_username(name)
find_connection_by_name(name).map { |connec| client.close_connection(connec) }
end
end # Mqmt
end # Transport

View File

@ -38,4 +38,15 @@ class User < ApplicationRecord
def verified?
SKIP_EMAIL_VALIDATION ? true : !!confirmed_at
end
def self.admin_user
@admin_user ||= self.find_or_create_by(email: "admin@admin.com") do |u|
u.name = "Administrator"
u.password = ENV.fetch("ADMIN_PASSWORD")
u.password_confirmation = ENV.fetch("ADMIN_PASSWORD")
u.confirmed_at = Time.now
u.agreed_to_terms_at = Time.now
u.device_id = Devices::Create.run!(user: u).id
end
end
end

View File

@ -0,0 +1,48 @@
# I heard you like mutations. So we made a mutation class that creates mutations
# so you can mutate while you mutate.
# This class will create a "base case" `::Destroy` mutation. Very useful when
# you don't have special logic in your ::Destroy class and just need a base case
class CreateDestroyer < Mutations::Command
BAD_OWNERSHIP = "You do not own that %s"
required { duck :resource }
def execute
klass = Class.new(Mutations::Command)
klass.instance_variable_set("@resource", resource)
klass.class_eval do |x|
def self.resource
@resource
end
def self.resource_name
resource.model_name.singular
end
def resource_name
self.class.resource_name.to_sym
end
required do
model :device, class: Device
model x.resource_name, class: x.resource
end
def validate
not_yours unless self.send(resource_name).device == device
end
def execute
self.send(resource_name).destroy! && ""
end
def not_yours
add_error resource_name, resource_name, BAD_OWNERSHIP % resource_name
end
end
return klass
end
end

View File

@ -1,7 +1,7 @@
module Devices
class Create < Mutations::Command
required do
model :user, class: User
model :user, class: User, new_records: true
end
optional do

View File

@ -0,0 +1,25 @@
module DiagnosticDumps
class Create < Mutations::Command
required do
model :device, class: Device
string :fbos_version
string :fbos_commit
string :firmware_commit
string :network_interface
string :fbos_dmesg_dump
string :firmware_state
end
def execute
DiagnosticDump
.create!(device: device,
ticket_identifier: rand(36**5).to_s(36),
fbos_version: fbos_version,
fbos_commit: fbos_commit,
firmware_commit: firmware_commit,
network_interface: network_interface,
fbos_dmesg_dump: fbos_dmesg_dump,
firmware_state: firmware_state,)
end
end
end

View File

@ -0,0 +1,3 @@
module DiagnosticDumps
Destroy = CreateDestroyer.run!(resource: DiagnosticDump)
end

View File

@ -0,0 +1,3 @@
module FarmEvents
Destroy = CreateDestroyer.run!(resource: FarmEvent)
end

View File

@ -0,0 +1,3 @@
module FarmwareInstallations
Destroy = CreateDestroyer.run!(resource: FarmwareInstallation)
end

View File

@ -2,15 +2,15 @@ module Images
class Create < Mutations::Command
required do
string :attachment_url
model :device, class: Device
model :device, class: Device
end
optional do
hash :meta do
optional do
integer :x
integer :y
integer :z
float :x
float :y
float :z
string :name
end
end
@ -18,7 +18,7 @@ module Images
def execute
i = Image.create!(inputs.except(:attachment_url))
CreateAttachmentFromUrlJob.perform_later(image: i,
CreateAttachmentFromUrlJob.perform_later(image_id: i.id,
attachment_url: attachment_url)
i
end

View File

@ -0,0 +1,3 @@
module Images
Destroy = CreateDestroyer.run!(resource: Image)
end

View File

@ -33,19 +33,20 @@ module Logs
#
# TODO: delete the `meta` field once FBOS < v6.4.0 reach EOL.
string :type, in: Log::TYPES
integer :x
integer :y
integer :z
float :x
float :y
float :z
integer :verbosity
integer :major_version
integer :minor_version
integer :created_at
hash :meta do # This can be transitioned out soon.
string :type, in: Log::TYPES
optional do
integer :x
integer :y
integer :z
float :x
float :y
float :z
integer :verbosity
integer :major_version
integer :minor_version
@ -68,6 +69,7 @@ module Logs
@log.major_version = transitional_field(:major_version)
@log.minor_version = transitional_field(:minor_version)
@log.type = transitional_field(:type, "info")
@log.created_at = DateTime.strptime(created_at.to_s,'%s') if created_at
@log.validate!
end
@ -79,7 +81,7 @@ module Logs
private
def maybe_deliver
LogDispatch.delay.deliver(device, @log)
Log.delay.deliver(device, @log)
end
def has_bad_words

View File

@ -0,0 +1,3 @@
module Logs
Destroy = CreateDestroyer.run!(resource: Log)
end

View File

@ -4,12 +4,19 @@ module PinBindings
required do
model :device, class: Device
integer :sequence_id
integer :pin_num
end
optional do
integer :sequence_id
string :special_action, in: PinBinding.special_actions.values
end
def validate
validate_pin_num
validate_sequence_id
exactly_one_choice
not_both_actions
end
def execute

View File

@ -1,8 +1,30 @@
module PinBindings
module Helpers
BAD_SEQ_ID = "Sequence ID is not valid"
MUTUAL_EXCLUSION = "Pin Bindings require exactly one sequence or special " \
"action. Please pick one."
def validate_pin_num
if pin_num && PinBinding::OFF_LIMITS.include?(pin_num)
add_error :pin_num, :pin_num, PinBinding::BAD_PIN_NUM
end
end
def false_xor_sequence_id_special_actn
add_error :sequence_id, :sequence_id, MUTUAL_EXCLUSION
end
def exactly_one_choice
false_xor_sequence_id_special_actn if !(sequence_id || special_action)
end
def not_both_actions
false_xor_sequence_id_special_actn if sequence_id && special_action
end
def validate_sequence_id
unless device.sequences.exists?(sequence_id)
add_error :sequence_id, :sequence_id, "Sequence ID is not valid"
if sequence_id && !device.sequences.exists?(sequence_id)
add_error :sequence_id, :sequence_id, BAD_SEQ_ID
end
end
end

View File

@ -8,11 +8,14 @@ module PinBindings
end
optional do
string :special_action, in: PinBinding.special_actions.values
integer :sequence_id
integer :pin_num
end
def validate
validate_pin_num
not_both_actions
validate_sequence_id if sequence_id
end

View File

@ -0,0 +1,3 @@
module PlantTemplates
Destroy = CreateDestroyer.run!(resource: PlantTemplate)
end

View File

@ -7,15 +7,19 @@ module Points
required do
model :device, class: Device
array :point_ids, class: Integer
end
optional { boolean :hard_delete, default: false }
optional do
boolean :hard_delete, default: false
array :point_ids, class: Integer
model :point, class: Point
end
P = :point
S = :sequence
def validate
maybe_wrap_ids
# Collect names of sequences that still use this point.
problems = (tool_seq + point_seq)
.group_by(&:sequence_name)
@ -88,5 +92,10 @@ module Points
.where(tool_id: every_tool_id_as_json, device_id: device.id)
.to_a
end
def maybe_wrap_ids
raise "NO" unless (point || point_ids)
inputs[:point_ids] = [point.id] if point
end
end
end

View File

@ -20,3 +20,4 @@ module Regimens
end
end
end
Regimina ||= Regimens # Lol, inflection errors

View File

@ -17,3 +17,5 @@ module Regimens
end
end
end
Regimina ||= Regimens # Lol, inflection errors

View File

@ -32,3 +32,4 @@ module Regimens
end
end
end
Regimina ||= Regimens # Lol, inflection errors

View File

@ -0,0 +1,3 @@
module SavedGardens
Destroy = CreateDestroyer.run!(resource: SavedGarden)
end

View File

@ -9,6 +9,12 @@ module SensorReadings
integer :pin
end
optional do
integer :mode,
in: CeleryScriptSettingsBag::ALLOWED_PIN_MODES,
default: CeleryScriptSettingsBag::DIGITAL
end
def execute
SensorReading.create!(inputs)
end

View File

@ -0,0 +1,3 @@
module SensorReadings
Destroy = CreateDestroyer.run!(resource: SensorReading)
end

View File

@ -1,5 +1,5 @@
module Sequences
class Delete < Mutations::Command
class Destroy < Mutations::Command
IN_USE = "Sequence is still in use by"
THE_FOLLOWING = " the following %{resource}: %{items}"
AND = " and"

View File

@ -5,9 +5,9 @@ module ToolSlots
required do
model :device, class: Device
string :name, default: "Untitled Slot"
integer :x
integer :y
integer :z
float :x
float :y
float :z
end
optional do

View File

@ -19,7 +19,7 @@ private
def confirm_password
invalid = !user.valid_password?(password)
add_error :password, :*, BAD_PASSWORD if invalid
add_error :password, :*, BAD_PASSWORD if invalid
end
end
end

View File

@ -49,6 +49,7 @@ private
.where
.not(jti: (RequestStore[:jwt] || {})[:jti])
.destroy_all
CleanOutOldDbItemsJob.perform_later
end
# Send a `factory_reset` RPC over AMQP/MQTT to all connected devices.

View File

@ -0,0 +1,4 @@
module WebcamFeeds
Destroy = CreateDestroyer.run!(resource: WebcamFeed,
singular_name: "webcam_feed")
end

View File

@ -0,0 +1,5 @@
class DiagnosticDumpSerializer < ActiveModel::Serializer
attributes :id, :device_id, :ticket_identifier, :fbos_commit, :fbos_version,
:firmware_commit, :firmware_state, :network_interface,
:fbos_dmesg_dump, :created_at, :updated_at
end

View File

@ -0,0 +1,13 @@
class PinBindingSerializer < ActiveModel::Serializer
attributes :id, :created_at, :updated_at, :device_id, :sequence_id,
:special_action, :pin_num, :binding_type
def binding_type
object.special_action ? "special" : "standard"
end
# `sequence_id` and `special_action` are mutually exclusive.
def sequence_id
object.special_action ? nil : object.sequence_id
end
end

View File

@ -1,3 +1,3 @@
class SensorReadingSerializer < ActiveModel::Serializer
attributes :id, :pin, :value, :x, :y, :z
attributes :id, :created_at, :mode, :pin, :value, :x, :y, :z
end

View File

@ -2,7 +2,7 @@
var hasInclude = !!Array.prototype.includes
var user;
try {
user = (JSON.parse(localStorage.session || "{}").user);
user = {user_id: (JSON.parse(localStorage.session || "{}").user || {}).id || 0};
} catch(e) {
};
<% if ENV["ROLLBAR_ACCESS_TOKEN"] && ENV["ROLLBAR_CLIENT_TOKEN"] %>

View File

@ -43,7 +43,7 @@ DEPS = `yarn outdated`
.map{|y| y.split }
.map{|y| "#{y[0]}@#{y[3]}"}
.sort
.reject { |x| x.include?("router") }
# puts "Making sure that type checks pass WITHOUT any upgrades"
tc_ok = type_check

77
batch_updates.md 100644
View File

@ -0,0 +1,77 @@
# Support Table
Not all resources support the experimental resource API.
|Resource | Delete | Update / Insert|
|-----------------------|---------|----------------|
| FarmEvent | :heart: | :broken_heart: |
| FarmwareInstallation | :heart: | :broken_heart: |
| Image | :heart: | :broken_heart: |
| Log | :heart: | :broken_heart: |
| Peripheral | :heart: | :broken_heart: |
| PinBinding | :heart: | :broken_heart: |
| PlantTemplate | :heart: | :broken_heart: |
| Point | :heart: | :broken_heart: |
| Regimen | :heart: | :broken_heart: |
| SavedGarden | :heart: | :broken_heart: |
| Sensor | :heart: | :broken_heart: |
| SensorReading | :heart: | :broken_heart: |
| Sequence | :heart: | :broken_heart: |
| Tool | :heart: | :broken_heart: |
| WebcamFeed | :heart: | :broken_heart: |
# Step 1: Send the Update
Send an MQTT message in the format of:
```
bot/device_<id>/resources_v0/<action>/<resource type>/<resource_id or 0>/<Transaction UUID>
```
Example 1-1:
```
bot/device_3/resources_v0/destroy/Sequence/2/123-456
```
NOTES:
* `<Transaction UUID>` can be any user defined string. Ensure that the string is unique. We recommend using UUIDs.
* `<resource_id>` This is the `.id` property of the resource you are deleting.
* `<action>` Only `destroy` is supported as of July 2018.
* `<resource type>` See "resource" column of table above. **Case sensitive**.
**For deletion messages** the body of the message is unimportant and is discarded by the server.
# Step 2(A): Handle Failure
If your message is malformed or the server was unable to complete the request, you will receive an error message on the following MQTT channel:
```
bot/device_<id>/from_api
```
The message will take the same format as RPC errors:
```
{
"kind": "rpc_error",
"args": { "label": "THE UUID YOU GAVE THE SERVER" },
"body": [
{
"kind": "explanation",
"args": { "message": "Human readable explanation message" }
}
]
}
```
# Step 2(B): Handle Success
If successful, an `rpc_ok` CeleryScript node will be streamed to the following MQTT channel:
```
bot/device_<id>/from_api
```
**This is not a JSON resource.** It is merely an indication that the server has accepted the request and processed it. The resource itself will be streamed over the `auto_sync`* channel.

View File

@ -1,2 +0,0 @@
ignore:
- "app/models/transport.rb" # Too many stubs to bother testing.

View File

@ -15,10 +15,6 @@
# SERVER WONT WORK IF YOU FORGET TO DELETE THIS EXAMPLE TEXT BELOW.
# ADD A REAL RSA_KEY OR DELETE THIS LINE!!
RSA_KEY: "Change this! Keys look like `-----BEGIN RSA .........`"
# If you use Let's Encrypt for SSL,
# you must set this when renewing SSL.
# Otherwise, not required and CAN BE REMOVED.
ACME_SECRET: "-----"
# If your server is on a domain (eg: my-own-farmbot.com), put it here.
# DONT USE `localhost`.
# DONT USE `127.0.0.1`.
@ -59,8 +55,6 @@ HEROKU_SLUG_COMMIT: "This is set by Heroku, used by Frontend to show current ver
# Use a REAL IP ADDRESS if you are controlling real bots.
# 0.0.0.0 is only OK for software testing. Change this!
MQTT_HOST: "98.76.54.32"
# Delete this line if you are not an employee of FarmBot, Inc.
NPM_ADDON: "Used by FarmBot, Inc. to load proprietary extras, like Rollbar."
# Same as above. Can be deleted unless you are a Rollbar.IO customer.
ROLLBAR_ACCESS_TOKEN: "____"
ROLLBAR_CLIENT_TOKEN: "____"
@ -79,10 +73,6 @@ NO_EMAILS: "TRUE"
# If you are not using the standard MQTT broker (eg: you use a 3rd party
# MQTT vendor), you will need to change this line.
MQTT_WS: "ws://DELETE_OR_CHANGE_THIS_LINE/ws"
# ENV var used by FarmBot employees when building different versions of the JWT
# auth backend plugin.
# Can be deleted safely.
API_PUBLIC_KEY_PATH: "http://changeme.io/api/public_key"
# If you are using a shared RabbitMQ server and need to use a VHost other than
# "/", change this ENV var.
MQTT_VHOST: "/"
@ -97,4 +87,12 @@ EXTRA_DOMAINS: staging.farm.bot,whatever.farm.bot
RUN_CAPYBARA: "true"
# Set this to "production" in most cases.
# If you need help debugging issues, please delete this line.
RAILS_ENV: "production"
RAILS_ENV: "production"
# Every server has a superuser.
# Set this to something SECURE.
ADMIN_PASSWORD: ""
# Some hosts (Eg: FarmBot, Inc.) run the RabbitMQ management API on a
# non-standard host.
# Include the protocol! (http vs. https)
# DELETE THIS LINE if you are a self-hosted user.
RABBIT_MGMT_URL: "http://delete_this_line.com"

View File

@ -13,6 +13,7 @@ module FarmBot
# Bullet.enable = true
# Bullet.console = true
# end
config.active_record.schema_format = :sql
config.active_job.queue_adapter = :delayed_job
config.action_dispatch.perform_deep_munge = false
I18n.enforce_available_locales = false

View File

@ -1,7 +1,6 @@
default: &default
adapter: postgresql
encoding: unicode
database: farmbot_development
pool: 5
development:
@ -15,4 +14,3 @@ test:
production:
database: farmbot_prod
<<: *default

View File

@ -11,7 +11,7 @@ if ENV["ROLLBAR_ACCESS_TOKEN"]
Rollbar.configure do |config|
config.access_token = ENV["ROLLBAR_ACCESS_TOKEN"]
config.enabled = Rails.env.production? ? true : false
config.person_method = "current_device"
config.person_method = "current_device_id"
config.environment = (ENV["API_HOST"] || $API_URL || ENV["ROLLBAR_ENV"] || Rails.env)
end
end

View File

@ -1,7 +1,13 @@
FarmBot::Application.routes.draw do
namespace :api, defaults: {format: :json}, constraints: { format: "json" } do
post "/rmq/user" => "rmq_utils#user", as: "rmq_user"
post "/rmq/vhost" => "rmq_utils#vhost", as: "rmq_vhost"
post "/rmq/resource" => "rmq_utils#resource", as: "rmq_resource"
post "/rmq/topic" => "rmq_utils#topic", as: "rmq_topic"
# Standard API Resources:
{
diagnostic_dumps: [:create, :destroy, :index],
farm_events: [:create, :destroy, :index, :update],
farmware_installations: [:create, :destroy, :index],
images: [:create, :destroy, :index, :show],

View File

@ -1,51 +0,0 @@
var StatsPlugin = require('stats-webpack-plugin');
module.exports = function () {
return {
entry: {
"app_bundle": "./webpack/entry.tsx",
"front_page": "./webpack/front_page/index.tsx",
"password_reset": "./webpack/password_reset/index.tsx",
"tos_update": "./webpack/tos_update/index.tsx"
},
// Was "eval", but that did not go well with our CSP
devtool: "cheap-module-source-map",
module: {
rules: [
{
test: [/\.scss$/, /\.css$/],
use: ["style-loader", "css-loader", "sass-loader"]
},
{
test: /\.tsx?$/,
use: "ts-loader"
},
{
test: [/\.woff$/, /\.woff2$/, /\.ttf$/],
use: "url-loader"
},
{
test: [/\.eot$/, /\.svg(\?v=\d+\.\d+\.\d+)?$/],
use: "file-loader"
}
]
},
// Allows imports without file extensions.
resolve: {
extensions: [".js", ".ts", ".tsx", ".css", ".scss", ".json"]
},
plugins: [
new StatsPlugin('manifest.json', {
// We only need assetsByChunkName
chunkModules: false,
source: false,
chunks: false,
modules: false,
assets: true
})
],
node: {
fs: "empty"
}
};
}

View File

@ -1,27 +1,68 @@
var path = require("path");
var genConfig = require("./webpack.base");
var conf = genConfig();
var StatsPlugin = require('stats-webpack-plugin');
var host = process.env["API_HOST"] || "localhost"
var devServerPort = 3808;
const host = process.env["API_HOST"] || "localhost"
conf.mode = "development";
conf.output = {
// must match config.webpack.output_dir
path: path.join(__dirname, '..', 'public', 'webpack'),
publicPath: `//${host}:${devServerPort}/webpack/`,
filename: '[name].js'
};
conf.devServer = {
port: devServerPort,
disableHostCheck: true,
watchOptions: {
aggregateTimeout: 300,
poll: 1000
module.exports = {
mode: "none",
output: {
// must match config.webpack.output_dir
path: path.join(__dirname, '..', 'public', 'webpack'),
publicPath: `//${host}:${devServerPort}/webpack/`,
filename: '[name].js'
},
host: "0.0.0.0",
headers: { 'Access-Control-Allow-Origin': '*' }
entry: {
"app_bundle": "./webpack/entry.tsx",
"front_page": "./webpack/front_page/index.tsx",
"password_reset": "./webpack/password_reset/index.tsx",
"tos_update": "./webpack/tos_update/index.tsx"
},
devtool: "eval",
module: {
rules: [
{
test: [/\.scss$/, /\.css$/],
use: ["style-loader", "css-loader", "sass-loader"]
},
{
test: /\.tsx?$/,
use: "ts-loader"
},
{
test: [/\.woff$/, /\.woff2$/, /\.ttf$/],
use: "url-loader"
},
{
test: [/\.eot$/, /\.svg(\?v=\d+\.\d+\.\d+)?$/],
use: "file-loader"
}
]
},
// Allows imports without file extensions.
resolve: {
extensions: [".js", ".ts", ".tsx", ".css", ".scss", ".json"]
},
plugins: [
new StatsPlugin('manifest.json', {
// We only need assetsByChunkName
chunkModules: false,
source: false,
chunks: false,
modules: false,
assets: true
})
],
node: {
fs: "empty"
},
devServer: {
port: devServerPort,
disableHostCheck: true,
watchOptions: {
aggregateTimeout: 300,
poll: 1000
},
host: "0.0.0.0",
headers: { 'Access-Control-Allow-Origin': '*' }
}
};
module.exports = conf;

View File

@ -1,48 +1,90 @@
'use strict';
global.WEBPACK_ENV = "production";
// var ExtractTextPlugin = require("extract-text-webpack-plugin");
var path = require("path");
var genConfig = require("./webpack.base");
var UglifyJsPlugin = require("webpack-uglify-js-plugin");
var OptimizeCssAssetsPlugin = require("optimize-css-assets-webpack-plugin");
var webpack = require("webpack");
var StatsPlugin = require('stats-webpack-plugin');
var publicPath = '/webpack/';
var conf = genConfig();
conf.mode = "production";
conf.output = {
path: path.join(__dirname, '..', 'public', 'webpack'),
publicPath: '/webpack/',
filename: '[name]-[chunkhash].js',
chunkFilename: '[id].[chunkhash].js'
var conf = {
mode: "none",
devtool: "source-map",
entry: {
"app_bundle": "./webpack/entry.tsx",
"front_page": "./webpack/front_page/index.tsx",
"password_reset": "./webpack/password_reset/index.tsx",
"tos_update": "./webpack/tos_update/index.tsx"
},
output: {
path: path.join(__dirname, '..', 'public', 'webpack'),
publicPath,
filename: '[name]-[chunkhash].js',
chunkFilename: '[id].[name].[chunkhash].js'
},
module: {
rules: [
{
test: [/\.scss$/, /\.css$/],
use: ["style-loader", "css-loader", "sass-loader"]
},
{
test: /\.tsx?$/,
use: "ts-loader"
},
{
test: [/\.woff$/, /\.woff2$/, /\.ttf$/],
use: "url-loader"
},
{
test: [/\.eot$/, /\.svg(\?v=\d+\.\d+\.\d+)?$/],
use: "file-loader"
}
]
},
// Allows imports without file extensions.
resolve: {
extensions: [".js", ".ts", ".tsx", ".css", ".scss", ".json"]
},
plugins: [
new StatsPlugin('manifest.json', {
// We only need assetsByChunkName
chunkModules: false,
source: false,
chunks: false,
modules: false,
assets: true
}),
new OptimizeCssAssetsPlugin({
assetNameRegExp: /\.css$/g,
cssProcessor: require("cssnano"),
cssProcessorOptions: { discardComments: { removeAll: true } },
canPrint: true
}),
new UglifyJsPlugin({
cacheFolder: path.resolve(__dirname, "../public/dist/cached_uglify/"),
debug: true,
minimize: true,
sourceMap: true,
screw_ie8: true,
output: { comments: false },
compressor: { warnings: false }
}),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify("production")
}
})
],
node: {
fs: "empty"
}
};
[
// new ExtractTextPlugin({
// filename: "dist/[name].[chunkhash].css",
// disable: false,
// allChunks: true
// }),
new OptimizeCssAssetsPlugin({
assetNameRegExp: /\.css$/g,
cssProcessor: require("cssnano"),
cssProcessorOptions: { discardComments: { removeAll: true } },
canPrint: true
}),
new UglifyJsPlugin({
cacheFolder: path.resolve(__dirname, "../public/dist/cached_uglify/"),
debug: true,
minimize: true,
sourceMap: true,
screw_ie8: true,
output: { comments: false },
compressor: { warnings: false }
}),
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify("production")
}
})
].map(x => conf.plugins.push(x));
var accessToken = process.env.ROLLBAR_ACCESS_TOKEN
if (accessToken) {
var RollbarSourceMapPlugin = require('rollbar-sourcemap-webpack-plugin')
var version = process.env.BUILT_AT || process.env.HEROKU_SLUG_COMMIT || "????"
var plugin = new RollbarSourceMapPlugin({accessToken, version, publicPath})
conf.plugins.push(plugin)
}
module.exports = conf;

View File

@ -0,0 +1,14 @@
class GetRidOfLogDispatches < ActiveRecord::Migration[5.2]
def change
drop_table :log_dispatches do |t|
t.bigint :device_id
t.bigint :log_id
t.datetime :sent_at
t.datetime :created_at, null: false
t.datetime :updated_at, null: false
end
# If we don't do this, a storm of emails will hit every user.
# Relates to the deprecation of the `LogDispatch` table. - RC, 9 JUN 18
Log.where("created_at < ?", 1.hour.ago).update_all(sent_at: 2.hours.ago)
end
end

View File

@ -0,0 +1,16 @@
class CreateDiagnosticDumps < ActiveRecord::Migration[5.2]
def change
create_table :diagnostic_dumps do |t|
t.references :device, foreign_key: true, null: false
t.string :ticket_identifier, null: false, unique: true
t.string :fbos_commit, null: false
t.string :fbos_version, null: false
t.string :firmware_commit, null: false
t.string :firmware_state, null: false
t.string :network_interface, null: false
t.text :fbos_dmesg_dump, null: false
t.timestamps
end
end
end

View File

@ -0,0 +1,19 @@
class AddSpecialActionToPinBinding < ActiveRecord::Migration[5.2]
def up
execute <<-SQL
CREATE TYPE special_action AS
ENUM ('dump_info', 'emergency_lock', 'emergency_unlock', 'power_off',
'read_status', 'reboot', 'sync', 'take_photo');
SQL
add_column :pin_bindings, :special_action, :special_action, index: true
end
def down
remove_column :pin_bindings, :special_action
execute <<-SQL
DROP TYPE special_action;
SQL
end
end

View File

@ -0,0 +1,11 @@
class ChangeLogColumnsToFloats < ActiveRecord::Migration[5.2]
ALL = [ :x, :y, :z ]
def up
ALL.map { |ax| change_column :logs, ax, :float }
end
def down
ALL.map { |ax| change_column :logs, ax, :integer }
end
end

View File

@ -0,0 +1,9 @@
class AddMoreStuffToDataDumps < ActiveRecord::Migration[5.2]
def change
add_column :diagnostic_dumps, :sync_status, :string, limit: 12
add_column :diagnostic_dumps, :wifi_level, :string, limit: 12
add_column :diagnostic_dumps, :soc_temp, :string, limit: 12
add_column :diagnostic_dumps, :firmware_hardware, :string, limit: 12
add_column :diagnostic_dumps, :firmware_version, :string, limit: 12
end
end

View File

@ -0,0 +1,13 @@
class FirmwareConfigFloatConversion < ActiveRecord::Migration[5.2]
def up
change_column :firmware_configs, :movement_step_per_mm_x, :float
change_column :firmware_configs, :movement_step_per_mm_y, :float
change_column :firmware_configs, :movement_step_per_mm_z, :float
end
def down
change_column :firmware_configs, :movement_step_per_mm_x, :integer
change_column :firmware_configs, :movement_step_per_mm_y, :integer
change_column :firmware_configs, :movement_step_per_mm_z, :integer
end
end

View File

@ -1,549 +0,0 @@
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_06_06_131907) do
# These are extensions that must be enabled in order to support this database
enable_extension "hstore"
enable_extension "plpgsql"
create_table "delayed_jobs", id: :serial, force: :cascade do |t|
t.integer "priority", default: 0, null: false
t.integer "attempts", default: 0, null: false
t.text "handler", null: false
t.text "last_error"
t.datetime "run_at"
t.datetime "locked_at"
t.datetime "failed_at"
t.string "locked_by"
t.string "queue"
t.datetime "created_at"
t.datetime "updated_at"
t.index ["priority", "run_at"], name: "delayed_jobs_priority"
end
create_table "device_configs", force: :cascade do |t|
t.bigint "device_id"
t.string "key", limit: 100
t.string "value", limit: 300
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_device_configs_on_device_id"
end
create_table "devices", id: :serial, force: :cascade do |t|
t.string "name", default: "Farmbot"
t.integer "max_log_count", default: 100
t.integer "max_images_count", default: 100
t.string "timezone", limit: 280
t.datetime "last_saw_api"
t.datetime "last_saw_mq"
t.string "fbos_version", limit: 15
t.datetime "throttled_until"
t.datetime "throttled_at"
t.index ["timezone"], name: "index_devices_on_timezone"
end
create_table "edge_nodes", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "sequence_id", null: false
t.bigint "primary_node_id", null: false
t.string "kind", limit: 50
t.string "value", limit: 300
t.index ["kind", "value"], name: "index_edge_nodes_on_kind_and_value"
t.index ["primary_node_id"], name: "index_edge_nodes_on_primary_node_id"
t.index ["sequence_id"], name: "index_edge_nodes_on_sequence_id"
end
create_table "farm_events", id: :serial, force: :cascade do |t|
t.integer "device_id"
t.datetime "start_time"
t.datetime "end_time"
t.integer "repeat"
t.string "time_unit"
t.string "executable_type", limit: 280
t.integer "executable_id"
t.index ["device_id"], name: "index_farm_events_on_device_id"
t.index ["end_time"], name: "index_farm_events_on_end_time"
t.index ["executable_type", "executable_id"], name: "index_farm_events_on_executable_type_and_executable_id"
end
create_table "farmware_installations", force: :cascade do |t|
t.bigint "device_id"
t.string "url"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_farmware_installations_on_device_id"
end
create_table "fbos_configs", force: :cascade do |t|
t.bigint "device_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "auto_sync", default: false
t.boolean "beta_opt_in", default: false
t.boolean "disable_factory_reset", default: false
t.boolean "firmware_input_log", default: false
t.boolean "firmware_output_log", default: false
t.boolean "sequence_body_log", default: false
t.boolean "sequence_complete_log", default: false
t.boolean "sequence_init_log", default: false
t.integer "network_not_found_timer"
t.string "firmware_hardware", default: "arduino"
t.boolean "api_migrated", default: false
t.boolean "os_auto_update", default: true
t.boolean "arduino_debug_messages", default: false
t.index ["device_id"], name: "index_fbos_configs_on_device_id"
end
create_table "firmware_configs", force: :cascade do |t|
t.bigint "device_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "encoder_enabled_x", default: 0
t.integer "encoder_enabled_y", default: 0
t.integer "encoder_enabled_z", default: 0
t.integer "encoder_invert_x", default: 0
t.integer "encoder_invert_y", default: 0
t.integer "encoder_invert_z", default: 0
t.integer "encoder_missed_steps_decay_x", default: 5
t.integer "encoder_missed_steps_decay_y", default: 5
t.integer "encoder_missed_steps_decay_z", default: 5
t.integer "encoder_missed_steps_max_x", default: 5
t.integer "encoder_missed_steps_max_y", default: 5
t.integer "encoder_missed_steps_max_z", default: 5
t.integer "encoder_scaling_x", default: 5556
t.integer "encoder_scaling_y", default: 5556
t.integer "encoder_scaling_z", default: 5556
t.integer "encoder_type_x", default: 0
t.integer "encoder_type_y", default: 0
t.integer "encoder_type_z", default: 0
t.integer "encoder_use_for_pos_x", default: 0
t.integer "encoder_use_for_pos_y", default: 0
t.integer "encoder_use_for_pos_z", default: 0
t.integer "movement_axis_nr_steps_x", default: 0
t.integer "movement_axis_nr_steps_y", default: 0
t.integer "movement_axis_nr_steps_z", default: 0
t.integer "movement_enable_endpoints_x", default: 0
t.integer "movement_enable_endpoints_y", default: 0
t.integer "movement_enable_endpoints_z", default: 0
t.integer "movement_home_at_boot_x", default: 0
t.integer "movement_home_at_boot_y", default: 0
t.integer "movement_home_at_boot_z", default: 0
t.integer "movement_home_spd_x", default: 50
t.integer "movement_home_spd_y", default: 50
t.integer "movement_home_spd_z", default: 50
t.integer "movement_home_up_x", default: 0
t.integer "movement_home_up_y", default: 0
t.integer "movement_home_up_z", default: 1
t.integer "movement_invert_endpoints_x", default: 0
t.integer "movement_invert_endpoints_y", default: 0
t.integer "movement_invert_endpoints_z", default: 0
t.integer "movement_invert_motor_x", default: 0
t.integer "movement_invert_motor_y", default: 0
t.integer "movement_invert_motor_z", default: 0
t.integer "movement_keep_active_x", default: 0
t.integer "movement_keep_active_y", default: 0
t.integer "movement_keep_active_z", default: 1
t.integer "movement_max_spd_x", default: 400
t.integer "movement_max_spd_y", default: 400
t.integer "movement_max_spd_z", default: 400
t.integer "movement_min_spd_x", default: 50
t.integer "movement_min_spd_y", default: 50
t.integer "movement_min_spd_z", default: 50
t.integer "movement_secondary_motor_invert_x", default: 1
t.integer "movement_secondary_motor_x", default: 1
t.integer "movement_step_per_mm_x", default: 5
t.integer "movement_step_per_mm_y", default: 5
t.integer "movement_step_per_mm_z", default: 25
t.integer "movement_steps_acc_dec_x", default: 300
t.integer "movement_steps_acc_dec_y", default: 300
t.integer "movement_steps_acc_dec_z", default: 300
t.integer "movement_stop_at_home_x", default: 0
t.integer "movement_stop_at_home_y", default: 0
t.integer "movement_stop_at_home_z", default: 0
t.integer "movement_stop_at_max_x", default: 0
t.integer "movement_stop_at_max_y", default: 0
t.integer "movement_stop_at_max_z", default: 0
t.integer "movement_timeout_x", default: 120
t.integer "movement_timeout_y", default: 120
t.integer "movement_timeout_z", default: 120
t.integer "param_config_ok", default: 0
t.integer "param_e_stop_on_mov_err", default: 0
t.integer "param_mov_nr_retry", default: 3
t.integer "param_test", default: 0
t.integer "param_use_eeprom", default: 1
t.integer "param_version", default: 1
t.integer "pin_guard_1_active_state", default: 1
t.integer "pin_guard_1_pin_nr", default: 0
t.integer "pin_guard_1_time_out", default: 60
t.integer "pin_guard_2_active_state", default: 1
t.integer "pin_guard_2_pin_nr", default: 0
t.integer "pin_guard_2_time_out", default: 60
t.integer "pin_guard_3_active_state", default: 1
t.integer "pin_guard_3_pin_nr", default: 0
t.integer "pin_guard_3_time_out", default: 60
t.integer "pin_guard_4_active_state", default: 1
t.integer "pin_guard_4_pin_nr", default: 0
t.integer "pin_guard_4_time_out", default: 60
t.integer "pin_guard_5_active_state", default: 1
t.integer "pin_guard_5_pin_nr", default: 0
t.integer "pin_guard_5_time_out", default: 60
t.boolean "api_migrated", default: false
t.integer "movement_invert_2_endpoints_x", default: 0
t.integer "movement_invert_2_endpoints_y", default: 0
t.integer "movement_invert_2_endpoints_z", default: 0
t.index ["device_id"], name: "index_firmware_configs_on_device_id"
end
create_table "global_configs", force: :cascade do |t|
t.string "key"
t.text "value"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["key"], name: "index_global_configs_on_key"
end
create_table "images", id: :serial, force: :cascade do |t|
t.integer "device_id"
t.text "meta"
t.datetime "attachment_processed_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "attachment_file_name"
t.string "attachment_content_type"
t.integer "attachment_file_size"
t.datetime "attachment_updated_at"
t.index ["device_id"], name: "index_images_on_device_id"
end
create_table "log_dispatches", force: :cascade do |t|
t.bigint "device_id"
t.bigint "log_id"
t.datetime "sent_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_log_dispatches_on_device_id"
t.index ["log_id"], name: "index_log_dispatches_on_log_id"
t.index ["sent_at"], name: "index_log_dispatches_on_sent_at"
end
create_table "logs", id: :serial, force: :cascade do |t|
t.text "message"
t.text "meta"
t.string "channels", limit: 280
t.integer "device_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "type", limit: 10, default: "info"
t.integer "major_version"
t.integer "minor_version"
t.integer "verbosity", default: 1
t.integer "x"
t.integer "y"
t.integer "z"
t.datetime "sent_at"
t.index ["created_at"], name: "index_logs_on_created_at"
t.index ["device_id"], name: "index_logs_on_device_id"
t.index ["type"], name: "index_logs_on_type"
t.index ["verbosity"], name: "index_logs_on_verbosity"
end
create_table "peripherals", id: :serial, force: :cascade do |t|
t.integer "device_id"
t.integer "pin"
t.string "label", limit: 280
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "mode", default: 0
t.index ["device_id"], name: "index_peripherals_on_device_id"
t.index ["mode"], name: "index_peripherals_on_mode"
end
create_table "pin_bindings", force: :cascade do |t|
t.bigint "device_id"
t.integer "pin_num"
t.bigint "sequence_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_pin_bindings_on_device_id"
t.index ["sequence_id"], name: "index_pin_bindings_on_sequence_id"
end
create_table "plant_templates", force: :cascade do |t|
t.bigint "saved_garden_id", null: false
t.bigint "device_id", null: false
t.float "radius", default: 25.0, null: false
t.float "x", null: false
t.float "y", null: false
t.float "z", default: 0.0, null: false
t.string "name", default: "untitled", null: false
t.string "openfarm_slug", limit: 280, default: "null", null: false
t.index ["device_id"], name: "index_plant_templates_on_device_id"
t.index ["saved_garden_id"], name: "index_plant_templates_on_saved_garden_id"
end
create_table "points", id: :serial, force: :cascade do |t|
t.float "radius", default: 25.0, null: false
t.float "x", null: false
t.float "y", null: false
t.float "z", default: 0.0, null: false
t.integer "device_id", null: false
t.hstore "meta"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name", default: "untitled", null: false
t.string "pointer_type", limit: 280, null: false
t.datetime "planted_at"
t.string "openfarm_slug", limit: 280, default: "50", null: false
t.string "plant_stage", limit: 10, default: "planned"
t.integer "tool_id"
t.integer "pullout_direction", default: 0
t.datetime "migrated_at"
t.datetime "discarded_at"
t.index ["device_id"], name: "index_points_on_device_id"
t.index ["discarded_at"], name: "index_points_on_discarded_at"
t.index ["id", "pointer_type"], name: "index_points_on_id_and_pointer_type"
t.index ["meta"], name: "index_points_on_meta", using: :gin
t.index ["tool_id"], name: "index_points_on_tool_id"
end
create_table "primary_nodes", force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "sequence_id", null: false
t.string "kind", limit: 50
t.bigint "child_id"
t.bigint "parent_id"
t.string "parent_arg_name", limit: 50
t.bigint "next_id"
t.bigint "body_id"
t.string "comment", limit: 240
t.index ["body_id"], name: "index_primary_nodes_on_body_id"
t.index ["child_id"], name: "index_primary_nodes_on_child_id"
t.index ["next_id"], name: "index_primary_nodes_on_next_id"
t.index ["parent_id"], name: "index_primary_nodes_on_parent_id"
t.index ["sequence_id"], name: "index_primary_nodes_on_sequence_id"
end
create_table "regimen_items", id: :serial, force: :cascade do |t|
t.bigint "time_offset"
t.integer "regimen_id"
t.integer "sequence_id"
t.index ["regimen_id"], name: "index_regimen_items_on_regimen_id"
t.index ["sequence_id"], name: "index_regimen_items_on_sequence_id"
end
create_table "regimens", id: :serial, force: :cascade do |t|
t.string "color"
t.string "name", limit: 280
t.integer "device_id"
t.index ["device_id"], name: "index_regimens_on_device_id"
end
create_table "saved_gardens", force: :cascade do |t|
t.string "name", null: false
t.bigint "device_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_saved_gardens_on_device_id"
end
create_table "sensor_readings", force: :cascade do |t|
t.bigint "device_id"
t.float "x"
t.float "y"
t.float "z"
t.integer "value"
t.integer "pin"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "mode", default: 0
t.index ["device_id"], name: "index_sensor_readings_on_device_id"
end
create_table "sensors", force: :cascade do |t|
t.bigint "device_id"
t.integer "pin"
t.string "label"
t.integer "mode"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_sensors_on_device_id"
end
create_table "sequences", id: :serial, force: :cascade do |t|
t.integer "device_id"
t.string "name", null: false
t.string "color"
t.string "kind", limit: 280, default: "sequence"
t.datetime "updated_at"
t.datetime "created_at"
t.boolean "migrated_nodes", default: false
t.index ["created_at"], name: "index_sequences_on_created_at"
t.index ["device_id"], name: "index_sequences_on_device_id"
end
create_table "token_issuances", force: :cascade do |t|
t.bigint "device_id", null: false
t.integer "exp", null: false
t.string "jti", limit: 45, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["device_id"], name: "index_token_issuances_on_device_id"
t.index ["exp"], name: "index_token_issuances_on_exp"
t.index ["jti", "device_id"], name: "index_token_issuances_on_jti_and_device_id"
end
create_table "tools", id: :serial, force: :cascade do |t|
t.string "name", limit: 280
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "device_id"
t.index ["device_id"], name: "index_tools_on_device_id"
end
create_table "users", id: :serial, force: :cascade do |t|
t.integer "device_id"
t.string "name"
t.string "email", limit: 280, default: "", null: false
t.string "encrypted_password", default: "", null: false
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "confirmed_at"
t.string "confirmation_token"
t.datetime "agreed_to_terms_at"
t.datetime "confirmation_sent_at"
t.string "unconfirmed_email"
t.index ["agreed_to_terms_at"], name: "index_users_on_agreed_to_terms_at"
t.index ["confirmation_token"], name: "index_users_on_confirmation_token"
t.index ["device_id"], name: "index_users_on_device_id"
t.index ["email"], name: "index_users_on_email", unique: true
end
create_table "web_app_configs", force: :cascade do |t|
t.bigint "device_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "confirm_step_deletion", default: false
t.boolean "disable_animations", default: false
t.boolean "disable_i18n", default: false
t.boolean "display_trail", default: false
t.boolean "dynamic_map", default: false
t.boolean "encoder_figure", default: false
t.boolean "hide_webcam_widget", default: false
t.boolean "legend_menu_open", default: false
t.boolean "map_xl", default: false
t.boolean "raw_encoders", default: false
t.boolean "scaled_encoders", default: false
t.boolean "show_spread", default: false
t.boolean "show_farmbot", default: true
t.boolean "show_plants", default: true
t.boolean "show_points", default: true
t.boolean "x_axis_inverted", default: false
t.boolean "y_axis_inverted", default: false
t.boolean "z_axis_inverted", default: false
t.integer "bot_origin_quadrant", default: 2
t.integer "zoom_level", default: 1
t.integer "success_log", default: 1
t.integer "busy_log", default: 1
t.integer "warn_log", default: 1
t.integer "error_log", default: 1
t.integer "info_log", default: 1
t.integer "fun_log", default: 1
t.integer "debug_log", default: 1
t.boolean "stub_config", default: false
t.boolean "show_first_party_farmware", default: false
t.boolean "enable_browser_speak", default: false
t.boolean "show_images", default: false
t.string "photo_filter_begin"
t.string "photo_filter_end"
t.boolean "discard_unsaved", default: false
t.boolean "xy_swap", default: false
t.boolean "home_button_homing", default: false
t.index ["device_id"], name: "index_web_app_configs_on_device_id"
end
create_table "webcam_feeds", force: :cascade do |t|
t.bigint "device_id"
t.string "url"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "name", limit: 80, default: "Webcam Feed"
t.index ["device_id"], name: "index_webcam_feeds_on_device_id"
end
add_foreign_key "device_configs", "devices"
add_foreign_key "edge_nodes", "sequences"
add_foreign_key "farmware_installations", "devices"
add_foreign_key "log_dispatches", "devices"
add_foreign_key "log_dispatches", "logs"
add_foreign_key "peripherals", "devices"
add_foreign_key "pin_bindings", "devices"
add_foreign_key "pin_bindings", "sequences"
add_foreign_key "points", "devices"
add_foreign_key "points", "tools"
add_foreign_key "primary_nodes", "sequences"
add_foreign_key "sensor_readings", "devices"
add_foreign_key "sensors", "devices"
add_foreign_key "token_issuances", "devices"
create_view "in_use_tools", sql_definition: <<-SQL
SELECT tools.id AS tool_id,
tools.name AS tool_name,
sequences.name AS sequence_name,
sequences.id AS sequence_id,
sequences.device_id
FROM ((edge_nodes
JOIN sequences ON ((edge_nodes.sequence_id = sequences.id)))
JOIN tools ON (((edge_nodes.value)::integer = tools.id)))
WHERE ((edge_nodes.kind)::text = 'tool_id'::text);
SQL
create_view "sequence_usage_reports", sql_definition: <<-SQL
SELECT sequences.id AS sequence_id,
( SELECT count(*) AS count
FROM edge_nodes
WHERE (((edge_nodes.kind)::text = 'sequence_id'::text) AND ((edge_nodes.value)::integer = sequences.id))) AS edge_node_count,
( SELECT count(*) AS count
FROM farm_events
WHERE ((farm_events.executable_id = sequences.id) AND ((farm_events.executable_type)::text = 'Sequence'::text))) AS farm_event_count,
( SELECT count(*) AS count
FROM regimen_items
WHERE (regimen_items.sequence_id = sequences.id)) AS regimen_items_count
FROM sequences;
SQL
create_view "in_use_points", sql_definition: <<-SQL
SELECT points.x,
points.y,
points.z,
sequences.id AS sequence_id,
edge_nodes.id AS edge_node_id,
points.device_id,
(edge_nodes.value)::integer AS point_id,
points.pointer_type,
points.name AS pointer_name,
sequences.name AS sequence_name
FROM ((edge_nodes
JOIN sequences ON ((edge_nodes.sequence_id = sequences.id)))
JOIN points ON (((edge_nodes.value)::integer = points.id)))
WHERE ((edge_nodes.kind)::text = 'pointer_id'::text);
SQL
end

View File

@ -6,7 +6,6 @@ unless Rails.env == "production"
ENV['MQTT_HOST'] = "blooper.io"
ENV['OS_UPDATE_SERVER'] = "http://non_legacy_update_url.com"
LogDispatch.destroy_all
Log.destroy_all
TokenIssuance.destroy_all
PinBinding.destroy_all
@ -21,23 +20,17 @@ unless Rails.env == "production"
User.destroy_all
PlantTemplate.destroy_all
SavedGarden.destroy_all
Users::Create.run!(name: "Administrator",
email: "farmbot@farmbot.io",
User.admin_user
Users::Create.run!(name: "Test",
email: "test@test.com",
password: "password123",
password_confirmation: "password123",
agree_to_terms: true)
signed_tos = User.last
signed_tos.agreed_to_terms_at = nil
signed_tos.confirmed_at = Time.now
signed_tos.save(validate: false)
Users::Create.run!(name: "Administrator",
email: "admin@admin.com",
password: "password123",
password_confirmation: "password123",
agree_to_terms: true)
confirmed_at: Time.now,
agreed_to_terms_at: Time.now)
User.all.update_all(confirmed_at: Time.now,
agreed_to_terms_at: Time.now)
u = User.last
u.update_attributes(confirmed_at: Time.now)
u.update_attributes(device: Devices::Create.run!(user: u))
Log.transaction do
FactoryBot.create_list(:log, 35, device: u.device)
end
@ -117,8 +110,6 @@ unless Rails.env == "production"
y: 10,
z: 10)
d = u.device
# PinBindings::Create
# .run!(device: d, sequence_id: d.sequences.sample.id, pin_num: 15,)
Sensors::Create
.run!(device: d, pin: 14, label: "Stub sensor", mode: 0)
end

2277
db/structure.sql 100644

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,8 @@ class CorpusEmitter
class CSArg
TRANSLATIONS = {"integer" => "number",
"string" => "string" }
"string" => "string",
"float" => "number" }
attr_reader :name, :allowed_values
def initialize(name:, allowed_values:)
@ -15,7 +16,11 @@ class CorpusEmitter
end
def values
allowed_values.map { |v| TRANSLATIONS[v] || v.camelize }.join(PIPE)
allowed_values
.map { |v| TRANSLATIONS[v] || v.camelize }
.uniq
.sort
.join(PIPE)
end
def to_ts
@ -83,9 +88,11 @@ class CorpusEmitter
end
end
HASH = JSON.load(open("http://localhost:3000/api/corpuses/3")).deep_symbolize_keys
HASH = JSON.load(open("http://localhost:3000/api/corpus")).deep_symbolize_keys
ARGS = {}
HASH[:args].map{ |x| CSArg.new(x) }.each{|x| ARGS[x.name] = x}
HASH[:args]
.map { |x| CSArg.new(x) }
.each { |x| ARGS[x.name] = x }
NODES = HASH[:nodes].map { |x| CSNode.new(x) }
def const(key, val)

View File

@ -1,15 +0,0 @@
require_relative "./log_service_support"
begin
# Listen to all logs on the message broker and store them in the database.
Transport
.current
.log_channel
.subscribe(block: true) do |info, _, payl|
LogService.process(info, payl)
end
rescue StandardError => e
puts "MQTT Broker is unreachable. Waiting 5 seconds..."
sleep 5
retry
end

View File

@ -0,0 +1,10 @@
require_relative "../app/lib/service_runner_base.rb"
begin
ServiceRunner.go!(Transport.current.log_channel, LogService)
# :nocov:
rescue
sleep 3
retry
end
# :nocov:

View File

@ -0,0 +1,13 @@
require_relative "../app/lib/service_runner_base.rb"
require_relative "../app/lib/resources.rb"
require_relative "../app/lib/resources/preprocessor.rb"
require_relative "../app/lib/resources/job.rb"
require_relative "../app/lib/resources/service.rb"
begin
ServiceRunner.go!(Transport.current.resource_channel, Resources::Service)
# :nocov:
rescue
sleep 3
retry
end
# :nocov:

View File

@ -12,9 +12,15 @@ namespace :api do
sh "PORT=3000 bundle exec foreman start --procfile=Procfile.dev"
end
desc "Run Rails _ONLY_. No Webpack."
task only: :environment do
sh "PORT=3000 bundle exec foreman start --procfile=Procfile.api_only"
end
desc "Pull the latest Farmbot API version"
task(update: :environment) { same_thing }
desc "Pull the latest Farmbot API version"
task(upgrade: :environment) { same_thing }
end

Some files were not shown because too many files have changed in this diff Show More