fancybox2 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/MIT-LICENSE +22 -0
- data/README.md +190 -0
- data/lib/fancybox2.rb +15 -0
- data/lib/fancybox2/core_ext/array.rb +18 -0
- data/lib/fancybox2/core_ext/hash.rb +170 -0
- data/lib/fancybox2/logger/json_formatter.rb +19 -0
- data/lib/fancybox2/logger/mqtt_log_device.rb +28 -0
- data/lib/fancybox2/logger/multi.rb +127 -0
- data/lib/fancybox2/module/base.rb +426 -0
- data/lib/fancybox2/module/config.rb +7 -0
- data/lib/fancybox2/module/exceptions.rb +27 -0
- data/lib/fancybox2/utils/os.rb +38 -0
- data/lib/fancybox2/version.rb +3 -0
- metadata +83 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 36eebce6aa26f750e0d2974e2f7ea1a2f029174bddb6d3bb6b8a4adb4e01fd4a
|
4
|
+
data.tar.gz: 7f0d67c72d4472d950bbb581a2b2829cea9a0527bdc4377568a3d4293039ccbc
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 38a9da60393ba4308c46fca05ef6764111a872128f89cfc62bda02a998df36f64a08f41ef86eedc4108011368cd1eeaf682bbdda1f299ce16bfe2d66dfc075eb
|
7
|
+
data.tar.gz: 6600b35da0bf560f6b6f84edaa7f2307b6a49c4ae8e0c831cda9f764e0652b8f25384df236b87f096e19faba44ee86653381decef46c22d00597f65dfc807ea5
|
data/MIT-LICENSE
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2020 Fancy Pixel S.r.l. All rights reserved.
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a
|
6
|
+
copy of this software and associated documentation files (the "Software"),
|
7
|
+
to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be included
|
14
|
+
in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
17
|
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
19
|
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
20
|
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
21
|
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
22
|
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,190 @@
|
|
1
|
+
<p align="center">
|
2
|
+
<img width="480" src="assets/logo.png"/>
|
3
|
+
</p>
|
4
|
+
|
5
|
+
[![Build Status](https://travis-ci.org/space-bunny/ruby_sdk.svg)](https://travis-ci.org/space-bunny/ruby_sdk)
|
6
|
+
[![Gem Version](https://badge.fury.io/rb/spacebunny.svg)](https://badge.fury.io/rb/spacebunny)
|
7
|
+
|
8
|
+
[SpaceBunny](http://spacebunny.io) is the IoT platform that makes it easy for you and your devices to send and
|
9
|
+
exchange messages with a server or even with each other. You can store the data, receive timely event notifications,
|
10
|
+
monitor live streams and remotely control your devices. Easy to use, and ready to scale at any time.
|
11
|
+
|
12
|
+
This is the source code repository for Ruby SDK.
|
13
|
+
Please feel free to contribute!
|
14
|
+
|
15
|
+
## Installation
|
16
|
+
|
17
|
+
Add this line to your application's Gemfile:
|
18
|
+
|
19
|
+
```ruby
|
20
|
+
gem 'spacebunny'
|
21
|
+
```
|
22
|
+
|
23
|
+
And then execute:
|
24
|
+
|
25
|
+
$ bundle
|
26
|
+
|
27
|
+
Or install it yourself as:
|
28
|
+
|
29
|
+
$ gem install spacebunny
|
30
|
+
|
31
|
+
After you have signed up for a [SpaceBunny](http://spacebunny.io)'s account, follow the
|
32
|
+
[Getting Started](http://getting_started_link) guide for a one minute introduction to the platform concepts
|
33
|
+
and a super rapid setup.
|
34
|
+
|
35
|
+
This SDK provides Device and LiveStream clients and currently supports the AMQP protocol.
|
36
|
+
|
37
|
+
## Device - Basic usage
|
38
|
+
|
39
|
+
Pick a device, view its configurations and copy the Device Key. Instantiate a new `Spacebunny::Device` client,
|
40
|
+
providing the Device Key:
|
41
|
+
|
42
|
+
```ruby
|
43
|
+
dev = Spacebunny::Device.new 'device_key'
|
44
|
+
```
|
45
|
+
|
46
|
+
the SDK will auto-configure, contacting [SpaceBunny APIs](http://doc.spacebunny.io/api) endpoint, retrieving the
|
47
|
+
connection configurations and required parameters. Nothing remains but to connect:
|
48
|
+
|
49
|
+
```ruby
|
50
|
+
dev.connect
|
51
|
+
```
|
52
|
+
|
53
|
+
### Publish
|
54
|
+
|
55
|
+
Ok, all set up! Let's publish some message:
|
56
|
+
|
57
|
+
```ruby
|
58
|
+
# We're assuming you have created a 'data' channel and you have enabled it for your device
|
59
|
+
|
60
|
+
# Let's publish, for instance, some JSON. Payload can be everything you want,
|
61
|
+
# SpaceBunny does not impose any constraint on format or content.
|
62
|
+
|
63
|
+
require 'json' # to convert our payload to JSON
|
64
|
+
|
65
|
+
# Publish one message every second for a minute.
|
66
|
+
60.times do
|
67
|
+
# Generate some random data
|
68
|
+
payload = { greetings: 'Hello, World!', temp: rand(20.0..25.0), foo: rand(100..200) }.to_json
|
69
|
+
|
70
|
+
# Publish
|
71
|
+
dev.publish :data, payload
|
72
|
+
|
73
|
+
# Give feedback on what has been published
|
74
|
+
puts "Published: #{payload}"
|
75
|
+
|
76
|
+
# Take a nap...
|
77
|
+
sleep 1
|
78
|
+
end
|
79
|
+
```
|
80
|
+
|
81
|
+
Let's check out that our data is really being sent by going to our web dashboard: navigate to devices, select the
|
82
|
+
device and click on 'LIVE DATA'. Select the 'data' channel from the dropdown and click **Start**.
|
83
|
+
Having published data as JSON allows SpaceBunny's web UI to parse them and visualize a nice
|
84
|
+
realtime graph: On the **Chart** tab write `temp` in the input field and press enter.
|
85
|
+
You'll see the graph of the `temp` parameter being rendered. If you want to plot more parameters,
|
86
|
+
just use a comma as separator e.g: temp, pressure, voltage
|
87
|
+
On the **Messages** tab you'll see raw messages' payloads received on this channel.
|
88
|
+
|
89
|
+
### Inbox
|
90
|
+
|
91
|
+
Waiting for and reading messages from the device's Inbox is trivial:
|
92
|
+
|
93
|
+
```ruby
|
94
|
+
dev.inbox(wait: true, ack: :auto) do |message|
|
95
|
+
puts "Received: #{message.payload}"
|
96
|
+
end
|
97
|
+
```
|
98
|
+
|
99
|
+
`wait` option (default false) causes the script to wait forever on the receive block
|
100
|
+
|
101
|
+
`ack` option can have two values: `:manual` (default) or `:auto`. When `:manual` you are responsible to ack the messages,
|
102
|
+
for instance:
|
103
|
+
|
104
|
+
```ruby
|
105
|
+
dev.inbox(wait: true, ack: :manual) do |message|
|
106
|
+
puts "Received: #{message.payload}"
|
107
|
+
# Manually ack the message
|
108
|
+
message.ack
|
109
|
+
end
|
110
|
+
```
|
111
|
+
This permits to handle errors or other critical situations
|
112
|
+
|
113
|
+
## Live Stream - Basic usage
|
114
|
+
|
115
|
+
For accessing a Live Stream a Live Stream Key's is required. On SpaceBunny's Web UI, go to the Streams section,
|
116
|
+
click on "Live Stream Keys" and pick or create one.
|
117
|
+
|
118
|
+
```ruby
|
119
|
+
live = Spacebunny::LiveStream.new client: 'live_stream_key_client', secret: 'live_stream_key_secret'
|
120
|
+
```
|
121
|
+
|
122
|
+
Similarly to the Device client, the SDK will auto-configure itself, contacting [SpaceBunny APIs](http://doc.spacebunny.io/api)
|
123
|
+
endpoint, retrieving the connection configurations and required parameters. Nothing remains but to connect:
|
124
|
+
|
125
|
+
```ruby
|
126
|
+
live.connect
|
127
|
+
```
|
128
|
+
|
129
|
+
### Reading live messages
|
130
|
+
|
131
|
+
Each LiveStream has its own cache that will keep always last 100 messages (FIFO, when there are more than 100 messages,
|
132
|
+
the oldest ones get discarded). If you want to consume messages in a parallel way, you shoul use the cache and connect
|
133
|
+
as many LiveStream clients as you need: this way messages will be equally distributed to clients.
|
134
|
+
|
135
|
+
```ruby
|
136
|
+
live.message_from_cache :some_live_stream, wait: true, ack: :auto do |message|
|
137
|
+
puts "Received from cache: #{message.payload}"
|
138
|
+
end
|
139
|
+
|
140
|
+
# An equivalent method is:
|
141
|
+
# live.message_from :some_live_stream, from_cache: true, wait: true, ack: :auto do |message|
|
142
|
+
# puts "Received from cache: #{message.payload}"
|
143
|
+
# end
|
144
|
+
```
|
145
|
+
|
146
|
+
Conversely, if you want that each client will receive a copy of each message, don't use the cache:
|
147
|
+
|
148
|
+
```ruby
|
149
|
+
live.message_from :some_live_stream, wait: true, ack: :auto do |message|
|
150
|
+
puts "Received a copy of: #{message.payload}"
|
151
|
+
end
|
152
|
+
```
|
153
|
+
|
154
|
+
Every client subscribed to the LiveStream in this way will receive a copy of the message.
|
155
|
+
|
156
|
+
## TLS
|
157
|
+
|
158
|
+
Instantiating a TLS-secured connection is trivial:
|
159
|
+
|
160
|
+
```ruby
|
161
|
+
# For a Device
|
162
|
+
|
163
|
+
dev = Spacebunny::Device.new key, tls: true
|
164
|
+
|
165
|
+
# Similarly, for a Live Stream
|
166
|
+
|
167
|
+
live = Spacebunny::LiveStream.new client, secret, tls: true
|
168
|
+
```
|
169
|
+
|
170
|
+
## More examples and options
|
171
|
+
|
172
|
+
Take a look at the ```examples``` directory for more code samples and further details about available options.
|
173
|
+
|
174
|
+
|
175
|
+
### Contributing
|
176
|
+
|
177
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/FancyPixel/spacebunny_ruby.
|
178
|
+
This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere
|
179
|
+
to the [Contributor Covenant](contributor-covenant.org) code of conduct.
|
180
|
+
|
181
|
+
### Development
|
182
|
+
|
183
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `bundle exec rspec` to run the tests.
|
184
|
+
You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
185
|
+
|
186
|
+
To install this gem onto your local machine, run `bundle exec rake install`.
|
187
|
+
|
188
|
+
### License
|
189
|
+
|
190
|
+
The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
|
data/lib/fancybox2.rb
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
require 'zeitwerk'
|
2
|
+
require 'logger'
|
3
|
+
|
4
|
+
loader = Zeitwerk::Loader.for_gem
|
5
|
+
core_ext = "#{__dir__}/fancybox2/core_ext/"
|
6
|
+
loader.ignore core_ext
|
7
|
+
loader.inflector.inflect 'mqtt_log_device' => 'MQTTLogDevice',
|
8
|
+
'json_formatter' => 'JSONFormatter'
|
9
|
+
loader.setup
|
10
|
+
|
11
|
+
require "#{core_ext}/hash"
|
12
|
+
require "#{core_ext}/array"
|
13
|
+
|
14
|
+
module Fancybox2
|
15
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
class Array
|
2
|
+
# Extracts options from a set of arguments. Removes and returns the last
|
3
|
+
# element in the array if it's a hash, otherwise returns a blank hash.
|
4
|
+
#
|
5
|
+
# def options(*args)
|
6
|
+
# args.extract_options!
|
7
|
+
# end
|
8
|
+
#
|
9
|
+
# options(1, 2) # => {}
|
10
|
+
# options(1, 2, a: :b) # => {:a=>:b}
|
11
|
+
def extract_options
|
12
|
+
if last.is_a?(Hash)
|
13
|
+
pop
|
14
|
+
else
|
15
|
+
{}
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,170 @@
|
|
1
|
+
# :nocov:
|
2
|
+
class Hash
|
3
|
+
# Returns a new hash with all keys converted using the block operation.
|
4
|
+
#
|
5
|
+
# hash = { name: 'Rob', age: '28' }
|
6
|
+
#
|
7
|
+
# hash.transform_keys{ |key| key.to_s.upcase }
|
8
|
+
# # => {"NAME"=>"Rob", "AGE"=>"28"}
|
9
|
+
def transform_keys
|
10
|
+
return enum_for(:transform_keys) unless block_given?
|
11
|
+
result = self.class.new
|
12
|
+
each_key do |key|
|
13
|
+
result[yield(key)] = self[key]
|
14
|
+
end
|
15
|
+
result
|
16
|
+
end
|
17
|
+
|
18
|
+
# Destructively convert all keys using the block operations.
|
19
|
+
# Same as transform_keys but modifies +self+.
|
20
|
+
def transform_keys!
|
21
|
+
return enum_for(:transform_keys!) unless block_given?
|
22
|
+
keys.each do |key|
|
23
|
+
self[yield(key)] = delete(key)
|
24
|
+
end
|
25
|
+
self
|
26
|
+
end
|
27
|
+
|
28
|
+
# Returns a new hash with all keys converted to strings.
|
29
|
+
#
|
30
|
+
# hash = { name: 'Rob', age: '28' }
|
31
|
+
#
|
32
|
+
# hash.stringify_keys
|
33
|
+
# # => {"name"=>"Rob", "age"=>"28"}
|
34
|
+
def stringify_keys
|
35
|
+
transform_keys { |key| key.to_s }
|
36
|
+
end
|
37
|
+
|
38
|
+
# Destructively convert all keys to strings. Same as
|
39
|
+
# +stringify_keys+, but modifies +self+.
|
40
|
+
def stringify_keys!
|
41
|
+
transform_keys! { |key| key.to_s }
|
42
|
+
end
|
43
|
+
|
44
|
+
# Returns a new hash with all keys converted to symbols, as long as
|
45
|
+
# they respond to +to_sym+.
|
46
|
+
#
|
47
|
+
# hash = { 'name' => 'Rob', 'age' => '28' }
|
48
|
+
#
|
49
|
+
# hash.symbolize_keys
|
50
|
+
# # => {:name=>"Rob", :age=>"28"}
|
51
|
+
def symbolize_keys
|
52
|
+
transform_keys { |key| key.to_sym rescue key }
|
53
|
+
end
|
54
|
+
|
55
|
+
alias_method :to_options, :symbolize_keys
|
56
|
+
|
57
|
+
# Destructively convert all keys to symbols, as long as they respond
|
58
|
+
# to +to_sym+. Same as +symbolize_keys+, but modifies +self+.
|
59
|
+
def symbolize_keys!
|
60
|
+
transform_keys! { |key| key.to_sym rescue key }
|
61
|
+
end
|
62
|
+
|
63
|
+
alias_method :to_options!, :symbolize_keys!
|
64
|
+
|
65
|
+
# Validate all keys in a hash match <tt>*valid_keys</tt>, raising
|
66
|
+
# ArgumentError on a mismatch.
|
67
|
+
#
|
68
|
+
# Note that keys are treated differently than HashWithIndifferentAccess,
|
69
|
+
# meaning that string and symbol keys will not match.
|
70
|
+
#
|
71
|
+
# { name: 'Rob', years: '28' }.assert_valid_keys(:name, :age) # => raises "ArgumentError: Unknown key: :years. Valid keys are: :name, :age"
|
72
|
+
# { name: 'Rob', age: '28' }.assert_valid_keys('name', 'age') # => raises "ArgumentError: Unknown key: :name. Valid keys are: 'name', 'age'"
|
73
|
+
# { name: 'Rob', age: '28' }.assert_valid_keys(:name, :age) # => passes, raises nothing
|
74
|
+
def assert_valid_keys(*valid_keys)
|
75
|
+
valid_keys.flatten!
|
76
|
+
each_key do |k|
|
77
|
+
unless valid_keys.include?(k)
|
78
|
+
raise ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}")
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
# Returns a new hash with all keys converted by the block operation.
|
84
|
+
# This includes the keys from the root hash and from all
|
85
|
+
# nested hashes and arrays.
|
86
|
+
#
|
87
|
+
# hash = { person: { name: 'Rob', age: '28' } }
|
88
|
+
#
|
89
|
+
# hash.deep_transform_keys{ |key| key.to_s.upcase }
|
90
|
+
# # => {"PERSON"=>{"NAME"=>"Rob", "AGE"=>"28"}}
|
91
|
+
def deep_transform_keys(&block)
|
92
|
+
_deep_transform_keys_in_object(self, &block)
|
93
|
+
end
|
94
|
+
|
95
|
+
# Destructively convert all keys by using the block operation.
|
96
|
+
# This includes the keys from the root hash and from all
|
97
|
+
# nested hashes and arrays.
|
98
|
+
def deep_transform_keys!(&block)
|
99
|
+
_deep_transform_keys_in_object!(self, &block)
|
100
|
+
end
|
101
|
+
|
102
|
+
# Returns a new hash with all keys converted to strings.
|
103
|
+
# This includes the keys from the root hash and from all
|
104
|
+
# nested hashes and arrays.
|
105
|
+
#
|
106
|
+
# hash = { person: { name: 'Rob', age: '28' } }
|
107
|
+
#
|
108
|
+
# hash.deep_stringify_keys
|
109
|
+
# # => {"person"=>{"name"=>"Rob", "age"=>"28"}}
|
110
|
+
def deep_stringify_keys
|
111
|
+
deep_transform_keys { |key| key.to_s }
|
112
|
+
end
|
113
|
+
|
114
|
+
# Destructively convert all keys to strings.
|
115
|
+
# This includes the keys from the root hash and from all
|
116
|
+
# nested hashes and arrays.
|
117
|
+
def deep_stringify_keys!
|
118
|
+
deep_transform_keys! { |key| key.to_s }
|
119
|
+
end
|
120
|
+
|
121
|
+
# Returns a new hash with all keys converted to symbols, as long as
|
122
|
+
# they respond to +to_sym+. This includes the keys from the root hash
|
123
|
+
# and from all nested hashes and arrays.
|
124
|
+
#
|
125
|
+
# hash = { 'person' => { 'name' => 'Rob', 'age' => '28' } }
|
126
|
+
#
|
127
|
+
# hash.deep_symbolize_keys
|
128
|
+
# # => {:person=>{:name=>"Rob", :age=>"28"}}
|
129
|
+
def deep_symbolize_keys
|
130
|
+
deep_transform_keys { |key| key.to_sym rescue key }
|
131
|
+
end
|
132
|
+
|
133
|
+
# Destructively convert all keys to symbols, as long as they respond
|
134
|
+
# to +to_sym+. This includes the keys from the root hash and from all
|
135
|
+
# nested hashes and arrays.
|
136
|
+
def deep_symbolize_keys!
|
137
|
+
deep_transform_keys! { |key| key.to_sym rescue key }
|
138
|
+
end
|
139
|
+
|
140
|
+
private
|
141
|
+
|
142
|
+
# support methods for deep transforming nested hashes and arrays
|
143
|
+
def _deep_transform_keys_in_object(object, &block)
|
144
|
+
case object
|
145
|
+
when Hash
|
146
|
+
object.each_with_object({}) do |(key, value), result|
|
147
|
+
result[yield(key)] = _deep_transform_keys_in_object(value, &block)
|
148
|
+
end
|
149
|
+
when Array
|
150
|
+
object.map { |e| _deep_transform_keys_in_object(e, &block) }
|
151
|
+
else
|
152
|
+
object
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
def _deep_transform_keys_in_object!(object, &block)
|
157
|
+
case object
|
158
|
+
when Hash
|
159
|
+
object.keys.each do |key|
|
160
|
+
value = object.delete(key)
|
161
|
+
object[yield(key)] = _deep_transform_keys_in_object!(value, &block)
|
162
|
+
end
|
163
|
+
object
|
164
|
+
when Array
|
165
|
+
object.map! { |e| _deep_transform_keys_in_object!(e, &block) }
|
166
|
+
else
|
167
|
+
object
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
require 'logger'
|
2
|
+
require 'json'
|
3
|
+
|
4
|
+
module Fancybox2
|
5
|
+
module Logger
|
6
|
+
class JSONFormatter < ::Logger::Formatter
|
7
|
+
def call(severity, time, progname, msg)
|
8
|
+
json = JSON.generate(
|
9
|
+
level: severity,
|
10
|
+
timestamp: time.utc.strftime('%Y-%m-%dT%H:%M:%S.%3NZ'.freeze),
|
11
|
+
#progname: progname,
|
12
|
+
message: msg,
|
13
|
+
pid: Process.pid
|
14
|
+
)
|
15
|
+
"#{json}\n"
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
module Fancybox2
|
2
|
+
module Logger
|
3
|
+
class MQTTLogDevice
|
4
|
+
|
5
|
+
attr_accessor :client, :topic
|
6
|
+
|
7
|
+
def initialize(topic, *args)
|
8
|
+
@topic = topic
|
9
|
+
options = args.extract_options.deep_symbolize_keys
|
10
|
+
@client = options[:client]
|
11
|
+
unless @client.respond_to?(:publish)
|
12
|
+
raise ArgumentError, "provided client does not respond to 'publish'"
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
def write(message)
|
17
|
+
if @client.connected?
|
18
|
+
@client.publish @topic, message
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def close(*args)
|
23
|
+
# Do nothing.
|
24
|
+
# Future: close only if client is internal
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,127 @@
|
|
1
|
+
module Fancybox2
|
2
|
+
module Logger
|
3
|
+
|
4
|
+
# Log on multiple loggers at the same time
|
5
|
+
#
|
6
|
+
# Usage example:
|
7
|
+
#
|
8
|
+
# file_logger = Logger.new(File.open("log/debug.log", "a"))
|
9
|
+
# stdout_logger = Logger.new(STDOUT)
|
10
|
+
# Create a logger that logs both to STDOUT and log_file at the same time with 'info' loglevel
|
11
|
+
# multi_logger = Fancybox2::Logger::Multi.new(file_logger, stdout_logger, level: :info))
|
12
|
+
|
13
|
+
class Multi
|
14
|
+
attr_accessor :loggers, :level, :escape_data, :progname
|
15
|
+
|
16
|
+
# logger_1, logger_2, ... , level: nil, loggers: nil, escape_data: true)
|
17
|
+
def initialize(*args)
|
18
|
+
options = args.extract_options.deep_symbolize_keys
|
19
|
+
loggers = args
|
20
|
+
if !loggers.is_a?(Array) || loggers.size.zero?
|
21
|
+
raise ArgumentError.new("provide at least one logger instance")
|
22
|
+
end
|
23
|
+
|
24
|
+
@level = normalize_log_level(options[:level])
|
25
|
+
@escape_data = options[:escape_data] || false
|
26
|
+
@progname = options[:progname]
|
27
|
+
|
28
|
+
self.loggers = loggers
|
29
|
+
# Set properties
|
30
|
+
# Override Loggers levels only if explicitly required
|
31
|
+
self.level = @level if options[:level] # Do not use @level because it has already been processed
|
32
|
+
# Override Logger's Formatter only if explicitly required
|
33
|
+
self.escape_data = @escape_data if @escape_data
|
34
|
+
self.progname = @progname if @progname
|
35
|
+
|
36
|
+
define_methods
|
37
|
+
end
|
38
|
+
|
39
|
+
def add(level, *args)
|
40
|
+
@loggers.each { |logger| logger.add(level, *args) }
|
41
|
+
end
|
42
|
+
alias log add
|
43
|
+
|
44
|
+
def add_logger(logger)
|
45
|
+
@loggers << logger
|
46
|
+
end
|
47
|
+
|
48
|
+
def close
|
49
|
+
@loggers.map(&:close)
|
50
|
+
end
|
51
|
+
|
52
|
+
def default_log_level
|
53
|
+
'info'
|
54
|
+
end
|
55
|
+
|
56
|
+
def escape_data=(value)
|
57
|
+
if value
|
58
|
+
@loggers.each do |logger|
|
59
|
+
escape_data_of logger
|
60
|
+
end
|
61
|
+
else
|
62
|
+
@loggers.each { |logger| logger.formatter = ::Logger::Formatter.new }
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def level=(level)
|
67
|
+
@level = normalize_log_level(level)
|
68
|
+
@loggers.each { |logger| logger.level = level }
|
69
|
+
end
|
70
|
+
|
71
|
+
def loggers=(new_loggers)
|
72
|
+
@loggers = []
|
73
|
+
new_loggers.each do |logger|
|
74
|
+
# Check if provided loggers are real Loggers
|
75
|
+
unless logger.is_a? ::Logger
|
76
|
+
raise ArgumentError.new("one of the provided loggers is not of class Logger, but of class '#{logger.class}'")
|
77
|
+
end
|
78
|
+
# Add Logger to the list
|
79
|
+
add_logger logger
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def progname=(name)
|
84
|
+
loggers.each do |logger|
|
85
|
+
logger.progname = name
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
private
|
90
|
+
|
91
|
+
def define_methods
|
92
|
+
::Logger::Severity.constants.each do |level|
|
93
|
+
define_singleton_method(level.downcase) do |args|
|
94
|
+
@loggers.each { |logger| logger.add(normalize_log_level(level.downcase), *args) }
|
95
|
+
end
|
96
|
+
|
97
|
+
define_singleton_method("#{ level.downcase }?".to_sym) do
|
98
|
+
@level <= ::Logger::Severity.const_get(level)
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
def escape_data_of(logger)
|
104
|
+
original_formatter = ::Logger::Formatter.new
|
105
|
+
logger.formatter = proc do |severity, datetime, progname, msg|
|
106
|
+
original_formatter.call(severity, datetime, progname, msg.dump)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
##
|
111
|
+
# @param [String] log_level
|
112
|
+
def normalize_log_level(log_level)
|
113
|
+
case log_level
|
114
|
+
when :unknown, ::Logger::UNKNOWN, 'unknown' then ::Logger::UNKNOWN
|
115
|
+
when :debug, ::Logger::DEBUG, 'debug' then ::Logger::DEBUG
|
116
|
+
when :info, ::Logger::INFO, 'info' then ::Logger::INFO
|
117
|
+
when :warn, ::Logger::WARN, 'warn' then ::Logger::WARN
|
118
|
+
when :error, ::Logger::ERROR, 'error' then ::Logger::ERROR
|
119
|
+
when :fatal, ::Logger::FATAL, 'fatal' then ::Logger::FATAL
|
120
|
+
else
|
121
|
+
# puts "Fancybox2::Logger::Multi#normalize_log_level, log_level value '#{log_level.inspect}' not supported, defaulting to '#{default_log_level}'"
|
122
|
+
normalize_log_level(default_log_level)
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
@@ -0,0 +1,426 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'yaml'
|
3
|
+
require 'logger'
|
4
|
+
require 'paho-mqtt'
|
5
|
+
require 'concurrent-ruby'
|
6
|
+
|
7
|
+
module Fancybox2
|
8
|
+
module Module
|
9
|
+
class Base
|
10
|
+
|
11
|
+
attr_reader :logger, :mqtt_client, :fbxfile, :fbxfile_path, :status
|
12
|
+
attr_accessor :configs
|
13
|
+
|
14
|
+
def initialize(fbxfile_path, options = {})
|
15
|
+
unless fbxfile_path || fbxfile_path.is_a?(String) || fbxfile_path.empty?
|
16
|
+
raise FbxfileNotProvided
|
17
|
+
end
|
18
|
+
|
19
|
+
@fbxfile_path = fbxfile_path
|
20
|
+
options.deep_symbolize_keys!
|
21
|
+
@internal_mqtt_client = false
|
22
|
+
|
23
|
+
@fbxfile = check_and_return_fbxfile options.fetch(:fbxfile, load_fbx_file)
|
24
|
+
@mqtt_client_params = options[:mqtt_client_params] || {}
|
25
|
+
check_or_build_mqtt_client options[:mqtt_client]
|
26
|
+
@log_level = options.fetch :log_level, ::Logger::INFO
|
27
|
+
@log_progname = options.fetch :log_progname, 'Fancybox2::Module::Base'
|
28
|
+
@logger = options.fetch :logger, create_default_logger
|
29
|
+
@status = :stopped
|
30
|
+
@alive_task = nil
|
31
|
+
end
|
32
|
+
|
33
|
+
def alive_message_data(&block)
|
34
|
+
if block_given?
|
35
|
+
@alive_message_data = block
|
36
|
+
return
|
37
|
+
end
|
38
|
+
@alive_message_data.call if @alive_message_data
|
39
|
+
end
|
40
|
+
|
41
|
+
def alive_message_data=(callback)
|
42
|
+
@alive_message_data = callback if callback.is_a?(Proc)
|
43
|
+
end
|
44
|
+
|
45
|
+
def message_to(dest, action = '', payload = '', retain = false, qos = 2)
|
46
|
+
if mqtt_client.connected?
|
47
|
+
topic = topic_for dest: dest, action: action
|
48
|
+
payload = case payload
|
49
|
+
when Hash, Array
|
50
|
+
payload.to_json
|
51
|
+
else
|
52
|
+
payload
|
53
|
+
end
|
54
|
+
logger.debug "#{self.class}#message_to '#{topic}' payload: #{payload}"
|
55
|
+
mqtt_client.publish topic, payload, retain, qos
|
56
|
+
else
|
57
|
+
logger.error 'MQTT client not connected to broker'
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def name
|
62
|
+
fbxfile[:name]
|
63
|
+
end
|
64
|
+
|
65
|
+
def on_action(action, callback = nil, &block)
|
66
|
+
topic = topic_for source: :core, action: action
|
67
|
+
mqtt_client.add_topic_callback topic do |packet|
|
68
|
+
# :nocov:
|
69
|
+
payload = packet.payload
|
70
|
+
# Try to parse payload as JSON. Rescue with original payload in case of error
|
71
|
+
packet.payload = JSON.parse(payload) rescue payload
|
72
|
+
if block_given?
|
73
|
+
block.call packet
|
74
|
+
elsif callback && callback.is_a?(Proc)
|
75
|
+
callback.call packet
|
76
|
+
end
|
77
|
+
# :nocov:
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
def on_configs(packet = nil, &block)
|
82
|
+
logger.debug 'on_configs'
|
83
|
+
if block_given?
|
84
|
+
@on_configs = block
|
85
|
+
return
|
86
|
+
end
|
87
|
+
@configs = begin
|
88
|
+
# Try to parse
|
89
|
+
JSON.parse packet.payload
|
90
|
+
rescue JSON::ParserError
|
91
|
+
logger.debug 'on_configs: failed parsing packet as JSON, retrying with YAML'
|
92
|
+
begin
|
93
|
+
# Try to parse YAML
|
94
|
+
YAML.load packet.payload
|
95
|
+
rescue StandardError
|
96
|
+
logger.debug 'on_configs: failed parsing packet as YAML. Falling back to raw payload'
|
97
|
+
# Fallback to original content
|
98
|
+
packet.payload
|
99
|
+
end
|
100
|
+
end
|
101
|
+
@on_configs.call(packet) if @on_configs
|
102
|
+
end
|
103
|
+
|
104
|
+
def on_configs=(callback)
|
105
|
+
@on_configs = callback if callback.is_a?(Proc)
|
106
|
+
end
|
107
|
+
|
108
|
+
def on_logger(packet = nil, &block)
|
109
|
+
if block_given?
|
110
|
+
@on_logger = block
|
111
|
+
return
|
112
|
+
end
|
113
|
+
@on_logger.call(packet) if @on_logger
|
114
|
+
logger_configs = packet.payload
|
115
|
+
logger.level = logger_configs['level'] if logger_configs['level']
|
116
|
+
end
|
117
|
+
|
118
|
+
def on_logger=(callback)
|
119
|
+
@on_logger = callback if callback.is_a?(Proc)
|
120
|
+
end
|
121
|
+
|
122
|
+
def on_restart(packet = nil, &block)
|
123
|
+
if block_given?
|
124
|
+
@on_restart = block
|
125
|
+
return
|
126
|
+
end
|
127
|
+
@on_restart.call(packet) if @on_restart
|
128
|
+
# Stop + start
|
129
|
+
on_stop
|
130
|
+
on_start packet
|
131
|
+
end
|
132
|
+
|
133
|
+
def on_restart=(callback)
|
134
|
+
@on_restart = callback if callback.is_a?(Proc)
|
135
|
+
end
|
136
|
+
|
137
|
+
def on_shutdown(do_exit = true, &block)
|
138
|
+
if block_given?
|
139
|
+
@on_shutdown = block
|
140
|
+
return
|
141
|
+
end
|
142
|
+
|
143
|
+
shutdown_ok = true
|
144
|
+
logger.debug "Received 'shutdown' command"
|
145
|
+
# Stop sending alive messages
|
146
|
+
@alive_task.shutdown if @alive_task
|
147
|
+
|
148
|
+
begin
|
149
|
+
# Call user code if any
|
150
|
+
@on_shutdown.call if @on_shutdown
|
151
|
+
rescue StandardError => e
|
152
|
+
logger.error "Error during shutdown: #{e.message}"
|
153
|
+
shutdown_ok = false
|
154
|
+
end
|
155
|
+
|
156
|
+
# Signal core that we've executed shutdown operations.
|
157
|
+
# This message is not mandatory, so keep it simple
|
158
|
+
shutdown_message = shutdown_ok ? 'ok' : 'nok'
|
159
|
+
logger.debug "Sending shutdown message to core with status '#{shutdown_message}'"
|
160
|
+
message_to :core, :shutdown, { status: shutdown_message }
|
161
|
+
sleep 0.05 # Wait some time in order to be sure that the message has been published (message is not mandatory)
|
162
|
+
|
163
|
+
if mqtt_client && mqtt_client.connected?
|
164
|
+
# Gracefully disconnect from broker and exit
|
165
|
+
logger.debug 'Disconnecting from broker'
|
166
|
+
mqtt_client.disconnect
|
167
|
+
end
|
168
|
+
|
169
|
+
if do_exit
|
170
|
+
# Exit from process
|
171
|
+
status_code = shutdown_ok ? 0 : 1
|
172
|
+
logger.debug "Exiting with status code #{status_code}"
|
173
|
+
exit status_code
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
def on_shutdown=(callback)
|
178
|
+
@on_shutdown = callback if callback.is_a?(Proc)
|
179
|
+
end
|
180
|
+
|
181
|
+
def on_start(packet = nil, &block)
|
182
|
+
if block_given?
|
183
|
+
@on_start = block
|
184
|
+
return
|
185
|
+
end
|
186
|
+
# Call user code
|
187
|
+
@on_start.call(packet) if @on_start
|
188
|
+
|
189
|
+
configs = packet ? packet.payload : {}
|
190
|
+
interval = configs['aliveTimeout'] || 1000
|
191
|
+
# Start code execution from scratch
|
192
|
+
logger.debug "Received 'start'"
|
193
|
+
@status = :running
|
194
|
+
start_sending_alive interval: interval
|
195
|
+
end
|
196
|
+
|
197
|
+
def on_start=(callback)
|
198
|
+
@on_start = callback if callback.is_a?(Proc)
|
199
|
+
end
|
200
|
+
|
201
|
+
def on_stop(&block)
|
202
|
+
if block_given?
|
203
|
+
@on_stop = block
|
204
|
+
return
|
205
|
+
end
|
206
|
+
@on_stop.call if @on_stop
|
207
|
+
@status = :stopped
|
208
|
+
# Stop code execution, but keep broker connection and continue to send alive
|
209
|
+
end
|
210
|
+
|
211
|
+
def on_stop=(callback)
|
212
|
+
@on_stop = callback if callback.is_a?(Proc)
|
213
|
+
end
|
214
|
+
|
215
|
+
def remove_action(action)
|
216
|
+
topic = topic_for source: :core, action: action
|
217
|
+
mqtt_client.remove_topic_callback topic
|
218
|
+
end
|
219
|
+
|
220
|
+
def shutdown(do_exit = true)
|
221
|
+
on_shutdown do_exit
|
222
|
+
end
|
223
|
+
|
224
|
+
def start
|
225
|
+
on_start
|
226
|
+
end
|
227
|
+
|
228
|
+
def start_sending_alive(interval: 5000)
|
229
|
+
# TODO: replace the alive interval task with Eventmachine?
|
230
|
+
# Interval is expected to be msec, so convert it to secs
|
231
|
+
interval /= 1000
|
232
|
+
@alive_task.shutdown if @alive_task
|
233
|
+
@alive_task = Concurrent::TimerTask.new(execution_interval: interval, timeout_interval: 2, run_now: true) do
|
234
|
+
packet = { status: @status, lastSeen: Time.now.utc }
|
235
|
+
if @alive_message_data
|
236
|
+
packet[:data] = @alive_message_data.call
|
237
|
+
end
|
238
|
+
message_to :core, :alive, packet
|
239
|
+
end
|
240
|
+
@alive_task.execute
|
241
|
+
end
|
242
|
+
|
243
|
+
def running?
|
244
|
+
@status.eql? :running
|
245
|
+
end
|
246
|
+
|
247
|
+
def stopped?
|
248
|
+
@status.eql? :stopped
|
249
|
+
end
|
250
|
+
|
251
|
+
def setup(retry_connection = true)
|
252
|
+
unless @setted_up
|
253
|
+
begin
|
254
|
+
logger.debug 'Connecting to the broker...'
|
255
|
+
mqtt_client.connect
|
256
|
+
rescue PahoMqtt::Exception => e
|
257
|
+
# :nocov:
|
258
|
+
logger.error "Error while connecting to the broker: #{e.message}"
|
259
|
+
retry if retry_connection
|
260
|
+
# :nocov:
|
261
|
+
end
|
262
|
+
|
263
|
+
@setted_up = true
|
264
|
+
end
|
265
|
+
end
|
266
|
+
|
267
|
+
def topic_for(source: self.name, dest: self.name, action: nil, packet_type: :msg)
|
268
|
+
source = source.to_s
|
269
|
+
packet_type = packet_type.to_s
|
270
|
+
dest = dest.to_s
|
271
|
+
action = action.to_s
|
272
|
+
|
273
|
+
Config::DEFAULT_TOPIC_FORMAT % [source, packet_type, dest, action]
|
274
|
+
end
|
275
|
+
|
276
|
+
## MQTT Client callbacks
|
277
|
+
|
278
|
+
def on_client_connack
|
279
|
+
logger.debug 'Connected to the broker'
|
280
|
+
# Setup default callbacks
|
281
|
+
default_actions.each do |action_name, callback|
|
282
|
+
action_name = action_name.to_s
|
283
|
+
|
284
|
+
on_action action_name do |packet|
|
285
|
+
# :nocov:
|
286
|
+
if callback.is_a? Proc
|
287
|
+
callback.call packet
|
288
|
+
else
|
289
|
+
logger.warn "No valid callback defined for '#{action_name}'"
|
290
|
+
end
|
291
|
+
# :nocov:
|
292
|
+
end
|
293
|
+
end
|
294
|
+
|
295
|
+
if mqtt_client.subscribed_topics.size.zero?
|
296
|
+
# Subscribe to all messages directed to me
|
297
|
+
logger.debug 'Making broker subscriptions'
|
298
|
+
mqtt_client.subscribe [topic_for(source: '+', action: '+'), 2]
|
299
|
+
end
|
300
|
+
end
|
301
|
+
|
302
|
+
# @note Call super if you override this method
|
303
|
+
def on_client_suback
|
304
|
+
# Client subscribed, we're ready to rock -> Tell core
|
305
|
+
logger.debug 'Subscriptions done'
|
306
|
+
logger.debug "Sending 'ready' to core"
|
307
|
+
message_to :core, :ready
|
308
|
+
end
|
309
|
+
|
310
|
+
# @note Call super if you override this method
|
311
|
+
def on_client_unsuback
|
312
|
+
end
|
313
|
+
|
314
|
+
# @note Call super if you override this method
|
315
|
+
def on_client_puback(message)
|
316
|
+
end
|
317
|
+
|
318
|
+
# @note Call super if you override this method
|
319
|
+
def on_client_pubrel(message)
|
320
|
+
end
|
321
|
+
|
322
|
+
# @note Call super if you override this method
|
323
|
+
def on_client_pubrec(message)
|
324
|
+
end
|
325
|
+
|
326
|
+
# @note Call super if you override this method
|
327
|
+
def on_client_pubcomp(message)
|
328
|
+
end
|
329
|
+
|
330
|
+
# @note Call super if you override this method
|
331
|
+
def on_client_message(message)
|
332
|
+
end
|
333
|
+
|
334
|
+
private
|
335
|
+
|
336
|
+
def check_or_build_mqtt_client(mqtt_client = nil)
|
337
|
+
if mqtt_client
|
338
|
+
unless mqtt_client.is_a? PahoMqtt::Client
|
339
|
+
raise Exceptions::NotValidMQTTClient.new
|
340
|
+
end
|
341
|
+
@internal_mqtt_client = false
|
342
|
+
@mqtt_client = mqtt_client
|
343
|
+
else
|
344
|
+
@internal_mqtt_client = true
|
345
|
+
@mqtt_client = PahoMqtt::Client.new mqtt_params
|
346
|
+
end
|
347
|
+
end
|
348
|
+
|
349
|
+
def check_and_return_fbxfile(hash_attributes)
|
350
|
+
raise ArgumentError, 'You must provide an Hash as argument' unless hash_attributes.is_a?(Hash)
|
351
|
+
hash_attributes.deep_symbolize_keys
|
352
|
+
end
|
353
|
+
|
354
|
+
def create_default_logger
|
355
|
+
stdout_logger = ::Logger.new STDOUT
|
356
|
+
broker_logger = ::Logger.new(Logger::MQTTLogDevice.new(topic_for(dest: :core, action: :logs),
|
357
|
+
client: mqtt_client),
|
358
|
+
formatter: Logger::JSONFormatter.new)
|
359
|
+
logger = Logger::Multi.new stdout_logger, broker_logger,
|
360
|
+
level: @log_level,
|
361
|
+
progname: @log_progname
|
362
|
+
logger
|
363
|
+
end
|
364
|
+
|
365
|
+
# :nocov:
|
366
|
+
def default_actions
|
367
|
+
{
|
368
|
+
start: proc { |packet| on_start packet },
|
369
|
+
stop: proc { on_stop },
|
370
|
+
restart: proc { |packet| on_restart packet },
|
371
|
+
shutdown: proc { on_shutdown },
|
372
|
+
logger: proc { |packet| on_logger packet },
|
373
|
+
configs: proc { |packet| on_configs packet }
|
374
|
+
}
|
375
|
+
end
|
376
|
+
# :nocov:
|
377
|
+
|
378
|
+
def load_fbx_file
|
379
|
+
if File.exists? @fbxfile_path
|
380
|
+
@fbxfile = YAML.load(File.read(@fbxfile_path)).deep_symbolize_keys
|
381
|
+
else
|
382
|
+
raise Exceptions::FbxfileNotFound.new @fbxfile_path
|
383
|
+
end
|
384
|
+
end
|
385
|
+
|
386
|
+
# :nocov:
|
387
|
+
def mqtt_default_params
|
388
|
+
{
|
389
|
+
host: 'localhost',
|
390
|
+
port: 1883,
|
391
|
+
mqtt_version: '3.1.1',
|
392
|
+
clean_session: true,
|
393
|
+
persistent: true,
|
394
|
+
blocking: false,
|
395
|
+
reconnect_limit: -1,
|
396
|
+
reconnect_delay: 1,
|
397
|
+
client_id: nil,
|
398
|
+
username: nil,
|
399
|
+
password: nil,
|
400
|
+
ssl: false,
|
401
|
+
will_topic: nil,
|
402
|
+
will_payload: nil,
|
403
|
+
will_qos: 0,
|
404
|
+
will_retain: false,
|
405
|
+
keep_alive: 7,
|
406
|
+
ack_timeout: 5,
|
407
|
+
on_connack: proc { on_client_connack },
|
408
|
+
on_suback: proc { on_client_suback },
|
409
|
+
on_unsuback: proc { on_client_unsuback },
|
410
|
+
on_puback: proc { |msg| on_client_puback msg },
|
411
|
+
on_pubrel: proc { |msg| on_client_pubrel msg },
|
412
|
+
on_pubrec: proc { |msg| on_client_pubrec msg },
|
413
|
+
on_pubcomp: proc { |msg| on_client_pubcomp msg },
|
414
|
+
on_message: proc { |msg| on_client_message msg }
|
415
|
+
}
|
416
|
+
end
|
417
|
+
# :nocov:
|
418
|
+
|
419
|
+
def mqtt_params
|
420
|
+
return @mqtt_params if @mqtt_params
|
421
|
+
@mqtt_params = mqtt_default_params.merge(@mqtt_client_params) { |key, old_val, new_val| new_val.nil? ? old_val : new_val }
|
422
|
+
@mqtt_params
|
423
|
+
end
|
424
|
+
end
|
425
|
+
end
|
426
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module Fancybox2
|
2
|
+
module Module
|
3
|
+
module Exceptions
|
4
|
+
|
5
|
+
class FbxfileNotProvided < StandardError
|
6
|
+
def initialize(file_path, message = nil)
|
7
|
+
message = message || "Fbxfile.example path not provided. Given: #{file_path}"
|
8
|
+
super(message)
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
class FbxfileNotFound < StandardError
|
13
|
+
def initialize(file_path, message = nil)
|
14
|
+
message = message || "Fbxfile.example not found at #{file_path}"
|
15
|
+
super(message)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
class NotValidMQTTClient < StandardError
|
20
|
+
def initialize(message = nil)
|
21
|
+
message = message || 'The provided MQTT client is not an instance of PahoMqtt::Client'
|
22
|
+
super(message)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
module Fancybox2
|
2
|
+
module Utils
|
3
|
+
module Os
|
4
|
+
extend self
|
5
|
+
|
6
|
+
def identifier
|
7
|
+
return @indentifier if @indentifier
|
8
|
+
|
9
|
+
host_os = RbConfig::CONFIG['host_os']
|
10
|
+
case host_os
|
11
|
+
when /aix(.+)$/
|
12
|
+
'aix'
|
13
|
+
when /darwin(.+)$/
|
14
|
+
'darwin'
|
15
|
+
when /linux/
|
16
|
+
'linux'
|
17
|
+
when /freebsd(.+)$/
|
18
|
+
'freebsd'
|
19
|
+
when /openbsd(.+)$/
|
20
|
+
'openbsd'
|
21
|
+
when /netbsd(.*)$/
|
22
|
+
'netbsd'
|
23
|
+
when /dragonfly(.*)$/
|
24
|
+
'dragonflybsd'
|
25
|
+
when /solaris2/
|
26
|
+
'solaris2'
|
27
|
+
when /mswin|mingw32|windows/
|
28
|
+
# No Windows platform exists that was not based on the Windows_NT kernel,
|
29
|
+
# so 'windows' refers to all platforms built upon the Windows_NT kernel and
|
30
|
+
# have access to win32 or win64 subsystems.
|
31
|
+
'windows'
|
32
|
+
else
|
33
|
+
host_os
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
metadata
ADDED
@@ -0,0 +1,83 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fancybox2
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Alessandro Verlato
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2020-10-14 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: zeitwerk
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 2.3.0
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: 2.3.0
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: concurrent-ruby
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 1.1.6
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: 1.1.6
|
41
|
+
description:
|
42
|
+
email: alessandro@fancypixel.it
|
43
|
+
executables: []
|
44
|
+
extensions: []
|
45
|
+
extra_rdoc_files: []
|
46
|
+
files:
|
47
|
+
- MIT-LICENSE
|
48
|
+
- README.md
|
49
|
+
- lib/fancybox2.rb
|
50
|
+
- lib/fancybox2/core_ext/array.rb
|
51
|
+
- lib/fancybox2/core_ext/hash.rb
|
52
|
+
- lib/fancybox2/logger/json_formatter.rb
|
53
|
+
- lib/fancybox2/logger/mqtt_log_device.rb
|
54
|
+
- lib/fancybox2/logger/multi.rb
|
55
|
+
- lib/fancybox2/module/base.rb
|
56
|
+
- lib/fancybox2/module/config.rb
|
57
|
+
- lib/fancybox2/module/exceptions.rb
|
58
|
+
- lib/fancybox2/utils/os.rb
|
59
|
+
- lib/fancybox2/version.rb
|
60
|
+
homepage: https://github.com/Fancybox2/ruby-sdk
|
61
|
+
licenses:
|
62
|
+
- MIT
|
63
|
+
metadata: {}
|
64
|
+
post_install_message:
|
65
|
+
rdoc_options: []
|
66
|
+
require_paths:
|
67
|
+
- lib
|
68
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
69
|
+
requirements:
|
70
|
+
- - ">="
|
71
|
+
- !ruby/object:Gem::Version
|
72
|
+
version: 2.5.0
|
73
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
74
|
+
requirements:
|
75
|
+
- - ">="
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
version: '0'
|
78
|
+
requirements: []
|
79
|
+
rubygems_version: 3.1.4
|
80
|
+
signing_key:
|
81
|
+
specification_version: 4
|
82
|
+
summary: Fancybox 2 Ruby SDK
|
83
|
+
test_files: []
|