ruby_spark 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +17 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +50 -0
- data/Rakefile +1 -0
- data/fixtures/vcr_cassettes/analog_read.yml +41 -0
- data/fixtures/vcr_cassettes/analog_write.yml +41 -0
- data/fixtures/vcr_cassettes/bad_core_id.yml +37 -0
- data/fixtures/vcr_cassettes/bad_token.yml +39 -0
- data/fixtures/vcr_cassettes/digital_read.yml +41 -0
- data/fixtures/vcr_cassettes/digital_write.yml +41 -0
- data/fixtures/vcr_cassettes/spark_timeout.yml +37 -0
- data/lib/ruby_spark.rb +17 -0
- data/lib/ruby_spark/core.rb +72 -0
- data/lib/ruby_spark/version.rb +3 -0
- data/ruby_spark.gemspec +28 -0
- data/spec/ruby_spark/core_spec.rb +94 -0
- data/spec/spec_helper.rb +19 -0
- metadata +162 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: d847f6a1e0cb7eb32f60930d18adc9e27bfce08a
|
4
|
+
data.tar.gz: 4a302003c45e3dd2508bdd2fa8317d8be22cb2f0
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 7d761940739f959a70b2f0e739eca5f9d3ff47f81c411532cee6217ea919eb5f69aa3251599fbfedc4d64e5d7847864ff2e9a17f58b1dd29abea25b095459d6d
|
7
|
+
data.tar.gz: 5d1699af7115a0e26523716a2092bd9f0dbdea4aad7b72dfa8b28ea3b25f13ad41ab7112dbe001925527b6137ea87a9f51362f522b82868256709e9721460bac
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2013 Eli Fatsi
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,50 @@
|
|
1
|
+
# RubySpark
|
2
|
+
|
3
|
+
Ruby Gem to make API requests to the [Spark Cloud API](http://docs.spark.io/#/api)
|
4
|
+
|
5
|
+
##Obtaining a Spark Core Access Token and Core ID
|
6
|
+
|
7
|
+
Assuming at this point you've followed the Spark Core's [Getting Started](http://docs.spark.io/#/start) guides and connected your Core with the Spark Cloud.
|
8
|
+
|
9
|
+
Head over to the [Spark Build IDE](https://www.spark.io/build). In the Settings tab you can get your Access Token, and you can fetch your Device ID from the Cores tab. You'll need these both to authenticate your API calls, and the Device ID to direct them.
|
10
|
+
|
11
|
+
## Installation
|
12
|
+
|
13
|
+
To use this gem, install it with `gem install ruby_spark` or add this line to your Gemfile:
|
14
|
+
|
15
|
+
gem 'ruby_spark'
|
16
|
+
|
17
|
+
and install it with `bundle install`
|
18
|
+
|
19
|
+
## Usage
|
20
|
+
|
21
|
+
Load:
|
22
|
+
|
23
|
+
require 'ruby_spark'
|
24
|
+
|
25
|
+
Configure:
|
26
|
+
|
27
|
+
RubySpark.configuration do |config|
|
28
|
+
config.auth_token = "very_long_spark_api_auth_token"
|
29
|
+
end
|
30
|
+
|
31
|
+
Instantiate.
|
32
|
+
|
33
|
+
core = RubySpark::Core.new("semi_long_core_device_id")
|
34
|
+
|
35
|
+
Fire away:
|
36
|
+
|
37
|
+
core.digital_write(3, "HIGH") #=> true or false
|
38
|
+
core.digital_read(5) #=> "HIGH" or "LOW"
|
39
|
+
|
40
|
+
core.analog_write(3, 420) #=> true or false
|
41
|
+
core.analog_read(5) #=> 0 to 4096
|
42
|
+
|
43
|
+
|
44
|
+
Clearly you'll need to replace "very_long_spark_api_auth_token" and "semi_long_core_device_id" with real values.
|
45
|
+
|
46
|
+
## Contributing
|
47
|
+
|
48
|
+
Happily accepting contributions. To contribute, fork, develop, add some specs, and pull request.
|
49
|
+
|
50
|
+
Note about the specs. All API requests make use of the [VCR](https://github.com/vcr/vcr) gem. To contribute without exposing your Auth Token and Core ID, run the specs with your real authentication, and then find-and-replace your Auth Token and Core ID with fake values in the spec and any VCR cassettes.
|
data/Rakefile
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require "bundler/gem_tasks"
|
@@ -0,0 +1,41 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/good_core_id/analogread
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=good_auth_token¶ms=A6
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 200
|
13
|
+
message: OK
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:13:11 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '127'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"id": "good_core_id",
|
34
|
+
"name": "First Core",
|
35
|
+
"last_app": null,
|
36
|
+
"connected": true,
|
37
|
+
"return_value": 2399
|
38
|
+
}
|
39
|
+
http_version:
|
40
|
+
recorded_at: Wed, 25 Dec 2013 06:13:11 GMT
|
41
|
+
recorded_with: VCR 2.5.0
|
@@ -0,0 +1,41 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/good_core_id/analogwrite
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=good_auth_token¶ms=A7%2C130
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 200
|
13
|
+
message: OK
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:12:18 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '124'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"id": "good_core_id",
|
34
|
+
"name": "First Core",
|
35
|
+
"last_app": null,
|
36
|
+
"connected": true,
|
37
|
+
"return_value": 1
|
38
|
+
}
|
39
|
+
http_version:
|
40
|
+
recorded_at: Wed, 25 Dec 2013 06:12:18 GMT
|
41
|
+
recorded_with: VCR 2.5.0
|
@@ -0,0 +1,37 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/bad_core_id/digitalwrite
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=good_auth_token¶ms=D7%2CHIGH
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 403
|
13
|
+
message: Forbidden
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:18:27 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '34'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"error": "Permission Denied"
|
34
|
+
}
|
35
|
+
http_version:
|
36
|
+
recorded_at: Wed, 25 Dec 2013 06:18:27 GMT
|
37
|
+
recorded_with: VCR 2.5.0
|
@@ -0,0 +1,39 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/good_core_id/digitalwrite
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=bad_token¶ms=D7%2CHIGH
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 400
|
13
|
+
message: Bad Request
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:15:59 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '109'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"code": 400,
|
34
|
+
"error": "invalid_grant",
|
35
|
+
"error_description": "The access token provided is invalid."
|
36
|
+
}
|
37
|
+
http_version:
|
38
|
+
recorded_at: Wed, 25 Dec 2013 06:15:59 GMT
|
39
|
+
recorded_with: VCR 2.5.0
|
@@ -0,0 +1,41 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/good_core_id/digitalread
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=good_auth_token¶ms=D6
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 200
|
13
|
+
message: OK
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:13:03 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '124'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"id": "good_core_id",
|
34
|
+
"name": "First Core",
|
35
|
+
"last_app": null,
|
36
|
+
"connected": true,
|
37
|
+
"return_value": 1
|
38
|
+
}
|
39
|
+
http_version:
|
40
|
+
recorded_at: Wed, 25 Dec 2013 06:13:03 GMT
|
41
|
+
recorded_with: VCR 2.5.0
|
@@ -0,0 +1,41 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/good_core_id/digitalwrite
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=good_auth_token¶ms=D7%2CHIGH
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 200
|
13
|
+
message: OK
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:11:50 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '124'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"id": "good_core_id",
|
34
|
+
"name": "First Core",
|
35
|
+
"last_app": null,
|
36
|
+
"connected": true,
|
37
|
+
"return_value": 1
|
38
|
+
}
|
39
|
+
http_version:
|
40
|
+
recorded_at: Wed, 25 Dec 2013 06:11:51 GMT
|
41
|
+
recorded_with: VCR 2.5.0
|
@@ -0,0 +1,37 @@
|
|
1
|
+
---
|
2
|
+
http_interactions:
|
3
|
+
- request:
|
4
|
+
method: post
|
5
|
+
uri: https://api.spark.io/v1/devices/good_core_id/digitalwrite
|
6
|
+
body:
|
7
|
+
encoding: UTF-8
|
8
|
+
string: access_token=good_auth_token¶ms=D7%2CHIGH
|
9
|
+
headers: {}
|
10
|
+
response:
|
11
|
+
status:
|
12
|
+
code: 200
|
13
|
+
message: OK
|
14
|
+
headers:
|
15
|
+
Access-Control-Allow-Origin:
|
16
|
+
- '*'
|
17
|
+
Content-Type:
|
18
|
+
- application/json; charset=utf-8
|
19
|
+
Date:
|
20
|
+
- Wed, 25 Dec 2013 06:19:10 GMT
|
21
|
+
Server:
|
22
|
+
- nginx/1.4.2
|
23
|
+
X-Powered-By:
|
24
|
+
- Express
|
25
|
+
Content-Length:
|
26
|
+
- '27'
|
27
|
+
Connection:
|
28
|
+
- keep-alive
|
29
|
+
body:
|
30
|
+
encoding: UTF-8
|
31
|
+
string: |-
|
32
|
+
{
|
33
|
+
"error": "Timed out."
|
34
|
+
}
|
35
|
+
http_version:
|
36
|
+
recorded_at: Wed, 25 Dec 2013 06:19:10 GMT
|
37
|
+
recorded_with: VCR 2.5.0
|
data/lib/ruby_spark.rb
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
require 'httparty'
|
2
|
+
|
3
|
+
require 'ruby_spark/version'
|
4
|
+
require 'ruby_spark/core'
|
5
|
+
|
6
|
+
module RubySpark
|
7
|
+
class AuthTokenNotDefinedError < StandardError; end
|
8
|
+
|
9
|
+
class << self
|
10
|
+
attr_accessor :auth_token
|
11
|
+
|
12
|
+
def configuration
|
13
|
+
yield self
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
module RubySpark
|
2
|
+
class Core
|
3
|
+
class ApiError < StandardError; end
|
4
|
+
|
5
|
+
def initialize(core_id)
|
6
|
+
raise RubySpark::AuthTokenNotDefinedError if RubySpark.auth_token.nil?
|
7
|
+
|
8
|
+
@core_id = core_id
|
9
|
+
end
|
10
|
+
|
11
|
+
def digital_write(pin, message)
|
12
|
+
response = post('digitalwrite', "D#{pin},#{message}")
|
13
|
+
handle(response) do
|
14
|
+
response["return_value"] == 1
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def digital_read(pin)
|
19
|
+
response = post('digitalread', "D#{pin}")
|
20
|
+
handle(response) do
|
21
|
+
response["return_value"] == 1 ? "HIGH" : "LOW"
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
def analog_write(pin, value)
|
26
|
+
response = post('analogwrite', "A#{pin},#{value}")
|
27
|
+
handle(response) do
|
28
|
+
response["return_value"] == 1
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def analog_read(pin)
|
33
|
+
response = post('analogread', "A#{pin}")
|
34
|
+
handle(response) do
|
35
|
+
response["return_value"]
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
private
|
40
|
+
|
41
|
+
def post(action, params)
|
42
|
+
url = base_url + action
|
43
|
+
body = access_params.merge(:params => params)
|
44
|
+
|
45
|
+
HTTParty.post(url, :body => body).parsed_response
|
46
|
+
end
|
47
|
+
|
48
|
+
def handle(response, &block)
|
49
|
+
if error = error_from(response)
|
50
|
+
raise ApiError.new(error) if error.length > 0
|
51
|
+
else
|
52
|
+
yield block
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def error_from(response)
|
57
|
+
response["error"].tap do |error|
|
58
|
+
description = response["error_description"]
|
59
|
+
error.concat(": #{description}") if description
|
60
|
+
error.concat(": You do not have access to that Core") if error == "Permission Denied"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def base_url
|
65
|
+
"https://api.spark.io/v1/devices/#{@core_id}/"
|
66
|
+
end
|
67
|
+
|
68
|
+
def access_params
|
69
|
+
{:access_token => RubySpark.auth_token}
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
data/ruby_spark.gemspec
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'ruby_spark/version'
|
5
|
+
require 'ruby_spark/core'
|
6
|
+
|
7
|
+
Gem::Specification.new do |spec|
|
8
|
+
spec.name = "ruby_spark"
|
9
|
+
spec.version = RubySpark::VERSION
|
10
|
+
spec.authors = ["Eli Fatsi"]
|
11
|
+
spec.email = ["eli.fatsi@viget.com"]
|
12
|
+
spec.description = "Ruby Gem to make API calls to the Spark Cloud"
|
13
|
+
spec.summary = "Ruby Gem to make API calls to the Spark Cloud"
|
14
|
+
spec.homepage = "http://github.com/efatsi/ruby_spark"
|
15
|
+
spec.license = "MIT"
|
16
|
+
|
17
|
+
spec.files = `git ls-files`.split($/)
|
18
|
+
spec.test_files = spec.files.grep(%r{^spec/})
|
19
|
+
|
20
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
21
|
+
spec.add_development_dependency "rake"
|
22
|
+
spec.add_development_dependency "rspec"
|
23
|
+
spec.add_development_dependency "vcr"
|
24
|
+
spec.add_development_dependency "webmock"
|
25
|
+
spec.add_development_dependency "pry"
|
26
|
+
|
27
|
+
spec.add_dependency "httparty"
|
28
|
+
end
|
@@ -0,0 +1,94 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require 'pry'
|
3
|
+
|
4
|
+
describe RubySpark::Core do
|
5
|
+
|
6
|
+
before { RubySpark.auth_token = "good_auth_token" }
|
7
|
+
|
8
|
+
subject { described_class.new("good_core_id") }
|
9
|
+
|
10
|
+
describe "#digital_write" do
|
11
|
+
it "succeeds when Auth Token and Core ID are correct" do
|
12
|
+
VCR.use_cassette("digital_write") do
|
13
|
+
subject.digital_write(7, "HIGH").should == true
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
it "returns the appropriate error when Auth Token is bad" do
|
18
|
+
RubySpark.auth_token = "bad_token"
|
19
|
+
|
20
|
+
VCR.use_cassette("bad_token") do
|
21
|
+
expect {
|
22
|
+
subject.digital_write(7, "HIGH")
|
23
|
+
}.to raise_error(RubySpark::Core::ApiError)
|
24
|
+
end
|
25
|
+
|
26
|
+
VCR.use_cassette("bad_token") do
|
27
|
+
begin
|
28
|
+
subject.digital_write(7, "HIGH")
|
29
|
+
rescue => e
|
30
|
+
e.message.should == "invalid_grant: The access token provided is invalid."
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
it "returns the appropriate error when Core ID is bad" do
|
36
|
+
subject = described_class.new("bad_core_id")
|
37
|
+
|
38
|
+
VCR.use_cassette("bad_core_id") do
|
39
|
+
expect {
|
40
|
+
subject.digital_write(7, "HIGH")
|
41
|
+
}.to raise_error(RubySpark::Core::ApiError)
|
42
|
+
end
|
43
|
+
|
44
|
+
VCR.use_cassette("bad_core_id") do
|
45
|
+
begin
|
46
|
+
subject.digital_write(7, "HIGH")
|
47
|
+
rescue => e
|
48
|
+
e.message.should == "Permission Denied: You do not have access to that Core"
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
it "returns the appropriate error when the Spark API times out" do
|
54
|
+
VCR.use_cassette("spark_timeout") do
|
55
|
+
expect {
|
56
|
+
subject.digital_write(7, "HIGH")
|
57
|
+
}.to raise_error(RubySpark::Core::ApiError)
|
58
|
+
end
|
59
|
+
|
60
|
+
VCR.use_cassette("spark_timeout") do
|
61
|
+
begin
|
62
|
+
subject.digital_write(7, "HIGH")
|
63
|
+
rescue => e
|
64
|
+
e.message.should == "Timed out."
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
describe "#digital_read" do
|
71
|
+
it "succeeds when Auth Token and Core ID are correct" do
|
72
|
+
VCR.use_cassette("digital_read") do
|
73
|
+
subject.digital_read(6).should == "HIGH"
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
describe "#analog_write" do
|
79
|
+
it "succeeds when Auth Token and Core ID are correct" do
|
80
|
+
VCR.use_cassette("analog_write") do
|
81
|
+
subject.analog_write(7, 130).should == true
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
describe "#analog_read" do
|
87
|
+
it "succeeds when Auth Token and Core ID are correct" do
|
88
|
+
VCR.use_cassette("analog_read") do
|
89
|
+
subject.analog_read(6).should == 2399
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
end
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'bundler/setup'
|
3
|
+
require 'vcr'
|
4
|
+
|
5
|
+
require 'ruby_spark' # and any other gems you need
|
6
|
+
|
7
|
+
VCR.configure do |c|
|
8
|
+
c.cassette_library_dir = 'fixtures/vcr_cassettes'
|
9
|
+
c.hook_into :webmock
|
10
|
+
c.default_cassette_options = {
|
11
|
+
:record => :once,
|
12
|
+
:allow_playback_repeats => true,
|
13
|
+
}
|
14
|
+
c.allow_http_connections_when_no_cassette = false
|
15
|
+
end
|
16
|
+
|
17
|
+
RSpec.configure do |config|
|
18
|
+
# some (optional) config here
|
19
|
+
end
|
metadata
ADDED
@@ -0,0 +1,162 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: ruby_spark
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Eli Fatsi
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2013-12-25 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: bundler
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ~>
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.3'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ~>
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.3'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: rake
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - '>='
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: rspec
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - '>='
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - '>='
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: vcr
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - '>='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - '>='
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: webmock
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - '>='
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - '>='
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: pry
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - '>='
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - '>='
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: httparty
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - '>='
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: '0'
|
104
|
+
type: :runtime
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - '>='
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: '0'
|
111
|
+
description: Ruby Gem to make API calls to the Spark Cloud
|
112
|
+
email:
|
113
|
+
- eli.fatsi@viget.com
|
114
|
+
executables: []
|
115
|
+
extensions: []
|
116
|
+
extra_rdoc_files: []
|
117
|
+
files:
|
118
|
+
- .gitignore
|
119
|
+
- Gemfile
|
120
|
+
- LICENSE.txt
|
121
|
+
- README.md
|
122
|
+
- Rakefile
|
123
|
+
- fixtures/vcr_cassettes/analog_read.yml
|
124
|
+
- fixtures/vcr_cassettes/analog_write.yml
|
125
|
+
- fixtures/vcr_cassettes/bad_core_id.yml
|
126
|
+
- fixtures/vcr_cassettes/bad_token.yml
|
127
|
+
- fixtures/vcr_cassettes/digital_read.yml
|
128
|
+
- fixtures/vcr_cassettes/digital_write.yml
|
129
|
+
- fixtures/vcr_cassettes/spark_timeout.yml
|
130
|
+
- lib/ruby_spark.rb
|
131
|
+
- lib/ruby_spark/core.rb
|
132
|
+
- lib/ruby_spark/version.rb
|
133
|
+
- ruby_spark.gemspec
|
134
|
+
- spec/ruby_spark/core_spec.rb
|
135
|
+
- spec/spec_helper.rb
|
136
|
+
homepage: http://github.com/efatsi/ruby_spark
|
137
|
+
licenses:
|
138
|
+
- MIT
|
139
|
+
metadata: {}
|
140
|
+
post_install_message:
|
141
|
+
rdoc_options: []
|
142
|
+
require_paths:
|
143
|
+
- lib
|
144
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
145
|
+
requirements:
|
146
|
+
- - '>='
|
147
|
+
- !ruby/object:Gem::Version
|
148
|
+
version: '0'
|
149
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
150
|
+
requirements:
|
151
|
+
- - '>='
|
152
|
+
- !ruby/object:Gem::Version
|
153
|
+
version: '0'
|
154
|
+
requirements: []
|
155
|
+
rubyforge_project:
|
156
|
+
rubygems_version: 2.0.6
|
157
|
+
signing_key:
|
158
|
+
specification_version: 4
|
159
|
+
summary: Ruby Gem to make API calls to the Spark Cloud
|
160
|
+
test_files:
|
161
|
+
- spec/ruby_spark/core_spec.rb
|
162
|
+
- spec/spec_helper.rb
|