ruby_spark 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: d847f6a1e0cb7eb32f60930d18adc9e27bfce08a
4
- data.tar.gz: 4a302003c45e3dd2508bdd2fa8317d8be22cb2f0
3
+ metadata.gz: 08cc629326d77db1563de7e9e5042fda0de306f1
4
+ data.tar.gz: 8746072414f6befb38835392722163d5a145205c
5
5
  SHA512:
6
- metadata.gz: 7d761940739f959a70b2f0e739eca5f9d3ff47f81c411532cee6217ea919eb5f69aa3251599fbfedc4d64e5d7847864ff2e9a17f58b1dd29abea25b095459d6d
7
- data.tar.gz: 5d1699af7115a0e26523716a2092bd9f0dbdea4aad7b72dfa8b28ea3b25f13ad41ab7112dbe001925527b6137ea87a9f51362f522b82868256709e9721460bac
6
+ metadata.gz: 61c816b0357594179c4ebac680605e61f25afa850fd53a12a1c7dbcdaa16e31add5d11d3ac8d47376eac3433067eb83f58c3102212fcaaacd9217fe4261d1683
7
+ data.tar.gz: b3641b8658cd5de1dd309225ca0f268031fb8877b885654ee8fb735587a9caa7bae39d34d2dfdaa4bc94fd7d1345d1a81d0394a807f3aa395474884bb0a5de3f
data/README.md CHANGED
@@ -24,14 +24,20 @@ Load:
24
24
 
25
25
  Configure:
26
26
 
27
+ # config/initializers/ruby_spark.rb
28
+
27
29
  RubySpark.configuration do |config|
28
- config.auth_token = "very_long_spark_api_auth_token"
30
+ config.access_token = "very_long_spark_api_access_token"
29
31
  end
30
32
 
31
33
  Instantiate.
32
34
 
33
35
  core = RubySpark::Core.new("semi_long_core_device_id")
34
36
 
37
+ Alternitavely, you can pass your Access Token directly into your Core object and skip the configuration.
38
+
39
+ core = RubySpark::Core.new("semi_long_core_device_id", "very_long_spark_api_access_token")
40
+
35
41
  Fire away:
36
42
 
37
43
  core.digital_write(3, "HIGH") #=> true or false
@@ -40,11 +46,10 @@ Fire away:
40
46
  core.analog_write(3, 420) #=> true or false
41
47
  core.analog_read(5) #=> 0 to 4096
42
48
 
43
-
44
- Clearly you'll need to replace "very_long_spark_api_auth_token" and "semi_long_core_device_id" with real values.
49
+ Clearly you'll need to replace "very_long_spark_api_access_token" and "semi_long_core_device_id" with real values.
45
50
 
46
51
  ## Contributing
47
52
 
48
53
  Happily accepting contributions. To contribute, fork, develop, add some specs, and pull request.
49
54
 
50
- Note about the specs. All API requests make use of the [VCR](https://github.com/vcr/vcr) gem. To contribute without exposing your Auth Token and Core ID, run the specs with your real authentication, and then find-and-replace your Auth Token and Core ID with fake values in the spec and any VCR cassettes.
55
+ Note about the specs. All API requests make use of the [VCR](https://github.com/vcr/vcr) gem. To contribute without exposing your Access Token and Core ID, run the specs with your real authentication, and then find-and-replace your Access Token and Core ID with fake values in the spec and any VCR cassettes.
data/lib/ruby_spark.rb CHANGED
@@ -4,10 +4,10 @@ require 'ruby_spark/version'
4
4
  require 'ruby_spark/core'
5
5
 
6
6
  module RubySpark
7
- class AuthTokenNotDefinedError < StandardError; end
7
+ class ConfigurationError < StandardError; end
8
8
 
9
9
  class << self
10
- attr_accessor :auth_token
10
+ attr_accessor :access_token
11
11
 
12
12
  def configuration
13
13
  yield self
@@ -2,10 +2,11 @@ module RubySpark
2
2
  class Core
3
3
  class ApiError < StandardError; end
4
4
 
5
- def initialize(core_id)
6
- raise RubySpark::AuthTokenNotDefinedError if RubySpark.auth_token.nil?
5
+ def initialize(core_id, access_token = RubySpark.access_token)
6
+ raise RubySpark::ConfigurationError.new("Access Token not defined") if access_token.nil?
7
7
 
8
- @core_id = core_id
8
+ @access_token = access_token
9
+ @core_id = core_id
9
10
  end
10
11
 
11
12
  def digital_write(pin, message)
@@ -57,7 +58,7 @@ module RubySpark
57
58
  response["error"].tap do |error|
58
59
  description = response["error_description"]
59
60
  error.concat(": #{description}") if description
60
- error.concat(": You do not have access to that Core") if error == "Permission Denied"
61
+ error.concat(": Invalid Core ID") if error == "Permission Denied"
61
62
  end
62
63
  end
63
64
 
@@ -66,7 +67,7 @@ module RubySpark
66
67
  end
67
68
 
68
69
  def access_params
69
- {:access_token => RubySpark.auth_token}
70
+ {:access_token => @access_token}
70
71
  end
71
72
  end
72
73
  end
@@ -1,3 +1,3 @@
1
1
  module RubySpark
2
- VERSION = "0.0.1"
2
+ VERSION = "0.0.2"
3
3
  end
@@ -5,7 +5,7 @@ http_interactions:
5
5
  uri: https://api.spark.io/v1/devices/good_core_id/analogread
6
6
  body:
7
7
  encoding: UTF-8
8
- string: access_token=good_auth_token&params=A6
8
+ string: access_token=good_access_token&params=A6
9
9
  headers: {}
10
10
  response:
11
11
  status:
@@ -5,7 +5,7 @@ http_interactions:
5
5
  uri: https://api.spark.io/v1/devices/good_core_id/analogwrite
6
6
  body:
7
7
  encoding: UTF-8
8
- string: access_token=good_auth_token&params=A7%2C130
8
+ string: access_token=good_access_token&params=A7%2C130
9
9
  headers: {}
10
10
  response:
11
11
  status:
@@ -5,7 +5,7 @@ http_interactions:
5
5
  uri: https://api.spark.io/v1/devices/bad_core_id/digitalwrite
6
6
  body:
7
7
  encoding: UTF-8
8
- string: access_token=good_auth_token&params=D7%2CHIGH
8
+ string: access_token=good_access_token&params=D7%2CHIGH
9
9
  headers: {}
10
10
  response:
11
11
  status:
@@ -5,7 +5,7 @@ http_interactions:
5
5
  uri: https://api.spark.io/v1/devices/good_core_id/digitalread
6
6
  body:
7
7
  encoding: UTF-8
8
- string: access_token=good_auth_token&params=D6
8
+ string: access_token=good_access_token&params=D6
9
9
  headers: {}
10
10
  response:
11
11
  status:
@@ -5,7 +5,7 @@ http_interactions:
5
5
  uri: https://api.spark.io/v1/devices/good_core_id/digitalwrite
6
6
  body:
7
7
  encoding: UTF-8
8
- string: access_token=good_auth_token&params=D7%2CHIGH
8
+ string: access_token=good_access_token&params=D7%2CHIGH
9
9
  headers: {}
10
10
  response:
11
11
  status:
@@ -0,0 +1,39 @@
1
+ ---
2
+ http_interactions:
3
+ - request:
4
+ method: post
5
+ uri: https://api.spark.io/v1/devices/good_core_id/digitalread
6
+ body:
7
+ encoding: UTF-8
8
+ string: access_token=good_access_token&params=D6
9
+ headers: {}
10
+ response:
11
+ status:
12
+ code: 400
13
+ message: Bad Request
14
+ headers:
15
+ Access-Control-Allow-Origin:
16
+ - '*'
17
+ Content-Type:
18
+ - application/json; charset=utf-8
19
+ Date:
20
+ - Thu, 26 Dec 2013 17:04:34 GMT
21
+ Server:
22
+ - nginx/1.4.2
23
+ X-Powered-By:
24
+ - Express
25
+ Content-Length:
26
+ - '109'
27
+ Connection:
28
+ - keep-alive
29
+ body:
30
+ encoding: UTF-8
31
+ string: |-
32
+ {
33
+ "code": 400,
34
+ "error": "invalid_grant",
35
+ "error_description": "The access token provided is invalid."
36
+ }
37
+ http_version:
38
+ recorded_at: Thu, 26 Dec 2013 17:04:34 GMT
39
+ recorded_with: VCR 2.5.0
@@ -5,7 +5,7 @@ http_interactions:
5
5
  uri: https://api.spark.io/v1/devices/good_core_id/digitalwrite
6
6
  body:
7
7
  encoding: UTF-8
8
- string: access_token=good_auth_token&params=D7%2CHIGH
8
+ string: access_token=good_access_token&params=D7%2CHIGH
9
9
  headers: {}
10
10
  response:
11
11
  status:
@@ -3,92 +3,125 @@ require 'pry'
3
3
 
4
4
  describe RubySpark::Core do
5
5
 
6
- before { RubySpark.auth_token = "good_auth_token" }
6
+ context "with Access Token set in config variable" do
7
+ before { RubySpark.access_token = "good_access_token" }
8
+ subject { described_class.new("good_core_id") }
9
+
10
+ describe "#digital_write" do
11
+ it "succeeds when Access Token and Core ID are correct" do
12
+ VCR.use_cassette("digital_write") do
13
+ subject.digital_write(7, "HIGH").should == true
14
+ end
15
+ end
7
16
 
8
- subject { described_class.new("good_core_id") }
17
+ it "returns the appropriate error when Access Token is bad" do
18
+ RubySpark.access_token = "bad_token"
9
19
 
10
- describe "#digital_write" do
11
- it "succeeds when Auth Token and Core ID are correct" do
12
- VCR.use_cassette("digital_write") do
13
- subject.digital_write(7, "HIGH").should == true
20
+ VCR.use_cassette("bad_token") do
21
+ expect {
22
+ subject.digital_write(7, "HIGH")
23
+ }.to raise_error(RubySpark::Core::ApiError)
24
+ end
25
+
26
+ VCR.use_cassette("bad_token") do
27
+ begin
28
+ subject.digital_write(7, "HIGH")
29
+ rescue => e
30
+ e.message.should == "invalid_grant: The access token provided is invalid."
31
+ end
32
+ end
14
33
  end
15
- end
16
34
 
17
- it "returns the appropriate error when Auth Token is bad" do
18
- RubySpark.auth_token = "bad_token"
35
+ it "returns the appropriate error when Core ID is bad" do
36
+ subject = described_class.new("bad_core_id")
19
37
 
20
- VCR.use_cassette("bad_token") do
21
- expect {
22
- subject.digital_write(7, "HIGH")
23
- }.to raise_error(RubySpark::Core::ApiError)
24
- end
38
+ VCR.use_cassette("bad_core_id") do
39
+ expect {
40
+ subject.digital_write(7, "HIGH")
41
+ }.to raise_error(RubySpark::Core::ApiError)
42
+ end
25
43
 
26
- VCR.use_cassette("bad_token") do
27
- begin
28
- subject.digital_write(7, "HIGH")
29
- rescue => e
30
- e.message.should == "invalid_grant: The access token provided is invalid."
44
+ VCR.use_cassette("bad_core_id") do
45
+ begin
46
+ subject.digital_write(7, "HIGH")
47
+ rescue => e
48
+ e.message.should == "Permission Denied: Invalid Core ID"
49
+ end
31
50
  end
32
51
  end
33
- end
34
52
 
35
- it "returns the appropriate error when Core ID is bad" do
36
- subject = described_class.new("bad_core_id")
53
+ it "returns the appropriate error when the Spark API times out" do
54
+ VCR.use_cassette("spark_timeout") do
55
+ expect {
56
+ subject.digital_write(7, "HIGH")
57
+ }.to raise_error(RubySpark::Core::ApiError)
58
+ end
37
59
 
38
- VCR.use_cassette("bad_core_id") do
39
- expect {
40
- subject.digital_write(7, "HIGH")
41
- }.to raise_error(RubySpark::Core::ApiError)
60
+ VCR.use_cassette("spark_timeout") do
61
+ begin
62
+ subject.digital_write(7, "HIGH")
63
+ rescue => e
64
+ e.message.should == "Timed out."
65
+ end
66
+ end
42
67
  end
68
+ end
43
69
 
44
- VCR.use_cassette("bad_core_id") do
45
- begin
46
- subject.digital_write(7, "HIGH")
47
- rescue => e
48
- e.message.should == "Permission Denied: You do not have access to that Core"
70
+ describe "#digital_read" do
71
+ it "succeeds when Access Token and Core ID are correct" do
72
+ VCR.use_cassette("digital_read") do
73
+ subject.digital_read(6).should == "HIGH"
49
74
  end
50
75
  end
51
76
  end
52
77
 
53
- it "returns the appropriate error when the Spark API times out" do
54
- VCR.use_cassette("spark_timeout") do
55
- expect {
56
- subject.digital_write(7, "HIGH")
57
- }.to raise_error(RubySpark::Core::ApiError)
78
+ describe "#analog_write" do
79
+ it "succeeds when Access Token and Core ID are correct" do
80
+ VCR.use_cassette("analog_write") do
81
+ subject.analog_write(7, 130).should == true
82
+ end
58
83
  end
84
+ end
59
85
 
60
- VCR.use_cassette("spark_timeout") do
61
- begin
62
- subject.digital_write(7, "HIGH")
63
- rescue => e
64
- e.message.should == "Timed out."
86
+ describe "#analog_read" do
87
+ it "succeeds when Access Token and Core ID are correct" do
88
+ VCR.use_cassette("analog_read") do
89
+ subject.analog_read(6).should == 2399
65
90
  end
66
91
  end
67
92
  end
68
93
  end
69
94
 
70
- describe "#digital_read" do
71
- it "succeeds when Auth Token and Core ID are correct" do
72
- VCR.use_cassette("digital_read") do
73
- subject.digital_read(6).should == "HIGH"
95
+ context "with Access Token passed into Core" do
96
+ subject { described_class.new("good_core_id", "good_access_token") }
97
+
98
+ describe "#digital_read" do
99
+ it "succeeds when Access Token and Core ID are correct" do
100
+ VCR.use_cassette("digital_write") do
101
+ subject.digital_write(7, "HIGH").should == true
102
+ end
74
103
  end
75
104
  end
76
105
  end
77
106
 
78
- describe "#analog_write" do
79
- it "succeeds when Auth Token and Core ID are correct" do
80
- VCR.use_cassette("analog_write") do
81
- subject.analog_write(7, 130).should == true
107
+ context "with no Access Token defined" do
108
+ before { RubySpark.access_token = nil }
109
+ subject { described_class.new("good_core_id") }
110
+
111
+ it "returns proper error if Access Token is not defined" do
112
+ VCR.use_cassette("no_token") do
113
+ expect {
114
+ subject.digital_read(6)
115
+ }.to raise_error(RubySpark::ConfigurationError)
82
116
  end
83
- end
84
- end
85
117
 
86
- describe "#analog_read" do
87
- it "succeeds when Auth Token and Core ID are correct" do
88
- VCR.use_cassette("analog_read") do
89
- subject.analog_read(6).should == 2399
118
+ VCR.use_cassette("no_token") do
119
+ begin
120
+ subject.digital_read(6)
121
+ rescue => e
122
+ e.message.should == "Access Token not defined"
123
+ end
90
124
  end
91
125
  end
92
126
  end
93
-
94
127
  end
data/spec/spec_helper.rb CHANGED
@@ -5,7 +5,7 @@ require 'vcr'
5
5
  require 'ruby_spark' # and any other gems you need
6
6
 
7
7
  VCR.configure do |c|
8
- c.cassette_library_dir = 'fixtures/vcr_cassettes'
8
+ c.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
9
9
  c.hook_into :webmock
10
10
  c.default_cassette_options = {
11
11
  :record => :once,
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby_spark
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Eli Fatsi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2013-12-25 00:00:00.000000000 Z
11
+ date: 2013-12-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -120,17 +120,18 @@ files:
120
120
  - LICENSE.txt
121
121
  - README.md
122
122
  - Rakefile
123
- - fixtures/vcr_cassettes/analog_read.yml
124
- - fixtures/vcr_cassettes/analog_write.yml
125
- - fixtures/vcr_cassettes/bad_core_id.yml
126
- - fixtures/vcr_cassettes/bad_token.yml
127
- - fixtures/vcr_cassettes/digital_read.yml
128
- - fixtures/vcr_cassettes/digital_write.yml
129
- - fixtures/vcr_cassettes/spark_timeout.yml
130
123
  - lib/ruby_spark.rb
131
124
  - lib/ruby_spark/core.rb
132
125
  - lib/ruby_spark/version.rb
133
126
  - ruby_spark.gemspec
127
+ - spec/fixtures/vcr_cassettes/analog_read.yml
128
+ - spec/fixtures/vcr_cassettes/analog_write.yml
129
+ - spec/fixtures/vcr_cassettes/bad_core_id.yml
130
+ - spec/fixtures/vcr_cassettes/bad_token.yml
131
+ - spec/fixtures/vcr_cassettes/digital_read.yml
132
+ - spec/fixtures/vcr_cassettes/digital_write.yml
133
+ - spec/fixtures/vcr_cassettes/no_token.yml
134
+ - spec/fixtures/vcr_cassettes/spark_timeout.yml
134
135
  - spec/ruby_spark/core_spec.rb
135
136
  - spec/spec_helper.rb
136
137
  homepage: http://github.com/efatsi/ruby_spark
@@ -158,5 +159,13 @@ signing_key:
158
159
  specification_version: 4
159
160
  summary: Ruby Gem to make API calls to the Spark Cloud
160
161
  test_files:
162
+ - spec/fixtures/vcr_cassettes/analog_read.yml
163
+ - spec/fixtures/vcr_cassettes/analog_write.yml
164
+ - spec/fixtures/vcr_cassettes/bad_core_id.yml
165
+ - spec/fixtures/vcr_cassettes/bad_token.yml
166
+ - spec/fixtures/vcr_cassettes/digital_read.yml
167
+ - spec/fixtures/vcr_cassettes/digital_write.yml
168
+ - spec/fixtures/vcr_cassettes/no_token.yml
169
+ - spec/fixtures/vcr_cassettes/spark_timeout.yml
161
170
  - spec/ruby_spark/core_spec.rb
162
171
  - spec/spec_helper.rb