rest-core 2.1.2 → 3.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -2
  3. data/.travis.yml +3 -5
  4. data/CHANGES.md +65 -5
  5. data/Gemfile +10 -5
  6. data/NOTE.md +1 -1
  7. data/README.md +194 -128
  8. data/Rakefile +8 -34
  9. data/TODO.md +3 -2
  10. data/example/simple.rb +6 -4
  11. data/example/use-cases.rb +39 -122
  12. data/lib/rest-core.rb +14 -5
  13. data/lib/rest-core/builder.rb +12 -2
  14. data/lib/rest-core/client.rb +31 -25
  15. data/lib/rest-core/engine.rb +39 -0
  16. data/lib/rest-core/engine/http-client.rb +41 -0
  17. data/lib/rest-core/engine/net-http-persistent.rb +21 -0
  18. data/lib/rest-core/engine/rest-client.rb +13 -42
  19. data/lib/rest-core/event_source.rb +91 -0
  20. data/lib/rest-core/middleware.rb +17 -11
  21. data/lib/rest-core/middleware/error_detector.rb +1 -6
  22. data/lib/rest-core/middleware/oauth1_header.rb +1 -0
  23. data/lib/rest-core/middleware/oauth2_header.rb +20 -8
  24. data/lib/rest-core/middleware/oauth2_query.rb +1 -0
  25. data/lib/rest-core/middleware/timeout.rb +5 -19
  26. data/lib/rest-core/promise.rb +137 -0
  27. data/lib/rest-core/test.rb +2 -43
  28. data/lib/rest-core/thread_pool.rb +122 -0
  29. data/lib/rest-core/timer.rb +30 -0
  30. data/lib/rest-core/util/hmac.rb +0 -8
  31. data/lib/rest-core/version.rb +1 -1
  32. data/lib/rest-core/wrapper.rb +1 -1
  33. data/rest-core.gemspec +36 -25
  34. data/task/README.md +54 -0
  35. data/task/gemgem.rb +150 -156
  36. data/test/test_builder.rb +2 -2
  37. data/test/test_cache.rb +8 -8
  38. data/test/test_client.rb +16 -6
  39. data/test/test_client_oauth1.rb +1 -1
  40. data/test/test_event_source.rb +77 -0
  41. data/test/test_follow_redirect.rb +1 -1
  42. data/test/test_future.rb +16 -0
  43. data/test/test_oauth2_header.rb +28 -0
  44. data/test/test_promise.rb +89 -0
  45. data/test/test_rest-client.rb +21 -0
  46. data/test/test_thread_pool.rb +10 -0
  47. data/test/test_timeout.rb +13 -8
  48. metadata +61 -37
  49. data/example/multi.rb +0 -44
  50. data/lib/rest-core/engine/auto.rb +0 -25
  51. data/lib/rest-core/engine/em-http-request.rb +0 -90
  52. data/lib/rest-core/engine/future/future.rb +0 -107
  53. data/lib/rest-core/engine/future/future_fiber.rb +0 -32
  54. data/lib/rest-core/engine/future/future_thread.rb +0 -29
  55. data/lib/rest-core/middleware/timeout/timer_em.rb +0 -26
  56. data/lib/rest-core/middleware/timeout/timer_thread.rb +0 -36
  57. data/task/.gitignore +0 -1
  58. data/test/test_em-http-request.rb +0 -186
data/Rakefile CHANGED
@@ -1,41 +1,15 @@
1
- # encoding: utf-8
2
1
 
3
2
  begin
4
3
  require "#{dir = File.dirname(__FILE__)}/task/gemgem"
5
4
  rescue LoadError
6
- sh "git submodule update --init"
7
- exec Gem.ruby, "-S", "rake", *ARGV
5
+ sh 'git submodule update --init'
6
+ exec Gem.ruby, '-S', $PROGRAM_NAME, *ARGV
8
7
  end
9
8
 
10
- Gemgem.dir = dir
11
- ($LOAD_PATH << File.expand_path("#{Gemgem.dir}/lib" )).uniq!
12
-
13
- desc 'Generate gemspec'
14
- task 'gem:spec' do
15
- Gemgem.spec = Gemgem.create do |s|
16
- require 'rest-core/version'
17
- s.name = 'rest-core'
18
- s.version = RestCore::VERSION
19
- s.homepage = 'https://github.com/cardinalblue/rest-core'
20
-
21
- %w[rest-client].each{ |g| s.add_runtime_dependency(g) }
22
-
23
- s.authors = ['Cardinal Blue', 'Lin Jen-Shin (godfat)']
24
- s.email = ['dev (XD) cardinalblue.com']
25
-
26
- s.post_install_message = <<-MARKDOWN
27
- # [rest-core] Since 2.1.0, Incompatible changes for POST requests:
28
-
29
- * We no longer support Rails-like POST payload, like translating
30
- `{:foo => [1, 2]}` to `'foo[]=1&foo[]=2'`. It would now be translated to
31
- `'foo=1&foo=2'`. If you like `'foo[]'` as the key, simply pass it as
32
- `{'foo[]' => [1, 2]}`.
33
-
34
- * This also applies to nested hashes like `{:foo => {:bar => 1}`. If you
35
- want that behaviour, just pass `{'foo[bar]' => 1}` which would then be
36
- translated to `'foo[bar]=1'`.
37
- MARKDOWN
38
- end
39
-
40
- Gemgem.write
9
+ Gemgem.init(dir) do |s|
10
+ require 'rest-core/version'
11
+ s.name = 'rest-core'
12
+ s.version = RestCore::VERSION
13
+ s.homepage = 'https://github.com/godfat/rest-core'
14
+ %w[httpclient mime-types timers].each{ |g| s.add_runtime_dependency(g) }
41
15
  end
data/TODO.md CHANGED
@@ -1,8 +1,6 @@
1
1
  # TODO
2
2
 
3
3
  * middleware revisit (how to initialize?)
4
- * streaming?
5
- * connection pool?
6
4
  * X-Method-Override
7
5
 
8
6
  # BUG
@@ -15,3 +13,6 @@
15
13
  * headers and payload logs for CommonLogger
16
14
 
17
15
  # rest-request
16
+
17
+ * fix DRY by defining `prepare :: env -> env`
18
+ * FAIL and LOG need to be reimplemented as well.
@@ -8,21 +8,23 @@ YourClient = RC::Builder.client do
8
8
  use RC::Cache , nil, 3600
9
9
  end
10
10
 
11
+ YourClient.pool_size = 5
12
+
11
13
  client = YourClient.new(:cache => {})
12
- p client.get('cardinalblue') # cache miss
14
+ p client.get('godfat') # cache miss
13
15
  puts
14
- p client.get('cardinalblue') # cache hit
16
+ p client.get('godfat') # cache hit
15
17
 
16
18
  client.cache = false
17
19
 
18
20
  puts "concurrent requests"
19
- a = [client.get('cardinalblue'), client.get('godfat')]
21
+ a = [client.get('godfat'), client.get('cardinalblue')]
20
22
  puts "It's not blocking... but doing concurrent requests underneath"
21
23
  p a.map{ |r| r['name'] } # here we want the values, so it blocks here
22
24
  puts "DONE"
23
25
 
24
26
  puts "callback"
25
- client.get('cardinalblue'){ |v| p v }
27
+ client.get('godfat'){ |v| p v }
26
28
  puts "It's not blocking... but doing concurrent requests underneath"
27
29
  client.wait # we block here to wait for the request done
28
30
  puts "DONE"
@@ -1,11 +1,17 @@
1
1
 
2
- require 'fiber'
3
- require 'em-http-request'
4
2
  require 'rest-core'
5
3
  RC.eagerload
6
4
 
5
+ RC::Universal.pool_size = 0 # default to no thread pool
6
+
7
7
  def def_use_case name, &block
8
- singleton_class.send(:define_method, "#{name}_", &block)
8
+ singleton_class.send(:define_method, "#{name}_") do
9
+ begin
10
+ yield
11
+ rescue => e
12
+ q "Encountering: #{e}"
13
+ end
14
+ end
9
15
  singleton_class.send(:define_method, name) do
10
16
  @count ||= 0
11
17
  printf "Use case #%02d: %s\n", @count+=1, name
@@ -30,29 +36,37 @@ end
30
36
 
31
37
  def_use_case 'pure_ruby_single_request' do
32
38
  q RC::Universal.new(:json_response => true).
33
- get('https://api.github.com/users/godfat')['name']
39
+ get('https://graph.facebook.com/4')['name']
34
40
  end
35
41
 
36
42
  def_use_case 'pure_ruby_concurrent_requests' do
37
43
  client = RC::Universal.new(:json_response => true,
38
- :site => 'https://api.github.com/users/')
39
- q [client.get('godfat'), client.get('cardinalblue')].map{ |u| u['name'] }
44
+ :site => 'https://graph.facebook.com/')
45
+ q [client.get('4'), client.get('5')].map{ |u| u['name'] }
40
46
  end
41
47
 
42
48
  def_use_case 'pure_ruby_cache_requests' do
43
49
  client = RC::Universal.new(:json_response => true, :cache => {})
44
- 3.times{ q client.get('https://api.github.com/users/godfat')['name'] }
50
+ 3.times{ q client.get('https://graph.facebook.com/4')['name'] }
45
51
  end
46
52
 
47
53
  def_use_case 'pure_ruby_callback_requests' do
48
54
  m = Mutex.new
49
55
  RC::Universal.new(:json_response => true ,
50
- :site => 'https://api.github.com/users/' ,
56
+ :site => 'https://graph.facebook.com/' ,
51
57
  :log_method => lambda{|str| m.synchronize{puts(str)}}).
52
- get('godfat'){ |res|
58
+ get('4'){ |res|
59
+ if res.kind_of?(Exception)
60
+ p "Encountering: #{res}"
61
+ next
62
+ end
53
63
  q res['name'], m
54
64
  }.
55
- get('cardinalblue'){ |res|
65
+ get('5'){ |res|
66
+ if res.kind_of?(Exception)
67
+ p "Encountering: #{res}"
68
+ next
69
+ end
56
70
  q res['name'], m
57
71
  }.wait
58
72
  end
@@ -60,30 +74,26 @@ end
60
74
  def_use_case 'pure_ruby_nested_concurrent_requests' do
61
75
  m = Mutex.new
62
76
  c = RC::Universal.new(:json_response => true ,
63
- :site => 'https://api.github.com' ,
77
+ :site => 'https://graph.facebook.com/' ,
64
78
  :log_method => lambda{|str| m.synchronize{puts(str)}})
65
79
 
66
- %w[rubytaiwan godfat].each{ |user|
67
- c.get("/users/#{user}/repos", :per_page => 100){ |repos|
68
- rs = repos.reject{ |r| r['fork'] }
69
- rs = [{}] if rs.size == 1 # out of API limit :(
70
- most_watched = rs.max_by{ |r| r['watchers'] }['name']
71
- most_size = rs.max_by{ |r| r['size'] }['name']
72
-
73
- watch_contri = c.get("/repos/#{user}/#{most_watched}/contributors")
74
- size_contri = c.get("/repos/#{user}/#{most_size}/contributors")
80
+ %w[4 5].each{ |user|
81
+ c.get(user, :fields => 'cover'){ |data|
82
+ if data.kind_of?(Exception)
83
+ q "Encountering: #{data}", m
84
+ next
85
+ end
75
86
 
76
- watch_contri = [{}] if watch_contri.size == 1 # out of API limit :(
77
- size_contri = [{}] if size_contri.size == 1 # out of API limit :(
87
+ cover = data['cover']
88
+ comments = c.get("#{cover['id']}/comments")
89
+ likes = c.get("#{cover['id']}/likes")
90
+ most_liked_comment = comments['data'].max_by{|d|d['like_count']}
78
91
 
79
- most_watched_most_contri = watch_contri.max_by{ |c| c['contributions'] }
80
- most_size_most_contri = size_contri.max_by{ |c| c['contributions'] }
92
+ q "Author of most liked comment from #{user}'s cover photo:", m
93
+ q most_liked_comment['from']['name'], m
81
94
 
82
- q "Most contributed user for most watched: #{user}/#{most_watched}:", m
83
- q most_watched_most_contri['login'], m
84
-
85
- q "Most contributed user for most size : #{user}/#{most_size}:", m
86
- q most_size_most_contri['login'], m
95
+ y = !!likes['data'].find{|d|d['id'] == most_liked_comment['from']['id']}
96
+ q "Did the user also like the cover?: #{y}", m
87
97
  }
88
98
  }
89
99
 
@@ -92,72 +102,6 @@ end
92
102
 
93
103
  # ----------------------------------------------------------------------
94
104
 
95
- def_use_case 'eventmachine_fiber_single_request' do
96
- EM.run{ Fiber.new{ pure_ruby_single_request_ ; EM.stop }.resume}
97
- end
98
-
99
- def_use_case 'eventmachine_fiber_concurrent_requests' do
100
- EM.run{ Fiber.new{ pure_ruby_concurrent_requests_ ; EM.stop }.resume}
101
- end
102
-
103
- def_use_case 'eventmachine_fiber_cache_requests' do
104
- EM.run{ Fiber.new{ pure_ruby_cache_requests_ ; EM.stop }.resume}
105
- end
106
-
107
- def_use_case 'eventmachine_fiber_callback_requests' do
108
- EM.run{ Fiber.new{ pure_ruby_callback_requests_ ; EM.stop }.resume}
109
- end
110
-
111
- def_use_case 'eventmachine_fiber_nested_concurrent_requests' do
112
- EM.run{ Fiber.new{ pure_ruby_nested_concurrent_requests_; EM.stop }.resume}
113
- end
114
-
115
- # ----------------------------------------------------------------------
116
-
117
- def_use_case 'eventmachine_thread_single_request' do
118
- EM.run{ Thread.new{ pure_ruby_single_request_ ; EM.stop } }
119
- end
120
-
121
- def_use_case 'eventmachine_thread_concurrent_requests' do
122
- EM.run{ Thread.new{ pure_ruby_concurrent_requests_ ; EM.stop } }
123
- end
124
-
125
- def_use_case 'eventmachine_thread_cache_requests' do
126
- EM.run{ Thread.new{ pure_ruby_cache_requests_ ; EM.stop } }
127
- end
128
-
129
- def_use_case 'eventmachine_thread_callback_requests' do
130
- EM.run{ Thread.new{ pure_ruby_callback_requests_ ; EM.stop } }
131
- end
132
-
133
- def_use_case 'eventmachine_thread_nested_concurrent_requests' do
134
- EM.run{ Thread.new{ pure_ruby_nested_concurrent_requests_; EM.stop } }
135
- end
136
-
137
- # ----------------------------------------------------------------------
138
-
139
- def_use_case 'eventmachine_rest_client_single_request' do
140
- EM.run{ pure_ruby_single_request_ ; EM.stop }
141
- end
142
-
143
- def_use_case 'eventmachine_rest_client_concurrent_requests' do
144
- EM.run{ pure_ruby_concurrent_requests_ ; EM.stop }
145
- end
146
-
147
- def_use_case 'eventmachine_rest_client_cache_requests' do
148
- EM.run{ pure_ruby_cache_requests_ ; EM.stop }
149
- end
150
-
151
- def_use_case 'eventmachine_rest_client_callback_requests' do
152
- EM.run{ pure_ruby_callback_requests_ ; EM.stop }
153
- end
154
-
155
- def_use_case 'eventmachine_rest_client_nested_concurrent_requests' do
156
- EM.run{ pure_ruby_nested_concurrent_requests_; EM.stop }
157
- end
158
-
159
- # ----------------------------------------------------------------------
160
-
161
105
  def_use_case 'pure_ruby' do
162
106
  pure_ruby_single_request
163
107
  pure_ruby_concurrent_requests
@@ -166,33 +110,6 @@ def_use_case 'pure_ruby' do
166
110
  pure_ruby_nested_concurrent_requests
167
111
  end
168
112
 
169
- def_use_case 'eventmachine_fiber' do
170
- eventmachine_fiber_single_request
171
- eventmachine_fiber_concurrent_requests
172
- eventmachine_fiber_cache_requests
173
- eventmachine_fiber_callback_requests
174
- eventmachine_fiber_nested_concurrent_requests
175
- end
176
-
177
- def_use_case 'eventmachine_thread' do
178
- eventmachine_thread_single_request
179
- eventmachine_thread_concurrent_requests
180
- eventmachine_thread_cache_requests
181
- eventmachine_thread_callback_requests
182
- eventmachine_thread_nested_concurrent_requests
183
- end
184
-
185
- def_use_case 'eventmachine_rest_client' do
186
- eventmachine_rest_client_single_request
187
- eventmachine_rest_client_concurrent_requests
188
- eventmachine_rest_client_cache_requests
189
- eventmachine_rest_client_callback_requests
190
- eventmachine_rest_client_nested_concurrent_requests
191
- end
192
-
193
113
  # ----------------------------------------------------------------------
194
114
 
195
115
  pure_ruby
196
- eventmachine_fiber
197
- eventmachine_thread
198
- eventmachine_rest_client
@@ -9,16 +9,17 @@ module RestCore
9
9
  RESPONSE_BODY = 'RESPONSE_BODY'
10
10
  RESPONSE_STATUS = 'RESPONSE_STATUS'
11
11
  RESPONSE_HEADERS = 'RESPONSE_HEADERS'
12
+ RESPONSE_SOCKET = 'RESPONSE_SOCKET'
12
13
 
13
14
  DRY = 'core.dry'
14
15
  FAIL = 'core.fail'
15
16
  LOG = 'core.log'
17
+ CLIENT = 'core.client'
16
18
 
17
19
  ASYNC = 'async.callback'
18
20
  TIMER = 'async.timer'
19
- FUTURE = 'async.future'
20
-
21
- RootFiber = Fiber.respond_to?(:current) && Fiber.current
21
+ PROMISE = 'async.promise'
22
+ HIJACK = 'async.hijack'
22
23
 
23
24
  # core utilities
24
25
  autoload :Builder , 'rest-core/builder'
@@ -27,6 +28,9 @@ module RestCore
27
28
  autoload :Event , 'rest-core/event'
28
29
  autoload :Middleware , 'rest-core/middleware'
29
30
  autoload :Wrapper , 'rest-core/wrapper'
31
+ autoload :Promise , 'rest-core/promise'
32
+ autoload :ThreadPool , 'rest-core/thread_pool'
33
+ autoload :EventSource , 'rest-core/event_source'
30
34
 
31
35
  # oauth1 utilities
32
36
  autoload :ClientOauth1 , 'rest-core/client_oauth1'
@@ -59,10 +63,10 @@ module RestCore
59
63
  autoload :Timeout , 'rest-core/middleware/timeout'
60
64
 
61
65
  # engines
62
- autoload :Auto , 'rest-core/engine/auto'
63
66
  autoload :Dry , 'rest-core/engine/dry'
67
+ autoload :HttpClient , 'rest-core/engine/http-client'
64
68
  autoload :RestClient , 'rest-core/engine/rest-client'
65
- autoload :EmHttpRequest , 'rest-core/engine/em-http-request'
69
+ autoload :NetHttpPersistent, 'rest-core/engine/net-http-persistent'
66
70
 
67
71
  # clients
68
72
  autoload :Simple , 'rest-core/client/simple'
@@ -83,6 +87,11 @@ module RestCore
83
87
  eagerload(c, loaded) if c.respond_to?(:constants) && !loaded[n]
84
88
  }
85
89
  end
90
+
91
+ # identity function
92
+ def self.id
93
+ @id ||= lambda{ |a| a }
94
+ end
86
95
  end
87
96
 
88
97
  RC = RestCore unless Object.const_defined?(:RC)
@@ -7,7 +7,7 @@ class RestCore::Builder
7
7
  include Wrapper
8
8
 
9
9
  def self.default_engine
10
- @default_engine ||= RestCore::Auto
10
+ @default_engine ||= RestCore::HttpClient
11
11
  end
12
12
 
13
13
  def self.client *attrs, &block
@@ -24,12 +24,22 @@ class RestCore::Builder
24
24
  client = Class.new(struct)
25
25
  client.const_set('Struct', struct)
26
26
  client.send(:include, Client)
27
- class << client; attr_reader :builder; end
27
+ class << client
28
+ attr_reader :builder
29
+ attr_accessor :pool_size, :pool_idle_time
30
+
31
+ def thread_pool
32
+ RestCore::ThreadPool[self]
33
+ end
34
+ end
28
35
  client.instance_variable_set(:@builder, self)
36
+ client.instance_variable_set(:@pool_size, 0) # default to no pool
37
+ client.instance_variable_set(:@pool_idle_time, 60) # default to 60 seconds
29
38
  client
30
39
  end
31
40
 
32
41
  def initialize &block
42
+ @engine = nil
33
43
  @middles ||= []
34
44
  instance_eval(&block) if block_given?
35
45
  end
@@ -1,7 +1,6 @@
1
1
 
2
- require 'rest-core'
3
-
4
2
  require 'weakref'
3
+ require 'rest-core'
5
4
 
6
5
  module RestCore::Client
7
6
  include RestCore
@@ -43,12 +42,13 @@ module RestCore::Client
43
42
  mod.send(:include, accessor)
44
43
  end
45
44
 
46
- attr_reader :app, :dry, :futures
45
+ attr_reader :app, :dry, :promises
47
46
  def initialize o={}
48
47
  @app ||= self.class.builder.to_app # lighten! would reinitialize anyway
49
48
  @dry ||= self.class.builder.to_app(Dry)
50
- @futures = [] # don't record any futures in lighten!
51
- @mutex = nil # for locking futures, lazily initialized for serialization
49
+ @promises = [] # don't record any promises in lighten!
50
+ @mutex = nil # for locking promises, lazily initialized
51
+ # for serialization
52
52
  o.each{ |key, value| send("#{key}=", value) if respond_to?("#{key}=") }
53
53
  end
54
54
 
@@ -85,13 +85,13 @@ module RestCore::Client
85
85
  end
86
86
 
87
87
  def wait
88
- return self if futures.empty?
89
- current_futures = nil
88
+ return self if promises.empty?
89
+ current_promises = nil
90
90
  mutex.synchronize{
91
- current_futures = futures.dup
92
- futures.clear
91
+ current_promises = promises.dup
92
+ promises.clear
93
93
  }
94
- current_futures.each{ |f|
94
+ current_promises.each{ |f|
95
95
  begin
96
96
  f.wait
97
97
  rescue WeakRef::RefError # it's gc'ed after we think it's alive
@@ -111,14 +111,14 @@ module RestCore::Client
111
111
  request(
112
112
  {REQUEST_METHOD => :get ,
113
113
  REQUEST_PATH => path ,
114
- REQUEST_QUERY => query }.merge(opts), &cb)
114
+ REQUEST_QUERY => query }.merge(opts), response_key(opts), &cb)
115
115
  end
116
116
 
117
117
  def delete path, query={}, opts={}, &cb
118
118
  request(
119
119
  {REQUEST_METHOD => :delete,
120
120
  REQUEST_PATH => path ,
121
- REQUEST_QUERY => query }.merge(opts), &cb)
121
+ REQUEST_QUERY => query }.merge(opts), response_key(opts), &cb)
122
122
  end
123
123
 
124
124
  def head path, query={}, opts={}, &cb
@@ -132,7 +132,7 @@ module RestCore::Client
132
132
  request(
133
133
  {REQUEST_METHOD => :options,
134
134
  REQUEST_PATH => path ,
135
- REQUEST_QUERY => query }.merge(opts), &cb)
135
+ REQUEST_QUERY => query }.merge(opts), RESPONSE_HEADERS, &cb)
136
136
  end
137
137
 
138
138
  def post path, payload={}, query={}, opts={}, &cb
@@ -140,7 +140,7 @@ module RestCore::Client
140
140
  {REQUEST_METHOD => :post ,
141
141
  REQUEST_PATH => path ,
142
142
  REQUEST_QUERY => query ,
143
- REQUEST_PAYLOAD => payload}.merge(opts), &cb)
143
+ REQUEST_PAYLOAD => payload}.merge(opts), response_key(opts), &cb)
144
144
  end
145
145
 
146
146
  def put path, payload={}, query={}, opts={}, &cb
@@ -148,7 +148,7 @@ module RestCore::Client
148
148
  {REQUEST_METHOD => :put ,
149
149
  REQUEST_PATH => path ,
150
150
  REQUEST_QUERY => query ,
151
- REQUEST_PAYLOAD => payload}.merge(opts), &cb)
151
+ REQUEST_PAYLOAD => payload}.merge(opts), response_key(opts), &cb)
152
152
  end
153
153
 
154
154
  def patch path, payload={}, query={}, opts={}, &cb
@@ -156,14 +156,16 @@ module RestCore::Client
156
156
  {REQUEST_METHOD => :patch ,
157
157
  REQUEST_PATH => path ,
158
158
  REQUEST_QUERY => query ,
159
- REQUEST_PAYLOAD => payload}.merge(opts), &cb)
159
+ REQUEST_PAYLOAD => payload}.merge(opts), response_key(opts), &cb)
160
+ end
161
+
162
+ def event_source path, query={}, opts={}
163
+ EventSource.new(self, path, query, opts)
160
164
  end
161
165
 
162
166
  def request env, key=RESPONSE_BODY, app=app
163
167
  if block_given?
164
- request_full(env, app){ |response|
165
- yield(response[key])
166
- }
168
+ request_full(env, app){ |response| yield(response[key]) }
167
169
  else
168
170
  request_full(env, app)[key]
169
171
  end
@@ -171,14 +173,13 @@ module RestCore::Client
171
173
 
172
174
  def request_full env, app=app, &k
173
175
  response = app.call(build_env({ASYNC => !!k}.merge(env)),
174
- &(k || Middleware.id))
176
+ &(k || RC.id))
175
177
 
176
178
  # under ASYNC callback, response might not be a response hash
177
179
  # in that case (maybe in a user created engine), Client#wait
178
- # won't work because we have no way to track the future.
179
- if response.kind_of?(Hash) && RestCore.const_defined?(:Future) &&
180
- response[FUTURE].kind_of?(Future)
181
- mutex.synchronize{ futures << WeakRef.new(response[FUTURE]) }
180
+ # won't work because we have no way to track the promise.
181
+ if response.kind_of?(Hash) && response[PROMISE].kind_of?(Promise)
182
+ mutex.synchronize{ promises << WeakRef.new(response[PROMISE]) }
182
183
  end
183
184
 
184
185
  if block_given?
@@ -200,7 +201,8 @@ module RestCore::Client
200
201
  REQUEST_PAYLOAD => {} ,
201
202
  REQUEST_HEADERS => {} ,
202
203
  FAIL => [] ,
203
- LOG => [] }
204
+ LOG => [] ,
205
+ CLIENT => self}
204
206
  end
205
207
  # ------------------------ instance ---------------------
206
208
 
@@ -211,6 +213,10 @@ module RestCore::Client
211
213
  @mutex ||= Mutex.new
212
214
  end
213
215
 
216
+ def response_key opts
217
+ if opts[HIJACK] then RESPONSE_SOCKET else RESPONSE_BODY end
218
+ end
219
+
214
220
  def lighten_hash hash
215
221
  Hash[hash.map{ |(key, value)|
216
222
  case value