berkshelf-api 0.2.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 213f69bfd58c7200d8d779788f6774828087cf1b
4
- data.tar.gz: 8ebeaff3fae7fca72a3e5d1f6a88690c80beb7fd
3
+ metadata.gz: f238c1aec4a689f0461aaea5792d0588501e4d8c
4
+ data.tar.gz: f99f8dac7ac5aa4133ac4bef0420e74d128cd01f
5
5
  SHA512:
6
- metadata.gz: 7a30703cd2c06d4b43c6e0bf80c4846d74120e13fa105ee5d7573c6818aac8d1bc1d8d71ea9c39e85fe7f48a50955989d13c4131fb9b9d945d1d27f6b1474133
7
- data.tar.gz: d3cd8e1029ddd4a73291e50feb24c3d635195a24f0531445250b73eb6a7b45a699dfc00a2f2863c6ba0a49028d95ae65c118dc3e15e0e95300b969a9e4325865
6
+ metadata.gz: 0910bcf676f10ea3f0c7061a685400ee232350b39fe08866ed89558e6d3dc649cb6a6b0288041a59924c010825a23510fada8adc043466a37b56374b74ad1543
7
+ data.tar.gz: de9395b533f039629f2eef14bd583de33839afac67e225844616c5cdc22a6d3038b3852839e2f110b4f19e2e227ec946284d58bfc0f9dfd04792a645b966d1aa
data/CHANGELOG.md ADDED
@@ -0,0 +1,4 @@
1
+ # 1.0.0
2
+
3
+ * Prioritize Cache Builders so their entries do not clobber one another
4
+ * An entire config file is checksumed so that when it changes, the cache is rebuilt
data/README.md CHANGED
@@ -2,6 +2,7 @@
2
2
  [![Gem Version](https://badge.fury.io/rb/berkshelf-api.png)](http://badge.fury.io/rb/berkshelf-api)
3
3
  [![Build Status](https://secure.travis-ci.org/RiotGames/berkshelf-api.png?branch=master)](http://travis-ci.org/RiotGames/berkshelf-api)
4
4
  [![Dependency Status](https://gemnasium.com/RiotGames/berkshelf-api.png?travis)](https://gemnasium.com/RiotGames/berkshelf-api)
5
+ [![Code Climate](https://codeclimate.com/github/RiotGames/berkshelf-api.png)](https://codeclimate.com/github/RiotGames/berkshelf-api)
5
6
 
6
7
  A server which indexes cookbooks from various sources and hosts it over a REST API
7
8
 
@@ -23,35 +24,39 @@ Ruby 1.9.1 and 1.9.2 are not officially supported. If you encounter problems, pl
23
24
 
24
25
  ## Configuring Endpoints
25
26
 
26
- Which endpoints to index can be configured by editing the JSON configuration file (by default at: `~/.berkshelf/api-server/config.json`).
27
+ You may configure the endpoints to index by editing the JSON configuration file (default: `~/.berkshelf/api-server/config.json`).
27
28
 
28
29
  ### Opscode Community Site
29
30
 
30
- {
31
- "endpoints": [
32
- {
33
- type: "opscode",
34
- options: {
35
- url: 'http://cookbooks.opscode.com/api/v1'
36
- }
31
+ ````json
32
+ {
33
+ "endpoints": [
34
+ {
35
+ "type": "opscode",
36
+ "options": {
37
+ "url": "http://cookbooks.opscode.com/api/v1"
37
38
  }
38
- ]
39
- }
39
+ }
40
+ ]
41
+ }
42
+ ```
40
43
 
41
44
  ### Chef Server
42
45
 
43
- {
44
- "endpoints": [
45
- {
46
- "type": "chef_server",
47
- "options": {
48
- "url": "https://api.opscode.com/organizations/vialstudios",
49
- "client_name": "berkshelf",
50
- "client_key": "/etc/berkshelf/api-server/client.pem"
51
- }
46
+ ```json
47
+ {
48
+ "endpoints": [
49
+ {
50
+ "type": "chef_server",
51
+ "options": {
52
+ "url": "https://api.opscode.com/organizations/vialstudios",
53
+ "client_name": "berkshelf",
54
+ "client_key": "/etc/berkshelf/api-server/client.pem"
52
55
  }
53
- ]
54
- }
56
+ }
57
+ ]
58
+ }
59
+ ```
55
60
 
56
61
  ## Contributing
57
62
 
@@ -19,9 +19,9 @@ Gem::Specification.new do |spec|
19
19
  spec.require_paths = ["lib"]
20
20
  spec.required_ruby_version = ">= 1.9.3"
21
21
 
22
- spec.add_dependency 'ridley', '~> 1.3.0'
23
- spec.add_dependency 'celluloid', '~> 0.14.0'
24
- spec.add_dependency 'reel', '= 0.4.0.pre'
22
+ spec.add_dependency 'ridley', '~> 1.6'
23
+ spec.add_dependency 'celluloid', '~> 0.15'
24
+ spec.add_dependency 'reel', '>= 0.4.0'
25
25
  spec.add_dependency 'grape', '~> 0.5.0'
26
26
  spec.add_dependency 'hashie', '>= 2.0.4'
27
27
  spec.add_dependency 'faraday'
@@ -116,7 +116,10 @@ module Berkshelf::API
116
116
  loop do
117
117
  supervisor = run!(options)
118
118
 
119
- sleep 0.1 while supervisor.alive?
119
+ while supervisor.alive?
120
+ sleep 0.1
121
+ instance.terminate if @shutdown
122
+ end
120
123
 
121
124
  break if @shutdown
122
125
 
@@ -153,7 +156,6 @@ module Berkshelf::API
153
156
  # if there is no running instance
154
157
  def shutdown
155
158
  @shutdown = true
156
- instance.terminate
157
159
  end
158
160
  end
159
161
  end
@@ -19,7 +19,7 @@ module Berkshelf::API
19
19
  connection.cookbook.all.each do |cookbook, versions|
20
20
  versions.each do |version|
21
21
  cookbook_versions << RemoteCookbook.new(cookbook, version, self.class.worker_type,
22
- @connection.server_url)
22
+ @connection.server_url, priority)
23
23
  end
24
24
  end
25
25
  end
@@ -19,7 +19,7 @@ module Berkshelf::API
19
19
  [ cookbook, connection.future(:versions, cookbook) ]
20
20
  end.each do |cookbook, versions|
21
21
  versions.value.each do |version|
22
- cookbook_versions << RemoteCookbook.new(cookbook, version, self.class.worker_type, @connection.api_uri)
22
+ cookbook_versions << RemoteCookbook.new(cookbook, version, self.class.worker_type, @connection.api_uri, priority)
23
23
  end
24
24
  end
25
25
  end
@@ -16,8 +16,11 @@ module Berkshelf::API
16
16
  include Berkshelf::API::Mixin::Services
17
17
 
18
18
  attr_reader :options
19
+ attr_reader :priority
19
20
 
20
- def initialize(options = {}); end
21
+ def initialize(options = {})
22
+ @priority = options[:priority]
23
+ end
21
24
 
22
25
  # @abstract
23
26
  #
@@ -36,52 +39,6 @@ module Berkshelf::API
36
39
  raise RuntimeError, "must be implemented"
37
40
  end
38
41
 
39
- def build
40
- log.info "#{self} building..."
41
- log.info "#{self} determining if the cache is stale..."
42
- if stale?
43
- log.info "#{self} cache is stale."
44
- update_cache
45
- else
46
- log.info "#{self} cache is up to date."
47
- end
48
-
49
- log.info "clearing diff"
50
- clear_diff
51
- end
52
-
53
- # @return [Array<Array<RemoteCookbook>, Array<RemoteCookbook>>]
54
- def diff
55
- @diff ||= cache_manager.diff(cookbooks)
56
- end
57
-
58
- def update_cache
59
- created_cookbooks, deleted_cookbooks = diff
60
-
61
- log.info "#{self} adding (#{created_cookbooks.length}) items..."
62
- created_cookbooks.collect do |remote|
63
- [ remote, future(:metadata, remote) ]
64
- end.each do |remote, metadata|
65
- cache_manager.add(remote, metadata.value)
66
- end
67
-
68
- log.info "#{self} removing (#{deleted_cookbooks.length}) items..."
69
- deleted_cookbooks.each { |remote| cache_manager.remove(remote.name, remote.version) }
70
-
71
- log.info "#{self} cache updated."
72
- cache_manager.save
73
- end
74
-
75
- def stale?
76
- created_cookbooks, deleted_cookbooks = diff
77
- created_cookbooks.any? || deleted_cookbooks.any?
78
- end
79
-
80
- private
81
-
82
- def clear_diff
83
- @diff = nil
84
- end
85
42
  end
86
43
 
87
44
  class << self
@@ -8,6 +8,7 @@ module Berkshelf::API
8
8
 
9
9
  include Berkshelf::API::GenericServer
10
10
  include Berkshelf::API::Logging
11
+ include Berkshelf::API::Mixin::Services
11
12
 
12
13
  server_name :cache_builder
13
14
  finalizer :finalize_callback
@@ -18,21 +19,14 @@ module Berkshelf::API
18
19
  @worker_supervisor = WorkerSupervisor.new(@worker_registry)
19
20
  @building = false
20
21
 
21
- Application.config.endpoints.each do |endpoint|
22
+ Application.config.endpoints.each_with_index do |endpoint, index|
22
23
  endpoint_options = endpoint.options.to_hash.deep_symbolize_keys
23
- @worker_supervisor.supervise(CacheBuilder::Worker[endpoint.type], endpoint_options)
24
+ @worker_supervisor.supervise(CacheBuilder::Worker[endpoint.type], endpoint_options.merge(priority: index))
24
25
  end
25
26
  end
26
27
 
27
- # Issue a single build command to all workers
28
- #
29
- # @return [Array]
30
28
  def build
31
- workers.collect { |actor| actor.future(:build) }.map do |f|
32
- begin
33
- f.value
34
- rescue; end
35
- end
29
+ cache_manager.process_workers(workers)
36
30
  end
37
31
 
38
32
  # Issue a build command to all workers at the scheduled interval
@@ -44,7 +38,7 @@ module Berkshelf::API
44
38
  loop do
45
39
  @building = true
46
40
  build
47
- sleep BUILD_INTERVAL
41
+ sleep interval
48
42
  end
49
43
  end
50
44
 
@@ -10,12 +10,16 @@ module Berkshelf::API
10
10
  include Berkshelf::API::GenericServer
11
11
  include Berkshelf::API::Logging
12
12
 
13
+ extend Forwardable
14
+ def_delegators :@cache, :warmed?, :set_warmed, :clear
15
+
13
16
  SAVE_INTERVAL = 30.0
14
17
 
15
18
  server_name :cache_manager
16
19
  finalizer :finalize_callback
17
- exclusive :add, :clear, :remove, :save
20
+ exclusive :merge, :add, :remove
18
21
 
22
+ # @return [DependencyCache]
19
23
  attr_reader :cache
20
24
 
21
25
  def initialize
@@ -30,14 +34,77 @@ module Berkshelf::API
30
34
  #
31
35
  # @return [Hash]
32
36
  def add(cookbook, metadata)
33
- @cache.add(cookbook, metadata)
37
+ log.debug "#{self} adding (#{cookbook.name}, #{cookbook.version})"
38
+ cache.add(cookbook, metadata)
34
39
  end
35
40
 
36
- # Clear any items added to the cache
41
+ # Remove the cached item matching the given name and version
37
42
  #
38
- # @return [Hash]
39
- def clear
40
- @cache.clear
43
+ # @param [#to_s] name
44
+ # @param [#to_s] version
45
+ #
46
+ # @return [DependencyCache]
47
+ def remove(name, version)
48
+ log.debug "#{self} removing (#{name}, #{version})"
49
+ cache.remove(name, version)
50
+ end
51
+
52
+ # Loops through a list of workers and merges their cookbook sets into the cache
53
+ #
54
+ # @param [Array<CacheBuilder::Worker::Base>] workers
55
+ # The workers for this cache
56
+ #
57
+ # @return [Boolean]
58
+ def process_workers(workers)
59
+ # If the cache has been warmed already, we want to spawn
60
+ # workers for all the endpoints concurrently. However, if the
61
+ # cache is cold we want to run sequentially, so higher priority
62
+ # endpoints can work before lower priority, avoiding duplicate
63
+ # downloads.
64
+ # We don't want crashing workers to crash the CacheManager.
65
+ # Crashes are logged so just ignore the exceptions
66
+ if warmed?
67
+ Array(workers).flatten.collect do |worker|
68
+ self.future(:process_worker, worker)
69
+ end.each do |f|
70
+ f.value rescue nil
71
+ end
72
+ else
73
+ Array(workers).flatten.each do |worker|
74
+ process_worker(worker) rescue nil
75
+ end
76
+ end
77
+ self.set_warmed
78
+ end
79
+
80
+ # @param [CacheBuilder::Worker::Base] worker
81
+ def process_worker(worker)
82
+ log.info "processing #{worker}"
83
+ remote_cookbooks = worker.cookbooks
84
+ log.info "found #{remote_cookbooks.size} cookbooks from #{worker}"
85
+ created_cookbooks, deleted_cookbooks = diff(remote_cookbooks, worker.priority)
86
+ log.debug "#{created_cookbooks.size} cookbooks to be added to the cache from #{worker}"
87
+ log.debug "#{deleted_cookbooks.size} cookbooks to be removed from the cache from #{worker}"
88
+
89
+ # Process metadata in chunks - Ridley cookbook resource uses a
90
+ # task_class TaskThread, which means each future gets its own
91
+ # thread. If we have many (>2000) cookbooks we can easily
92
+ # exhaust the available threads on the system.
93
+ created_cookbooks_with_metadata = []
94
+ until created_cookbooks.empty?
95
+ work = created_cookbooks.slice!(0,500)
96
+ log.info "processing metadata for #{work.size} cookbooks with #{created_cookbooks.size} remaining on #{worker}"
97
+ work.map! do |remote|
98
+ [ remote, worker.future(:metadata, remote) ]
99
+ end.map! do |remote, metadata|
100
+ [remote, metadata.value]
101
+ end
102
+ created_cookbooks_with_metadata += work
103
+ end
104
+
105
+ log.info "about to merge cookbooks"
106
+ merge(created_cookbooks_with_metadata, deleted_cookbooks)
107
+ log.info "#{self} cache updated."
41
108
  end
42
109
 
43
110
  # Check if the cache knows about the given cookbook version
@@ -51,44 +118,53 @@ module Berkshelf::API
51
118
  end
52
119
 
53
120
  def load_save
121
+ log.info "Loading save from #{self.class.cache_file}"
54
122
  @cache = DependencyCache.from_file(self.class.cache_file)
123
+ log.info "Cache contains #{@cache.cookbooks.size} items"
55
124
  end
56
125
 
57
- # Remove the cached item matching the given name and version
58
- #
59
- # @param [#to_s] name
60
- # @param [#to_s] version
61
- #
62
- # @return [DependencyCache]
63
- def remove(name, version)
64
- @cache.remove(name, version)
65
- end
126
+ private
66
127
 
67
- def save
68
- log.info "Saving the cache to: #{self.class.cache_file}"
69
- cache.save(self.class.cache_file)
70
- log.info "Cache saved!"
71
- end
128
+ def merge(created_cookbooks, deleted_cookbooks)
129
+ log.info "#{self} adding (#{created_cookbooks.length}) items..."
130
+ created_cookbooks.each do |remote_with_metadata|
131
+ remote, metadata = remote_with_metadata
132
+ add(remote, metadata)
133
+ end
72
134
 
73
- # @param [Array<RemoteCookbook>] cookbooks
74
- # An array of RemoteCookbooks representing all the cookbooks on the indexed site
75
- #
76
- # @return [Array<Array<RemoteCookbook>, Array<RemoteCookbook>>]
77
- # A tuple of Arrays of RemoteCookbooks
78
- # The first array contains items not in the cache
79
- # The second array contains items in the cache, but not in the cookbooks parameter
80
- def diff(cookbooks)
81
- known_cookbooks = cache.cookbooks
82
- created_cookbooks = cookbooks - known_cookbooks
83
- deleted_cookbooks = known_cookbooks - cookbooks
84
- [ created_cookbooks, deleted_cookbooks ]
85
- end
135
+ log.info "#{self} removing (#{deleted_cookbooks.length}) items..."
136
+ deleted_cookbooks.each { |remote| remove(remote.name, remote.version) }
86
137
 
87
- private
138
+ log.info "#{self} cache updated."
139
+ save
140
+ end
141
+
142
+ def save
143
+ if warmed?
144
+ log.info "Saving the cache to: #{self.class.cache_file}"
145
+ cache.save(self.class.cache_file)
146
+ log.info "Cache saved!"
147
+ end
148
+ end
149
+
150
+ # @param [Array<RemoteCookbook>] cookbooks
151
+ # An array of RemoteCookbooks representing all the cookbooks on the indexed site
152
+ # @param [Integer] worker_priority
153
+ # The priority/ID of the endpoint that is running
154
+ # @return [Array(Array<RemoteCookbook>, Array<RemoteCookbook>)]
155
+ # A tuple of Arrays of RemoteCookbooks
156
+ # The first array contains items not in the cache
157
+ # The second array contains items in the cache, but not in the cookbooks parameter
158
+ def diff(cookbooks, worker_priority)
159
+ known_cookbooks = cache.cookbooks.select { |c| c.priority <= worker_priority }
160
+ created_cookbooks = cookbooks - known_cookbooks
161
+ deleted_cookbooks = (known_cookbooks - cookbooks).select { |c| c.priority == worker_priority }
162
+ [ created_cookbooks, deleted_cookbooks ]
163
+ end
88
164
 
89
165
  def finalize_callback
90
166
  log.info "Cache Manager shutting down..."
91
- self.save
167
+ save
92
168
  end
93
169
  end
94
170
  end
@@ -1,4 +1,5 @@
1
1
  require 'buff/config/json'
2
+ require 'digest/sha1'
2
3
 
3
4
  module Berkshelf::API
4
5
  class Config < Buff::Config::JSON
@@ -24,5 +25,9 @@ module Berkshelf::API
24
25
  }
25
26
  }
26
27
  ]
28
+
29
+ def endpoints_checksum
30
+ Digest::SHA1.hexdigest(endpoints.collect {|x| x.to_hash }.to_s)
31
+ end
27
32
  end
28
33
  end
@@ -6,6 +6,7 @@ module Berkshelf::API
6
6
  # {
7
7
  # "cookbook_name" => {
8
8
  # "x.y.z" => {
9
+ # :endpoint_priority => 1,
9
10
  # :dependencies => { "cookbook_name" => "constraint" },
10
11
  # :platforms => { "platform" => "constraint" }
11
12
  # }
@@ -32,12 +33,19 @@ module Berkshelf::API
32
33
  end
33
34
  end
34
35
 
36
+ include Berkshelf::API::Logging
35
37
  extend Forwardable
36
38
  def_delegators :@cache, :[], :[]=
37
39
 
38
40
  # @param [Hash] contents
39
41
  def initialize(contents = {})
40
- @cache = Hash[contents]
42
+ @warmed = false
43
+ @cache = Hash[contents].with_indifferent_access
44
+ if @cache['endpoints_checksum'] && (@cache['endpoints_checksum'] != Application.config.endpoints_checksum)
45
+ log.warn "Endpoints in config have changed - invalidating cache"
46
+ @cache.clear
47
+ end
48
+ @cache.delete('endpoints_checksum')
41
49
  end
42
50
 
43
51
  # @param [RemoteCookbook] cookbook
@@ -49,6 +57,7 @@ module Berkshelf::API
49
57
  dependencies = metadata.dependencies || Hash.new
50
58
  @cache[cookbook.name.to_s] ||= Hash.new
51
59
  @cache[cookbook.name.to_s][cookbook.version.to_s] = {
60
+ endpoint_priority: cookbook.priority,
52
61
  platforms: platforms,
53
62
  dependencies: dependencies,
54
63
  location_type: cookbook.location_type,
@@ -99,9 +108,13 @@ module Berkshelf::API
99
108
  @cache.to_hash
100
109
  end
101
110
 
111
+ # @param [Hash] options
112
+ #
102
113
  # @return [String]
103
114
  def to_json(options = {})
104
- JSON.generate(to_hash, options)
115
+ output = to_hash
116
+ output['endpoints_checksum'] = Application.config.endpoints_checksum if options[:saving]
117
+ JSON.generate(output, options)
105
118
  end
106
119
 
107
120
  # @return [Array<RemoteCookbook>]
@@ -109,7 +122,7 @@ module Berkshelf::API
109
122
  [].tap do |remote_cookbooks|
110
123
  @cache.each_pair do |name, versions|
111
124
  versions.each do |version, metadata|
112
- remote_cookbooks << RemoteCookbook.new(name, version, metadata[:location_type], metadata[:location_path])
125
+ remote_cookbooks << RemoteCookbook.new(name, version, metadata[:location_type], metadata[:location_path], metadata[:endpoint_priority])
113
126
  end
114
127
  end
115
128
  end
@@ -117,7 +130,15 @@ module Berkshelf::API
117
130
 
118
131
  def save(path)
119
132
  FileUtils.mkdir_p(File.dirname(path))
120
- File.open(path, 'w+') { |f| f.write(self.to_json) }
133
+ File.open(path, 'w+') { |f| f.write(self.to_json(saving: true)) }
134
+ end
135
+
136
+ def warmed?
137
+ @warmed
138
+ end
139
+
140
+ def set_warmed
141
+ @warmed = true
121
142
  end
122
143
  end
123
144
  end
@@ -12,7 +12,12 @@ module Berkshelf::API
12
12
 
13
13
  desc "list all known cookbooks"
14
14
  get 'universe' do
15
- cache_manager.cache
15
+ if cache_manager.warmed?
16
+ cache_manager.cache
17
+ else
18
+ header "Retry-After", 600
19
+ status 503
20
+ end
16
21
  end
17
22
  end
18
23
  end
@@ -1,3 +1,11 @@
1
1
  module Berkshelf::API
2
- class RemoteCookbook < Struct.new(:name, :version, :location_type, :location_path); end
2
+ class RemoteCookbook < Struct.new(:name, :version, :location_type, :location_path, :priority)
3
+ def hash
4
+ "#{name}|#{version}".hash
5
+ end
6
+
7
+ def eql?(other)
8
+ self.hash == other.hash
9
+ end
10
+ end
3
11
  end
@@ -13,50 +13,144 @@ module Berkshelf::API
13
13
  workers: 10
14
14
  }.freeze
15
15
 
16
+ INITIAL_BODY = ''
17
+
18
+ CONTENT_TYPE_ORIG = 'Content-Type'.freeze
19
+ CONTENT_LENGTH_ORIG = 'Content-Length'.freeze
20
+ CONTENT_TYPE = 'CONTENT_TYPE'.freeze
21
+ CONTENT_LENGTH = 'CONTENT_LENGTH'.freeze
22
+
23
+ SERVER_SOFTWARE = 'SERVER_SOFTWARE'.freeze
24
+ SERVER_NAME = 'SERVER_NAME'.freeze
25
+ SERVER_PORT = 'SERVER_PORT'.freeze
26
+ SERVER_PROTOCOL = 'SERVER_PROTOCOL'.freeze
27
+ GATEWAY_INTERFACE = "GATEWAY_INTERFACE".freeze
28
+ LOCALHOST = 'localhost'.freeze
29
+ HTTP_VERSION = 'HTTP_VERSION'.freeze
30
+ CGI_1_1 = 'CGI/1.1'.freeze
31
+ REMOTE_ADDR = 'REMOTE_ADDR'.freeze
32
+ CONNECTION = 'HTTP_CONNECTION'.freeze
33
+ SCRIPT_NAME = 'SCRIPT_NAME'.freeze
34
+ PATH_INFO = 'PATH_INFO'.freeze
35
+ REQUEST_METHOD = 'REQUEST_METHOD'.freeze
36
+ QUERY_STRING = 'QUERY_STRING'.freeze
37
+ HTTP_1_0 = 'HTTP/1.0'.freeze
38
+ HTTP_1_1 = 'HTTP/1.1'.freeze
39
+ HTTP_ = 'HTTP_'.freeze
40
+ HOST = 'Host'.freeze
41
+
42
+ RACK_INPUT = 'rack.input'.freeze
43
+ RACK_LOGGER = 'rack.logger'.freeze
44
+ RACK_VERSION = 'rack.version'.freeze
45
+ RACK_ERRORS = 'rack.errors'.freeze
46
+ RACK_MULTITHREAD = 'rack.multithread'.freeze
47
+ RACK_MULTIPROCESS = 'rack.multiprocess'.freeze
48
+ RACK_RUN_ONCE = 'rack.run_once'.freeze
49
+ RACK_URL_SCHEME = 'rack.url_scheme'.freeze
50
+ RACK_WEBSOCKET = 'rack.websocket'.freeze
51
+
52
+ PROTO_RACK_ENV = {
53
+ RACK_VERSION => ::Rack::VERSION,
54
+ RACK_ERRORS => STDERR,
55
+ RACK_MULTITHREAD => true,
56
+ RACK_MULTIPROCESS => false,
57
+ RACK_RUN_ONCE => false,
58
+ RACK_URL_SCHEME => "http".freeze,
59
+ SCRIPT_NAME => ENV[SCRIPT_NAME] || "",
60
+ SERVER_PROTOCOL => HTTP_1_1,
61
+ SERVER_SOFTWARE => "berkshelf-api/#{Berkshelf::API::VERSION}".freeze,
62
+ GATEWAY_INTERFACE => CGI_1_1
63
+ }.freeze
64
+
16
65
  # @return [String]
17
66
  attr_reader :host
67
+
18
68
  # @return [Integer]
19
69
  attr_reader :port
70
+
20
71
  # @return [Integer]
21
72
  attr_reader :workers
22
73
 
23
- def_delegator :handler, :rack_app
74
+ # @return [Berkshelf::API::RackApp]
75
+ attr_reader :app
24
76
 
25
77
  server_name :rest_gateway
26
- finalizer :finalize_callback
27
78
 
28
79
  # @option options [String] :host ('0.0.0.0')
29
80
  # @option options [Integer] :port (26200)
30
81
  # @option options [Boolean] :quiet (false)
31
82
  # @option options [Integer] :workers (10)
32
83
  def initialize(options = {})
33
- options = DEFAULT_OPTIONS.merge(options)
34
- options[:app] = Berkshelf::API::RackApp.new
35
-
36
- @host = options[:host]
37
- @port = options[:port]
38
- @workers = options[:workers]
39
- @handler = ::Rack::Handler::Reel.new(options)
40
- @pool = ::Reel::RackWorker.pool(size: @workers, args: [ @handler ])
84
+ options = DEFAULT_OPTIONS.merge(options)
85
+ @host = options[:host]
86
+ @port = options[:port]
41
87
 
42
88
  log.info "REST Gateway listening on #{@host}:#{@port}"
43
89
  super(@host, @port, &method(:on_connect))
90
+ @app = Berkshelf::API::RackApp.new
44
91
  end
45
92
 
46
- # @param [Reel::Connection] connection
47
93
  def on_connect(connection)
48
- pool.handle(connection.detach)
94
+ while request = connection.request
95
+ case request
96
+ when request.websocket?
97
+ request.respond(:bad_request, "WebSockets not supported")
98
+ else
99
+ route_request(connection, request)
100
+ end
101
+ end
102
+ end
103
+
104
+ def route_request(connection, request)
105
+ status, headers, body_parts = app.call(request_env(request, connection))
106
+ body, is_stream = response_body(body_parts)
107
+
108
+ response_klass = is_stream ? Reel::StreamResponse : Reel::Response
109
+ response = response_klass.new(status, headers, body)
110
+ connection.respond(response)
111
+ end
112
+
113
+ def request_env(request, connection)
114
+ env = env(request)
115
+ env[REMOTE_ADDR] = connection.remote_ip
116
+ env
49
117
  end
50
118
 
51
119
  private
52
120
 
53
- # @return [Reel::RackWorker]
54
- attr_reader :pool
55
- # @return [Rack::Handler::Reel]
56
- attr_reader :handler
121
+ def env(request)
122
+ env = Hash[PROTO_RACK_ENV]
123
+
124
+ env[RACK_INPUT] = StringIO.new(request.body.to_s || INITIAL_BODY)
125
+ env[RACK_INPUT].set_encoding(Encoding::BINARY) if env[RACK_INPUT].respond_to?(:set_encoding)
126
+ env[SERVER_NAME], env[SERVER_PORT] = (request[HOST]||'').split(':', 2)
127
+ env[SERVER_PORT] ||= port.to_s
128
+ env[HTTP_VERSION] = request.version || env[SERVER_PROTOCOL]
129
+ env[REQUEST_METHOD] = request.method
130
+ env[PATH_INFO] = request.path
131
+ env[QUERY_STRING] = request.query_string || ''
132
+
133
+ (_ = request.headers.delete CONTENT_TYPE_ORIG) && (env[CONTENT_TYPE] = _)
134
+ (_ = request.headers.delete CONTENT_LENGTH_ORIG) && (env[CONTENT_LENGTH] = _)
135
+
136
+ request.headers.each_pair do |key, val|
137
+ env[HTTP_ + key.gsub('-', '_').upcase] = val
138
+ end
139
+ env
140
+ end
57
141
 
58
- def finalize_callback
59
- pool.terminate if pool && pool.alive?
142
+ def response_body(body_parts)
143
+ if body_parts.respond_to?(:to_path)
144
+ ::File.new(body_parts.to_path)
145
+ else
146
+ body = ''
147
+ body_parts.each do |c|
148
+ return [c, true] if c.is_a?(Reel::Stream)
149
+ body << c
150
+ end
151
+ body_parts.close if body_parts.respond_to?(:close)
152
+ body
153
+ end
60
154
  end
61
155
  end
62
156
  end
@@ -18,7 +18,10 @@ module Berkshelf::API::RSpec
18
18
  def start(options = {})
19
19
  options = options.reverse_merge(port: 26210, log_location: "/dev/null", endpoints: [])
20
20
  Berkshelf::API::Application.config.endpoints = options[:endpoints]
21
- Berkshelf::API::Application.run!(options) unless running?
21
+ unless running?
22
+ Berkshelf::API::Application.run!(options)
23
+ cache_builder.build
24
+ end
22
25
  end
23
26
 
24
27
  def stop
@@ -92,7 +92,7 @@ module Berkshelf::API
92
92
  #
93
93
  # @return [String, nil]
94
94
  def download(name, version, destination = Dir.mktmpdir)
95
- log.info "downloading #{name}(#{version})"
95
+ log.debug "downloading #{name}(#{version})"
96
96
  if uri = download_uri(name, version)
97
97
  archive = stream(uri)
98
98
  Archive.extract(archive.path, destination)
@@ -1,5 +1,5 @@
1
1
  module Berkshelf
2
2
  module API
3
- VERSION = "0.2.0"
3
+ VERSION = "1.0.0"
4
4
  end
5
5
  end
data/lib/berkshelf/api.rb CHANGED
@@ -1,3 +1,4 @@
1
+ require 'berkshelf/api/version'
1
2
  require 'celluloid'
2
3
  require 'hashie'
3
4
  require 'ridley'
@@ -35,25 +35,4 @@ describe Berkshelf::API::CacheBuilder::Worker::ChefServer do
35
35
  end
36
36
  end
37
37
  end
38
-
39
- describe "#build" do
40
- before do
41
- Berkshelf::API::CacheManager.start
42
- chef_cookbook("ruby", "1.0.0")
43
- chef_cookbook("ruby", "2.0.0")
44
- chef_cookbook("elixir", "3.0.0")
45
- chef_cookbook("elixir", "3.0.1")
46
- end
47
-
48
- let(:cache) { Berkshelf::API::CacheManager.instance.cache }
49
-
50
- it "adds each item to the cache" do
51
- subject.build
52
- expect(cache).to have_cookbook("ruby", "1.0.0")
53
- expect(cache).to have_cookbook("ruby", "2.0.0")
54
- expect(cache).to have_cookbook("elixir", "3.0.0")
55
- expect(cache).to have_cookbook("elixir", "3.0.1")
56
- expect(cache.cookbooks).to have(4).items
57
- end
58
- end
59
38
  end
@@ -37,44 +37,4 @@ describe Berkshelf::API::CacheBuilder::Worker::Base do
37
37
  end
38
38
  end
39
39
  end
40
-
41
- let(:cache_manager) { double(:diff => :chicken) }
42
- subject { described_class.new }
43
-
44
- describe "#diff" do
45
- it "should delegate to the cache_manager to calculate the diff" do
46
- subject.should_receive(:cache_manager).and_return(cache_manager)
47
- subject.should_receive(:cookbooks).and_return(:cookbooks)
48
-
49
- expect(subject.diff).to eql(:chicken)
50
- end
51
-
52
- it "should memoize the diff to prevent recalculating" do
53
- subject.should_receive(:cache_manager).exactly(1).times.and_return(cache_manager)
54
- subject.should_receive(:cookbooks).and_return(:cookbooks)
55
-
56
- subject.diff
57
- subject.diff
58
- end
59
- end
60
-
61
- describe "#clear_diff" do
62
- it "should set the diff to nil" do
63
- subject.should_receive(:cache_manager).and_return(cache_manager)
64
- subject.should_receive(:cookbooks).and_return(:cookbooks)
65
-
66
- subject.diff
67
- expect(subject.instance_variable_get(:@diff)).to eql(:chicken)
68
- subject.send(:clear_diff)
69
- expect(subject.instance_variable_get(:@diff)).to eql(nil)
70
- end
71
-
72
- it "memoizes the diff to prevent recalculating" do
73
- subject.should_receive(:cache_manager).exactly(1).times.and_return(cache_manager)
74
- subject.should_receive(:cookbooks).and_return(:cookbooks)
75
-
76
- subject.diff
77
- subject.diff
78
- end
79
- end
80
40
  end
@@ -1,17 +1,21 @@
1
1
  require 'spec_helper'
2
2
 
3
3
  describe Berkshelf::API::CacheBuilder do
4
+
5
+ before { Berkshelf::API::CacheManager.start }
4
6
  let(:instance) { described_class.new }
5
7
 
6
8
  describe "#build" do
7
9
  subject(:build) { instance.build }
8
10
  let(:workers) { [ double('worker') ] }
9
11
  let(:future) { double('future', value: nil) }
12
+ let(:cache_manager) { double('cache_manager') }
10
13
 
11
14
  before { instance.stub(workers: workers) }
12
15
 
13
- it "sends a #build message to each worker" do
14
- workers.each { |worker| worker.should_receive(:future).with(:build).and_return(future) }
16
+ it "asks the cache_manager to process all of its actors" do
17
+ instance.stub(:cache_manager).and_return(cache_manager)
18
+ cache_manager.should_receive(:process_workers).with(instance.workers).and_return(future)
15
19
  build
16
20
  end
17
21
  end
@@ -26,12 +30,51 @@ describe Berkshelf::API::CacheBuilder do
26
30
  end
27
31
  end
28
32
 
29
- it "has one worker started by default" do
30
- expect(workers).to have(1).item
33
+ context "when no workers are explicitly configured" do
34
+ it "has one worker started by default" do
35
+ expect(workers).to have(1).item
36
+ end
37
+
38
+ it "has an opscode worker started by default" do
39
+ expect(workers.first).to be_a(described_class::Worker::Opscode)
40
+ end
31
41
  end
32
42
 
33
- it "has an opscode worker started by default" do
34
- expect(workers.first).to be_a(described_class::Worker::Opscode)
43
+ context "when there are multiple workers" do
44
+ let(:endpoint_array) { [ first_worker, second_worker ] }
45
+ let(:first_worker) { double(options: endpoint_options.dup.merge(priority: 0), type: 'chicken') }
46
+ let(:second_worker) { double(options: endpoint_options.dup.merge(priority: 1), type: 'tuna') }
47
+ let(:endpoint_options) do
48
+ {
49
+ "url" => "www.fake.com",
50
+ "client_name" => "fake",
51
+ "client_key" => "/path/to/fake.key"
52
+ }
53
+ end
54
+ let(:dummy_endpoint_klass) do
55
+ Class.new do
56
+ attr_reader :options
57
+ include Celluloid
58
+
59
+ def initialize(options = {})
60
+ @options = options
61
+ end
62
+ end
63
+ end
64
+
65
+ before do
66
+ Berkshelf::API::Application.config.stub(:endpoints).and_return(endpoint_array)
67
+ Berkshelf::API::CacheBuilder::Worker.stub(:[]).and_return(dummy_endpoint_klass, dummy_endpoint_klass)
68
+ end
69
+
70
+ it "has two workers" do
71
+ expect(workers).to have(2).items
72
+ end
73
+
74
+ it "keeps the ordering" do
75
+ expect(workers.first.options[:priority]).to be(0)
76
+ expect(workers.last.options[:priority]).to be(1)
77
+ end
35
78
  end
36
79
  end
37
80
  end
@@ -67,23 +67,48 @@ describe Berkshelf::API::CacheManager do
67
67
  end
68
68
 
69
69
  describe "#diff" do
70
- let(:cookbook_one) { Berkshelf::API::RemoteCookbook.new("ruby", "1.2.3", "opscode") }
71
- let(:cookbook_two) { Berkshelf::API::RemoteCookbook.new("elixir", "2.0.0", "opscode") }
70
+ let(:cookbook_one) { Berkshelf::API::RemoteCookbook.new("ruby", "1.2.3", "opscode", nil, 1) }
71
+ let(:cookbook_two) { Berkshelf::API::RemoteCookbook.new("elixir", "2.0.0", "opscode", nil, 1) }
72
72
  let(:comparison) { Array.new }
73
73
 
74
74
  before do
75
- subject.add(cookbook_one, double(dependencies: nil, platforms: nil))
76
- subject.add(cookbook_two, double(dependencies: nil, platforms: nil))
75
+ subject.send(:add, cookbook_one, double(dependencies: nil, platforms: nil))
76
+ subject.send(:add, cookbook_two, double(dependencies: nil, platforms: nil))
77
77
 
78
- @created, @deleted = @diff = subject.diff(comparison)
78
+ @created, @deleted = @diff = subject.send(:diff, comparison, 1)
79
79
  end
80
80
 
81
81
  it "returns two items" do
82
82
  expect(@diff).to have(2).items
83
83
  end
84
84
 
85
+ context "when there are more than one worker endpoints" do
86
+ let(:new_cookbook) { Berkshelf::API::RemoteCookbook.new("ruby", "3.0.0", "opscode", nil, 2) }
87
+ let(:comparison) { [ cookbook_one, cookbook_two, new_cookbook ] }
88
+
89
+ before do
90
+ @created, @deleted = @diff = subject.send(:diff, comparison, 2)
91
+ end
92
+
93
+ it "only creates cookbooks that have the same or lower priority" do
94
+ expect(@created).to eql([new_cookbook])
95
+ end
96
+
97
+ context "when the cookbook has been deleted" do
98
+ let(:comparison) { [cookbook_one] }
99
+
100
+ before do
101
+ @created, @deleted = @diff = subject.send(:diff, comparison, 1)
102
+ end
103
+
104
+ it "only deletes cookbooks at the same priority" do
105
+ expect(@deleted).to eql([cookbook_two])
106
+ end
107
+ end
108
+ end
109
+
85
110
  context "when there are created and deleted cookbooks" do
86
- let(:new_cookbook) { Berkshelf::API::RemoteCookbook.new("ruby", "3.0.0", "opscode") }
111
+ let(:new_cookbook) { Berkshelf::API::RemoteCookbook.new("ruby", "3.0.0", "opscode", nil, 1) }
87
112
  let(:comparison) { [ cookbook_one, new_cookbook ] }
88
113
 
89
114
  it "should return created and deleted cookbooks" do
@@ -93,7 +118,7 @@ describe Berkshelf::API::CacheManager do
93
118
  end
94
119
 
95
120
  context "when there are only created cookbooks" do
96
- let(:new_cookbook) { Berkshelf::API::RemoteCookbook.new("ruby", "3.0.0", "opscode") }
121
+ let(:new_cookbook) { Berkshelf::API::RemoteCookbook.new("ruby", "3.0.0", "opscode", nil, 1) }
97
122
  let(:comparison) { [ cookbook_one, cookbook_two, new_cookbook ] }
98
123
 
99
124
  it "should return only created cookbooks" do
@@ -105,6 +130,10 @@ describe Berkshelf::API::CacheManager do
105
130
  context "when there are only deleted cookbooks" do
106
131
  let(:comparison) { [ cookbook_one ] }
107
132
 
133
+ before do
134
+ @created, @deleted = @diff = subject.send(:diff, comparison, 1)
135
+ end
136
+
108
137
  it "should return only deleted cookbooks" do
109
138
  expect(@created).to be_empty
110
139
  expect(@deleted).to eql([cookbook_two])
@@ -67,6 +67,13 @@ describe Berkshelf::API::DependencyCache do
67
67
 
68
68
  subject { described_class.new(contents) }
69
69
 
70
+ context "when a new DependencyCache is created" do
71
+ it "should allow indifferent access to items in the cache" do
72
+ expect(subject[:chicken]).to be_a(Hash)
73
+ expect(subject[:chicken][:'1.0'][:dependencies]).to be_a(Hash)
74
+ end
75
+ end
76
+
70
77
  describe "#cookbooks" do
71
78
  it "should return a list of RemoteCookbooks" do
72
79
  expected_value = [
@@ -8,11 +8,24 @@ describe Berkshelf::API::Endpoint::V1 do
8
8
  let(:app) { described_class.new }
9
9
 
10
10
  describe "GET /universe" do
11
- before { get '/universe' }
12
- subject { last_response }
13
- let(:app_cache) { cache_manager.cache }
11
+ context "the cache has been warmed" do
12
+ before { cache_manager.set_warmed; get '/universe' }
14
13
 
15
- its(:status) { should be(200) }
16
- its(:body) { should eq(app_cache.to_json) }
14
+ subject { last_response }
15
+ let(:app_cache) { cache_manager.cache }
16
+
17
+ its(:status) { should be(200) }
18
+ its(:body) { should eq(app_cache.to_json) }
19
+ end
20
+
21
+ context "the cache is still warming" do
22
+ before { get '/universe' }
23
+
24
+ subject { last_response }
25
+ let(:app_cache) { cache_manager.cache }
26
+
27
+ its(:status) { should be(503) }
28
+ its(:headers) { should have_key("Retry-After") }
29
+ end
17
30
  end
18
31
  end
@@ -9,18 +9,12 @@ describe Berkshelf::API::RESTGateway do
9
9
 
10
10
  its(:host) { should eql(described_class::DEFAULT_OPTIONS[:host]) }
11
11
  its(:port) { should eql(described_class::DEFAULT_OPTIONS[:port]) }
12
- its(:workers) { should eql(described_class::DEFAULT_OPTIONS[:workers]) }
13
- its(:rack_app) { should be_a(Berkshelf::API::RackApp) }
12
+ its(:app) { should be_a(Berkshelf::API::RackApp) }
14
13
 
15
14
  context "given a different port" do
16
15
  before { options[:port] = 26210 }
17
16
  its(:port) { should eql(26210) }
18
17
  end
19
-
20
- context "given a different amount of workers" do
21
- before { options[:workers] = 20 }
22
- its(:workers) { should eql(20) }
23
- end
24
18
  end
25
19
  end
26
20
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: berkshelf-api
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jamie Winsor
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-07-17 00:00:00.000000000 Z
12
+ date: 2013-10-15 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: ridley
@@ -17,42 +17,42 @@ dependencies:
17
17
  requirements:
18
18
  - - ~>
19
19
  - !ruby/object:Gem::Version
20
- version: 1.3.0
20
+ version: '1.6'
21
21
  type: :runtime
22
22
  prerelease: false
23
23
  version_requirements: !ruby/object:Gem::Requirement
24
24
  requirements:
25
25
  - - ~>
26
26
  - !ruby/object:Gem::Version
27
- version: 1.3.0
27
+ version: '1.6'
28
28
  - !ruby/object:Gem::Dependency
29
29
  name: celluloid
30
30
  requirement: !ruby/object:Gem::Requirement
31
31
  requirements:
32
32
  - - ~>
33
33
  - !ruby/object:Gem::Version
34
- version: 0.14.0
34
+ version: '0.15'
35
35
  type: :runtime
36
36
  prerelease: false
37
37
  version_requirements: !ruby/object:Gem::Requirement
38
38
  requirements:
39
39
  - - ~>
40
40
  - !ruby/object:Gem::Version
41
- version: 0.14.0
41
+ version: '0.15'
42
42
  - !ruby/object:Gem::Dependency
43
43
  name: reel
44
44
  requirement: !ruby/object:Gem::Requirement
45
45
  requirements:
46
- - - '='
46
+ - - '>='
47
47
  - !ruby/object:Gem::Version
48
- version: 0.4.0.pre
48
+ version: 0.4.0
49
49
  type: :runtime
50
50
  prerelease: false
51
51
  version_requirements: !ruby/object:Gem::Requirement
52
52
  requirements:
53
- - - '='
53
+ - - '>='
54
54
  - !ruby/object:Gem::Version
55
- version: 0.4.0.pre
55
+ version: 0.4.0
56
56
  - !ruby/object:Gem::Dependency
57
57
  name: grape
58
58
  requirement: !ruby/object:Gem::Requirement
@@ -191,6 +191,7 @@ files:
191
191
  - .gitignore
192
192
  - .ruby-version
193
193
  - .travis.yml
194
+ - CHANGELOG.md
194
195
  - CONTRIBUTING.md
195
196
  - Gemfile
196
197
  - Guardfile