pgdice 0.4.3 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e580b9c81c5f8881771e2728e23a012703d15ce1196ef3395f32e40bbe8bfa0b
4
- data.tar.gz: 0ab69e25f7d15d8fef14bfaeba53cccc2cd7e212e80d8fdaa36629c030410a28
3
+ metadata.gz: aa5557cff6f9f2ef5c8f73d925d6dc21d99df9300d368b05172a699893b97c22
4
+ data.tar.gz: 563dc93f6915247e5ff7d5b6500d1cfdcbcb38247a871d787d0d76bfad50fffc
5
5
  SHA512:
6
- metadata.gz: cbf8ecffdeb93605197217c6aba201611672d92ed56598b3580807e482c86ca4e740425b17ee25ac4161c7445b25c277484d71b1c58750f2dac55ba90db1abed
7
- data.tar.gz: 2358edafb9391d37df00982c1ef05cd490c44730ed8c45bc854c4e4eeaba433979c03a5a141a3032a81eb9b2753e47164cd967201abeb5c9c31980c6a2917488
6
+ metadata.gz: e7dbe79ec3584a45e054827acf2993d6aa02ccdae42335418cd4113c385c36a460d21c6c6cd68cbc2ced9044cfed38ff6e952735791a54a8587c65a905e0aa13
7
+ data.tar.gz: fe0e0ccb8f503b7bfed31f1baf6bb1b986566f26324b35550f8c39408ae013df25439bfd6b57f268f0883e827211a13dc5332711e296612f90c8f2cd0a1a8efa
data/.codeclimate.yml CHANGED
@@ -2,4 +2,7 @@ version: "2"
2
2
  checks:
3
3
  argument-count:
4
4
  config:
5
- threshold: 5
5
+ threshold: 5
6
+
7
+ exclude_patterns:
8
+ - "examples/**/*"
@@ -0,0 +1,27 @@
1
+ ---
2
+ name: Bug report
3
+ about: Create a report to help us improve
4
+ title: ''
5
+ labels: needs investigation
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **Describe the bug**
11
+ A clear and concise description of what the bug is.
12
+
13
+ **To Reproduce**
14
+ Steps to reproduce the behavior:
15
+
16
+ **Expected behavior**
17
+ A clear and concise description of what you expected to happen.
18
+
19
+ **Screenshots**
20
+ If applicable, add screenshots to help explain your problem.
21
+
22
+ **Please include this information**
23
+ - Postgres Version: [e.g. 10.6]
24
+ - PgDice Version [e.g. 0.14.3]
25
+
26
+ **Additional context**
27
+ Add any other context about the problem here.
@@ -0,0 +1,20 @@
1
+ ---
2
+ name: Feature request
3
+ about: Suggest an idea for this project
4
+ title: ''
5
+ labels: needs investigation
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **Is your feature request related to a problem? Please describe.**
11
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12
+
13
+ **Describe the solution you'd like**
14
+ A clear and concise description of what you want to happen.
15
+
16
+ **Describe alternatives you've considered**
17
+ A clear and concise description of any alternative solutions or features you've considered.
18
+
19
+ **Additional context**
20
+ Add any other context or screenshots about the feature request here.
@@ -0,0 +1,41 @@
1
+ name: Ruby Gem
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - master
7
+
8
+ jobs:
9
+ build:
10
+ name: Build + Publish
11
+ runs-on: ubuntu-latest
12
+
13
+ steps:
14
+ - uses: actions/checkout@master
15
+ - name: Set up Ruby 2.6
16
+ uses: actions/setup-ruby@v1
17
+ with:
18
+ version: 2.6.x
19
+
20
+ # - name: Publish to GPR
21
+ # run: |
22
+ # mkdir -p $HOME/.gem
23
+ # touch $HOME/.gem/credentials
24
+ # chmod 0600 $HOME/.gem/credentials
25
+ # printf -- "---\n:github: Bearer ${GEM_HOST_API_KEY}\n" > $HOME/.gem/credentials
26
+ # gem build *.gemspec
27
+ # gem push --KEY github --host https://rubygems.pkg.github.com/${OWNER} *.gem
28
+ # env:
29
+ # GEM_HOST_API_KEY: ${{secrets.GPR_AUTH_TOKEN}}
30
+ # OWNER: username
31
+
32
+ - name: Publish to RubyGems
33
+ run: |
34
+ mkdir -p $HOME/.gem
35
+ touch $HOME/.gem/credentials
36
+ chmod 0600 $HOME/.gem/credentials
37
+ printf -- "---\n:rubygems_api_key: ${GEM_HOST_API_KEY}\n" > $HOME/.gem/credentials
38
+ gem build *.gemspec
39
+ gem push *.gem
40
+ env:
41
+ GEM_HOST_API_KEY: ${{secrets.RUBYGEMS_AUTH_TOKEN}}
data/.rubocop.yml CHANGED
@@ -8,4 +8,8 @@ Metrics/BlockLength:
8
8
  # TODO: Evaluate if this is an acceptable parameter list. It's a constructor after all.
9
9
  Metrics/ParameterLists:
10
10
  Exclude:
11
- - lib/pgdice/table.rb
11
+ - lib/pgdice/table.rb
12
+
13
+ AllCops:
14
+ Exclude:
15
+ - 'examples/**/*'
data/README.md CHANGED
@@ -13,18 +13,6 @@ PgDice is intended to be used by scheduled background jobs in frameworks like [S
13
13
  where logging and clear exception messages are crucial.
14
14
 
15
15
 
16
- ## Disclaimer
17
-
18
- There are some features in this gem which allow you to drop database tables.
19
-
20
- If you choose to use this software without a __tested and working__ backup and restore strategy in place then you
21
- are a fool and will pay the price for your negligence. THIS SOFTWARE IS PROVIDED "AS IS",
22
- WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. By using this software you agree that the creator,
23
- maintainers and any affiliated parties CANNOT BE HELD LIABLE FOR DATA LOSS OR LOSSES OF ANY KIND.
24
-
25
- See the [LICENSE](LICENSE) for more information.
26
-
27
-
28
16
  # Installation
29
17
 
30
18
  Add this line to your application's Gemfile:
@@ -279,6 +267,12 @@ PgDice.list_droppable_partitions('comments', past: 60)
279
267
  ```
280
268
  This example would use `60` instead of the configured value of `90` from the `comments` table we configured above.
281
269
 
270
+ # Examples
271
+
272
+ 1. [Here's an example on how to use PgDice in AWS](examples/aws) and the [README](examples/aws/README.md) which will guide
273
+ you through what is going on.
274
+
275
+ 1. [Here's an example on how to write a config.yml for PgDice](examples/config.yml)
282
276
 
283
277
  # FAQ
284
278
 
@@ -292,7 +286,7 @@ This example would use `60` instead of the configured value of `90` from the `co
292
286
  password = config[Rails.env]["password"]
293
287
 
294
288
  "postgres://#{username}:#{password}@#{host}/#{database}"
295
- end
289
+ end
296
290
  ```
297
291
 
298
292
  1. I'm seeing off-by-one errors for my `assert_tables` calls?
@@ -305,6 +299,8 @@ end
305
299
  1. Non time-range based partitioning. [PgParty](https://github.com/rkrage/pg_party) might be a good option!
306
300
 
307
301
 
302
+
303
+
308
304
  # Development
309
305
 
310
306
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests.
@@ -336,6 +332,16 @@ to be a safe, welcoming space for collaboration, and contributors are expected t
336
332
 
337
333
  The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
338
334
 
335
+ # Disclaimer
336
+
337
+ There are some features in this gem which allow you to drop database tables.
338
+
339
+ If you choose to use this software without a __tested and working__ backup and restore strategy in place then you
340
+ are a fool and will pay the price for your negligence. THIS SOFTWARE IS PROVIDED "AS IS",
341
+ WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. By using this software you agree that the creator,
342
+ maintainers and any affiliated parties CANNOT BE HELD LIABLE FOR DATA LOSS OR LOSSES OF ANY KIND.
343
+
344
+ See the [LICENSE](LICENSE) for more information.
339
345
 
340
346
  # Code of Conduct
341
347
 
@@ -0,0 +1,28 @@
1
+ # How can I use PgDice in production?
2
+
3
+ This collection of files is how I use PgDice in production. I'll describe the architecture here so you'll have a place
4
+ to start.
5
+
6
+ 1. `tasks/poll_sqs.rake` is run using some sort of process manager like systemd on the ec2 instance. I like to run
7
+ the poll_sqs stuff on my Sidekiq instances because they are the ones who eventually handle the work anyway.
8
+
9
+ 1. `lib/sqs_poller.rb` is used to handle the looping logic for the rake task. It invokes `lib/sqs_listener.rb` for each
10
+ iteration.
11
+
12
+ 1. `lib/sqs_listener.rb` calls AWS SQS to receive messages and then passes each one into the `lib/sqs_listener/sqs_event_router.rb`
13
+ to be routed to the correct message handler.
14
+
15
+ 1. Inside `lib/sqs_listener/sqs_event_router.rb` the message is parsed and passed through a case statement.
16
+ This could be abstracted better but for now if the message has a field of `event_type` and a value of `"task"` then
17
+ the router will send it off to the `TaskEventHandler` which in this case is
18
+ `lib/sqs_listener/typed_event_handler/task_event_handler.rb`
19
+
20
+ 1. In the `TaskEventHandler` the task is sent to a handler which responds to the task specified in the message body field `task`.
21
+
22
+ 1. The handler for the task (in this case, `DatabaseTasks`) handles the parameters for invoking the Sidekiq worker: `PgDiceWorker`
23
+
24
+ 1. Finally, the `PgDiceWorker` is called and handles invoking `PgDice` based on the parameters passed in.
25
+
26
+
27
+ Hopefully that wasn't too confusing. There's a lot of steps in here because the system that uses PgDice handles lots
28
+ of different types of SQS events and needs to be as resilient as possible.
@@ -0,0 +1,59 @@
1
+ {
2
+ "Description": "Deployment stack",
3
+ "Parameters": {
4
+ "PgDiceEnabled": {
5
+ "Type": "String",
6
+ "Description": "The ENABLED/DISABLED state of the cloudwatch scheduled events for PgDice."
7
+ }
8
+ },
9
+ "Resources": {
10
+ "PgDiceDailyAddPartitions": {
11
+ "DependsOn": "IncomingSQS",
12
+ "Type": "AWS::Events::Rule",
13
+ "Properties": {
14
+ "State":{
15
+ "Ref": "PgDiceEnabled"
16
+ },
17
+ "Description": " PgDice daily add partitions",
18
+ "Name": "PgDiceDailyAddPartitions",
19
+ "ScheduleExpression": "rate(1 day)",
20
+ "Targets": [
21
+ {
22
+ "Arn": {
23
+ "Fn::GetAtt": [
24
+ "IncomingSQS",
25
+ "Arn"
26
+ ]
27
+ },
28
+ "Id": "PgDiceDailyAddPartitionsId",
29
+ "Input": "{\"event_type\":\"task\",\"task\":\"add_new_partitions\"}"
30
+ }
31
+ ]
32
+ }
33
+ },
34
+ "PgDiceDailyDropPartitions": {
35
+ "DependsOn": "IncomingSQS",
36
+ "Type": "AWS::Events::Rule",
37
+ "Properties": {
38
+ "State":{
39
+ "Ref": "PgDiceEnabled"
40
+ },
41
+ "Description": " PgDice daily drop partitions",
42
+ "Name": "PgDiceDailyDropPartitions",
43
+ "ScheduleExpression": "rate(1 day)",
44
+ "Targets": [
45
+ {
46
+ "Arn": {
47
+ "Fn::GetAtt": [
48
+ "IncomingSQS",
49
+ "Arn"
50
+ ]
51
+ },
52
+ "Id": "PgDiceDailyDropPartitionsId",
53
+ "Input": "{\"event_type\":\"task\",\"task\":\"drop_old_partitions\"}"
54
+ }
55
+ ]
56
+ }
57
+ }
58
+ }
59
+ }
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'aws-sdk-sqs'
4
+
5
+ # READ_ONLY_SQS can be set to ensure we don't delete good messages
6
+ class SqsListener
7
+ DEFAULT_VISIBILITY_TIMEOUT ||= 600
8
+ attr_reader :logger, :queue_url, :visibility_timeout
9
+
10
+ def initialize(opts = {})
11
+ @logger = opts[:logger] ||= Sidekiq.logger
12
+ @queue_url = opts[:queue_url] ||= ENV['SqsQueueUrl']
13
+ @sqs_client = opts[:sqs_client] ||= Aws::SQS::Client.new
14
+ @sqs_event_router = opts[:sqs_event_router] ||= SqsEventRouter.new(logger: logger)
15
+ increase_timeout_resolver = opts[:increase_timeout_resolver] ||= -> { ENV['READ_ONLY_SQS'].to_s == 'true' }
16
+ @visibility_timeout = calculate_visibility_timeout(increase_timeout_resolver.call)
17
+
18
+ logger.debug { "Running in environment: #{ENV['RAILS_ENV']} and using sqs queue: #{queue_url}" }
19
+ end
20
+
21
+ # http://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/sqs-example-get-messages-with-long-polling.html
22
+ def call
23
+ # This uses long polling to retrieve sqs events so we can process them
24
+ response = @sqs_client.receive_message(queue_url: queue_url,
25
+ max_number_of_messages: 10,
26
+ wait_time_seconds: 20,
27
+ visibility_timeout: visibility_timeout)
28
+
29
+ if response.messages&.size&.positive?
30
+ logger.debug { "The number of messages received from the queue was: #{response.messages&.size}" }
31
+ end
32
+
33
+ # Iterate over all the messages in the response (Response is a Struct which acts like an object with methods)
34
+ response.messages&.each do |message|
35
+ @sqs_event_router.handle_message(message)
36
+ end
37
+ end
38
+
39
+ private
40
+
41
+ def calculate_visibility_timeout(increase_timeout)
42
+ visibility_timeout = increase_timeout ? DEFAULT_VISIBILITY_TIMEOUT * 4 : DEFAULT_VISIBILITY_TIMEOUT
43
+
44
+ logger.info { "Visibility timeout set to: #{visibility_timeout} seconds" }
45
+ visibility_timeout
46
+ end
47
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ class DefaultEventHandler
4
+ attr_reader :logger
5
+
6
+ def initialize(opts = {})
7
+ @logger = opts[:logger] ||= Sidekiq.logger
8
+ @fallthrough_event_handler = opts[:fallthrough_event_handler] ||= FallthroughEventHandler.new(logger: logger)
9
+ end
10
+
11
+ def handle_message(message)
12
+ # Since 'message' is a JSON formatted string, parse the JSON and then get the values under the 'Records' key
13
+ # When JSON parses a string it returns a Ruby Hash (just like a Java HashMap)
14
+ records = JSON.parse(message.body)['Records']
15
+ if records
16
+ process_records(records, message)
17
+ else
18
+ # If the message body doesn't have any entries under the 'Records' key then we don't know what to do.
19
+ @fallthrough_event_handler.call(message)
20
+ end
21
+ rescue StandardError => e
22
+ # If any errors are raised processing this message then call the fallthrough because something went wrong.
23
+ logger.error { "Caught error while handling incoming message. Calling fallthrough_event_handler. Error: #{e}" }
24
+ @fallthrough_event_handler.call(message)
25
+ end
26
+
27
+ private
28
+
29
+ def process_records(records, message)
30
+ # Process default event
31
+ end
32
+ end
@@ -0,0 +1,4 @@
1
+ # frozen_string_literal: true
2
+
3
+ class UnknownTaskError < StandardError
4
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ class FallthroughEventHandler
4
+ attr_reader :logger
5
+
6
+ def initialize(opts = {})
7
+ @logger = opts[:logger] ||= Sidekiq.logger
8
+ @sqs_message_deleter = opts[:sqs_message_deleter] ||= SqsMessageDeleter.new(logger: logger)
9
+ end
10
+
11
+ def call(message)
12
+ logger.warn do
13
+ "Received sqs message we don't know how to process. Message: #{message}"
14
+ end
15
+
16
+ @sqs_message_deleter.call(message.receipt_handle)
17
+ end
18
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Responsible for routing incoming SQS events to the correct handler
4
+ class SqsEventRouter
5
+ attr_reader :logger
6
+
7
+ def initialize(opts = {})
8
+ @logger = opts[:logger] ||= Sidekiq.logger
9
+ @task_event_handler = opts[:task_event_handler] ||= TaskEventHandler.new(logger: logger)
10
+ @default_event_handler = opts[:default_event_handler] ||= DefaultEventHandler.new(logger: logger)
11
+ @sqs_message_deleter = opts[:sqs_message_deleter] ||= SqsMessageDeleter.new(logger: logger)
12
+ end
13
+
14
+ # Handles incoming sqs event, looking for a field of 'event_type'
15
+ # See scheduled_events.json for details on how to create task events from cloudwatch
16
+ def handle_message(message)
17
+ message_body = JSON.parse(message.body).with_indifferent_access
18
+ event_type = message_body[:event_type]
19
+
20
+ logger.tagged(message.receipt_handle) do
21
+ logger.debug { "The received message was: #{message}" }
22
+
23
+ case event_type
24
+ when 'task'
25
+ @task_event_handler.run_task(message_body)
26
+ @sqs_message_deleter.call(message.receipt_handle)
27
+ else
28
+ @default_event_handler.handle_message(message)
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ class TaskEventHandler
4
+ attr_reader :logger
5
+
6
+ def initialize(opts = {})
7
+ @logger = opts[:logger] ||= Sidekiq.logger
8
+ @task_handlers = [opts[:task_handlers] ||= initialize_default_handlers].flatten.compact
9
+ end
10
+
11
+ def run_task(message_body_hash)
12
+ task = message_body_hash.fetch(:task).to_sym
13
+ logger.debug { "Running task: #{task}. Searching for task in: #{@task_handlers}" }
14
+
15
+ task_handlers = resolve_task_handlers(task)
16
+
17
+ if task_handlers.blank?
18
+ raise UnknownTaskError, "Could not find task: #{task} in any of the available task_handlers: #{@task_handlers}"
19
+ end
20
+
21
+ invoke_task_handler(task_handlers.first, task, message_body_hash.fetch(:parameters, {}))
22
+ end
23
+
24
+ private
25
+
26
+ def resolve_task_handlers(task)
27
+ task_handlers = @task_handlers.select { |task_handler| task_handler.respond_to?(task) }
28
+
29
+ task_handlers.each do |task_handler|
30
+ logger.debug { "Found task handler: #{task_handler.class} that can handle task: #{task}" }
31
+ end
32
+ task_handlers
33
+ end
34
+
35
+ def invoke_task_handler(task_handler, task, params)
36
+ logger.debug { "Invoking handler: #{task_handler.class}##{task} with params: #{params}" }
37
+ task_handler.public_send(task, params)
38
+ end
39
+
40
+ def initialize_default_handlers
41
+ [
42
+ DatabaseTasks.new
43
+ # Other tasks go here
44
+ ]
45
+ end
46
+ end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Tasks that we can use to maintain partitioned tables over time
4
+ # You can override the default params hash by passing it in to the method calls.
5
+ # The default params are defined inside each method.
6
+ #
7
+ # Also, as far as the string keys for hashes go:
8
+ # https://github.com/mperham/sidekiq/wiki/Best-Practices
9
+ # Sidekiq job parameters must be JSON serializable. That means Ruby symbols are
10
+ # lost when they are sent through JSON!
11
+ class DatabaseTasks
12
+ def initialize(opts = {})
13
+ @pgdice = opts[:pgdice] ||= PgDice
14
+ @task_runner = opts[:task_runner] ||= ->(method, params) { PgdiceWorker.perform_async(method, params) }
15
+ end
16
+
17
+ def add_new_partitions(params = {})
18
+ all_params = { 'table_names' => table_names, 'only' => 'future', 'validate' => true }.merge(params)
19
+ @task_runner.call('add_new_partitions', all_params)
20
+ end
21
+
22
+ def drop_old_partitions(params = {})
23
+ all_params = { 'table_names' => table_names, 'only' => 'past', 'validate' => true }.merge(params)
24
+ @task_runner.call('drop_old_partitions', all_params)
25
+ end
26
+
27
+ def assert_tables(params = {})
28
+ all_params = { 'table_names' => table_names, 'validate' => false }.merge(params)
29
+ @task_runner.call('assert_tables', all_params)
30
+ end
31
+
32
+ private
33
+
34
+ def table_names
35
+ @pgdice.approved_tables.map(&:name)
36
+ end
37
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'aws-sdk-sqs'
4
+
5
+ class SqsMessageDeleter
6
+ attr_reader :logger
7
+
8
+ def initialize(opts = {})
9
+ @logger = opts[:logger] ||= Sidekiq.logger
10
+ @queue_url = opts[:queue_url] ||= ENV['SqsQueueUrl']
11
+ @sqs_client = opts[:sqs_client] ||= Aws::SQS::Client.new
12
+ @skip_delete_predicate = opts[:skip_delete_predicate] ||= proc do
13
+ Rails.env != 'production' || ENV['READ_ONLY_SQS'].to_s == 'true'
14
+ end
15
+ end
16
+
17
+ def call(sqs_message_receipt_handle)
18
+ if @skip_delete_predicate.call
19
+ logger.info { "Not destroying sqs message because environment is not prod or READ_ONLY_SQS was set to 'true'" }
20
+ return false
21
+ end
22
+
23
+ logger.debug { "Destroying sqs message with handle: #{sqs_message_receipt_handle}" }
24
+
25
+ response = @sqs_client.delete_message(queue_url: @queue_url, receipt_handle: sqs_message_receipt_handle)
26
+ unless response.successful?
27
+ raise "Attempt to delete SQS message: #{sqs_message_receipt_handle} was not successful. Response: #{response}"
28
+ end
29
+
30
+ true
31
+ end
32
+ end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'aws-sdk-sqs'
4
+
5
+ class SqsPoller
6
+ attr_reader :logger, :queue_url
7
+
8
+ MAX_RETRIES ||= 3
9
+ DEFAULT_WAIT_TIME ||= 5
10
+
11
+ def initialize(opts = {})
12
+ @logger = opts[:logger] ||= ActiveSupport::TaggedLogging.new(Logger.new(ENV['POLL_SQS_LOG_OUTPUT'] || STDOUT))
13
+ @max_retries = opts[:max_retries] ||= MAX_RETRIES
14
+ @sleep_seconds = opts[:sleep_seconds] ||= DEFAULT_WAIT_TIME
15
+ @error_sleep_seconds = opts[:error_sleep_seconds] ||= @sleep_seconds * 2
16
+ @sqs_listener = opts[:sqs_listener] ||= SqsListener.new(logger: logger)
17
+ end
18
+
19
+ def poll(iterations = Float::INFINITY)
20
+ logger.info { "Starting loop to #{iterations}, press Ctrl-C to exit" }
21
+
22
+ retries = 0
23
+ i = 0
24
+
25
+ while i < iterations
26
+ begin
27
+ i += 1
28
+ execute_loop
29
+ rescue StandardError => e
30
+ if retries < MAX_RETRIES
31
+ retries = handle_retry(retries, e)
32
+ retry
33
+ else
34
+ die(e)
35
+ end
36
+ rescue Exception => e
37
+ die(e)
38
+ end
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ def execute_loop
45
+ @sqs_listener.call
46
+ sleep @sleep_seconds
47
+ end
48
+
49
+ def handle_retry(retries, error)
50
+ logger.error do
51
+ "Polling loop encountered an error. Will retry in #{@error_sleep_seconds} seconds. "\
52
+ "Error: #{error}. Retries: #{retries}"
53
+ end
54
+ retries += 1
55
+
56
+ # Handle error with error tracking service
57
+ # @error_handler.call(error)
58
+
59
+ sleep @error_sleep_seconds
60
+ retries
61
+ end
62
+
63
+ def die(error)
64
+ logger.fatal { "Polling loop is stopping due to exception: #{error}" }
65
+ raise error
66
+ end
67
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ # You can set READ_ONLY_SQS=true if you don't want to delete messages
4
+
5
+ desc 'Poll SQS for any new test executions'
6
+ task poll_sqs: :environment do
7
+ SqsPoller.new.poll
8
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ # https://github.com/mperham/sidekiq/wiki/Best-Practices
4
+ # Sidekiq job parameters must be JSON serializable. That means Ruby symbols are
5
+ # lost when they are sent through JSON!
6
+ class PgdiceWorker
7
+ include Sidekiq::Worker
8
+ attr_reader :logger
9
+ sidekiq_options queue: :default, backtrace: true, retry: 5
10
+
11
+ def initialize(opts = {})
12
+ @pgdice = opts[:pgdice] ||= PgDice
13
+ @logger = opts[:logger] ||= Sidekiq.logger
14
+ @validator = opts[:validator] ||= lambda do |table_name, params|
15
+ @pgdice.public_send(:assert_tables, table_name, params)
16
+ end
17
+ end
18
+
19
+ def perform(method, params)
20
+ table_names = params.delete('table_names')
21
+ validate = params.delete('validate').present?
22
+ # Don't pass in params to PgDice if the hash is empty. PgDice will behave differently when params are passed.
23
+ pgdice_params = params.keys.size.zero? ? nil : handle_pgdice_params(params)
24
+
25
+ logger.debug { "PgdiceWorker called with method: #{method} and table_names: #{table_names}. Validate: #{validate}" }
26
+
27
+ [table_names].flatten.compact.each do |table_name|
28
+ @pgdice.public_send(method, table_name, pgdice_params)
29
+ @validator.call(table_name, pgdice_params) if validate
30
+ end
31
+ end
32
+
33
+ private
34
+
35
+ def handle_pgdice_params(pgdice_params)
36
+ convert_pgdice_param_values(pgdice_known_symbol_keys(pgdice_params))
37
+ end
38
+
39
+ def pgdice_known_symbol_keys(params)
40
+ convertable_keys = ['only']
41
+ params.keys.each do |key|
42
+ params[key.to_sym] = params.delete(key) if convertable_keys.include?(key)
43
+ end
44
+ params
45
+ end
46
+
47
+ def convert_pgdice_param_values(params)
48
+ symbolize_values_for_keys = [:only]
49
+ params.each do |key, value|
50
+ params[key] = value.to_sym if symbolize_values_for_keys.include?(key)
51
+ end
52
+ params
53
+ end
54
+ end
@@ -40,8 +40,8 @@ module PgDice
40
40
 
41
41
  def undo_partitioning(table_name)
42
42
  undo_partitioning!(table_name)
43
- rescue PgDice::PgSliceError => error
44
- logger.error { "Rescued PgSliceError: #{error}" }
43
+ rescue PgDice::PgSliceError => e
44
+ logger.error { "Rescued PgSliceError: #{e}" }
45
45
  false
46
46
  end
47
47
 
@@ -73,8 +73,8 @@ module PgDice
73
73
  table_name = params.fetch(:table_name)
74
74
 
75
75
  run_pgslice("unprep #{table_name}", params[:dry_run])
76
- rescue PgSliceError => error
77
- logger.error { "Rescued PgSliceError: #{error}" }
76
+ rescue PgSliceError => e
77
+ logger.error { "Rescued PgSliceError: #{e}" }
78
78
  false
79
79
  end
80
80
 
@@ -82,8 +82,8 @@ module PgDice
82
82
  table_name = params.fetch(:table_name)
83
83
 
84
84
  run_pgslice("unswap #{table_name}", params[:dry_run])
85
- rescue PgSliceError => error
86
- logger.error { "Rescued PgSliceError: #{error}" }
85
+ rescue PgSliceError => e
86
+ logger.error { "Rescued PgSliceError: #{e}" }
87
87
  false
88
88
  end
89
89
 
@@ -13,8 +13,8 @@ module PgDice
13
13
 
14
14
  def call(query)
15
15
  @connection_supplier.call.exec(query)
16
- rescue PG::Error => error
17
- logger.error { "Caught error: #{error}. Going to reset connection and try again" }
16
+ rescue PG::Error => e
17
+ logger.error { "Caught error: #{e}. Going to reset connection and try again" }
18
18
  @connection_supplier.call.reset
19
19
  @connection_supplier.call.exec(query)
20
20
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PgDice
4
- VERSION = '0.4.3'
4
+ VERSION = '1.0.1'
5
5
  end
data/pgdice.gemspec CHANGED
@@ -25,7 +25,7 @@ Gem::Specification.new do |spec|
25
25
  spec.require_paths = ['lib']
26
26
 
27
27
  # Locked because we depend on internal behavior for table commenting
28
- spec.add_runtime_dependency 'pg', '~> 1.1.0', '>= 1.1.0'
28
+ spec.add_runtime_dependency 'pg', '~> 1.2.2', '>= 1.1.0'
29
29
  spec.add_runtime_dependency 'pgslice', '0.4.5'
30
30
 
31
31
  spec.add_development_dependency 'bundler', '~> 1.16', '>= 1.16'
@@ -38,6 +38,6 @@ Gem::Specification.new do |spec|
38
38
  spec.add_development_dependency 'minitest-ci', '~> 3.4.0', '>= 3.4.0'
39
39
  spec.add_development_dependency 'minitest-reporters', '~> 1.3.4', '>= 1.3.4'
40
40
  spec.add_development_dependency 'rake', '~> 10.0', '>= 10.0'
41
- spec.add_development_dependency 'rubocop', '0.59'
41
+ spec.add_development_dependency 'rubocop', '0.71'
42
42
  spec.add_development_dependency 'simplecov', '~> 0.16.1', '>= 0.16.1'
43
43
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pgdice
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.3
4
+ version: 1.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrew Newell
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-04-23 00:00:00.000000000 Z
11
+ date: 2020-01-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: pg
@@ -19,7 +19,7 @@ dependencies:
19
19
  version: 1.1.0
20
20
  - - "~>"
21
21
  - !ruby/object:Gem::Version
22
- version: 1.1.0
22
+ version: 1.2.2
23
23
  type: :runtime
24
24
  prerelease: false
25
25
  version_requirements: !ruby/object:Gem::Requirement
@@ -29,7 +29,7 @@ dependencies:
29
29
  version: 1.1.0
30
30
  - - "~>"
31
31
  - !ruby/object:Gem::Version
32
- version: 1.1.0
32
+ version: 1.2.2
33
33
  - !ruby/object:Gem::Dependency
34
34
  name: pgslice
35
35
  requirement: !ruby/object:Gem::Requirement
@@ -250,14 +250,14 @@ dependencies:
250
250
  requirements:
251
251
  - - '='
252
252
  - !ruby/object:Gem::Version
253
- version: '0.59'
253
+ version: '0.71'
254
254
  type: :development
255
255
  prerelease: false
256
256
  version_requirements: !ruby/object:Gem::Requirement
257
257
  requirements:
258
258
  - - '='
259
259
  - !ruby/object:Gem::Version
260
- version: '0.59'
260
+ version: '0.71'
261
261
  - !ruby/object:Gem::Dependency
262
262
  name: simplecov
263
263
  requirement: !ruby/object:Gem::Requirement
@@ -288,6 +288,9 @@ files:
288
288
  - ".circleci/config.yml"
289
289
  - ".codeclimate.yml"
290
290
  - ".coveralls.yml"
291
+ - ".github/ISSUE_TEMPLATE/bug_report.md"
292
+ - ".github/ISSUE_TEMPLATE/feature_request.md"
293
+ - ".github/workflows/gempush.yml"
291
294
  - ".gitignore"
292
295
  - ".rubocop.yml"
293
296
  - ".ruby-gemset"
@@ -303,6 +306,19 @@ files:
303
306
  - bin/console
304
307
  - bin/guard
305
308
  - bin/setup
309
+ - examples/aws/README.md
310
+ - examples/aws/cloudformation/scheduled_events.json
311
+ - examples/aws/lib/sqs_listener.rb
312
+ - examples/aws/lib/sqs_listener/default_event_handler.rb
313
+ - examples/aws/lib/sqs_listener/exceptions/unknown_task_error.rb
314
+ - examples/aws/lib/sqs_listener/fallthrough_event_handler.rb
315
+ - examples/aws/lib/sqs_listener/sqs_event_router.rb
316
+ - examples/aws/lib/sqs_listener/typed_event_handler/task_event_handler.rb
317
+ - examples/aws/lib/sqs_listener/typed_event_handler/tasks/database_tasks.rb
318
+ - examples/aws/lib/sqs_message_deleter.rb
319
+ - examples/aws/lib/sqs_poller.rb
320
+ - examples/aws/tasks/poll_sqs.rake
321
+ - examples/aws/workers/pg_dice_worker.rb
306
322
  - examples/config.yml
307
323
  - lib/pgdice.rb
308
324
  - lib/pgdice/approved_tables.rb
@@ -352,8 +368,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
352
368
  - !ruby/object:Gem::Version
353
369
  version: '0'
354
370
  requirements: []
355
- rubyforge_project:
356
- rubygems_version: 2.7.9
371
+ rubygems_version: 3.0.3
357
372
  signing_key:
358
373
  specification_version: 4
359
374
  summary: Postgres table partitioning with a Ruby API!