webhookdb 1.3.1 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. checksums.yaml +4 -4
  2. data/admin-dist/assets/{index-6aebf805.js → index-9306dd28.js} +39 -39
  3. data/admin-dist/index.html +1 -1
  4. data/data/messages/templates/errors/generic_backfill.email.liquid +30 -0
  5. data/data/messages/templates/errors/icalendar_fetch.email.liquid +8 -2
  6. data/data/messages/templates/specs/with_fields.email.liquid +6 -0
  7. data/db/migrations/026_undo_integration_backfill_cursor.rb +2 -0
  8. data/db/migrations/032_remove_db_defaults.rb +2 -0
  9. data/db/migrations/043_text_search.rb +2 -0
  10. data/db/migrations/045_system_log.rb +15 -0
  11. data/db/migrations/046_indices.rb +14 -0
  12. data/db/migrations/047_sync_parallelism.rb +9 -0
  13. data/db/migrations/048_sync_stats.rb +9 -0
  14. data/db/migrations/049_error_handlers.rb +18 -0
  15. data/db/migrations/050_logged_webhook_indices.rb +25 -0
  16. data/db/migrations/051_partitioning.rb +9 -0
  17. data/integration/async_spec.rb +0 -2
  18. data/integration/service_integrations_spec.rb +0 -2
  19. data/lib/amigo/durable_job.rb +2 -2
  20. data/lib/amigo/job_in_context.rb +12 -0
  21. data/lib/webhookdb/admin.rb +6 -0
  22. data/lib/webhookdb/admin_api/data_provider.rb +1 -0
  23. data/lib/webhookdb/admin_api/entities.rb +8 -0
  24. data/lib/webhookdb/aggregate_result.rb +1 -1
  25. data/lib/webhookdb/api/entities.rb +6 -2
  26. data/lib/webhookdb/api/error_handlers.rb +104 -0
  27. data/lib/webhookdb/api/helpers.rb +25 -1
  28. data/lib/webhookdb/api/icalproxy.rb +22 -0
  29. data/lib/webhookdb/api/install.rb +2 -1
  30. data/lib/webhookdb/api/organizations.rb +6 -0
  31. data/lib/webhookdb/api/saved_queries.rb +1 -0
  32. data/lib/webhookdb/api/saved_views.rb +1 -0
  33. data/lib/webhookdb/api/service_integrations.rb +2 -1
  34. data/lib/webhookdb/api/sync_targets.rb +1 -1
  35. data/lib/webhookdb/api/system.rb +5 -0
  36. data/lib/webhookdb/api/webhook_subscriptions.rb +1 -0
  37. data/lib/webhookdb/api.rb +4 -1
  38. data/lib/webhookdb/apps.rb +4 -0
  39. data/lib/webhookdb/async/autoscaler.rb +10 -0
  40. data/lib/webhookdb/async/job.rb +4 -0
  41. data/lib/webhookdb/async/scheduled_job.rb +4 -0
  42. data/lib/webhookdb/async.rb +2 -0
  43. data/lib/webhookdb/backfiller.rb +17 -4
  44. data/lib/webhookdb/concurrent.rb +96 -0
  45. data/lib/webhookdb/connection_cache.rb +57 -10
  46. data/lib/webhookdb/console.rb +1 -1
  47. data/lib/webhookdb/customer/reset_code.rb +1 -1
  48. data/lib/webhookdb/customer.rb +5 -4
  49. data/lib/webhookdb/database_document.rb +1 -1
  50. data/lib/webhookdb/db_adapter/default_sql.rb +1 -14
  51. data/lib/webhookdb/db_adapter/partition.rb +14 -0
  52. data/lib/webhookdb/db_adapter/partitioning.rb +8 -0
  53. data/lib/webhookdb/db_adapter/pg.rb +77 -5
  54. data/lib/webhookdb/db_adapter/snowflake.rb +15 -6
  55. data/lib/webhookdb/db_adapter.rb +25 -3
  56. data/lib/webhookdb/dbutil.rb +2 -0
  57. data/lib/webhookdb/errors.rb +34 -0
  58. data/lib/webhookdb/fixtures/logged_webhooks.rb +4 -0
  59. data/lib/webhookdb/fixtures/organization_error_handlers.rb +20 -0
  60. data/lib/webhookdb/http.rb +30 -16
  61. data/lib/webhookdb/icalendar.rb +30 -9
  62. data/lib/webhookdb/jobs/amigo_test_jobs.rb +1 -1
  63. data/lib/webhookdb/jobs/backfill.rb +21 -25
  64. data/lib/webhookdb/jobs/create_mirror_table.rb +3 -4
  65. data/lib/webhookdb/jobs/deprecated_jobs.rb +3 -0
  66. data/lib/webhookdb/jobs/emailer.rb +2 -1
  67. data/lib/webhookdb/jobs/front_signalwire_message_channel_sync_inbound.rb +15 -0
  68. data/lib/webhookdb/jobs/icalendar_delete_stale_cancelled_events.rb +7 -2
  69. data/lib/webhookdb/jobs/icalendar_enqueue_syncs.rb +74 -11
  70. data/lib/webhookdb/jobs/icalendar_enqueue_syncs_for_urls.rb +22 -0
  71. data/lib/webhookdb/jobs/icalendar_sync.rb +21 -9
  72. data/lib/webhookdb/jobs/increase_event_handler.rb +3 -2
  73. data/lib/webhookdb/jobs/{logged_webhook_replay.rb → logged_webhooks_replay.rb} +5 -3
  74. data/lib/webhookdb/jobs/message_dispatched.rb +1 -0
  75. data/lib/webhookdb/jobs/model_event_system_log_tracker.rb +112 -0
  76. data/lib/webhookdb/jobs/monitor_metrics.rb +29 -0
  77. data/lib/webhookdb/jobs/organization_database_migration_notify.rb +32 -0
  78. data/lib/webhookdb/jobs/organization_database_migration_run.rb +4 -6
  79. data/lib/webhookdb/jobs/organization_error_handler_dispatch.rb +26 -0
  80. data/lib/webhookdb/jobs/prepare_database_connections.rb +1 -0
  81. data/lib/webhookdb/jobs/process_webhook.rb +11 -12
  82. data/lib/webhookdb/jobs/renew_watch_channel.rb +10 -10
  83. data/lib/webhookdb/jobs/replication_migration.rb +5 -2
  84. data/lib/webhookdb/jobs/reset_code_create_dispatch.rb +1 -2
  85. data/lib/webhookdb/jobs/scheduled_backfills.rb +2 -2
  86. data/lib/webhookdb/jobs/send_invite.rb +3 -2
  87. data/lib/webhookdb/jobs/send_test_webhook.rb +1 -3
  88. data/lib/webhookdb/jobs/send_webhook.rb +4 -5
  89. data/lib/webhookdb/jobs/stale_row_deleter.rb +31 -0
  90. data/lib/webhookdb/jobs/sync_target_enqueue_scheduled.rb +3 -0
  91. data/lib/webhookdb/jobs/sync_target_run_sync.rb +9 -15
  92. data/lib/webhookdb/jobs/{webhook_subscription_delivery_attempt.rb → webhook_subscription_delivery_event.rb} +5 -8
  93. data/lib/webhookdb/liquid/expose.rb +1 -1
  94. data/lib/webhookdb/liquid/filters.rb +1 -1
  95. data/lib/webhookdb/liquid/partial.rb +2 -2
  96. data/lib/webhookdb/logged_webhook/resilient.rb +3 -3
  97. data/lib/webhookdb/logged_webhook.rb +16 -2
  98. data/lib/webhookdb/message/email_transport.rb +1 -1
  99. data/lib/webhookdb/message/transport.rb +1 -1
  100. data/lib/webhookdb/message.rb +55 -4
  101. data/lib/webhookdb/messages/error_generic_backfill.rb +47 -0
  102. data/lib/webhookdb/messages/error_icalendar_fetch.rb +5 -0
  103. data/lib/webhookdb/messages/error_signalwire_send_sms.rb +2 -0
  104. data/lib/webhookdb/messages/specs.rb +16 -0
  105. data/lib/webhookdb/organization/alerting.rb +56 -6
  106. data/lib/webhookdb/organization/database_migration.rb +2 -2
  107. data/lib/webhookdb/organization/db_builder.rb +5 -4
  108. data/lib/webhookdb/organization/error_handler.rb +141 -0
  109. data/lib/webhookdb/organization.rb +76 -10
  110. data/lib/webhookdb/postgres/model.rb +1 -0
  111. data/lib/webhookdb/postgres/model_utilities.rb +2 -0
  112. data/lib/webhookdb/postgres.rb +3 -4
  113. data/lib/webhookdb/replicator/base.rb +202 -68
  114. data/lib/webhookdb/replicator/base_stale_row_deleter.rb +165 -0
  115. data/lib/webhookdb/replicator/column.rb +2 -0
  116. data/lib/webhookdb/replicator/email_octopus_contact_v1.rb +0 -1
  117. data/lib/webhookdb/replicator/fake.rb +106 -88
  118. data/lib/webhookdb/replicator/front_signalwire_message_channel_app_v1.rb +131 -61
  119. data/lib/webhookdb/replicator/github_repo_v1_mixin.rb +17 -0
  120. data/lib/webhookdb/replicator/icalendar_calendar_v1.rb +197 -32
  121. data/lib/webhookdb/replicator/icalendar_event_v1.rb +20 -44
  122. data/lib/webhookdb/replicator/icalendar_event_v1_partitioned.rb +33 -0
  123. data/lib/webhookdb/replicator/intercom_contact_v1.rb +1 -0
  124. data/lib/webhookdb/replicator/intercom_conversation_v1.rb +1 -0
  125. data/lib/webhookdb/replicator/intercom_v1_mixin.rb +49 -6
  126. data/lib/webhookdb/replicator/partitionable_mixin.rb +116 -0
  127. data/lib/webhookdb/replicator/shopify_v1_mixin.rb +1 -1
  128. data/lib/webhookdb/replicator/signalwire_message_v1.rb +31 -1
  129. data/lib/webhookdb/replicator/sponsy_v1_mixin.rb +1 -1
  130. data/lib/webhookdb/replicator/transistor_episode_stats_v1.rb +0 -1
  131. data/lib/webhookdb/replicator/transistor_episode_v1.rb +11 -5
  132. data/lib/webhookdb/replicator/webhook_request.rb +8 -0
  133. data/lib/webhookdb/replicator.rb +6 -3
  134. data/lib/webhookdb/service/helpers.rb +4 -0
  135. data/lib/webhookdb/service/middleware.rb +6 -2
  136. data/lib/webhookdb/service/view_api.rb +1 -1
  137. data/lib/webhookdb/service.rb +10 -10
  138. data/lib/webhookdb/service_integration.rb +19 -1
  139. data/lib/webhookdb/signalwire.rb +1 -1
  140. data/lib/webhookdb/spec_helpers/async.rb +0 -4
  141. data/lib/webhookdb/spec_helpers/sentry.rb +32 -0
  142. data/lib/webhookdb/spec_helpers/shared_examples_for_replicators.rb +239 -64
  143. data/lib/webhookdb/spec_helpers.rb +1 -0
  144. data/lib/webhookdb/sync_target.rb +202 -34
  145. data/lib/webhookdb/system_log_event.rb +9 -0
  146. data/lib/webhookdb/tasks/admin.rb +1 -1
  147. data/lib/webhookdb/tasks/annotate.rb +1 -1
  148. data/lib/webhookdb/tasks/db.rb +13 -1
  149. data/lib/webhookdb/tasks/docs.rb +1 -1
  150. data/lib/webhookdb/tasks/fixture.rb +1 -1
  151. data/lib/webhookdb/tasks/message.rb +1 -1
  152. data/lib/webhookdb/tasks/regress.rb +1 -1
  153. data/lib/webhookdb/tasks/release.rb +1 -1
  154. data/lib/webhookdb/tasks/sidekiq.rb +1 -1
  155. data/lib/webhookdb/tasks/specs.rb +1 -1
  156. data/lib/webhookdb/version.rb +1 -1
  157. data/lib/webhookdb/webhook_subscription.rb +3 -4
  158. data/lib/webhookdb.rb +34 -8
  159. metadata +114 -64
  160. data/lib/webhookdb/jobs/customer_created_notify_internal.rb +0 -22
  161. data/lib/webhookdb/jobs/organization_database_migration_notify_finished.rb +0 -21
  162. data/lib/webhookdb/jobs/organization_database_migration_notify_started.rb +0 -21
  163. /data/lib/webhookdb/jobs/{logged_webhook_resilient_replay.rb → logged_webhooks_resilient_replay.rb} +0 -0
  164. /data/lib/webhookdb/jobs/{webhook_resource_notify_integrations.rb → webhookdb_resource_notify_integrations.rb} +0 -0
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "webhookdb/async/scheduled_job"
4
+ require "webhookdb/jobs"
5
+
6
+ # Log out some metrics every minute.
7
+ class Webhookdb::Jobs::MonitorMetrics
8
+ extend Webhookdb::Async::ScheduledJob
9
+
10
+ cron "* * * * *" # Every 1 minute
11
+ splay 0
12
+
13
+ def _perform
14
+ opts = {}
15
+ max_size = 0
16
+ max_latency = 0
17
+ Sidekiq::Queue.all.each do |q|
18
+ size = q.size
19
+ latency = q.latency
20
+ max_size = [max_size, size].max
21
+ max_latency = [max_latency, latency].max
22
+ opts["#{q.name}_size"] = size
23
+ opts["#{q.name}_latency"] = latency
24
+ end
25
+ opts[:max_size] = max_size
26
+ opts[:max_latency] = max_latency
27
+ self.set_job_tags(action: "metrics_monitor_queue", **opts)
28
+ end
29
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "webhookdb/async/job"
4
+ require "webhookdb/messages/org_database_migration_finished"
5
+ require "webhookdb/messages/org_database_migration_started"
6
+
7
+ class Webhookdb::Jobs::OrganizationDatabaseMigrationNotify
8
+ extend Webhookdb::Async::Job
9
+
10
+ on "webhookdb.organization.databasemigration.updated"
11
+
12
+ def _perform(event)
13
+ dbm = self.lookup_model(Webhookdb::Organization::DatabaseMigration, event)
14
+ self.set_job_tags(database_migration_id: dbm.id, organization: dbm.organization.key)
15
+ case event.payload[1]
16
+ when changed(:started_at, from: nil)
17
+ Webhookdb::Idempotency.once_ever.under_key("org-dbmigration-start-#{dbm.id}") do
18
+ msg = Webhookdb::Messages::OrgDatabaseMigrationStarted.new(dbm)
19
+ dbm.organization.admin_customers.each { |c| msg.dispatch_email(c) }
20
+ end
21
+ self.set_job_tags(result: "started_message_sent")
22
+ when changed(:finished_at, from: nil)
23
+ Webhookdb::Idempotency.once_ever.under_key("org-dbmigration-finish-#{dbm.id}") do
24
+ msg = Webhookdb::Messages::OrgDatabaseMigrationFinished.new(dbm)
25
+ dbm.organization.admin_customers.each { |c| msg.dispatch_email(c) }
26
+ end
27
+ self.set_job_tags(result: "finished_message_sent")
28
+ else
29
+ self.set_job_tags(result: "noop")
30
+ end
31
+ end
32
+ end
@@ -11,14 +11,12 @@ class Webhookdb::Jobs::OrganizationDatabaseMigrationRun
11
11
 
12
12
  def _perform(event)
13
13
  dbm = self.lookup_model(Webhookdb::Organization::DatabaseMigration, event)
14
- self.with_log_tags(
15
- organization_id: dbm.organization.id,
16
- organization_name: dbm.organization.name,
17
- organization_database_migration_id: dbm.id,
18
- ) do
14
+ self.set_job_tags(organization: dbm.organization.key, database_migration_id: dbm.id)
15
+ begin
19
16
  dbm.migrate
17
+ self.set_job_tags(result: "migration_finished")
20
18
  rescue Webhookdb::Organization::DatabaseMigration::MigrationAlreadyFinished
21
- self.logger.warn("org_database_migration_already_finished")
19
+ self.set_job_tags(result: "migration_already_finished")
22
20
  end
23
21
  end
24
22
  end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "webhookdb/async/job"
4
+
5
+ class Webhookdb::Jobs::OrganizationErrorHandlerDispatch
6
+ extend Webhookdb::Async::Job
7
+
8
+ sidekiq_options queue: "netout"
9
+
10
+ def perform(error_handler_id, payload)
11
+ eh = self.lookup_model(Webhookdb::Organization::ErrorHandler, error_handler_id)
12
+ self.set_job_tags(error_handler_id: eh.id, **eh.organization.log_tags)
13
+ begin
14
+ eh.dispatch(payload)
15
+ self.set_job_tags(result: "success")
16
+ rescue StandardError => e
17
+ # Don't bother logging these errors out
18
+ self.set_job_tags(result: "error")
19
+ self.logger.debug("organization_error_handler_post_error", error: e)
20
+ raise Amigo::Retry::OrDie.new(
21
+ Webhookdb::Organization::Alerting.error_handler_retries,
22
+ Webhookdb::Organization::Alerting.error_handler_retry_interval,
23
+ )
24
+ end
25
+ end
26
+ end
@@ -12,6 +12,7 @@ class Webhookdb::Jobs::PrepareDatabaseConnections
12
12
 
13
13
  def _perform(event)
14
14
  org = self.lookup_model(Webhookdb::Organization, event)
15
+ self.set_job_tags(organization: org.key)
15
16
  org.db.transaction do
16
17
  # If creating the public host fails, we end up with an orphaned database,
17
18
  # but that's not a big deal- we can eventually see it's empty/unlinked and drop it.
@@ -23,18 +23,17 @@ class Webhookdb::Jobs::ProcessWebhook
23
23
  end
24
24
 
25
25
  def _perform(event)
26
- self.with_log_tags(@sint.log_tags) do
27
- kw = event.payload[1].symbolize_keys
28
- svc = Webhookdb::Replicator.create(@sint)
29
- # kwargs contains: :headers, :body, :request_path, :request_method
30
- req = Webhookdb::Replicator::WebhookRequest.new(
31
- body: kw.fetch(:body),
32
- headers: kw.fetch(:headers),
33
- path: kw.fetch(:request_path),
34
- method: kw.fetch(:request_method),
35
- )
36
- svc.upsert_webhook(req)
37
- end
26
+ self.set_job_tags(@sint.log_tags)
27
+ kw = event.payload[1].symbolize_keys
28
+ svc = Webhookdb::Replicator.create(@sint)
29
+ # kwargs contains: :headers, :body, :request_path, :request_method
30
+ req = Webhookdb::Replicator::WebhookRequest.new(
31
+ body: kw.fetch(:body),
32
+ headers: kw.fetch(:headers),
33
+ path: kw.fetch(:request_path),
34
+ method: kw.fetch(:request_method),
35
+ )
36
+ svc.upsert_webhook(req)
38
37
  end
39
38
 
40
39
  def semaphore_key
@@ -1,24 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "amigo/queue_backoff_job"
3
4
  require "webhookdb/async/job"
4
5
  require "webhookdb/jobs"
5
6
 
6
7
  # Generic helper to renew watch channels, enqueued by replicator-specific jobs
7
8
  # like RenewGoogleWatchChannels.
8
- # Must be emitted with [service integration id, {row_pk:, expirng_before:}]
9
+ # Must be emitted with [service_integration_id, {row_pk:, expirng_before:}]
9
10
  # Calls #renew_watch_channel(row_pk:, expiring_before:).
10
11
  class Webhookdb::Jobs::RenewWatchChannel
11
12
  extend Webhookdb::Async::Job
13
+ include Amigo::QueueBackoffJob
12
14
 
13
- on "webhookdb.serviceintegration.renewwatchchannel"
15
+ sidekiq_options queue: "netout"
14
16
 
15
- def _perform(event)
16
- sint = self.lookup_model(Webhookdb::ServiceIntegration, event)
17
- self.with_log_tags(sint.log_tags) do
18
- opts = event.payload[1]
19
- row_pk = opts.fetch("row_pk")
20
- expiring_before = Time.parse(opts.fetch("expiring_before"))
21
- sint.replicator.renew_watch_channel(row_pk:, expiring_before:)
22
- end
17
+ def perform(service_integration_id, renew_watch_criteria)
18
+ sint = self.lookup_model(Webhookdb::ServiceIntegration, service_integration_id)
19
+ self.set_job_tags(sint.log_tags)
20
+ row_pk = renew_watch_criteria.fetch("row_pk")
21
+ expiring_before = Time.parse(renew_watch_criteria.fetch("expiring_before"))
22
+ sint.replicator.renew_watch_channel(row_pk:, expiring_before:)
23
23
  end
24
24
  end
@@ -2,18 +2,21 @@
2
2
 
3
3
  # See Organization::enqueue_migrate_all_replication_tables for more info.
4
4
  class Webhookdb::Jobs::ReplicationMigration
5
- include Sidekiq::Worker
5
+ extend Webhookdb::Async::Job
6
6
 
7
7
  def perform(org_id, target_release_created_at)
8
8
  (org = Webhookdb::Organization[org_id]) or raise "Organization[#{org_id}] does not exist"
9
9
  target_rca = Time.parse(target_release_created_at)
10
10
  current_rca = Time.parse(Webhookdb::RELEASE_CREATED_AT)
11
+ self.set_job_tags(organization_id: org_id, target_release_created_at:)
11
12
  if target_rca == current_rca
12
13
  self.class.migrate_org(org)
14
+ self.set_job_tags(result: "ran_replication_migration_job")
13
15
  elsif target_rca > current_rca
14
16
  self.class.perform_in(1, org_id, target_release_created_at)
17
+ self.set_job_tags(result: "reenqueued_replication_migration_job")
15
18
  else
16
- self.logger.warn("stale_replication_migration_job", target_release_created_at:)
19
+ self.set_job_tags(result: "stale_replication_migration_job")
17
20
  end
18
21
  end
19
22
 
@@ -10,13 +10,12 @@ class Webhookdb::Jobs::ResetCodeCreateDispatch
10
10
 
11
11
  def _perform(event)
12
12
  code = self.lookup_model(Webhookdb::Customer::ResetCode, event)
13
+ self.set_job_tags(code_id: code.id, customer: code.customer.email, transport: code.transport)
13
14
  Webhookdb::Idempotency.once_ever.under_key("reset-code-#{code.customer_id}-#{code.id}") do
14
15
  msg = Webhookdb::Messages::Verification.new(code)
15
16
  case code.transport
16
17
  when "email"
17
18
  msg.dispatch_email(code.customer)
18
- else
19
- raise "Unknown transport for #{code.inspect}"
20
19
  end
21
20
  end
22
21
  end
@@ -51,9 +51,9 @@ module Webhookdb::Jobs
51
51
  Webhookdb::Github.activity_cron_expression, 30.seconds, false,
52
52
  ),
53
53
  Spec.new(
54
- # I think we can get rid of this once we're more confident webhooks are working reliably.
54
+ # This incremental sync is a backstop for any missed webhooks.
55
55
  "IntercomScheduledBackfill", "intercom_marketplace_root_v1",
56
- "46 4 * * *", 0, false, true,
56
+ "46 4 * * *", 0, true, true,
57
57
  ),
58
58
  Spec.new(
59
59
  "AtomSingleFeedPoller", "atom_single_feed_v1",
@@ -9,7 +9,8 @@ class Webhookdb::Jobs::SendInvite
9
9
  on "webhookdb.organizationmembership.invite"
10
10
 
11
11
  def _perform(event)
12
- membership = self.lookup_model(Webhookdb::OrganizationMembership, event)
13
- Webhookdb::Messages::Invite.new(membership).dispatch(membership.customer)
12
+ m = self.lookup_model(Webhookdb::OrganizationMembership, event)
13
+ self.set_job_tags(membership_id: m.id, organization: m.organization.key, customer: m.customer.email)
14
+ Webhookdb::Messages::Invite.new(m).dispatch(m.customer)
14
15
  end
15
16
  end
@@ -14,9 +14,7 @@ class Webhookdb::Jobs::SendTestWebhook
14
14
  # we don't want to retry and randomly send a payload later.
15
15
  sidekiq_options retry: false
16
16
 
17
- def dependent_queues
18
- return ["critical"]
19
- end
17
+ def dependent_queues = ["critical"]
20
18
 
21
19
  def _perform(event)
22
20
  webhook_sub = self.lookup_model(Webhookdb::WebhookSubscription, event)
@@ -10,11 +10,10 @@ class Webhookdb::Jobs::SendWebhook
10
10
 
11
11
  def _perform(event)
12
12
  sint = self.lookup_model(Webhookdb::ServiceIntegration, event)
13
- self.with_log_tags(sint.log_tags) do
14
- sint.all_webhook_subscriptions_dataset.to_notify.each do |sub|
15
- payload = {service_name: sint.service_name, table_name: sint.table_name, **event.payload[1]}
16
- sub.enqueue_delivery(payload)
17
- end
13
+ self.set_job_tags(sint.log_tags)
14
+ sint.all_webhook_subscriptions_dataset.to_notify.each do |sub|
15
+ payload = {service_name: sint.service_name, table_name: sint.table_name, **event.payload[1]}
16
+ sub.enqueue_delivery(payload)
18
17
  end
19
18
  end
20
19
  end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "webhookdb/async/job"
4
+
5
+ # Run the +stale_row_deleter+ for each service integration
6
+ # which match the given where/exclude clauses.
7
+ # This is generally used to delete old stale rows in the backend
8
+ # (by passing initial: true) when a new stale row deleter is deployed.
9
+ class Webhookdb::Jobs::StaleRowDeleter
10
+ extend Webhookdb::Async::Job
11
+
12
+ def perform(opts={})
13
+ opts = opts.deep_symbolize_keys
14
+ opts[:where] ||= {}
15
+ opts[:exclude] ||= {}
16
+ opts[:initial] ||= false
17
+ ds = Webhookdb::ServiceIntegration.dataset
18
+ ds = ds.where(opts[:where]) if opts[:where]
19
+ ds = ds.exclude(opts[:exclude]) if opts[:exclude]
20
+ self.set_job_tags(dataset: ds.sql)
21
+ count = 0
22
+ ds.each do |sint|
23
+ self.with_log_tags(sint.log_tags) do
24
+ d = sint.replicator.stale_row_deleter
25
+ opts[:initial] ? d.run_initial : d.run
26
+ count += 1
27
+ end
28
+ end
29
+ self.set_job_tags(run_count: count)
30
+ end
31
+ end
@@ -9,8 +9,11 @@ class Webhookdb::Jobs::SyncTargetEnqueueScheduled
9
9
  splay 0
10
10
 
11
11
  def _perform
12
+ count = 0
12
13
  Webhookdb::SyncTarget.due_for_sync(as_of: Time.now).select(:id, :period_seconds).each do |st|
14
+ count += 1
13
15
  Webhookdb::Jobs::SyncTargetRunSync.perform_in(st.jitter, st.id)
14
16
  end
17
+ self.set_job_tags(enqueued_count: count)
15
18
  end
16
19
  end
@@ -9,9 +9,7 @@ class Webhookdb::Jobs::SyncTargetRunSync
9
9
 
10
10
  sidekiq_options queue: "netout"
11
11
 
12
- def dependent_queues
13
- return ["critical"]
14
- end
12
+ def dependent_queues = ["critical"]
15
13
 
16
14
  def perform(sync_target_id)
17
15
  stgt = Webhookdb::SyncTarget[sync_target_id]
@@ -19,22 +17,18 @@ class Webhookdb::Jobs::SyncTargetRunSync
19
17
  # A sync target may be enqueued, but destroyed before the sync runs.
20
18
  # If so, log a warning. We see this on staging a lot,
21
19
  # but it does happen on production too, and should be expected.
22
- self.logger.info("missing_sync_target", sync_target_id:)
20
+ self.set_job_tags(result: "missing_sync_target", sync_target_id:)
23
21
  return
24
22
  end
25
- self.with_log_tags(
26
- sync_target_id: stgt.id,
27
- sync_target_connection_url: stgt.displaysafe_connection_url,
28
- sync_target_service_integration_service: stgt.service_integration.service_name,
29
- sync_target_service_integration_table: stgt.service_integration.table_name,
30
- ) do
31
- stgt.run_sync(now: Time.now)
23
+ self.set_job_tags(stgt.log_tags)
24
+ begin
25
+ started = Time.now
26
+ stgt.run_sync(now: started)
27
+ self.set_job_tags(result: "sync_target_synced", synced_at_of: started)
32
28
  rescue Webhookdb::SyncTarget::SyncInProgress
33
- Webhookdb::Idempotency.every(30.seconds).in_memory.under_key("sync_target_in_progress-#{stgt.id}") do
34
- self.logger.info("sync_target_already_in_progress")
35
- end
29
+ self.set_job_tags(result: "sync_target_already_in_progress")
36
30
  rescue Webhookdb::SyncTarget::Deleted
37
- self.logger.info("sync_target_deleted")
31
+ self.set_job_tags(result: "sync_target_deleted")
38
32
  end
39
33
  end
40
34
  end
@@ -12,18 +12,15 @@ class Webhookdb::Jobs::WebhookSubscriptionDeliveryEvent
12
12
 
13
13
  sidekiq_options queue: "netout"
14
14
 
15
- def dependent_queues
16
- return ["critical"]
17
- end
15
+ def dependent_queues = ["critical"]
18
16
 
19
17
  def perform(delivery_id)
20
18
  delivery = Webhookdb::WebhookSubscription::Delivery[delivery_id]
21
- Webhookdb::Async::JobLogger.with_log_tags(
19
+ Webhookdb::Async::JobLogger.set_job_tags(
22
20
  webhook_subscription_delivery_id: delivery.id,
23
21
  webhook_subscription_id: delivery.webhook_subscription_id,
24
- organization_key: delivery.webhook_subscription.fetch_organization,
25
- ) do
26
- delivery.attempt_delivery
27
- end
22
+ organization: delivery.webhook_subscription.fetch_organization,
23
+ )
24
+ delivery.attempt_delivery
28
25
  end
29
26
  end
@@ -24,4 +24,4 @@ class Webhookdb::Liquid::Expose < Liquid::Block
24
24
  end
25
25
  end
26
26
 
27
- Liquid::Template.register_tag("expose", Webhookdb::Liquid::Expose)
27
+ Liquid::Environment.default.register_tag("expose", Webhookdb::Liquid::Expose)
@@ -13,4 +13,4 @@ module Webhookdb::Liquid::Filters
13
13
  end
14
14
  end
15
15
 
16
- Liquid::Template.register_filter(Webhookdb::Liquid::Filters)
16
+ Liquid::Environment.default.register_filter(Webhookdb::Liquid::Filters)
@@ -6,7 +6,7 @@ require "liquid"
6
6
  class Webhookdb::Liquid::Partial < Liquid::Include
7
7
  def initialize(tag_name, name, options)
8
8
  name = "'partials/#{Regexp.last_match(1)}'" if name =~ /['"]([a-z0-9_]+)['"]/
9
- super(tag_name, name, options)
9
+ super
10
10
  end
11
11
  end
12
- Liquid::Template.register_tag("partial", Webhookdb::Liquid::Partial)
12
+ Liquid::Environment.default.register_tag("partial", Webhookdb::Liquid::Partial)
@@ -12,10 +12,10 @@ class Webhookdb::LoggedWebhook::Resilient
12
12
  str_payload = JSON.dump(kwargs)
13
13
  self.database_urls.each do |url|
14
14
  next unless self.write_to(url, service_integration_opaque_id, str_payload)
15
- self.logger.warn "resilient_insert_handled", error: e, **self._dburl_log_kwargs(url)
15
+ self.logger.warn "resilient_insert_handled", self._dburl_log_kwargs(url), e
16
16
  return true
17
17
  end
18
- self.logger.error "resilient_insert_unhandled", error: e, logged_webhook_kwargs: kwargs
18
+ self.logger.error "resilient_insert_unhandled", {logged_webhook_kwargs: kwargs}, e
19
19
  raise
20
20
  end
21
21
 
@@ -37,7 +37,7 @@ class Webhookdb::LoggedWebhook::Resilient
37
37
  end
38
38
  return true
39
39
  rescue StandardError => e
40
- self.logger.debug "resilient_insert_failure", error: e, **self._dburl_log_kwargs(dburl)
40
+ self.logger.debug "resilient_insert_failure", self._dburl_log_kwargs(dburl), e
41
41
  return false
42
42
  end
43
43
 
@@ -88,13 +88,26 @@ class Webhookdb::LoggedWebhook < Webhookdb::Postgres::Model(:logged_webhooks)
88
88
  unowned = self.where(organization_id: nil)
89
89
  successes = owned.where { response_status < 400 }
90
90
  failures = owned.where { response_status >= 400 }
91
+ # NOTE: This code is tightly coupled with indices created in 050_logged_webhooks_indices.rb
92
+ # We create a separate index for each operation; the indices (5 in total) cover the full combination of:
93
+ # - rows without an organization (idx 1)
94
+ # - rows with an organization
95
+ # - rows already truncated
96
+ # - rows with status < 400 (idx 2)
97
+ # - rows with status >= 400 (idx 3)
98
+ # - rows not truncated
99
+ # - rows with status < 400 (idx 4)
100
+ # - rows with status >= 400 (idx 5)
101
+ # Note that we only delete already-truncated rows so we can keep our indices smaller;
102
+ # since deletion ages are always older than truncation ages, this should not be a problem.
103
+
91
104
  # Delete old unowned
92
105
  unowned.where { inserted_at < now - DELETE_UNOWNED }.delete
93
106
  # Delete successes first so they don't have to be truncated
94
- successes.where { inserted_at < now - DELETE_SUCCESSES }.delete
107
+ successes.where { inserted_at < now - DELETE_SUCCESSES }.exclude(truncated_at: nil).delete
95
108
  self.truncate_dataset(successes.where { inserted_at < now - TRUNCATE_SUCCESSES })
96
109
  # Delete failures
97
- failures.where { inserted_at < now - DELETE_FAILURES }.delete
110
+ failures.where { inserted_at < now - DELETE_FAILURES }.exclude(truncated_at: nil).delete
98
111
  self.truncate_dataset(failures.where { inserted_at < now - TRUNCATE_FAILURES })
99
112
  end
100
113
 
@@ -145,6 +158,7 @@ class Webhookdb::LoggedWebhook < Webhookdb::Postgres::Model(:logged_webhooks)
145
158
  end
146
159
 
147
160
  def self.truncate_dataset(ds)
161
+ ds = ds.where(truncated_at: nil)
148
162
  return ds.update(request_body: "", request_headers: "{}", truncated_at: Time.now)
149
163
  end
150
164
 
@@ -12,7 +12,7 @@ class Webhookdb::Message::EmailTransport < Webhookdb::Message::Transport
12
12
  register_transport(:email)
13
13
 
14
14
  configurable(:email) do
15
- setting :allowlist, ["*@lithic.tech", "*@webhookdb.com"], convert: ->(s) { s.split }
15
+ setting :allowlist, ["*@lithic.tech", "*@webhookdb.com"], convert: lambda(&:split)
16
16
  setting :from, "WebhookDB <hello@webhookdb.com>"
17
17
 
18
18
  setting :smtp_host, "localhost"
@@ -5,7 +5,7 @@ require "webhookdb/message"
5
5
  class Webhookdb::Message::Transport
6
6
  extend Webhookdb::MethodUtilities
7
7
 
8
- class Error < StandardError; end
8
+ class Error < Webhookdb::WebhookdbError; end
9
9
  class UndeliverableRecipient < Error; end
10
10
 
11
11
  singleton_attr_reader :transports
@@ -29,8 +29,8 @@ module Webhookdb::Message
29
29
 
30
30
  configurable(:messages) do
31
31
  after_configured do
32
- Liquid::Template.error_mode = :strict
33
- Liquid::Template.file_system = Liquid::LocalFileSystem.new(DATA_DIR, "%s.liquid")
32
+ Liquid::Environment.default.error_mode = :strict
33
+ Liquid::Environment.default.file_system = Liquid::LocalFileSystem.new(DATA_DIR, "%s.liquid")
34
34
  end
35
35
  end
36
36
 
@@ -73,6 +73,7 @@ module Webhookdb::Message
73
73
  "environment" => Webhookdb::Message::EnvironmentDrop.new,
74
74
  "app_url" => Webhookdb.app_url,
75
75
  )
76
+ drops = self.unify_drops_encoding(drops)
76
77
 
77
78
  content_tmpl = Liquid::Template.parse(template_file.read)
78
79
  # The 'expose' drop smashes data into the register.
@@ -101,13 +102,63 @@ module Webhookdb::Message
101
102
  return Rendering.new(content, lctx.registers)
102
103
  end
103
104
 
105
+ # Handle encoding in liquid drop string values that would likely crash message rendering.
106
+ #
107
+ # If there is a mixed character encoding of string values in a liquid drop,
108
+ # such as when handling user-supplied values, force all strings into UTF-8.
109
+ #
110
+ # This is needed because the way Ruby does encoding coercion when parsing input
111
+ # which does not declare an encoding, such as a file or especially an HTTP response.
112
+ # Ruby will:
113
+ # - Use ASCII if the values fit into 7 bits
114
+ # - Use ASCII-8BIT if the values fit into 8 bits (128 to 255)
115
+ # - Otherwise, use UTF-8.
116
+ #
117
+ # The actual rules are more complex, but this is common enough.
118
+ #
119
+ # While ASCII encoding can be used as UTF-8, ASCII-8BIT cannot.
120
+ # So adding `(ascii-8bit string) + (utf-8 string)` will error with an
121
+ # `Encoding::CompatibilityError`.
122
+ #
123
+ # Instead, if we see a series of liquid drop string values
124
+ # with different encodings, force them all to be UTF-8.
125
+ # This can result in some unexpected behavior,
126
+ # but it should be fine, since you'd only see it with unexpected input
127
+ # (all valid inputs should be UTF-8).
128
+ #
129
+ # @param [Hash] drops
130
+ # @return [Hash]
131
+ def self.unify_drops_encoding(drops)
132
+ return drops if drops.empty?
133
+ seen_enc = nil
134
+ force_enc = false
135
+ drops.each_value do |v|
136
+ next unless v.respond_to?(:encoding)
137
+ seen_enc ||= v.encoding
138
+ next if seen_enc == v.encoding
139
+ force_enc = true
140
+ break
141
+ end
142
+ return drops unless force_enc
143
+ utf8 = Encoding.find("UTF-8")
144
+ result = drops.each_with_object({}) do |(k, v), memo|
145
+ if v.respond_to?(:encoding) && v.encoding != utf8
146
+ v2 = v.encode(utf8, invalid: :replace, undef: :replace, replace: "?")
147
+ memo[k] = v2
148
+ else
149
+ memo[k] = v
150
+ end
151
+ end
152
+ return result
153
+ end
154
+
104
155
  def self.send_unsent
105
156
  Webhookdb::Message::Delivery.unsent.each(&:send!)
106
157
  end
107
158
 
108
- class InvalidTransportError < StandardError; end
159
+ class InvalidTransportError < Webhookdb::ProgrammingError; end
109
160
 
110
- class MissingTemplateError < StandardError; end
161
+ class MissingTemplateError < Webhookdb::ProgrammingError; end
111
162
 
112
163
  # Presents a homogeneous interface for a given 'to' value (email vs. customer, for example).
113
164
  # .to will always be a plain object, and .customer will be a +Webhookdb::Customer+ if present.
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "webhookdb/message/template"
4
+
5
+ class Webhookdb::Messages::ErrorGenericBackfill < Webhookdb::Message::Template
6
+ def self.fixtured(_recipient)
7
+ sint = Webhookdb::Fixtures.service_integration.create
8
+ return self.new(
9
+ sint,
10
+ response_status: 422,
11
+ request_url: "https://whdbtest.signalwire.com/2010-04-01/Accounts/projid/Messages.json",
12
+ request_method: "POST",
13
+ response_body: "Unauthorized",
14
+ )
15
+ end
16
+
17
+ attr_accessor :service_integration
18
+
19
+ def initialize(service_integration, request_url:, request_method:, response_status:, response_body:)
20
+ @service_integration = service_integration
21
+ @request_url = request_url
22
+ @request_method = request_method
23
+ @response_status = response_status
24
+ @response_body = response_body
25
+ super()
26
+ end
27
+
28
+ def signature
29
+ # Only alert on the backfill once a day
30
+ return "msg-#{self.full_template_name}-sint:#{@service_integration.id}"
31
+ end
32
+
33
+ def template_folder = "errors"
34
+ def template_name = "generic_backfill"
35
+
36
+ def liquid_drops
37
+ return super.merge(
38
+ friendly_name: @service_integration.replicator.descriptor.resource_name_singular,
39
+ service_name: @service_integration.service_name,
40
+ opaque_id: @service_integration.opaque_id,
41
+ request_method: @request_method,
42
+ request_url: @request_url,
43
+ response_status: @response_status,
44
+ response_body: @response_body,
45
+ )
46
+ end
47
+ end
@@ -9,6 +9,8 @@ class Webhookdb::Messages::ErrorIcalendarFetch < Webhookdb::Message::Template
9
9
  response_status: 403, request_url: "/foo", request_method: "GET", response_body: "hi",)
10
10
  end
11
11
 
12
+ attr_accessor :service_integration
13
+
12
14
  def initialize(service_integration, external_calendar_id, request_url:, request_method:, response_status:,
13
15
  response_body:)
14
16
  @service_integration = service_integration
@@ -36,6 +38,9 @@ class Webhookdb::Messages::ErrorIcalendarFetch < Webhookdb::Message::Template
36
38
  response_status: @response_status,
37
39
  response_body: @response_body,
38
40
  external_calendar_id: @external_calendar_id,
41
+ webhook_endpoint: @service_integration.replicator.webhook_endpoint,
42
+ org_name: @service_integration.organization.name,
43
+ org_key: @service_integration.organization.key,
39
44
  )
40
45
  end
41
46
  end