lhm-shopify 3.4.2 → 3.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cfb1802db81b2673ea5e52acec84b4e457a8d5debb74f5391fcb282bf1fc8484
4
- data.tar.gz: 88abd0ae0484efa69777b0284c253788996b2148c0cb61fc035c0ccd7d49627c
3
+ metadata.gz: 6028b7c6d8d1f2e2d8b786ae3e8f41c4525c7507529085c3427a8dc9cf11bd2e
4
+ data.tar.gz: de625eda2736c56cfa813ba73f8447f6e798c30080274d91164b895879a7f3a7
5
5
  SHA512:
6
- metadata.gz: 4b98a803678e1ab543997d4e6f9031b02511324293c2d97b3f94938ae6b701e55c122633ab6044920363c56d9746e35f9b131433eb65108437c4ffeb8ca2e5ea
7
- data.tar.gz: '018c3a179dfdf90f9b5fd4bd3f67636644fc47eda7b0b825aee00330db0f83a59cf6855a5e18105493a89ac0a372ac26fd2c356148282449ca2de84fc56c5179'
6
+ metadata.gz: 6561aadaa26f6f28f4fd04987a7fca180e9130194cd756cc5a53a654e060b7bd93efaec78a6b067d39e2b31fd6b4f1826a4de9ac90da162af912b1dd101b8732
7
+ data.tar.gz: b221afa7215cf36f66fdd48ffd449a2d6034c8710c36d9ccdbf909b2078b83fc610fa1c1170214752371e50f41ed6504186975c80e5a8164ef6b25dce631fe89
@@ -17,18 +17,14 @@ jobs:
17
17
  with:
18
18
  ruby-version: 2.5.1
19
19
  bundler-cache: true
20
- - name: Cache MySQL
21
- uses: actions/cache@v2
22
- with:
23
- path: dbdeployer/binaries
24
- key: ${{ runner.os }}-dbdeployer-${{ hashFiles('**/dbdeployer/install.sh') }}
25
20
  - name: Install Ruby packages
26
21
  run: bundle install
27
22
  - name: Install Ubuntu packages
28
- run: sudo apt-get install numactl libaio-dev
29
- - name: Setup MySQL
30
- run: ./dbdeployer/install.sh
31
- - name: Run integration tests
32
- run: bundle exec rake integration
33
- - name: Run unit tests
34
- run: bundle exec rake unit
23
+ run: sudo apt-get update && sudo apt-get install numactl libaio-dev libmysqlclient-dev
24
+ - name: Setup MySQL and ProxySQL (docker-compose)
25
+ run: docker-compose up -d # Might have to change to docker compose up -d (i.e. Compose V2) when the Ubuntu image changes the docker-compose version
26
+ - name: Wait until DBs are alive
27
+ run: ./scripts/helpers/wait-for-dbs.sh
28
+ timeout-minutes: 2
29
+ - name: Run tests
30
+ run: bundle exec rake specs
data/.gitignore CHANGED
@@ -6,12 +6,9 @@ pkg/*
6
6
  .ruby-version
7
7
  .ruby-gemset
8
8
  bin/rake
9
- spec/integration/database.yml
10
9
  gemfiles/vendor
11
10
  omg.ponies
12
11
  *~
13
12
  coverage
14
- bin/dbdeployer
15
- /dbdeployer/sandboxes
16
- /dbdeployer/binaries
13
+ .idea/
17
14
  Gemfile.lock
data/README.md CHANGED
@@ -259,11 +259,9 @@ COV=1 bundle exec rake unit && bundle exec rake integration
259
259
  open coverage/index.html
260
260
  ```
261
261
 
262
- ### dbdeployer
263
-
264
- The integration tests rely on a replicated configuration for MySQL. We're using
265
- [`dbdeployer`](https://github.com/datacharmer/dbdeployer) to manage the test nodes
266
- required to set up these configurations.
262
+ ### Docker Compose
263
+ The integration tests rely on a replication configuration for MySQL which is being proxied by an instance of ProxySQL.
264
+ It is important that every container is running to execute the integration test suite.
267
265
 
268
266
  ## License
269
267
 
data/Rakefile CHANGED
@@ -16,6 +16,16 @@ Rake::TestTask.new('integration') do |t|
16
16
  t.libs << 'spec'
17
17
  t.test_files = FileList['spec/integration/**/*_spec.rb']
18
18
  t.verbose = true
19
+ end
20
+
21
+ Rake::TestTask.new('dev') do |t|
22
+ t.libs << 'lib'
23
+ t.libs << 'spec'
24
+ t.test_files = FileList[
25
+ 'spec/test_helper.rb',
26
+ # Add file to test individually
27
+ ]
28
+ t.verbose = true
19
29
  end
20
30
 
21
31
  task :specs => [:unit, :integration]
data/dev.yml CHANGED
@@ -7,14 +7,21 @@ up:
7
7
  - wget
8
8
  - ruby: 2.6.5
9
9
  - bundler
10
+ - docker
10
11
  - custom:
11
- name: Database
12
- met?: test -f spec/integration/database.yml && test "$(./dbdeployer/sandboxes/rsandbox_5_7_22/status_all | grep on\ | wc -l | xargs echo)" = "2"
13
- meet: ./dbdeployer/install.sh
14
- down: ./dbdeployer/sandboxes/rsandbox_5_7_22/stop_all
12
+ name: Docker Compose
13
+ met?: docker compose ls | grep -ioE -q "lhm.*running\(3\)"
14
+ meet: docker compose up -d
15
+ - custom:
16
+ name: Waiting for DBs to be operational
17
+ met?: ./scripts/helpers/wait-for-dbs.sh
18
+ meet: ":"
15
19
 
16
20
  commands:
17
21
  unit: bundle exec rake unit
18
22
  int: bundle exec rake integration
19
23
  test: bundle exec rake unit && bundle exec rake integration
20
24
  cov: rm -rf coverage; COV=1 bundle exec rake unit && bundle exec rake integration; open coverage/index.html
25
+ logs:
26
+ desc: "See the DB logs (ctrl-c + ctrl-c to exit)"
27
+ run: docker-compose logs -f
@@ -0,0 +1,46 @@
1
+ services:
2
+ # Writer
3
+ mysql-1:
4
+ container_name: mysql-1
5
+ image: percona:5.7
6
+ command:
7
+ --server-id=1
8
+ --log-slave-updates=ON
9
+ --gtid-mode=ON
10
+ --enforce-gtid-consistency=ON
11
+ --read-only=OFF
12
+ hostname: 'mysql-1'
13
+ volumes:
14
+ - ./scripts/mysql/writer:/docker-entrypoint-initdb.d
15
+ environment:
16
+ MYSQL_ROOT_PASSWORD: password
17
+ MYSQL_HOST: mysql-1
18
+ ports:
19
+ - "33006:3306"
20
+ # Reader
21
+ mysql-2:
22
+ container_name: mysql-2
23
+ image: percona:5.7
24
+ command:
25
+ --server-id=2
26
+ --log-slave-updates=ON
27
+ --gtid-mode=ON
28
+ --enforce-gtid-consistency=ON
29
+ --read-only=ON
30
+ hostname: 'mysql-2'
31
+ volumes:
32
+ - ./scripts/mysql/reader:/docker-entrypoint-initdb.d
33
+ environment:
34
+ MYSQL_ROOT_PASSWORD: password
35
+ MYSQL_HOST: mysql-2
36
+ ports:
37
+ - "33007:3306"
38
+ # Proxysql
39
+ proxysql:
40
+ container_name: proxysql
41
+ image: proxysql/proxysql:2.0.11
42
+ volumes:
43
+ - ./scripts/proxysql/proxysql.cnf:/etc/proxysql.cnf
44
+ command: "proxysql -c /etc/proxysql.cnf -f --idle-threads"
45
+ ports:
46
+ - "33005:3306"
@@ -16,17 +16,12 @@ module Lhm
16
16
 
17
17
  attr_reader :connection
18
18
 
19
- def initialize(migration, connection = nil, options = {})
19
+ def initialize(migration, connection = nil, options={})
20
20
  @migration = migration
21
21
  @connection = connection
22
22
  @origin = migration.origin
23
23
  @destination = migration.destination
24
- @retry_helper = SqlRetry.new(
25
- @connection,
26
- {
27
- log_prefix: "AtomicSwitcher"
28
- }.merge!(options.fetch(:retriable, {}))
29
- )
24
+ @retry_options = options[:retriable] || {}
30
25
  end
31
26
 
32
27
  def atomic_switch
@@ -36,7 +31,7 @@ module Lhm
36
31
 
37
32
  def validate
38
33
  unless @connection.data_source_exists?(@origin.name) &&
39
- @connection.data_source_exists?(@destination.name)
34
+ @connection.data_source_exists?(@destination.name)
40
35
  error "`#{ @origin.name }` and `#{ @destination.name }` must exist"
41
36
  end
42
37
  end
@@ -44,9 +39,7 @@ module Lhm
44
39
  private
45
40
 
46
41
  def execute
47
- @retry_helper.with_retries do |retriable_connection|
48
- retriable_connection.execute atomic_switch
49
- end
42
+ @connection.execute(atomic_switch, @retry_options)
50
43
  end
51
44
  end
52
45
  end
@@ -7,18 +7,11 @@ module Lhm
7
7
  @connection = connection
8
8
  @lowest = lowest
9
9
  @highest = highest
10
- @retry_helper = SqlRetry.new(
11
- @connection,
12
- {
13
- log_prefix: "Chunker Insert"
14
- }.merge!(options.fetch(:retriable, {}))
15
- )
10
+ @retry_options = options[:retriable] || {}
16
11
  end
17
12
 
18
13
  def insert_and_return_count_of_rows_created
19
- @retry_helper.with_retries do |retriable_connection|
20
- retriable_connection.update sql
21
- end
14
+ @connection.update(sql, @retry_options)
22
15
  end
23
16
 
24
17
  def sql
data/lib/lhm/chunker.rb CHANGED
@@ -28,11 +28,12 @@ module Lhm
28
28
  @start = @chunk_finder.start
29
29
  @limit = @chunk_finder.limit
30
30
  @printer = options[:printer] || Printer::Percentage.new
31
+ @retry_options = options[:retriable] || {}
31
32
  @retry_helper = SqlRetry.new(
32
33
  @connection,
33
34
  {
34
35
  log_prefix: "Chunker"
35
- }.merge!(options.fetch(:retriable, {}))
36
+ }.merge!(@retry_options)
36
37
  )
37
38
  end
38
39
 
@@ -46,7 +47,7 @@ module Lhm
46
47
  top = upper_id(@next_to_insert, stride)
47
48
  verify_can_run
48
49
 
49
- affected_rows = ChunkInsert.new(@migration, @connection, bottom, top, @options).insert_and_return_count_of_rows_created
50
+ affected_rows = ChunkInsert.new(@migration, @connection, bottom, top, @retry_options).insert_and_return_count_of_rows_created
50
51
  expected_rows = top - bottom + 1
51
52
 
52
53
  # Only log the chunker progress every 5 minutes instead of every iteration
@@ -78,7 +79,7 @@ module Lhm
78
79
  private
79
80
 
80
81
  def raise_on_non_pk_duplicate_warning
81
- @connection.query("show warnings").each do |level, code, message|
82
+ @connection.execute("show warnings", @retry_options).each do |level, code, message|
82
83
  unless message.match?(/Duplicate entry .+ for key 'PRIMARY'/)
83
84
  m = "Unexpected warning found for inserted row: #{message}"
84
85
  Lhm.logger.warn(m)
@@ -93,16 +94,14 @@ module Lhm
93
94
 
94
95
  def verify_can_run
95
96
  return unless @verifier
96
- @retry_helper.with_retries do |retriable_connection|
97
+ @retry_helper.with_retries(@retry_options) do |retriable_connection|
97
98
  raise "Verification failed, aborting early" if !@verifier.call(retriable_connection)
98
99
  end
99
100
  end
100
101
 
101
102
  def upper_id(next_id, stride)
102
103
  sql = "select id from `#{ @migration.origin_name }` where id >= #{ next_id } order by id limit 1 offset #{ stride - 1}"
103
- top = @retry_helper.with_retries do |retriable_connection|
104
- retriable_connection.select_value(sql)
105
- end
104
+ top = @connection.select_value(sql, @retry_options)
106
105
 
107
106
  [top ? top.to_i : @limit, @limit].min
108
107
  end
@@ -4,17 +4,12 @@ require 'lhm/sql_retry'
4
4
  module Lhm
5
5
  module Cleanup
6
6
  class Current
7
- def initialize(run, origin_table_name, connection, options = {})
7
+ def initialize(run, origin_table_name, connection, options={})
8
8
  @run = run
9
9
  @table_name = TableName.new(origin_table_name)
10
10
  @connection = connection
11
11
  @ddls = []
12
- @retry_helper = SqlRetry.new(
13
- @connection,
14
- {
15
- log_prefix: "Cleanup::Current"
16
- }.merge!(options.fetch(:retriable, {}))
17
- )
12
+ @retry_config = options[:retriable] || {}
18
13
  end
19
14
 
20
15
  attr_reader :run, :connection, :ddls
@@ -59,9 +54,7 @@ module Lhm
59
54
 
60
55
  def execute_ddls
61
56
  ddls.each do |ddl|
62
- @retry_helper.with_retries do |retriable_connection|
63
- retriable_connection.execute(ddl)
64
- end
57
+ @connection.execute(ddl, @retry_config)
65
58
  end
66
59
  Lhm.logger.info("Dropped triggers on #{@lhm_triggers_for_origin.join(', ')}")
67
60
  Lhm.logger.info("Dropped tables #{@lhm_triggers_for_origin.join(', ')}")
@@ -0,0 +1,61 @@
1
+ require 'delegate'
2
+
3
+ module Lhm
4
+ class Connection < SimpleDelegator
5
+
6
+ # Lhm::Connection inherits from SingleDelegator. It will forward any unknown method calls to the ActiveRecord
7
+ # connection.
8
+ alias connection __getobj__
9
+ alias connection= __setobj__
10
+
11
+ def initialize(connection:, default_log_prefix: nil, retry_options: {})
12
+ @default_log_prefix = default_log_prefix
13
+ @retry_options = retry_options || default_retry_config
14
+ @sql_retry = Lhm::SqlRetry.new(
15
+ connection,
16
+ retry_options,
17
+ )
18
+
19
+ # Creates delegation for the ActiveRecord Connection
20
+ super(connection)
21
+ end
22
+
23
+ def execute(query, retry_options = {})
24
+ exec_with_retries(:execute, query, retry_options)
25
+ end
26
+
27
+ def update(query, retry_options = {})
28
+ exec_with_retries(:update, query, retry_options)
29
+ end
30
+
31
+ def select_value(query, retry_options = {})
32
+ exec_with_retries(:select_value, query, retry_options)
33
+ end
34
+
35
+ private
36
+
37
+ def exec_with_retries(method, sql, retry_options = {})
38
+ retry_options[:log_prefix] ||= file
39
+ @sql_retry.with_retries(retry_options) do |conn|
40
+ conn.public_send(method, sql)
41
+ end
42
+ end
43
+
44
+ # Returns camelized file name of caller (e.g. chunk_insert.rb -> ChunkInsert)
45
+ def file
46
+ # Find calling file and extract name
47
+ /[\/]*(\w+).rb:\d+:in/.match(relevant_caller)
48
+ name = $1&.camelize || "Connection"
49
+ "#{name}"
50
+ end
51
+
52
+ def relevant_caller
53
+ lhm_stack = caller.select { |x| x.include?("/lhm") }
54
+ first_candidate_index = lhm_stack.find_index {|line| !line.include?(__FILE__)}
55
+
56
+ # Find the file that called the `#execute` (fallbacks to current file)
57
+ return lhm_stack.first unless first_candidate_index
58
+ lhm_stack.at(first_candidate_index)
59
+ end
60
+ end
61
+ end
data/lib/lhm/entangler.rb CHANGED
@@ -4,6 +4,7 @@
4
4
  require 'lhm/command'
5
5
  require 'lhm/sql_helper'
6
6
  require 'lhm/sql_retry'
7
+ require 'lhm/connection'
7
8
 
8
9
  module Lhm
9
10
  class Entangler
@@ -19,12 +20,7 @@ module Lhm
19
20
  @origin = migration.origin
20
21
  @destination = migration.destination
21
22
  @connection = connection
22
- @retry_helper = SqlRetry.new(
23
- @connection,
24
- {
25
- log_prefix: "Entangler"
26
- }.merge!(options.fetch(:retriable, {}))
27
- )
23
+ @retry_options = options[:retriable] || {}
28
24
  end
29
25
 
30
26
  def entangle
@@ -90,18 +86,14 @@ module Lhm
90
86
 
91
87
  def before
92
88
  entangle.each do |stmt|
93
- @retry_helper.with_retries do |retriable_connection|
94
- retriable_connection.execute(stmt)
95
- end
89
+ @connection.execute(stmt, @retry_options)
96
90
  end
97
91
  Lhm.logger.info("Created triggers on #{@origin.name}")
98
92
  end
99
93
 
100
94
  def after
101
95
  untangle.each do |stmt|
102
- @retry_helper.with_retries do |retriable_connection|
103
- retriable_connection.execute(stmt)
104
- end
96
+ @connection.execute(stmt, @retry_options)
105
97
  end
106
98
  Lhm.logger.info("Dropped triggers on #{@origin.name}")
107
99
  end
data/lib/lhm/invoker.rb CHANGED
@@ -56,7 +56,7 @@ module Lhm
56
56
  Chunker.new(migration, @connection, options).run
57
57
  raise "Required triggers do not exist" unless triggers_still_exist?(@connection, entangler)
58
58
  if options[:atomic_switch]
59
- AtomicSwitcher.new(migration, @connection, options).run
59
+ AtomicSwitcher.new(migration, @connection).run
60
60
  else
61
61
  LockedSwitcher.new(migration, @connection).run
62
62
  end
data/lib/lhm/sql_retry.rb CHANGED
@@ -17,20 +17,26 @@ module Lhm
17
17
  class SqlRetry
18
18
  def initialize(connection, options = {})
19
19
  @connection = connection
20
- @log_prefix = options.delete(:log_prefix)
21
- @retry_config = default_retry_config.dup.merge!(options)
20
+ @global_retry_config = default_retry_config.dup.merge!(options)
22
21
  end
23
22
 
24
- def with_retries
25
- Retriable.retriable(retry_config) do
23
+ def with_retries(retry_config = {})
24
+ cnf = @global_retry_config.dup.merge!(retry_config)
25
+ @log_prefix = cnf.delete(:log_prefix) || "SQL Retry"
26
+ Retriable.retriable(cnf) do
26
27
  yield(@connection)
27
28
  end
28
29
  end
29
30
 
30
- attr_reader :retry_config
31
+ attr_reader :global_retry_config
31
32
 
32
33
  private
33
34
 
35
+ def log_with_prefix(message, level = :info)
36
+ message.prepend("[#{@log_prefix}] ") if @log_prefix
37
+ Lhm.logger.send(level, message)
38
+ end
39
+
34
40
  # For a full list of configuration options see https://github.com/kamui/retriable
35
41
  def default_retry_config
36
42
  {
@@ -52,8 +58,7 @@ module Lhm
52
58
  max_elapsed_time: Float::INFINITY, # max total time in seconds that code is allowed to keep being retried
53
59
  on_retry: Proc.new do |exception, try_number, total_elapsed_time, next_interval|
54
60
  log = "#{exception.class}: '#{exception.message}' - #{try_number} tries in #{total_elapsed_time} seconds and #{next_interval} seconds until the next try."
55
- log.prepend("[#{@log_prefix}] ") if @log_prefix
56
- Lhm.logger.info(log)
61
+ log_with_prefix(log, :info)
57
62
  end
58
63
  }.freeze
59
64
  end
data/lib/lhm/version.rb CHANGED
@@ -2,5 +2,5 @@
2
2
  # Schmidt
3
3
 
4
4
  module Lhm
5
- VERSION = '3.4.2'
5
+ VERSION = '3.5.0'
6
6
  end
data/lib/lhm.rb CHANGED
@@ -8,6 +8,7 @@ require 'lhm/throttler'
8
8
  require 'lhm/version'
9
9
  require 'lhm/cleanup/current'
10
10
  require 'lhm/sql_retry'
11
+ require 'lhm/connection'
11
12
  require 'lhm/test_support'
12
13
  require 'lhm/railtie' if defined?(Rails::Railtie)
13
14
  require 'logger'
@@ -82,7 +83,7 @@ module Lhm
82
83
  end
83
84
 
84
85
  def setup(connection)
85
- @@connection = connection
86
+ @@connection = Lhm::Connection.new(connection: connection)
86
87
  end
87
88
 
88
89
  def connection
@@ -0,0 +1,21 @@
1
+ #!/bin/bash
2
+ # Wait for writer
3
+ echo "Waiting for MySQL-1: "
4
+ while ! (mysqladmin ping --host="127.0.0.1" --port=33006 --user=root --password=password --silent 2> /dev/null); do
5
+ echo -ne "."
6
+ sleep 1
7
+ done
8
+ # Wait for reader
9
+ echo "Waiting for MySQL-2: "
10
+ while ! (mysqladmin ping --host="127.0.0.1" --port=33007 --user=root --password=password --silent 2> /dev/null); do
11
+ echo -ne "."
12
+ sleep 1
13
+ done
14
+ # Wait for proxysql
15
+ echo "Waiting for ProxySQL:"
16
+ while ! (mysqladmin ping --host="127.0.0.1" --port=33005 --user=root --password=password --silent 2> /dev/null); do
17
+ echo -ne "."
18
+ sleep 1
19
+ done
20
+
21
+ echo "All DBs are ready"
@@ -0,0 +1,10 @@
1
+ STOP SLAVE;
2
+ CHANGE MASTER TO
3
+ MASTER_HOST='mysql-1',
4
+ MASTER_AUTO_POSITION=1,
5
+ MASTER_USER='replication',
6
+ MASTER_PASSWORD='password',
7
+ MASTER_CONNECT_RETRY=1,
8
+ MASTER_RETRY_COUNT=300; -- 5 minutes
9
+
10
+ start slave;
@@ -0,0 +1 @@
1
+ CREATE DATABASE test;
@@ -0,0 +1,3 @@
1
+ # Creates replication user in Writer
2
+ CREATE USER IF NOT EXISTS 'replication'@'%' IDENTIFIED BY 'password';
3
+ GRANT REPLICATION SLAVE ON *.* TO' replication'@'%' IDENTIFIED BY 'password';
@@ -0,0 +1,117 @@
1
+ #file proxysql.cfg
2
+
3
+ datadir="/var/lib/proxysql"
4
+ restart_on_missing_heartbeats=999999
5
+ query_parser_token_delimiters=","
6
+ query_parser_key_value_delimiters=":"
7
+ unit_of_work_identifiers="consistent_read_id"
8
+
9
+ admin_variables=
10
+ {
11
+ mysql_ifaces="0.0.0.0:6032"
12
+ admin_credentials="admin:password;remote-admin:password"
13
+ }
14
+
15
+ mysql_servers =
16
+ (
17
+ {
18
+ address="mysql-1"
19
+ port=3306
20
+ hostgroup=0
21
+ max_connections=200
22
+ },
23
+ {
24
+ address="mysql-2"
25
+ port=3306
26
+ hostgroup=1
27
+ max_connections=200
28
+ }
29
+ )
30
+
31
+ mysql_variables=
32
+ {
33
+ session_idle_ms=1
34
+ auto_increment_delay_multiplex=0
35
+
36
+ threads=8
37
+ max_connections=100000
38
+ interfaces="0.0.0.0:3306"
39
+ server_version="5.7.18-proxysql"
40
+ connect_timeout_server=10000
41
+ connect_timeout_server_max=10000
42
+ connect_retries_on_failure=0
43
+ default_charset="utf8mb4"
44
+ free_connections_pct=100
45
+ connection_warming=true
46
+ max_allowed_packet=16777216
47
+ monitor_enabled=false
48
+ query_retries_on_failure=0
49
+ shun_on_failures=999999
50
+ shun_recovery_time_sec=0
51
+ kill_backend_connection_when_disconnect=false
52
+ stats_time_backend_query=false
53
+ stats_time_query_processor=false
54
+ max_stmts_per_connection=5
55
+ default_max_latency_ms=999999
56
+ wait_timeout=1800000
57
+ eventslog_format=3
58
+ log_multiplexing_disabled=true
59
+ log_unhealthy_connections=false
60
+ }
61
+
62
+ # defines all the MySQL users
63
+ mysql_users:
64
+ (
65
+ {
66
+ username = "root"
67
+ password = "password"
68
+ default_hostgroup = 0
69
+ max_connections=1000
70
+ active = 1
71
+ },
72
+ {
73
+ username = "writer"
74
+ password = "password"
75
+ default_hostgroup = 0
76
+ max_connections=50000
77
+ active = 1
78
+ transaction_persistent=1
79
+ },
80
+ {
81
+ username = "reader"
82
+ password = "password"
83
+ default_hostgroup = 1
84
+ max_connections=50000
85
+ active = 1
86
+ transaction_persistent=1
87
+ }
88
+ )
89
+
90
+ #defines MySQL Query Rules
91
+ mysql_query_rules:
92
+ (
93
+ {
94
+ rule_id = 1
95
+ active = 1
96
+ match_digest = "@@SESSION"
97
+ multiplex = 2
98
+ },
99
+ {
100
+ rule_id = 2
101
+ active = 1
102
+ match_digest = "@@global\.server_id"
103
+ multiplex = 2
104
+ },
105
+ {
106
+ rule_id = 3
107
+ active = 1
108
+ match_digest = "@@global\.hostname"
109
+ multiplex = 2
110
+ },
111
+ {
112
+ rule_id = 4
113
+ active = 1
114
+ match_pattern = "maintenance:lhm"
115
+ destination_hostgroup = 0
116
+ }
117
+ )
@@ -6,6 +6,7 @@ require File.expand_path(File.dirname(__FILE__)) + '/integration_helper'
6
6
  require 'lhm/table'
7
7
  require 'lhm/migration'
8
8
  require 'lhm/atomic_switcher'
9
+ require 'lhm/connection'
9
10
 
10
11
  describe Lhm::AtomicSwitcher do
11
12
  include IntegrationHelper
@@ -29,9 +30,11 @@ describe Lhm::AtomicSwitcher do
29
30
  end
30
31
 
31
32
  it 'should retry and log on lock wait timeouts' do
32
- connection = mock()
33
- connection.stubs(:data_source_exists?).returns(true)
34
- connection.stubs(:execute).raises(ActiveRecord::StatementInvalid, 'Lock wait timeout exceeded; try restarting transaction.').then.returns(true)
33
+ ar_connection = mock()
34
+ ar_connection.stubs(:data_source_exists?).returns(true)
35
+ ar_connection.stubs(:execute).raises(ActiveRecord::StatementInvalid, 'Lock wait timeout exceeded; try restarting transaction.').then.returns(true)
36
+
37
+ connection = Lhm::Connection.new(connection: ar_connection)
35
38
 
36
39
  switcher = Lhm::AtomicSwitcher.new(@migration, connection, retriable: {base_interval: 0})
37
40
 
@@ -40,13 +43,16 @@ describe Lhm::AtomicSwitcher do
40
43
  log_messages = @logs.string.split("\n")
41
44
  assert_equal(2, log_messages.length)
42
45
  assert log_messages[0].include? "Starting run of class=Lhm::AtomicSwitcher"
46
+ # On failure of this assertion, check for Lhm::Connection#file
43
47
  assert log_messages[1].include? "[AtomicSwitcher] ActiveRecord::StatementInvalid: 'Lock wait timeout exceeded; try restarting transaction.' - 1 tries"
44
48
  end
45
49
 
46
50
  it 'should give up on lock wait timeouts after a configured number of tries' do
47
- connection = mock()
48
- connection.stubs(:data_source_exists?).returns(true)
49
- connection.stubs(:execute).twice.raises(ActiveRecord::StatementInvalid, 'Lock wait timeout exceeded; try restarting transaction.')
51
+ ar_connection = mock()
52
+ ar_connection.stubs(:data_source_exists?).returns(true)
53
+ ar_connection.stubs(:execute).twice.raises(ActiveRecord::StatementInvalid, 'Lock wait timeout exceeded; try restarting transaction.')
54
+
55
+ connection = Lhm::Connection.new(connection: ar_connection)
50
56
 
51
57
  switcher = Lhm::AtomicSwitcher.new(@migration, connection, retriable: {tries: 2, base_interval: 0})
52
58
 
@@ -62,8 +68,10 @@ describe Lhm::AtomicSwitcher do
62
68
  end
63
69
 
64
70
  it "should raise when destination doesn't exist" do
65
- connection = mock()
66
- connection.stubs(:data_source_exists?).returns(false)
71
+ ar_connection = mock()
72
+ ar_connection.stubs(:data_source_exists?).returns(false)
73
+
74
+ connection = Lhm::Connection.new(connection: ar_connection)
67
75
 
68
76
  switcher = Lhm::AtomicSwitcher.new(@migration, connection)
69
77
 
@@ -0,0 +1,15 @@
1
+ master:
2
+ host: mysql-1
3
+ user: root
4
+ password: password
5
+ port: 33006
6
+ slave:
7
+ host: mysql-2
8
+ user: root
9
+ password: password
10
+ port: 33007
11
+ proxysql:
12
+ host: proxysql
13
+ user: root
14
+ password: password
15
+ port: 33005
@@ -41,7 +41,6 @@ module IntegrationHelper
41
41
  $db_config['master']['port'],
42
42
  $db_config['master']['user'],
43
43
  $db_config['master']['password'],
44
- $db_config['master']['socket']
45
44
  )
46
45
  end
47
46
 
@@ -51,12 +50,11 @@ module IntegrationHelper
51
50
  $db_config['slave']['port'],
52
51
  $db_config['slave']['user'],
53
52
  $db_config['slave']['password'],
54
- $db_config['slave']['socket']
55
53
  )
56
54
  end
57
55
 
58
- def connect!(hostname, port, user, password, socket)
59
- adapter = ar_conn(hostname, port, user, password, socket)
56
+ def connect!(hostname, port, user, password)
57
+ adapter = Lhm::Connection.new(connection: ar_conn(hostname, port, user, password))
60
58
  Lhm.setup(adapter)
61
59
  unless defined?(@@cleaned_up)
62
60
  Lhm.cleanup(true)
@@ -65,14 +63,13 @@ module IntegrationHelper
65
63
  @connection = adapter
66
64
  end
67
65
 
68
- def ar_conn(host, port, user, password, socket)
66
+ def ar_conn(host, port, user, password)
69
67
  ActiveRecord::Base.establish_connection(
70
68
  :adapter => 'mysql2',
71
69
  :host => host,
72
70
  :username => user,
73
71
  :port => port,
74
72
  :password => password,
75
- :socket => socket,
76
73
  :database => $db_name
77
74
  )
78
75
  ActiveRecord::Base.connection
@@ -77,14 +77,17 @@ class LockWaitTimeoutTestHelper
77
77
  attr_reader :main_conn, :lock_duration, :innodb_lock_wait_timeout
78
78
 
79
79
  def new_mysql_connection
80
- Mysql2::Client.new(
80
+ client = Mysql2::Client.new(
81
81
  host: '127.0.0.1',
82
- database: test_db_name,
83
82
  username: db_config['master']['user'],
84
83
  password: db_config['master']['password'],
85
- port: db_config['master']['port'],
86
- socket: db_config['master']['socket']
84
+ port: db_config['master']['port']
87
85
  )
86
+
87
+ # For some reasons sometimes the database does not exist
88
+ client.query("CREATE DATABASE IF NOT EXISTS #{test_db_name}")
89
+ client.select_db(test_db_name)
90
+ client
88
91
  end
89
92
 
90
93
  def test_db_name
@@ -7,6 +7,7 @@ require 'lhm/table'
7
7
  require 'lhm/migration'
8
8
  require 'lhm/chunker'
9
9
  require 'lhm/throttler'
10
+ require 'lhm/connection'
10
11
 
11
12
  describe Lhm::Chunker do
12
13
  include UnitHelper
@@ -37,11 +38,11 @@ describe Lhm::Chunker do
37
38
  5
38
39
  end
39
40
 
40
- @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 4/)).returns(7)
41
- @connection.expects(:select_value).with(regexp_matches(/where id >= 8 order by id limit 1 offset 4/)).returns(21)
42
- @connection.expects(:update).with(regexp_matches(/between 1 and 7/)).returns(2)
43
- @connection.expects(:update).with(regexp_matches(/between 8 and 10/)).returns(2)
44
- @connection.expects(:query).twice.with(regexp_matches(/show warnings/)).returns([])
41
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 4/),{}).returns(7)
42
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 8 order by id limit 1 offset 4/),{}).returns(21)
43
+ @connection.expects(:update).with(regexp_matches(/between 1 and 7/),{}).returns(2)
44
+ @connection.expects(:update).with(regexp_matches(/between 8 and 10/),{}).returns(2)
45
+ @connection.expects(:execute).twice.with(regexp_matches(/show warnings/),{}).returns([])
45
46
 
46
47
  @chunker.run
47
48
  end
@@ -52,17 +53,17 @@ describe Lhm::Chunker do
52
53
  2
53
54
  end
54
55
 
55
- @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/)).returns(2)
56
- @connection.expects(:select_value).with(regexp_matches(/where id >= 3 order by id limit 1 offset 1/)).returns(4)
57
- @connection.expects(:select_value).with(regexp_matches(/where id >= 5 order by id limit 1 offset 1/)).returns(6)
58
- @connection.expects(:select_value).with(regexp_matches(/where id >= 7 order by id limit 1 offset 1/)).returns(8)
59
- @connection.expects(:select_value).with(regexp_matches(/where id >= 9 order by id limit 1 offset 1/)).returns(10)
56
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/),{}).returns(2)
57
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 3 order by id limit 1 offset 1/),{}).returns(4)
58
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 5 order by id limit 1 offset 1/),{}).returns(6)
59
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 7 order by id limit 1 offset 1/),{}).returns(8)
60
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 9 order by id limit 1 offset 1/),{}).returns(10)
60
61
 
61
- @connection.expects(:update).with(regexp_matches(/between 1 and 2/)).returns(2)
62
- @connection.expects(:update).with(regexp_matches(/between 3 and 4/)).returns(2)
63
- @connection.expects(:update).with(regexp_matches(/between 5 and 6/)).returns(2)
64
- @connection.expects(:update).with(regexp_matches(/between 7 and 8/)).returns(2)
65
- @connection.expects(:update).with(regexp_matches(/between 9 and 10/)).returns(2)
62
+ @connection.expects(:update).with(regexp_matches(/between 1 and 2/),{}).returns(2)
63
+ @connection.expects(:update).with(regexp_matches(/between 3 and 4/),{}).returns(2)
64
+ @connection.expects(:update).with(regexp_matches(/between 5 and 6/),{}).returns(2)
65
+ @connection.expects(:update).with(regexp_matches(/between 7 and 8/),{}).returns(2)
66
+ @connection.expects(:update).with(regexp_matches(/between 9 and 10/),{}).returns(2)
66
67
 
67
68
  @chunker.run
68
69
  end
@@ -79,17 +80,17 @@ describe Lhm::Chunker do
79
80
  end
80
81
  end
81
82
 
82
- @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/)).returns(2)
83
- @connection.expects(:select_value).with(regexp_matches(/where id >= 3 order by id limit 1 offset 2/)).returns(5)
84
- @connection.expects(:select_value).with(regexp_matches(/where id >= 6 order by id limit 1 offset 2/)).returns(8)
85
- @connection.expects(:select_value).with(regexp_matches(/where id >= 9 order by id limit 1 offset 2/)).returns(nil)
83
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/),{}).returns(2)
84
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 3 order by id limit 1 offset 2/),{}).returns(5)
85
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 6 order by id limit 1 offset 2/),{}).returns(8)
86
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 9 order by id limit 1 offset 2/),{}).returns(nil)
86
87
 
87
- @connection.expects(:update).with(regexp_matches(/between 1 and 2/)).returns(2)
88
- @connection.expects(:update).with(regexp_matches(/between 3 and 5/)).returns(2)
89
- @connection.expects(:update).with(regexp_matches(/between 6 and 8/)).returns(2)
90
- @connection.expects(:update).with(regexp_matches(/between 9 and 10/)).returns(2)
88
+ @connection.expects(:update).with(regexp_matches(/between 1 and 2/),{}).returns(2)
89
+ @connection.expects(:update).with(regexp_matches(/between 3 and 5/),{}).returns(2)
90
+ @connection.expects(:update).with(regexp_matches(/between 6 and 8/),{}).returns(2)
91
+ @connection.expects(:update).with(regexp_matches(/between 9 and 10/),{}).returns(2)
91
92
 
92
- @connection.expects(:query).twice.with(regexp_matches(/show warnings/)).returns([])
93
+ @connection.expects(:execute).twice.with(regexp_matches(/show warnings/),{}).returns([])
93
94
 
94
95
  @chunker.run
95
96
  end
@@ -99,8 +100,8 @@ describe Lhm::Chunker do
99
100
  :start => 1,
100
101
  :limit => 1)
101
102
 
102
- @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 0/)).returns(nil)
103
- @connection.expects(:update).with(regexp_matches(/between 1 and 1/)).returns(1)
103
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 0/),{}).returns(nil)
104
+ @connection.expects(:update).with(regexp_matches(/between 1 and 1/),{}).returns(1)
104
105
 
105
106
  @chunker.run
106
107
  end
@@ -113,17 +114,17 @@ describe Lhm::Chunker do
113
114
  2
114
115
  end
115
116
 
116
- @connection.expects(:select_value).with(regexp_matches(/where id >= 2 order by id limit 1 offset 1/)).returns(3)
117
- @connection.expects(:select_value).with(regexp_matches(/where id >= 4 order by id limit 1 offset 1/)).returns(5)
118
- @connection.expects(:select_value).with(regexp_matches(/where id >= 6 order by id limit 1 offset 1/)).returns(7)
119
- @connection.expects(:select_value).with(regexp_matches(/where id >= 8 order by id limit 1 offset 1/)).returns(9)
120
- @connection.expects(:select_value).with(regexp_matches(/where id >= 10 order by id limit 1 offset 1/)).returns(nil)
117
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 2 order by id limit 1 offset 1/),{}).returns(3)
118
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 4 order by id limit 1 offset 1/),{}).returns(5)
119
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 6 order by id limit 1 offset 1/),{}).returns(7)
120
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 8 order by id limit 1 offset 1/),{}).returns(9)
121
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 10 order by id limit 1 offset 1/),{}).returns(nil)
121
122
 
122
- @connection.expects(:update).with(regexp_matches(/between 2 and 3/)).returns(2)
123
- @connection.expects(:update).with(regexp_matches(/between 4 and 5/)).returns(2)
124
- @connection.expects(:update).with(regexp_matches(/between 6 and 7/)).returns(2)
125
- @connection.expects(:update).with(regexp_matches(/between 8 and 9/)).returns(2)
126
- @connection.expects(:update).with(regexp_matches(/between 10 and 10/)).returns(1)
123
+ @connection.expects(:update).with(regexp_matches(/between 2 and 3/),{}).returns(2)
124
+ @connection.expects(:update).with(regexp_matches(/between 4 and 5/),{}).returns(2)
125
+ @connection.expects(:update).with(regexp_matches(/between 6 and 7/),{}).returns(2)
126
+ @connection.expects(:update).with(regexp_matches(/between 8 and 9/),{}).returns(2)
127
+ @connection.expects(:update).with(regexp_matches(/between 10 and 10/),{}).returns(1)
127
128
 
128
129
  @chunker.run
129
130
  end
@@ -137,9 +138,9 @@ describe Lhm::Chunker do
137
138
  2
138
139
  end
139
140
 
140
- @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/)).returns(2)
141
- @connection.expects(:update).with(regexp_matches(/where \(foo.created_at > '2013-07-10' or foo.baz = 'quux'\) and `foo`/)).returns(1)
142
- @connection.expects(:query).with(regexp_matches(/show warnings/)).returns([])
141
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/),{}).returns(2)
142
+ @connection.expects(:update).with(regexp_matches(/where \(foo.created_at > '2013-07-10' or foo.baz = 'quux'\) and `foo`/),{}).returns(1)
143
+ @connection.expects(:execute).with(regexp_matches(/show warnings/),{}).returns([])
143
144
 
144
145
  def @migration.conditions
145
146
  "where foo.created_at > '2013-07-10' or foo.baz = 'quux'"
@@ -157,9 +158,9 @@ describe Lhm::Chunker do
157
158
  2
158
159
  end
159
160
 
160
- @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/)).returns(2)
161
- @connection.expects(:update).with(regexp_matches(/inner join bar on foo.id = bar.foo_id and/)).returns(1)
162
- @connection.expects(:query).with(regexp_matches(/show warnings/)).returns([])
161
+ @connection.expects(:select_value).with(regexp_matches(/where id >= 1 order by id limit 1 offset 1/),{}).returns(2)
162
+ @connection.expects(:update).with(regexp_matches(/inner join bar on foo.id = bar.foo_id and/),{}).returns(1)
163
+ @connection.expects(:execute).with(regexp_matches(/show warnings/),{}).returns([])
163
164
 
164
165
  def @migration.conditions
165
166
  'inner join bar on foo.id = bar.foo_id'
@@ -0,0 +1,68 @@
1
+ require 'lhm/connection'
2
+
3
+ describe Lhm::Connection do
4
+
5
+ LOCK_WAIT = ActiveRecord::StatementInvalid.new('Lock wait timeout exceeded; try restarting transaction.')
6
+
7
+ before(:each) do
8
+ @logs = StringIO.new
9
+ Lhm.logger = Logger.new(@logs)
10
+ end
11
+
12
+ it "Should find use calling file as prefix" do
13
+ ar_connection = mock()
14
+ ar_connection.stubs(:execute).raises(LOCK_WAIT).then.returns(true)
15
+
16
+ connection = Lhm::Connection.new(connection: ar_connection)
17
+
18
+ connection.execute("SHOW TABLES", { base_interval: 0 })
19
+
20
+ log_messages = @logs.string.split("\n")
21
+ assert_equal(1, log_messages.length)
22
+ assert log_messages.first.include?("[ConnectionSpec]")
23
+ end
24
+
25
+ it "#execute should be retried" do
26
+ ar_connection = mock()
27
+ ar_connection.stubs(:execute).raises(LOCK_WAIT)
28
+ .then.raises(LOCK_WAIT)
29
+ .then.returns(true)
30
+
31
+ connection = Lhm::Connection.new(connection: ar_connection)
32
+
33
+ connection.execute("SHOW TABLES", { base_interval: 0, tries: 3 })
34
+
35
+ log_messages = @logs.string.split("\n")
36
+ assert_equal(2, log_messages.length)
37
+ end
38
+
39
+ it "#update should be retried" do
40
+ ar_connection = mock()
41
+ ar_connection.stubs(:update).raises(LOCK_WAIT)
42
+ .then.raises(LOCK_WAIT)
43
+ .then.returns(1)
44
+
45
+ connection = Lhm::Connection.new(connection: ar_connection)
46
+
47
+ val = connection.update("SHOW TABLES", { base_interval: 0, tries: 3 })
48
+
49
+ log_messages = @logs.string.split("\n")
50
+ assert_equal val, 1
51
+ assert_equal(2, log_messages.length)
52
+ end
53
+
54
+ it "#select_value should be retried" do
55
+ ar_connection = mock()
56
+ ar_connection.stubs(:select_value).raises(LOCK_WAIT)
57
+ .then.raises(LOCK_WAIT)
58
+ .then.returns("dummy")
59
+
60
+ connection = Lhm::Connection.new(connection: ar_connection)
61
+
62
+ val = connection.select_value("SHOW TABLES", { base_interval: 0, tries: 3 })
63
+
64
+ log_messages = @logs.string.split("\n")
65
+ assert_equal val, "dummy"
66
+ assert_equal(2, log_messages.length)
67
+ end
68
+ end
@@ -6,6 +6,7 @@ require File.expand_path(File.dirname(__FILE__)) + '/unit_helper'
6
6
  require 'lhm/table'
7
7
  require 'lhm/migration'
8
8
  require 'lhm/entangler'
9
+ require 'lhm/connection'
9
10
 
10
11
  describe Lhm::Entangler do
11
12
  include UnitHelper
@@ -60,33 +61,43 @@ describe Lhm::Entangler do
60
61
  end
61
62
 
62
63
  it 'should retry trigger creation when it hits a lock wait timeout' do
63
- connection = mock()
64
64
  tries = 1
65
+ ar_connection = mock()
66
+ ar_connection.expects(:execute).times(tries).raises(Mysql2::Error, 'Lock wait timeout exceeded; try restarting transaction')
67
+
68
+ connection = Lhm::Connection.new(connection: ar_connection)
69
+
65
70
  @entangler = Lhm::Entangler.new(@migration, connection, retriable: {base_interval: 0, tries: tries})
66
- connection.expects(:execute).times(tries).raises(Mysql2::Error, 'Lock wait timeout exceeded; try restarting transaction')
67
71
 
68
72
  assert_raises(Mysql2::Error) { @entangler.before }
69
73
  end
70
74
 
71
75
  it 'should not retry trigger creation with other mysql errors' do
72
- connection = mock()
73
- connection.expects(:execute).once.raises(Mysql2::Error, 'The MySQL server is running with the --read-only option so it cannot execute this statement.')
76
+ ar_connection = mock()
77
+ ar_connection.expects(:execute).once.raises(Mysql2::Error, 'The MySQL server is running with the --read-only option so it cannot execute this statement.')
78
+ connection = Lhm::Connection.new(connection: ar_connection)
74
79
 
75
80
  @entangler = Lhm::Entangler.new(@migration, connection, retriable: {base_interval: 0})
76
81
  assert_raises(Mysql2::Error) { @entangler.before }
77
82
  end
78
83
 
79
84
  it 'should succesfully finish after retrying' do
80
- connection = mock()
81
- connection.stubs(:execute).raises(Mysql2::Error, 'Lock wait timeout exceeded; try restarting transaction').then.returns(true)
85
+ ar_connection = mock()
86
+ ar_connection.stubs(:execute).raises(Mysql2::Error, 'Lock wait timeout exceeded; try restarting transaction').then.returns(true)
87
+
88
+ connection = Lhm::Connection.new(connection: ar_connection)
89
+
82
90
  @entangler = Lhm::Entangler.new(@migration, connection, retriable: {base_interval: 0})
83
91
 
84
92
  assert @entangler.before
85
93
  end
86
94
 
87
95
  it 'should retry as many times as specified by configuration' do
88
- connection = mock()
89
- connection.expects(:execute).times(5).raises(Mysql2::Error, 'Lock wait timeout exceeded; try restarting transaction')
96
+ ar_connection = mock()
97
+ ar_connection.expects(:execute).times(5).raises(Mysql2::Error, 'Lock wait timeout exceeded; try restarting transaction')
98
+
99
+ connection = Lhm::Connection.new(connection: ar_connection)
100
+
90
101
  @entangler = Lhm::Entangler.new(@migration, connection, retriable: {tries: 5, base_interval: 0})
91
102
 
92
103
  assert_raises(Mysql2::Error) { @entangler.before }
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: lhm-shopify
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.4.2
4
+ version: 3.5.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - SoundCloud
@@ -12,7 +12,7 @@ authors:
12
12
  autorequire:
13
13
  bindir: bin
14
14
  cert_chain: []
15
- date: 2021-09-28 00:00:00.000000000 Z
15
+ date: 2021-12-07 00:00:00.000000000 Z
16
16
  dependencies:
17
17
  - !ruby/object:Gem::Dependency
18
18
  name: retriable
@@ -129,10 +129,8 @@ files:
129
129
  - LICENSE
130
130
  - README.md
131
131
  - Rakefile
132
- - bin/.gitkeep
133
- - dbdeployer/config.json
134
- - dbdeployer/install.sh
135
132
  - dev.yml
133
+ - docker-compose.yml
136
134
  - gemfiles/ar-2.3_mysql.gemfile
137
135
  - gemfiles/ar-3.2_mysql.gemfile
138
136
  - gemfiles/ar-3.2_mysql2.gemfile
@@ -149,6 +147,7 @@ files:
149
147
  - lib/lhm/chunker.rb
150
148
  - lib/lhm/cleanup/current.rb
151
149
  - lib/lhm/command.rb
150
+ - lib/lhm/connection.rb
152
151
  - lib/lhm/entangler.rb
153
152
  - lib/lhm/intersection.rb
154
153
  - lib/lhm/invoker.rb
@@ -168,6 +167,11 @@ files:
168
167
  - lib/lhm/throttler/time.rb
169
168
  - lib/lhm/timestamp.rb
170
169
  - lib/lhm/version.rb
170
+ - scripts/helpers/wait-for-dbs.sh
171
+ - scripts/mysql/reader/create_replication.sql
172
+ - scripts/mysql/writer/create_test_db.sql
173
+ - scripts/mysql/writer/create_users.sql
174
+ - scripts/proxysql/proxysql.cnf
171
175
  - shipit.rubygems.yml
172
176
  - spec/.lhm.example
173
177
  - spec/README.md
@@ -188,6 +192,7 @@ files:
188
192
  - spec/integration/chunk_insert_spec.rb
189
193
  - spec/integration/chunker_spec.rb
190
194
  - spec/integration/cleanup_spec.rb
195
+ - spec/integration/database.yml
191
196
  - spec/integration/entangler_spec.rb
192
197
  - spec/integration/integration_helper.rb
193
198
  - spec/integration/invoker_spec.rb
@@ -202,6 +207,7 @@ files:
202
207
  - spec/unit/chunk_finder_spec.rb
203
208
  - spec/unit/chunk_insert_spec.rb
204
209
  - spec/unit/chunker_spec.rb
210
+ - spec/unit/connection_spec.rb
205
211
  - spec/unit/entangler_spec.rb
206
212
  - spec/unit/intersection_spec.rb
207
213
  - spec/unit/lhm_spec.rb
data/bin/.gitkeep DELETED
File without changes
@@ -1,32 +0,0 @@
1
- {
2
- "version": "1.8.0",
3
- "sandbox-home": "./dbdeployer/sandboxes",
4
- "sandbox-binary": "./dbdeployer/binaries",
5
- "use-sandbox-catalog": true,
6
- "master-slave-base-port": 11000,
7
- "group-replication-base-port": 12000,
8
- "group-replication-sp-base-port": 13000,
9
- "fan-in-replication-base-port": 14000,
10
- "all-masters-replication-base-port": 15000,
11
- "multiple-base-port": 16000,
12
- "group-port-delta": 125,
13
- "mysqlx-port-delta": 10000,
14
- "master-name": "master",
15
- "master-abbr": "m",
16
- "node-prefix": "node",
17
- "slave-prefix": "slave",
18
- "slave-abbr": "s",
19
- "sandbox-prefix": "msb_",
20
- "master-slave-prefix": "rsandbox_",
21
- "group-prefix": "group_msb_",
22
- "group-sp-prefix": "group_sp_msb_",
23
- "multiple-prefix": "multi_msb_",
24
- "fan-in-prefix": "fan_in_msb_",
25
- "all-masters-prefix": "all_masters_msb_",
26
- "reserved-ports": [
27
- 1186,
28
- 3306,
29
- 33060
30
- ],
31
- "timestamp": "Mon Jul 16 17:36:55 AST 2018"
32
- }
@@ -1,64 +0,0 @@
1
- set -e
2
- mkdir -p ./dbdeployer/sandboxes
3
- mkdir -p ./dbdeployer/binaries
4
-
5
- if [ -z "$(uname | grep Darwin)" ]; then
6
- OS=linux
7
- set -x
8
- else
9
- OS=osx
10
- fi
11
-
12
- echo "Checking if dbdeployer is installed"
13
- if ! [ -x "$(command -v ./bin/dbdeployer)" ]; then
14
- echo "Not installed...starting install"
15
- VERSION=1.56.0
16
- origin=https://github.com/datacharmer/dbdeployer/releases/download/v$VERSION
17
- filename=dbdeployer-$VERSION.$OS
18
- wget -q $origin/$filename.tar.gz
19
- tar -xzf $filename.tar.gz
20
- chmod +x $filename
21
- sudo mv $filename ./bin/dbdeployer
22
- rm $filename.tar.gz
23
- else
24
- echo "Installation found!"
25
- fi
26
-
27
-
28
- echo "Checking if mysql 5.7.22 is available for dbdeployer"
29
- if [ -z "$(./bin/dbdeployer --config ./dbdeployer/config.json --sandbox-binary "./dbdeployer/binaries" available | grep 5.7.22)" ]; then
30
- echo "Not found..."
31
-
32
- if [ "$OS" = "linux" ]; then
33
- MYSQL_FILE=mysql-5.7.22-linux-glibc2.12-x86_64.tar.gz
34
- else
35
- MYSQL_FILE=mysql-5.7.22-macos10.13-x86_64.tar.gz
36
- fi
37
-
38
- if [ ! -f $MYSQL_FILE ]; then
39
- echo "Downloading $MYSQL_FILE...(this may take a while)"
40
- wget -q "https://dev.mysql.com/get/Downloads/MySQL-5.7/$MYSQL_FILE"
41
- fi
42
-
43
- echo "Setting up..."
44
- ./bin/dbdeployer unpack $MYSQL_FILE --verbosity 0 --config ./dbdeployer/config.json --sandbox-binary "./dbdeployer/binaries"
45
- rm $MYSQL_FILE
46
- else
47
- echo "mysql 5.7.22 found!"
48
- fi
49
-
50
- echo "Forcing new replication setup..."
51
- ./bin/dbdeployer deploy replication 5.7.22 --nodes 2 --force --config ./dbdeployer/config.json --sandbox-binary "./dbdeployer/binaries" --sandbox-home "./dbdeployer/sandboxes"
52
- ./bin/dbdeployer global status --config ./dbdeployer/config.json --sandbox-binary "./dbdeployer/binaries" --sandbox-home "./dbdeployer/sandboxes"
53
-
54
- echo "Setting up database.yml"
55
- DATABASE_YML=spec/integration/database.yml
56
- echo "master:" > $DATABASE_YML
57
- cat ./dbdeployer/sandboxes/rsandbox_5_7_22/master/my.sandbox.cnf | grep -A 4 client | tail -n 4 | sed -e 's/ * = /: /' -e 's/^/ /' >> $DATABASE_YML
58
-
59
- echo "slave:" >> $DATABASE_YML
60
- cat ./dbdeployer/sandboxes/rsandbox_5_7_22/node1/my.sandbox.cnf | grep -A 4 client | tail -n 4 | sed -e 's/ * = /: /' -e 's/^/ /' >> $DATABASE_YML
61
-
62
- cat $DATABASE_YML
63
-
64
- echo "You are ready to run the integration test suite..."