rom-dynamo 0.14.0 → 0.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/specs.yml +31 -0
- data/Gemfile +1 -0
- data/README.md +1 -0
- data/lib/rom/dynamo/dataset.rb +196 -0
- data/lib/rom/dynamo/{repository.rb → gateway.rb} +2 -2
- data/lib/rom/dynamo/relation.rb +9 -170
- data/lib/rom/dynamo/version.rb +1 -1
- data/lib/rom/dynamo.rb +6 -3
- data/rom-dynamo.gemspec +5 -2
- metadata +10 -8
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 7bbcf8f8498ca2b84b9542512884cc138dc251fc334dfac08412eeff71f9d1fa
|
|
4
|
+
data.tar.gz: eb0a7649d78caea174c0e6aea7ae5545bffd21fa1c6ae6e71ca0a79140dfc2fb
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 6ca7ae3a668dd4938a1261916e6404f8a618bf8c0e35cab0e856911111bcef397cedb303c371b3aa13cf83d2f876fccbbf4c3a7e8f0d8710767bcb585b1be2a5
|
|
7
|
+
data.tar.gz: 4361cf505fe065176038d2639c63328efef862e52df9631cf6fcf32910261e33ad4380c3aeb995a2bd6af849de8fe71645f7626e1ff9d49786f53e085f89fb1c
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
name: specs
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [ master ]
|
|
6
|
+
pull_request:
|
|
7
|
+
branches: [ master ]
|
|
8
|
+
|
|
9
|
+
jobs:
|
|
10
|
+
test:
|
|
11
|
+
runs-on: ubuntu-latest
|
|
12
|
+
strategy:
|
|
13
|
+
fail-fast: true
|
|
14
|
+
matrix:
|
|
15
|
+
ruby: [2.4, 2.5, 2.6, 2.7, 3.0, 4.0, head, jruby]
|
|
16
|
+
steps:
|
|
17
|
+
- uses: actions/checkout@v6
|
|
18
|
+
- name: Set up Ruby & run Bundler
|
|
19
|
+
uses: ruby/setup-ruby@v1
|
|
20
|
+
with:
|
|
21
|
+
ruby-version: ${{ matrix.ruby }}
|
|
22
|
+
bundler-cache: true
|
|
23
|
+
- name: Setup DynamoDB Local
|
|
24
|
+
uses: rrainn/dynamodb-action@v2.0.0
|
|
25
|
+
with:
|
|
26
|
+
port: 8000
|
|
27
|
+
- name: Run specs
|
|
28
|
+
run: bundle exec rake spec
|
|
29
|
+
env:
|
|
30
|
+
AWS_ACCESS_KEY_ID: DEADBEEF
|
|
31
|
+
AWS_SECRET_ACCESS_KEY: FORAWSSDK
|
data/Gemfile
CHANGED
data/README.md
CHANGED
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
# Rom::Dynamo
|
|
5
5
|
|
|
6
6
|
[][gem]
|
|
7
|
+
[](https://github.com/rykov/rom-dynamo/actions/workflows/specs.yml)
|
|
7
8
|
|
|
8
9
|
AWS DynamoDB support for [Ruby Object Mapper](https://github.com/rom-rb/rom).
|
|
9
10
|
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
module Rom
|
|
2
|
+
module Dynamo
|
|
3
|
+
class Dataset
|
|
4
|
+
include Enumerable
|
|
5
|
+
include Dry::Equalizer(:name, :connection)
|
|
6
|
+
extend Dry::Initializer[undefined: false]
|
|
7
|
+
EmptyQuery = { key_conditions: {}.freeze }.freeze
|
|
8
|
+
|
|
9
|
+
option :connection
|
|
10
|
+
option :name, proc(&:to_s)
|
|
11
|
+
option :logger, optional: true
|
|
12
|
+
option :table_keys, optional: true, reader: false
|
|
13
|
+
option :query, default: proc { EmptyQuery }, reader: false
|
|
14
|
+
alias_method :ddb, :connection
|
|
15
|
+
|
|
16
|
+
######### ENUMERATE ###########
|
|
17
|
+
|
|
18
|
+
def each(&block)
|
|
19
|
+
return enum_for(:each) if block.nil?
|
|
20
|
+
each_page { |p| p.items.each(&block) }
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def each_page(&block)
|
|
24
|
+
return enum_for(:each_page) if block.nil?
|
|
25
|
+
result = start_query(consistent_read: true)
|
|
26
|
+
result.each_page(&block)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
############# QUERY #############
|
|
30
|
+
|
|
31
|
+
def restrict(query = nil)
|
|
32
|
+
return self if query.nil?
|
|
33
|
+
dup_with_query(self.class, query)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def batch_restrict(keys)
|
|
37
|
+
dup_as(BatchGetDataset, keys: keys.map do |k|
|
|
38
|
+
Hash[table_keys.zip(k.is_a?(Array) ? k : [k])]
|
|
39
|
+
end)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def index_restrict(index, query)
|
|
43
|
+
dup_with_query(GlobalIndexDataset, query, index_name: index.to_s)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
############# PAGINATE #############
|
|
47
|
+
|
|
48
|
+
def limit(limit)
|
|
49
|
+
opts = limit.nil? ? {} : { limit: limit.to_i }
|
|
50
|
+
dup_with_query(self.class, nil, opts)
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def offset(key)
|
|
54
|
+
opts = key.nil? ? {} : { exclusive_start_key: key }
|
|
55
|
+
dup_with_query(self.class, nil, opts)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def reversed
|
|
59
|
+
dup_with_query(self.class, nil, scan_index_forward: false)
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
############# WRITE #############
|
|
63
|
+
def insert(hash)
|
|
64
|
+
opts = { table_name: name, item: stringify_keys(hash) }
|
|
65
|
+
connection.put_item(opts).attributes
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def delete(hash)
|
|
69
|
+
hash = stringify_keys(hash)
|
|
70
|
+
connection.delete_item({
|
|
71
|
+
table_name: name,
|
|
72
|
+
key: hash_to_key(hash),
|
|
73
|
+
expected: to_expected(hash),
|
|
74
|
+
}).attributes
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def update(keys, hash)
|
|
78
|
+
connection.update_item({
|
|
79
|
+
table_name: name, key: hash_to_key(stringify_keys(keys)),
|
|
80
|
+
attribute_updates: hash.each_with_object({}) do |(k, v), out|
|
|
81
|
+
out[k] = { value: dump_value(v), action: 'PUT' } if !keys[k]
|
|
82
|
+
end
|
|
83
|
+
}).attributes
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
############# HELPERS #############
|
|
87
|
+
private
|
|
88
|
+
def batch_get_each_page(keys, &block)
|
|
89
|
+
!keys.empty? && ddb.batch_get_item({
|
|
90
|
+
request_items: { name => { keys: keys } },
|
|
91
|
+
}).each_page do |page|
|
|
92
|
+
block.call(page[:responses][name])
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def dup_with_query(klass, key_hash, opts = {})
|
|
97
|
+
opts = @query.merge(opts)
|
|
98
|
+
|
|
99
|
+
if key_hash && !key_hash.empty?
|
|
100
|
+
conditions = @query[:key_conditions]
|
|
101
|
+
opts[:key_conditions] = conditions.merge(Hash[
|
|
102
|
+
key_hash.map do |key, value|
|
|
103
|
+
[key, {
|
|
104
|
+
attribute_value_list: [value],
|
|
105
|
+
comparison_operator: "EQ"
|
|
106
|
+
}]
|
|
107
|
+
end
|
|
108
|
+
]).freeze
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
dup_as(klass, query: opts.freeze)
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
def to_expected(hash)
|
|
115
|
+
hash && Hash[hash.map do |k, v|
|
|
116
|
+
[k, { value: v }]
|
|
117
|
+
end]
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def hash_to_key(hash)
|
|
121
|
+
table_keys.each_with_object({}) do |k, out|
|
|
122
|
+
out[k] = hash[k] if hash.has_key?(k)
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def table_keys
|
|
127
|
+
@table_keys ||= begin
|
|
128
|
+
r = ddb.describe_table(table_name: name)
|
|
129
|
+
r[:table][:key_schema].map(&:attribute_name)
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def start_query(opts = {}, &block)
|
|
134
|
+
opts = @query.merge(table_name: name).merge!(opts)
|
|
135
|
+
logger&.debug("Querying DDB: #{opts.inspect}")
|
|
136
|
+
ddb.query(opts)
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
def dup_as(klass, opts = {})
|
|
140
|
+
table_keys # To populate keys once at top-level Dataset
|
|
141
|
+
attrs = Dataset.dry_initializer.attributes(self)
|
|
142
|
+
klass.new(**attrs.merge(opts))
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
# String modifiers
|
|
146
|
+
def stringify_keys(hash)
|
|
147
|
+
hash.each_with_object({}) { |(k, v), out| out[k.to_s] = v }
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def dump_value(v)
|
|
151
|
+
return v.new_offset(0).iso8601(6) if v.is_a?(DateTime)
|
|
152
|
+
v.is_a?(Time) ? v.utc.iso8601(6) : v
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# Batch get using an array of key queries
|
|
157
|
+
# [{ key => val }, { key => val }, ...]
|
|
158
|
+
class BatchGetDataset < Dataset
|
|
159
|
+
option :keys
|
|
160
|
+
|
|
161
|
+
# Query for records
|
|
162
|
+
def each_page(&block)
|
|
163
|
+
return enum_for(:each_page) if block.nil?
|
|
164
|
+
batch_get_each_page(@keys) do |items|
|
|
165
|
+
klass = Aws::DynamoDB::Types::QueryOutput
|
|
166
|
+
block.call(klass.new(items: items, count: items.size))
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
# Dataset queried via a Global Secondary Index
|
|
172
|
+
# Paginate through keys from Global Index and
|
|
173
|
+
# call BatchGetItem for keys from each page
|
|
174
|
+
class GlobalIndexDataset < Dataset
|
|
175
|
+
def each_page(&block)
|
|
176
|
+
return enum_for(:each_page) if block.nil?
|
|
177
|
+
if @query[:limit]
|
|
178
|
+
block.call(populated_results(start_query))
|
|
179
|
+
else
|
|
180
|
+
start_query(limit: 100).each_page do |p|
|
|
181
|
+
block.call(populated_results(p))
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
private def populated_results(result, &block)
|
|
187
|
+
klass = Aws::DynamoDB::Types::QueryOutput
|
|
188
|
+
keys = result.items.map { |h| hash_to_key(h) }
|
|
189
|
+
klass.new(result.to_hash.merge(items: [].tap do |out|
|
|
190
|
+
batch_get_each_page(keys) { |i| out.concat(i) }
|
|
191
|
+
end))
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
end
|
|
@@ -4,7 +4,7 @@ require 'rom/gateway'
|
|
|
4
4
|
module Rom
|
|
5
5
|
module Dynamo
|
|
6
6
|
class Gateway < ROM::Gateway
|
|
7
|
-
attr_reader :ddb, :options
|
|
7
|
+
attr_reader :ddb, :options, :logger
|
|
8
8
|
|
|
9
9
|
def initialize(uri)
|
|
10
10
|
uri = Addressable::URI.parse(uri)
|
|
@@ -24,7 +24,7 @@ module Rom
|
|
|
24
24
|
|
|
25
25
|
def dataset(name)
|
|
26
26
|
name = "#{@prefix}#{name}"
|
|
27
|
-
@datasets[name] ||= _has?(name) && Dataset.new(connection: @ddb, name: name)
|
|
27
|
+
@datasets[name] ||= _has?(name) && Dataset.new(connection: @ddb, name: name, logger: @logger)
|
|
28
28
|
end
|
|
29
29
|
|
|
30
30
|
def dataset?(name)
|
data/lib/rom/dynamo/relation.rb
CHANGED
|
@@ -3,180 +3,19 @@ module Rom
|
|
|
3
3
|
class Relation < ROM::Relation
|
|
4
4
|
include Enumerable
|
|
5
5
|
forward :restrict, :batch_restrict, :index_restrict
|
|
6
|
-
forward :limit, :reversed
|
|
6
|
+
forward :limit, :reversed, :offset
|
|
7
7
|
adapter :dynamo
|
|
8
|
-
end
|
|
9
|
-
|
|
10
|
-
class Dataset
|
|
11
|
-
include Enumerable
|
|
12
|
-
include Dry::Equalizer(:name, :connection)
|
|
13
|
-
extend Dry::Initializer[undefined: false]
|
|
14
|
-
EmptyQuery = { key_conditions: {}.freeze }.freeze
|
|
15
|
-
|
|
16
|
-
option :connection
|
|
17
|
-
option :name, proc(&:to_s)
|
|
18
|
-
option :table_keys, optional: true, reader: false
|
|
19
|
-
option :query, default: proc { EmptyQuery }, reader: false
|
|
20
|
-
alias_method :ddb, :connection
|
|
21
8
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
9
|
+
def each_page(&block)
|
|
10
|
+
return enum_for(:each_page) if block.nil?
|
|
11
|
+
dataset.each_page do |page|
|
|
12
|
+
items = page[:items].map { |t| output_schema[t] }
|
|
13
|
+
items = mapper.(items).to_a if auto_map?
|
|
14
|
+
hash = page.to_hash.merge(items: items)
|
|
15
|
+
hash[:last_evaluated_key] ||= nil
|
|
16
|
+
block.call(ROM::OpenStruct.new(hash))
|
|
28
17
|
end
|
|
29
18
|
end
|
|
30
|
-
|
|
31
|
-
def restrict(query = nil)
|
|
32
|
-
return self if query.nil?
|
|
33
|
-
dup_with_query(Dataset, query)
|
|
34
|
-
end
|
|
35
|
-
|
|
36
|
-
def batch_restrict(keys)
|
|
37
|
-
dup_as(BatchGetDataset, keys: keys.map do |k|
|
|
38
|
-
Hash[table_keys.zip(k.is_a?(Array) ? k : [k])]
|
|
39
|
-
end)
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
def index_restrict(index, query)
|
|
43
|
-
dup_with_query(GlobalIndexDataset, query, index_name: index.to_s)
|
|
44
|
-
end
|
|
45
|
-
|
|
46
|
-
############# PAGINATION #############
|
|
47
|
-
|
|
48
|
-
def limit(limit)
|
|
49
|
-
dup_with_query(self.class, nil, limit: limit.to_i)
|
|
50
|
-
end
|
|
51
|
-
|
|
52
|
-
def reversed
|
|
53
|
-
dup_with_query(self.class, nil, scan_index_forward: false)
|
|
54
|
-
end
|
|
55
|
-
|
|
56
|
-
############# WRITE #############
|
|
57
|
-
def insert(hash)
|
|
58
|
-
opts = { table_name: name, item: stringify_keys(hash) }
|
|
59
|
-
connection.put_item(opts).attributes
|
|
60
|
-
end
|
|
61
|
-
|
|
62
|
-
def delete(hash)
|
|
63
|
-
hash = stringify_keys(hash)
|
|
64
|
-
connection.delete_item({
|
|
65
|
-
table_name: name,
|
|
66
|
-
key: hash_to_key(hash),
|
|
67
|
-
expected: to_expected(hash),
|
|
68
|
-
}).attributes
|
|
69
|
-
end
|
|
70
|
-
|
|
71
|
-
def update(keys, hash)
|
|
72
|
-
connection.update_item({
|
|
73
|
-
table_name: name, key: hash_to_key(stringify_keys(keys)),
|
|
74
|
-
attribute_updates: hash.each_with_object({}) do |(k, v), out|
|
|
75
|
-
out[k] = { value: dump_value(v), action: 'PUT' } if !keys[k]
|
|
76
|
-
end
|
|
77
|
-
}).attributes
|
|
78
|
-
end
|
|
79
|
-
|
|
80
|
-
############# HELPERS #############
|
|
81
|
-
private
|
|
82
|
-
def batch_get_each_item(keys, &block)
|
|
83
|
-
!keys.empty? && ddb.batch_get_item({
|
|
84
|
-
request_items: { name => { keys: keys } },
|
|
85
|
-
}).each_page do |page|
|
|
86
|
-
out = page[:responses][name]
|
|
87
|
-
out.each(&block)
|
|
88
|
-
end
|
|
89
|
-
end
|
|
90
|
-
|
|
91
|
-
def dup_with_query(klass, key_hash, opts = {})
|
|
92
|
-
opts = @query.merge(opts)
|
|
93
|
-
|
|
94
|
-
if key_hash && !key_hash.empty?
|
|
95
|
-
conditions = @query[:key_conditions]
|
|
96
|
-
opts[:key_conditions] = conditions.merge(Hash[
|
|
97
|
-
key_hash.map do |key, value|
|
|
98
|
-
[key, {
|
|
99
|
-
attribute_value_list: [value],
|
|
100
|
-
comparison_operator: "EQ"
|
|
101
|
-
}]
|
|
102
|
-
end
|
|
103
|
-
]).freeze
|
|
104
|
-
end
|
|
105
|
-
|
|
106
|
-
dup_as(klass, query: opts.freeze)
|
|
107
|
-
end
|
|
108
|
-
|
|
109
|
-
def to_expected(hash)
|
|
110
|
-
hash && Hash[hash.map do |k, v|
|
|
111
|
-
[k, { value: v }]
|
|
112
|
-
end]
|
|
113
|
-
end
|
|
114
|
-
|
|
115
|
-
def hash_to_key(hash)
|
|
116
|
-
table_keys.each_with_object({}) do |k, out|
|
|
117
|
-
out[k] = hash[k] if hash.has_key?(k)
|
|
118
|
-
end
|
|
119
|
-
end
|
|
120
|
-
|
|
121
|
-
def table_keys
|
|
122
|
-
@table_keys ||= begin
|
|
123
|
-
r = ddb.describe_table(table_name: name)
|
|
124
|
-
r[:table][:key_schema].map(&:attribute_name)
|
|
125
|
-
end
|
|
126
|
-
end
|
|
127
|
-
|
|
128
|
-
def start_query(opts = {}, &block)
|
|
129
|
-
opts = @query.merge(table_name: name).merge!(opts)
|
|
130
|
-
puts "Querying DDB: #{opts.inspect}"
|
|
131
|
-
ddb.query(opts)
|
|
132
|
-
end
|
|
133
|
-
|
|
134
|
-
def dup_as(klass, opts = {})
|
|
135
|
-
table_keys # To populate keys once at top-level Dataset
|
|
136
|
-
attrs = Dataset.dry_initializer.attributes(self)
|
|
137
|
-
klass.new(attrs.merge(opts))
|
|
138
|
-
end
|
|
139
|
-
|
|
140
|
-
# String modifiers
|
|
141
|
-
def stringify_keys(hash)
|
|
142
|
-
hash.each_with_object({}) { |(k, v), out| out[k.to_s] = v }
|
|
143
|
-
end
|
|
144
|
-
|
|
145
|
-
def dump_value(v)
|
|
146
|
-
return v.new_offset(0).iso8601(6) if v.is_a?(DateTime)
|
|
147
|
-
v.is_a?(Time) ? v.utc.iso8601(6) : v
|
|
148
|
-
end
|
|
149
|
-
end
|
|
150
|
-
|
|
151
|
-
# Batch get using an array of key queries
|
|
152
|
-
# [{ key => val }, { key => val }, ...]
|
|
153
|
-
class BatchGetDataset < Dataset
|
|
154
|
-
option :keys
|
|
155
|
-
|
|
156
|
-
# Query for records
|
|
157
|
-
def each(&block)
|
|
158
|
-
batch_get_each_item(@keys, &block)
|
|
159
|
-
end
|
|
160
19
|
end
|
|
161
|
-
|
|
162
|
-
# Dataset queried via a Global Secondary Index
|
|
163
|
-
# Paginate through keys from Global Index and
|
|
164
|
-
# call BatchGetItem for keys from each page
|
|
165
|
-
class GlobalIndexDataset < Dataset
|
|
166
|
-
def each(&block)
|
|
167
|
-
if @query[:limit]
|
|
168
|
-
each_item(start_query, &block)
|
|
169
|
-
else
|
|
170
|
-
result = start_query(limit: 100)
|
|
171
|
-
result.each_page { |p| each_item(p, &block) }
|
|
172
|
-
end
|
|
173
|
-
end
|
|
174
|
-
|
|
175
|
-
private def each_item(result, &block)
|
|
176
|
-
keys = result[:items].map { |h| hash_to_key(h) }
|
|
177
|
-
batch_get_each_item(keys, &block)
|
|
178
|
-
end
|
|
179
|
-
end
|
|
180
|
-
|
|
181
20
|
end
|
|
182
21
|
end
|
data/lib/rom/dynamo/version.rb
CHANGED
data/lib/rom/dynamo.rb
CHANGED
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
require 'rom'
|
|
2
2
|
require 'date'
|
|
3
3
|
require 'aws-sdk-dynamodb'
|
|
4
|
-
require
|
|
4
|
+
require 'rom/dynamo/version'
|
|
5
|
+
require 'rom/dynamo/dataset'
|
|
5
6
|
require 'rom/dynamo/relation'
|
|
6
7
|
require 'rom/dynamo/commands'
|
|
7
|
-
require 'rom/dynamo/
|
|
8
|
+
require 'rom/dynamo/gateway'
|
|
8
9
|
|
|
9
10
|
# jRuby HACK: https://github.com/jruby/jruby/issues/3645#issuecomment-181660161
|
|
10
|
-
|
|
11
|
+
if RUBY_ENGINE == 'jruby'
|
|
12
|
+
module Aws; const_set(:DynamoDB, Aws::DynamoDB) end
|
|
13
|
+
end
|
|
11
14
|
|
|
12
15
|
# Register adapter with ROM-rb
|
|
13
16
|
ROM.register_adapter(:dynamo, Rom::Dynamo)
|
data/rom-dynamo.gemspec
CHANGED
|
@@ -23,13 +23,16 @@ Gem::Specification.new do |spec|
|
|
|
23
23
|
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
|
24
24
|
spec.require_paths = ["lib"]
|
|
25
25
|
|
|
26
|
+
# Ruby 2.0 and above
|
|
27
|
+
spec.required_ruby_version = Gem::Requirement.new('>= 2.4.0')
|
|
28
|
+
|
|
26
29
|
# Runtime
|
|
27
30
|
spec.add_runtime_dependency "addressable", "~> 2.3"
|
|
28
31
|
spec.add_runtime_dependency "rom", ">= 1.0", "< 6.0"
|
|
29
32
|
spec.add_runtime_dependency "aws-sdk-dynamodb", "~> 1.0"
|
|
30
33
|
|
|
31
34
|
# Development
|
|
32
|
-
spec.add_development_dependency "activesupport", ">= 4.0", "<
|
|
33
|
-
spec.add_development_dependency "bundler", "
|
|
35
|
+
spec.add_development_dependency "activesupport", ">= 4.0", "< 9.0"
|
|
36
|
+
spec.add_development_dependency "bundler", ">= 1.7"
|
|
34
37
|
spec.add_development_dependency "rake", "~> 13.0"
|
|
35
38
|
end
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: rom-dynamo
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.16.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Michael Rykov
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date:
|
|
11
|
+
date: 2026-04-01 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: addressable
|
|
@@ -67,7 +67,7 @@ dependencies:
|
|
|
67
67
|
version: '4.0'
|
|
68
68
|
- - "<"
|
|
69
69
|
- !ruby/object:Gem::Version
|
|
70
|
-
version: '
|
|
70
|
+
version: '9.0'
|
|
71
71
|
type: :development
|
|
72
72
|
prerelease: false
|
|
73
73
|
version_requirements: !ruby/object:Gem::Requirement
|
|
@@ -77,19 +77,19 @@ dependencies:
|
|
|
77
77
|
version: '4.0'
|
|
78
78
|
- - "<"
|
|
79
79
|
- !ruby/object:Gem::Version
|
|
80
|
-
version: '
|
|
80
|
+
version: '9.0'
|
|
81
81
|
- !ruby/object:Gem::Dependency
|
|
82
82
|
name: bundler
|
|
83
83
|
requirement: !ruby/object:Gem::Requirement
|
|
84
84
|
requirements:
|
|
85
|
-
- - "
|
|
85
|
+
- - ">="
|
|
86
86
|
- !ruby/object:Gem::Version
|
|
87
87
|
version: '1.7'
|
|
88
88
|
type: :development
|
|
89
89
|
prerelease: false
|
|
90
90
|
version_requirements: !ruby/object:Gem::Requirement
|
|
91
91
|
requirements:
|
|
92
|
-
- - "
|
|
92
|
+
- - ">="
|
|
93
93
|
- !ruby/object:Gem::Version
|
|
94
94
|
version: '1.7'
|
|
95
95
|
- !ruby/object:Gem::Dependency
|
|
@@ -113,6 +113,7 @@ executables: []
|
|
|
113
113
|
extensions: []
|
|
114
114
|
extra_rdoc_files: []
|
|
115
115
|
files:
|
|
116
|
+
- ".github/workflows/specs.yml"
|
|
116
117
|
- ".gitignore"
|
|
117
118
|
- ".rspec"
|
|
118
119
|
- ".travis.yml"
|
|
@@ -125,8 +126,9 @@ files:
|
|
|
125
126
|
- lib/rom-dynamo.rb
|
|
126
127
|
- lib/rom/dynamo.rb
|
|
127
128
|
- lib/rom/dynamo/commands.rb
|
|
129
|
+
- lib/rom/dynamo/dataset.rb
|
|
130
|
+
- lib/rom/dynamo/gateway.rb
|
|
128
131
|
- lib/rom/dynamo/relation.rb
|
|
129
|
-
- lib/rom/dynamo/repository.rb
|
|
130
132
|
- lib/rom/dynamo/version.rb
|
|
131
133
|
- rom-dynamo.gemspec
|
|
132
134
|
homepage: https://github.com/rykov/rom-dynamo
|
|
@@ -141,7 +143,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
|
141
143
|
requirements:
|
|
142
144
|
- - ">="
|
|
143
145
|
- !ruby/object:Gem::Version
|
|
144
|
-
version:
|
|
146
|
+
version: 2.4.0
|
|
145
147
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
146
148
|
requirements:
|
|
147
149
|
- - ">="
|