monday_ruby 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.env +1 -1
- data/.rspec +0 -1
- data/.rubocop.yml +19 -0
- data/.simplecov +1 -0
- data/CHANGELOG.md +49 -0
- data/CONTRIBUTING.md +165 -0
- data/README.md +167 -88
- data/docs/.vitepress/config.mjs +255 -0
- data/docs/.vitepress/theme/index.js +4 -0
- data/docs/.vitepress/theme/style.css +43 -0
- data/docs/README.md +80 -0
- data/docs/explanation/architecture.md +507 -0
- data/docs/explanation/best-practices/errors.md +478 -0
- data/docs/explanation/best-practices/performance.md +1084 -0
- data/docs/explanation/best-practices/rate-limiting.md +630 -0
- data/docs/explanation/best-practices/testing.md +820 -0
- data/docs/explanation/column-values.md +857 -0
- data/docs/explanation/design.md +795 -0
- data/docs/explanation/graphql.md +356 -0
- data/docs/explanation/migration/v1.md +808 -0
- data/docs/explanation/pagination.md +447 -0
- data/docs/guides/advanced/batch.md +1274 -0
- data/docs/guides/advanced/complex-queries.md +1114 -0
- data/docs/guides/advanced/errors.md +818 -0
- data/docs/guides/advanced/pagination.md +934 -0
- data/docs/guides/advanced/rate-limiting.md +981 -0
- data/docs/guides/authentication.md +286 -0
- data/docs/guides/boards/create.md +386 -0
- data/docs/guides/boards/delete.md +405 -0
- data/docs/guides/boards/duplicate.md +511 -0
- data/docs/guides/boards/query.md +530 -0
- data/docs/guides/boards/update.md +453 -0
- data/docs/guides/columns/create.md +452 -0
- data/docs/guides/columns/metadata.md +492 -0
- data/docs/guides/columns/query.md +455 -0
- data/docs/guides/columns/update-multiple.md +459 -0
- data/docs/guides/columns/update-values.md +509 -0
- data/docs/guides/files/add-to-column.md +40 -0
- data/docs/guides/files/add-to-update.md +37 -0
- data/docs/guides/files/clear-column.md +33 -0
- data/docs/guides/first-request.md +285 -0
- data/docs/guides/folders/manage.md +750 -0
- data/docs/guides/groups/items.md +626 -0
- data/docs/guides/groups/manage.md +501 -0
- data/docs/guides/installation.md +169 -0
- data/docs/guides/items/create.md +493 -0
- data/docs/guides/items/delete.md +514 -0
- data/docs/guides/items/query.md +605 -0
- data/docs/guides/items/subitems.md +483 -0
- data/docs/guides/items/update.md +699 -0
- data/docs/guides/updates/manage.md +619 -0
- data/docs/guides/use-cases/dashboard.md +1421 -0
- data/docs/guides/use-cases/import.md +1962 -0
- data/docs/guides/use-cases/task-management.md +1381 -0
- data/docs/guides/workspaces/manage.md +502 -0
- data/docs/index.md +69 -0
- data/docs/package-lock.json +2468 -0
- data/docs/package.json +13 -0
- data/docs/reference/client.md +540 -0
- data/docs/reference/configuration.md +586 -0
- data/docs/reference/errors.md +693 -0
- data/docs/reference/resources/account.md +208 -0
- data/docs/reference/resources/activity-log.md +369 -0
- data/docs/reference/resources/board-view.md +359 -0
- data/docs/reference/resources/board.md +393 -0
- data/docs/reference/resources/column.md +543 -0
- data/docs/reference/resources/file.md +236 -0
- data/docs/reference/resources/folder.md +386 -0
- data/docs/reference/resources/group.md +507 -0
- data/docs/reference/resources/item.md +348 -0
- data/docs/reference/resources/subitem.md +267 -0
- data/docs/reference/resources/update.md +259 -0
- data/docs/reference/resources/workspace.md +213 -0
- data/docs/reference/response.md +560 -0
- data/docs/tutorial/first-integration.md +713 -0
- data/lib/monday/client.rb +41 -2
- data/lib/monday/configuration.rb +13 -0
- data/lib/monday/deprecation.rb +23 -0
- data/lib/monday/error.rb +5 -2
- data/lib/monday/request.rb +19 -1
- data/lib/monday/resources/base.rb +4 -0
- data/lib/monday/resources/board.rb +52 -0
- data/lib/monday/resources/column.rb +6 -0
- data/lib/monday/resources/file.rb +56 -0
- data/lib/monday/resources/folder.rb +55 -0
- data/lib/monday/resources/group.rb +66 -0
- data/lib/monday/resources/item.rb +62 -0
- data/lib/monday/util.rb +33 -1
- data/lib/monday/version.rb +1 -1
- data/lib/monday_ruby.rb +1 -0
- metadata +92 -11
- data/monday_ruby.gemspec +0 -39
|
@@ -0,0 +1,1962 @@
|
|
|
1
|
+
# Data Import Guide
|
|
2
|
+
|
|
3
|
+
Learn how to import data from various sources (CSV, JSON, databases, APIs) into monday.com using the monday_ruby gem. This guide covers data validation, error handling, progress tracking, and resume functionality for production-ready import systems.
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
This guide demonstrates building robust data import systems that can:
|
|
8
|
+
|
|
9
|
+
- Import from CSV, JSON, SQL databases, and REST APIs
|
|
10
|
+
- Validate data before import to catch errors early
|
|
11
|
+
- Handle large datasets with batching and rate limiting
|
|
12
|
+
- Track progress and resume failed imports
|
|
13
|
+
- Recover from errors gracefully
|
|
14
|
+
- Provide detailed import reports
|
|
15
|
+
|
|
16
|
+
## Prerequisites
|
|
17
|
+
|
|
18
|
+
```ruby
|
|
19
|
+
# Gemfile
|
|
20
|
+
gem 'monday_ruby'
|
|
21
|
+
gem 'csv' # Built-in, for CSV imports
|
|
22
|
+
gem 'json' # Built-in, for JSON imports
|
|
23
|
+
gem 'sequel' # Optional, for database imports
|
|
24
|
+
gem 'httparty' # Optional, for API imports
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
Initialize the client:
|
|
28
|
+
|
|
29
|
+
```ruby
|
|
30
|
+
require 'monday_ruby'
|
|
31
|
+
require 'csv'
|
|
32
|
+
require 'json'
|
|
33
|
+
|
|
34
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Import from CSV
|
|
38
|
+
|
|
39
|
+
### Basic CSV Import
|
|
40
|
+
|
|
41
|
+
Import a simple CSV file into a monday.com board:
|
|
42
|
+
|
|
43
|
+
```ruby
|
|
44
|
+
require 'monday_ruby'
|
|
45
|
+
require 'csv'
|
|
46
|
+
|
|
47
|
+
class CSVImporter
|
|
48
|
+
def initialize(client, board_id)
|
|
49
|
+
@client = client
|
|
50
|
+
@board_id = board_id
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def import(csv_file_path)
|
|
54
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
55
|
+
|
|
56
|
+
CSV.foreach(csv_file_path, headers: true) do |row|
|
|
57
|
+
begin
|
|
58
|
+
create_item(row)
|
|
59
|
+
results[:success] += 1
|
|
60
|
+
rescue => e
|
|
61
|
+
results[:failed] += 1
|
|
62
|
+
results[:errors] << { row: row.to_h, error: e.message }
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
results
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
private
|
|
70
|
+
|
|
71
|
+
def create_item(row)
|
|
72
|
+
# Map CSV columns to monday.com column values
|
|
73
|
+
column_values = {
|
|
74
|
+
"status" => { "label" => row["status"] },
|
|
75
|
+
"text" => row["description"],
|
|
76
|
+
"numbers" => row["amount"].to_f,
|
|
77
|
+
"date" => row["due_date"]
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
@client.item.create(
|
|
81
|
+
args: {
|
|
82
|
+
board_id: @board_id,
|
|
83
|
+
item_name: row["name"],
|
|
84
|
+
column_values: JSON.generate(column_values)
|
|
85
|
+
},
|
|
86
|
+
select: ["id", "name"]
|
|
87
|
+
)
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Usage
|
|
92
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
93
|
+
importer = CSVImporter.new(client, 123456789)
|
|
94
|
+
results = importer.import('data/tasks.csv')
|
|
95
|
+
|
|
96
|
+
puts "Imported: #{results[:success]}, Failed: #{results[:failed]}"
|
|
97
|
+
results[:errors].each do |error|
|
|
98
|
+
puts "Error in row #{error[:row]}: #{error[:error]}"
|
|
99
|
+
end
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
**Example CSV file (data/tasks.csv):**
|
|
103
|
+
|
|
104
|
+
```csv
|
|
105
|
+
name,status,description,amount,due_date
|
|
106
|
+
Task 1,Working on it,First task description,100.50,2025-01-15
|
|
107
|
+
Task 2,Done,Second task description,250.75,2025-01-20
|
|
108
|
+
Task 3,Stuck,Third task description,75.00,2025-01-25
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
### Advanced CSV Import with Batching
|
|
112
|
+
|
|
113
|
+
For large CSV files, batch the imports to improve performance:
|
|
114
|
+
|
|
115
|
+
```ruby
|
|
116
|
+
class BatchedCSVImporter
|
|
117
|
+
BATCH_SIZE = 50
|
|
118
|
+
|
|
119
|
+
def initialize(client, board_id)
|
|
120
|
+
@client = client
|
|
121
|
+
@board_id = board_id
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def import(csv_file_path, &progress_callback)
|
|
125
|
+
total_rows = CSV.read(csv_file_path).length - 1 # Subtract header
|
|
126
|
+
processed = 0
|
|
127
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
128
|
+
|
|
129
|
+
batch = []
|
|
130
|
+
|
|
131
|
+
CSV.foreach(csv_file_path, headers: true).with_index do |row, index|
|
|
132
|
+
batch << row
|
|
133
|
+
|
|
134
|
+
if batch.size >= BATCH_SIZE || index == total_rows - 1
|
|
135
|
+
batch_results = import_batch(batch)
|
|
136
|
+
results[:success] += batch_results[:success]
|
|
137
|
+
results[:failed] += batch_results[:failed]
|
|
138
|
+
results[:errors].concat(batch_results[:errors])
|
|
139
|
+
|
|
140
|
+
processed += batch.size
|
|
141
|
+
progress_callback&.call(processed, total_rows)
|
|
142
|
+
|
|
143
|
+
batch = []
|
|
144
|
+
sleep(1) # Rate limiting: 1 second between batches
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
results
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
private
|
|
152
|
+
|
|
153
|
+
def import_batch(rows)
|
|
154
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
155
|
+
|
|
156
|
+
rows.each do |row|
|
|
157
|
+
begin
|
|
158
|
+
create_item(row)
|
|
159
|
+
results[:success] += 1
|
|
160
|
+
rescue => e
|
|
161
|
+
results[:failed] += 1
|
|
162
|
+
results[:errors] << { row: row.to_h, error: e.message }
|
|
163
|
+
end
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
results
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def create_item(row)
|
|
170
|
+
column_values = {
|
|
171
|
+
"status" => { "label" => row["status"] },
|
|
172
|
+
"text" => row["description"],
|
|
173
|
+
"numbers" => row["amount"].to_f,
|
|
174
|
+
"date" => row["due_date"]
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
@client.item.create(
|
|
178
|
+
args: {
|
|
179
|
+
board_id: @board_id,
|
|
180
|
+
item_name: row["name"],
|
|
181
|
+
column_values: JSON.generate(column_values)
|
|
182
|
+
},
|
|
183
|
+
select: ["id"]
|
|
184
|
+
)
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
# Usage with progress tracking
|
|
189
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
190
|
+
importer = BatchedCSVImporter.new(client, 123456789)
|
|
191
|
+
|
|
192
|
+
results = importer.import('data/large_dataset.csv') do |processed, total|
|
|
193
|
+
percentage = (processed.to_f / total * 100).round(2)
|
|
194
|
+
puts "Progress: #{processed}/#{total} (#{percentage}%)"
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
puts "\nImport complete!"
|
|
198
|
+
puts "Success: #{results[:success]}, Failed: #{results[:failed]}"
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
### CSV Import with Column Mapping
|
|
202
|
+
|
|
203
|
+
Allow flexible column mapping from CSV to monday.com:
|
|
204
|
+
|
|
205
|
+
```ruby
|
|
206
|
+
class ConfigurableCSVImporter
|
|
207
|
+
def initialize(client, board_id, column_mapping)
|
|
208
|
+
@client = client
|
|
209
|
+
@board_id = board_id
|
|
210
|
+
@column_mapping = column_mapping
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
def import(csv_file_path)
|
|
214
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
215
|
+
|
|
216
|
+
CSV.foreach(csv_file_path, headers: true) do |row|
|
|
217
|
+
begin
|
|
218
|
+
create_item(row)
|
|
219
|
+
results[:success] += 1
|
|
220
|
+
rescue => e
|
|
221
|
+
results[:failed] += 1
|
|
222
|
+
results[:errors] << { row: row.to_h, error: e.message }
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
results
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
private
|
|
230
|
+
|
|
231
|
+
def create_item(row)
|
|
232
|
+
# Use the item_name field from mapping
|
|
233
|
+
item_name = row[@column_mapping[:item_name]]
|
|
234
|
+
|
|
235
|
+
# Build column values based on mapping
|
|
236
|
+
column_values = {}
|
|
237
|
+
@column_mapping[:columns].each do |monday_column, config|
|
|
238
|
+
csv_column = config[:csv_column]
|
|
239
|
+
value = row[csv_column]
|
|
240
|
+
|
|
241
|
+
next if value.nil? || value.strip.empty?
|
|
242
|
+
|
|
243
|
+
column_values[monday_column] = format_value(value, config[:type])
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
@client.item.create(
|
|
247
|
+
args: {
|
|
248
|
+
board_id: @board_id,
|
|
249
|
+
item_name: item_name,
|
|
250
|
+
column_values: JSON.generate(column_values)
|
|
251
|
+
},
|
|
252
|
+
select: ["id"]
|
|
253
|
+
)
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
def format_value(value, type)
|
|
257
|
+
case type
|
|
258
|
+
when :status
|
|
259
|
+
{ "label" => value }
|
|
260
|
+
when :text
|
|
261
|
+
value
|
|
262
|
+
when :number
|
|
263
|
+
value.to_f
|
|
264
|
+
when :date
|
|
265
|
+
value
|
|
266
|
+
when :person
|
|
267
|
+
{ "personsAndTeams" => [{ "id" => value.to_i, "kind" => "person" }] }
|
|
268
|
+
when :dropdown
|
|
269
|
+
{ "labels" => [value] }
|
|
270
|
+
else
|
|
271
|
+
value
|
|
272
|
+
end
|
|
273
|
+
end
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Define mapping configuration
|
|
277
|
+
mapping = {
|
|
278
|
+
item_name: "Task Name",
|
|
279
|
+
columns: {
|
|
280
|
+
"status" => { csv_column: "Current Status", type: :status },
|
|
281
|
+
"text" => { csv_column: "Notes", type: :text },
|
|
282
|
+
"numbers" => { csv_column: "Budget", type: :number },
|
|
283
|
+
"date" => { csv_column: "Deadline", type: :date },
|
|
284
|
+
"person" => { csv_column: "Owner ID", type: :person },
|
|
285
|
+
"dropdown" => { csv_column: "Priority", type: :dropdown }
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
# Usage
|
|
290
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
291
|
+
importer = ConfigurableCSVImporter.new(client, 123456789, mapping)
|
|
292
|
+
results = importer.import('data/custom_format.csv')
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
## Import from JSON
|
|
296
|
+
|
|
297
|
+
### Basic JSON Import
|
|
298
|
+
|
|
299
|
+
Import from a JSON file or API response:
|
|
300
|
+
|
|
301
|
+
```ruby
|
|
302
|
+
require 'monday_ruby'
|
|
303
|
+
require 'json'
|
|
304
|
+
|
|
305
|
+
class JSONImporter
|
|
306
|
+
def initialize(client, board_id)
|
|
307
|
+
@client = client
|
|
308
|
+
@board_id = board_id
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def import(json_file_path)
|
|
312
|
+
data = JSON.parse(File.read(json_file_path))
|
|
313
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
314
|
+
|
|
315
|
+
data.each do |record|
|
|
316
|
+
begin
|
|
317
|
+
create_item(record)
|
|
318
|
+
results[:success] += 1
|
|
319
|
+
rescue => e
|
|
320
|
+
results[:failed] += 1
|
|
321
|
+
results[:errors] << { record: record, error: e.message }
|
|
322
|
+
end
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
results
|
|
326
|
+
end
|
|
327
|
+
|
|
328
|
+
private
|
|
329
|
+
|
|
330
|
+
def create_item(record)
|
|
331
|
+
column_values = build_column_values(record)
|
|
332
|
+
|
|
333
|
+
@client.item.create(
|
|
334
|
+
args: {
|
|
335
|
+
board_id: @board_id,
|
|
336
|
+
item_name: record["name"],
|
|
337
|
+
column_values: JSON.generate(column_values)
|
|
338
|
+
},
|
|
339
|
+
select: ["id", "name"]
|
|
340
|
+
)
|
|
341
|
+
end
|
|
342
|
+
|
|
343
|
+
def build_column_values(record)
|
|
344
|
+
{
|
|
345
|
+
"status" => { "label" => record["status"] },
|
|
346
|
+
"text" => record["description"],
|
|
347
|
+
"numbers" => record["amount"],
|
|
348
|
+
"date" => record["due_date"]
|
|
349
|
+
}
|
|
350
|
+
end
|
|
351
|
+
end
|
|
352
|
+
|
|
353
|
+
# Usage
|
|
354
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
355
|
+
importer = JSONImporter.new(client, 123456789)
|
|
356
|
+
results = importer.import('data/tasks.json')
|
|
357
|
+
```
|
|
358
|
+
|
|
359
|
+
**Example JSON file (data/tasks.json):**
|
|
360
|
+
|
|
361
|
+
```json
|
|
362
|
+
[
|
|
363
|
+
{
|
|
364
|
+
"name": "Task 1",
|
|
365
|
+
"status": "Working on it",
|
|
366
|
+
"description": "First task description",
|
|
367
|
+
"amount": 100.50,
|
|
368
|
+
"due_date": "2025-01-15"
|
|
369
|
+
},
|
|
370
|
+
{
|
|
371
|
+
"name": "Task 2",
|
|
372
|
+
"status": "Done",
|
|
373
|
+
"description": "Second task description",
|
|
374
|
+
"amount": 250.75,
|
|
375
|
+
"due_date": "2025-01-20"
|
|
376
|
+
}
|
|
377
|
+
]
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
### Import Nested JSON Structures
|
|
381
|
+
|
|
382
|
+
Handle complex JSON with nested objects and arrays:
|
|
383
|
+
|
|
384
|
+
```ruby
|
|
385
|
+
class NestedJSONImporter
|
|
386
|
+
def initialize(client, board_id)
|
|
387
|
+
@client = client
|
|
388
|
+
@board_id = board_id
|
|
389
|
+
end
|
|
390
|
+
|
|
391
|
+
def import(json_file_path)
|
|
392
|
+
data = JSON.parse(File.read(json_file_path))
|
|
393
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
394
|
+
|
|
395
|
+
data.each do |record|
|
|
396
|
+
begin
|
|
397
|
+
# Create main item
|
|
398
|
+
item_response = create_main_item(record)
|
|
399
|
+
item_id = item_response.dig("data", "create_item", "id")
|
|
400
|
+
|
|
401
|
+
# Create subitems if present
|
|
402
|
+
if record["subtasks"] && !record["subtasks"].empty?
|
|
403
|
+
create_subitems(item_id, record["subtasks"])
|
|
404
|
+
end
|
|
405
|
+
|
|
406
|
+
results[:success] += 1
|
|
407
|
+
rescue => e
|
|
408
|
+
results[:failed] += 1
|
|
409
|
+
results[:errors] << { record: record, error: e.message }
|
|
410
|
+
end
|
|
411
|
+
end
|
|
412
|
+
|
|
413
|
+
results
|
|
414
|
+
end
|
|
415
|
+
|
|
416
|
+
private
|
|
417
|
+
|
|
418
|
+
def create_main_item(record)
|
|
419
|
+
column_values = {
|
|
420
|
+
"status" => { "label" => record["status"] },
|
|
421
|
+
"text" => record["description"],
|
|
422
|
+
"numbers" => record["budget"]["amount"],
|
|
423
|
+
"dropdown" => { "labels" => record["tags"] }
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
@client.item.create(
|
|
427
|
+
args: {
|
|
428
|
+
board_id: @board_id,
|
|
429
|
+
item_name: record["name"],
|
|
430
|
+
column_values: JSON.generate(column_values)
|
|
431
|
+
},
|
|
432
|
+
select: ["id", "name"]
|
|
433
|
+
)
|
|
434
|
+
end
|
|
435
|
+
|
|
436
|
+
def create_subitems(parent_id, subtasks)
|
|
437
|
+
subtasks.each do |subtask|
|
|
438
|
+
@client.item.create(
|
|
439
|
+
args: {
|
|
440
|
+
parent_item_id: parent_id,
|
|
441
|
+
item_name: subtask["name"]
|
|
442
|
+
},
|
|
443
|
+
select: ["id"]
|
|
444
|
+
)
|
|
445
|
+
end
|
|
446
|
+
end
|
|
447
|
+
end
|
|
448
|
+
|
|
449
|
+
# Usage
|
|
450
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
451
|
+
importer = NestedJSONImporter.new(client, 123456789)
|
|
452
|
+
results = importer.import('data/nested_tasks.json')
|
|
453
|
+
```
|
|
454
|
+
|
|
455
|
+
**Example nested JSON (data/nested_tasks.json):**
|
|
456
|
+
|
|
457
|
+
```json
|
|
458
|
+
[
|
|
459
|
+
{
|
|
460
|
+
"name": "Project Alpha",
|
|
461
|
+
"status": "Working on it",
|
|
462
|
+
"description": "Main project",
|
|
463
|
+
"budget": {
|
|
464
|
+
"amount": 5000,
|
|
465
|
+
"currency": "USD"
|
|
466
|
+
},
|
|
467
|
+
"tags": ["urgent", "client-work"],
|
|
468
|
+
"subtasks": [
|
|
469
|
+
{"name": "Subtask 1"},
|
|
470
|
+
{"name": "Subtask 2"}
|
|
471
|
+
]
|
|
472
|
+
}
|
|
473
|
+
]
|
|
474
|
+
```
|
|
475
|
+
|
|
476
|
+
## Import from Database
|
|
477
|
+
|
|
478
|
+
### SQL Database Import
|
|
479
|
+
|
|
480
|
+
Import data from a SQL database using Sequel:
|
|
481
|
+
|
|
482
|
+
```ruby
|
|
483
|
+
require 'monday_ruby'
|
|
484
|
+
require 'sequel'
|
|
485
|
+
|
|
486
|
+
class DatabaseImporter
|
|
487
|
+
BATCH_SIZE = 100
|
|
488
|
+
|
|
489
|
+
def initialize(client, board_id, db_url)
|
|
490
|
+
@client = client
|
|
491
|
+
@board_id = board_id
|
|
492
|
+
@db = Sequel.connect(db_url)
|
|
493
|
+
end
|
|
494
|
+
|
|
495
|
+
def import(table_name, where_clause: nil, &progress_callback)
|
|
496
|
+
dataset = @db[table_name.to_sym]
|
|
497
|
+
dataset = dataset.where(where_clause) if where_clause
|
|
498
|
+
|
|
499
|
+
total_rows = dataset.count
|
|
500
|
+
processed = 0
|
|
501
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
502
|
+
|
|
503
|
+
dataset.each_slice(BATCH_SIZE) do |batch|
|
|
504
|
+
batch.each do |row|
|
|
505
|
+
begin
|
|
506
|
+
create_item(row)
|
|
507
|
+
results[:success] += 1
|
|
508
|
+
rescue => e
|
|
509
|
+
results[:failed] += 1
|
|
510
|
+
results[:errors] << { row: row, error: e.message }
|
|
511
|
+
end
|
|
512
|
+
|
|
513
|
+
processed += 1
|
|
514
|
+
progress_callback&.call(processed, total_rows)
|
|
515
|
+
end
|
|
516
|
+
|
|
517
|
+
sleep(1) # Rate limiting
|
|
518
|
+
end
|
|
519
|
+
|
|
520
|
+
results
|
|
521
|
+
ensure
|
|
522
|
+
@db.disconnect
|
|
523
|
+
end
|
|
524
|
+
|
|
525
|
+
private
|
|
526
|
+
|
|
527
|
+
def create_item(row)
|
|
528
|
+
column_values = {
|
|
529
|
+
"status" => { "label" => row[:status] },
|
|
530
|
+
"text" => row[:description],
|
|
531
|
+
"numbers" => row[:amount].to_f,
|
|
532
|
+
"date" => row[:due_date]&.strftime("%Y-%m-%d")
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
@client.item.create(
|
|
536
|
+
args: {
|
|
537
|
+
board_id: @board_id,
|
|
538
|
+
item_name: row[:name],
|
|
539
|
+
column_values: JSON.generate(column_values)
|
|
540
|
+
},
|
|
541
|
+
select: ["id"]
|
|
542
|
+
)
|
|
543
|
+
end
|
|
544
|
+
end
|
|
545
|
+
|
|
546
|
+
# Usage
|
|
547
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
548
|
+
db_url = "postgres://user:password@localhost/mydb"
|
|
549
|
+
|
|
550
|
+
importer = DatabaseImporter.new(client, 123456789, db_url)
|
|
551
|
+
results = importer.import('tasks', where_clause: { active: true }) do |processed, total|
|
|
552
|
+
puts "Imported #{processed}/#{total} records"
|
|
553
|
+
end
|
|
554
|
+
```
|
|
555
|
+
|
|
556
|
+
### Database Import with Transformation
|
|
557
|
+
|
|
558
|
+
Transform database records before importing:
|
|
559
|
+
|
|
560
|
+
```ruby
|
|
561
|
+
class TransformingDatabaseImporter
|
|
562
|
+
def initialize(client, board_id, db_url, transformer)
|
|
563
|
+
@client = client
|
|
564
|
+
@board_id = board_id
|
|
565
|
+
@db = Sequel.connect(db_url)
|
|
566
|
+
@transformer = transformer
|
|
567
|
+
end
|
|
568
|
+
|
|
569
|
+
def import(query)
|
|
570
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
571
|
+
|
|
572
|
+
@db.fetch(query).each do |row|
|
|
573
|
+
begin
|
|
574
|
+
transformed_data = @transformer.call(row)
|
|
575
|
+
create_item(transformed_data)
|
|
576
|
+
results[:success] += 1
|
|
577
|
+
rescue => e
|
|
578
|
+
results[:failed] += 1
|
|
579
|
+
results[:errors] << { row: row, error: e.message }
|
|
580
|
+
end
|
|
581
|
+
end
|
|
582
|
+
|
|
583
|
+
results
|
|
584
|
+
ensure
|
|
585
|
+
@db.disconnect
|
|
586
|
+
end
|
|
587
|
+
|
|
588
|
+
private
|
|
589
|
+
|
|
590
|
+
def create_item(data)
|
|
591
|
+
@client.item.create(
|
|
592
|
+
args: {
|
|
593
|
+
board_id: @board_id,
|
|
594
|
+
item_name: data[:name],
|
|
595
|
+
column_values: JSON.generate(data[:columns])
|
|
596
|
+
},
|
|
597
|
+
select: ["id"]
|
|
598
|
+
)
|
|
599
|
+
end
|
|
600
|
+
end
|
|
601
|
+
|
|
602
|
+
# Define a transformer
|
|
603
|
+
transformer = lambda do |row|
|
|
604
|
+
{
|
|
605
|
+
name: "#{row[:first_name]} #{row[:last_name]}",
|
|
606
|
+
columns: {
|
|
607
|
+
"email" => row[:email],
|
|
608
|
+
"text" => row[:notes],
|
|
609
|
+
"status" => { "label" => row[:is_active] ? "Active" : "Inactive" },
|
|
610
|
+
"date" => row[:created_at]&.strftime("%Y-%m-%d")
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
end
|
|
614
|
+
|
|
615
|
+
# Usage
|
|
616
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
617
|
+
db_url = "postgres://user:password@localhost/mydb"
|
|
618
|
+
|
|
619
|
+
importer = TransformingDatabaseImporter.new(client, 123456789, db_url, transformer)
|
|
620
|
+
query = "SELECT * FROM users WHERE created_at > '2025-01-01'"
|
|
621
|
+
results = importer.import(query)
|
|
622
|
+
```
|
|
623
|
+
|
|
624
|
+
## Import from External API
|
|
625
|
+
|
|
626
|
+
### REST API Import
|
|
627
|
+
|
|
628
|
+
Fetch data from an external API and import to monday.com:
|
|
629
|
+
|
|
630
|
+
```ruby
|
|
631
|
+
require 'monday_ruby'
|
|
632
|
+
require 'httparty'
|
|
633
|
+
|
|
634
|
+
class APIImporter
|
|
635
|
+
def initialize(client, board_id, api_base_url, api_key: nil)
|
|
636
|
+
@client = client
|
|
637
|
+
@board_id = board_id
|
|
638
|
+
@api_base_url = api_base_url
|
|
639
|
+
@api_key = api_key
|
|
640
|
+
end
|
|
641
|
+
|
|
642
|
+
def import(endpoint)
|
|
643
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
644
|
+
|
|
645
|
+
data = fetch_from_api(endpoint)
|
|
646
|
+
|
|
647
|
+
data.each do |record|
|
|
648
|
+
begin
|
|
649
|
+
create_item(record)
|
|
650
|
+
results[:success] += 1
|
|
651
|
+
rescue => e
|
|
652
|
+
results[:failed] += 1
|
|
653
|
+
results[:errors] << { record: record, error: e.message }
|
|
654
|
+
end
|
|
655
|
+
end
|
|
656
|
+
|
|
657
|
+
results
|
|
658
|
+
end
|
|
659
|
+
|
|
660
|
+
private
|
|
661
|
+
|
|
662
|
+
def fetch_from_api(endpoint)
|
|
663
|
+
url = "#{@api_base_url}/#{endpoint}"
|
|
664
|
+
headers = {}
|
|
665
|
+
headers['Authorization'] = "Bearer #{@api_key}" if @api_key
|
|
666
|
+
|
|
667
|
+
response = HTTParty.get(url, headers: headers)
|
|
668
|
+
|
|
669
|
+
if response.success?
|
|
670
|
+
JSON.parse(response.body)
|
|
671
|
+
else
|
|
672
|
+
raise "API request failed: #{response.code} - #{response.message}"
|
|
673
|
+
end
|
|
674
|
+
end
|
|
675
|
+
|
|
676
|
+
def create_item(record)
|
|
677
|
+
column_values = transform_api_data(record)
|
|
678
|
+
|
|
679
|
+
@client.item.create(
|
|
680
|
+
args: {
|
|
681
|
+
board_id: @board_id,
|
|
682
|
+
item_name: record["title"],
|
|
683
|
+
column_values: JSON.generate(column_values)
|
|
684
|
+
},
|
|
685
|
+
select: ["id"]
|
|
686
|
+
)
|
|
687
|
+
end
|
|
688
|
+
|
|
689
|
+
def transform_api_data(record)
|
|
690
|
+
{
|
|
691
|
+
"status" => { "label" => record["status"] },
|
|
692
|
+
"text" => record["description"],
|
|
693
|
+
"link" => { "url" => record["url"], "text" => "View Original" }
|
|
694
|
+
}
|
|
695
|
+
end
|
|
696
|
+
end
|
|
697
|
+
|
|
698
|
+
# Usage
|
|
699
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
700
|
+
importer = APIImporter.new(
|
|
701
|
+
client,
|
|
702
|
+
123456789,
|
|
703
|
+
"https://api.example.com/v1",
|
|
704
|
+
api_key: ENV['EXTERNAL_API_KEY']
|
|
705
|
+
)
|
|
706
|
+
|
|
707
|
+
results = importer.import('tasks')
|
|
708
|
+
```
|
|
709
|
+
|
|
710
|
+
### API Import with Pagination
|
|
711
|
+
|
|
712
|
+
Handle paginated API responses:
|
|
713
|
+
|
|
714
|
+
```ruby
|
|
715
|
+
class PaginatedAPIImporter
|
|
716
|
+
def initialize(client, board_id, api_base_url, api_key: nil)
|
|
717
|
+
@client = client
|
|
718
|
+
@board_id = board_id
|
|
719
|
+
@api_base_url = api_base_url
|
|
720
|
+
@api_key = api_key
|
|
721
|
+
end
|
|
722
|
+
|
|
723
|
+
def import(endpoint, page_size: 100, &progress_callback)
|
|
724
|
+
results = { success: 0, failed: 0, errors: [], total_pages: 0 }
|
|
725
|
+
page = 1
|
|
726
|
+
|
|
727
|
+
loop do
|
|
728
|
+
response_data = fetch_page(endpoint, page, page_size)
|
|
729
|
+
break if response_data['items'].empty?
|
|
730
|
+
|
|
731
|
+
results[:total_pages] = page
|
|
732
|
+
|
|
733
|
+
response_data['items'].each do |record|
|
|
734
|
+
begin
|
|
735
|
+
create_item(record)
|
|
736
|
+
results[:success] += 1
|
|
737
|
+
rescue => e
|
|
738
|
+
results[:failed] += 1
|
|
739
|
+
results[:errors] << { record: record, error: e.message }
|
|
740
|
+
end
|
|
741
|
+
end
|
|
742
|
+
|
|
743
|
+
progress_callback&.call(page, response_data['total_pages'])
|
|
744
|
+
|
|
745
|
+
break unless response_data['has_more']
|
|
746
|
+
page += 1
|
|
747
|
+
sleep(1) # Rate limiting
|
|
748
|
+
end
|
|
749
|
+
|
|
750
|
+
results
|
|
751
|
+
end
|
|
752
|
+
|
|
753
|
+
private
|
|
754
|
+
|
|
755
|
+
def fetch_page(endpoint, page, page_size)
|
|
756
|
+
url = "#{@api_base_url}/#{endpoint}?page=#{page}&per_page=#{page_size}"
|
|
757
|
+
headers = {}
|
|
758
|
+
headers['Authorization'] = "Bearer #{@api_key}" if @api_key
|
|
759
|
+
|
|
760
|
+
response = HTTParty.get(url, headers: headers)
|
|
761
|
+
|
|
762
|
+
if response.success?
|
|
763
|
+
JSON.parse(response.body)
|
|
764
|
+
else
|
|
765
|
+
raise "API request failed: #{response.code} - #{response.message}"
|
|
766
|
+
end
|
|
767
|
+
end
|
|
768
|
+
|
|
769
|
+
def create_item(record)
|
|
770
|
+
column_values = {
|
|
771
|
+
"status" => { "label" => record["status"] },
|
|
772
|
+
"text" => record["description"]
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
@client.item.create(
|
|
776
|
+
args: {
|
|
777
|
+
board_id: @board_id,
|
|
778
|
+
item_name: record["title"],
|
|
779
|
+
column_values: JSON.generate(column_values)
|
|
780
|
+
},
|
|
781
|
+
select: ["id"]
|
|
782
|
+
)
|
|
783
|
+
end
|
|
784
|
+
end
|
|
785
|
+
|
|
786
|
+
# Usage
|
|
787
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
788
|
+
importer = PaginatedAPIImporter.new(
|
|
789
|
+
client,
|
|
790
|
+
123456789,
|
|
791
|
+
"https://api.example.com/v1",
|
|
792
|
+
api_key: ENV['EXTERNAL_API_KEY']
|
|
793
|
+
)
|
|
794
|
+
|
|
795
|
+
results = importer.import('tasks', page_size: 50) do |current_page, total_pages|
|
|
796
|
+
puts "Processing page #{current_page}/#{total_pages}"
|
|
797
|
+
end
|
|
798
|
+
```
|
|
799
|
+
|
|
800
|
+
## Data Validation
|
|
801
|
+
|
|
802
|
+
### Pre-Import Validation
|
|
803
|
+
|
|
804
|
+
Validate data before importing to catch errors early:
|
|
805
|
+
|
|
806
|
+
```ruby
|
|
807
|
+
class ValidatingImporter
|
|
808
|
+
def initialize(client, board_id, validators)
|
|
809
|
+
@client = client
|
|
810
|
+
@board_id = board_id
|
|
811
|
+
@validators = validators
|
|
812
|
+
end
|
|
813
|
+
|
|
814
|
+
def import(data)
|
|
815
|
+
validation_results = validate_all(data)
|
|
816
|
+
|
|
817
|
+
if validation_results[:invalid].any?
|
|
818
|
+
return {
|
|
819
|
+
success: 0,
|
|
820
|
+
failed: validation_results[:invalid].count,
|
|
821
|
+
errors: validation_results[:invalid],
|
|
822
|
+
skipped_due_to_validation: true
|
|
823
|
+
}
|
|
824
|
+
end
|
|
825
|
+
|
|
826
|
+
import_valid_data(validation_results[:valid])
|
|
827
|
+
end
|
|
828
|
+
|
|
829
|
+
private
|
|
830
|
+
|
|
831
|
+
def validate_all(data)
|
|
832
|
+
results = { valid: [], invalid: [] }
|
|
833
|
+
|
|
834
|
+
data.each_with_index do |record, index|
|
|
835
|
+
errors = validate_record(record)
|
|
836
|
+
|
|
837
|
+
if errors.empty?
|
|
838
|
+
results[:valid] << record
|
|
839
|
+
else
|
|
840
|
+
results[:invalid] << {
|
|
841
|
+
index: index,
|
|
842
|
+
record: record,
|
|
843
|
+
errors: errors
|
|
844
|
+
}
|
|
845
|
+
end
|
|
846
|
+
end
|
|
847
|
+
|
|
848
|
+
results
|
|
849
|
+
end
|
|
850
|
+
|
|
851
|
+
def validate_record(record)
|
|
852
|
+
errors = []
|
|
853
|
+
|
|
854
|
+
@validators.each do |field, rules|
|
|
855
|
+
value = record[field]
|
|
856
|
+
|
|
857
|
+
if rules[:required] && (value.nil? || value.to_s.strip.empty?)
|
|
858
|
+
errors << "#{field} is required"
|
|
859
|
+
end
|
|
860
|
+
|
|
861
|
+
if rules[:type] && value
|
|
862
|
+
case rules[:type]
|
|
863
|
+
when :number
|
|
864
|
+
errors << "#{field} must be a number" unless value.to_s =~ /^\d+(\.\d+)?$/
|
|
865
|
+
when :date
|
|
866
|
+
errors << "#{field} must be a valid date" unless valid_date?(value)
|
|
867
|
+
when :email
|
|
868
|
+
errors << "#{field} must be a valid email" unless value =~ /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
|
|
869
|
+
end
|
|
870
|
+
end
|
|
871
|
+
|
|
872
|
+
if rules[:max_length] && value && value.to_s.length > rules[:max_length]
|
|
873
|
+
errors << "#{field} exceeds maximum length of #{rules[:max_length]}"
|
|
874
|
+
end
|
|
875
|
+
|
|
876
|
+
if rules[:allowed_values] && value && !rules[:allowed_values].include?(value)
|
|
877
|
+
errors << "#{field} must be one of: #{rules[:allowed_values].join(', ')}"
|
|
878
|
+
end
|
|
879
|
+
end
|
|
880
|
+
|
|
881
|
+
errors
|
|
882
|
+
end
|
|
883
|
+
|
|
884
|
+
def valid_date?(date_string)
|
|
885
|
+
Date.parse(date_string.to_s)
|
|
886
|
+
true
|
|
887
|
+
rescue ArgumentError
|
|
888
|
+
false
|
|
889
|
+
end
|
|
890
|
+
|
|
891
|
+
def import_valid_data(valid_records)
|
|
892
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
893
|
+
|
|
894
|
+
valid_records.each do |record|
|
|
895
|
+
begin
|
|
896
|
+
create_item(record)
|
|
897
|
+
results[:success] += 1
|
|
898
|
+
rescue => e
|
|
899
|
+
results[:failed] += 1
|
|
900
|
+
results[:errors] << { record: record, error: e.message }
|
|
901
|
+
end
|
|
902
|
+
end
|
|
903
|
+
|
|
904
|
+
results
|
|
905
|
+
end
|
|
906
|
+
|
|
907
|
+
def create_item(record)
|
|
908
|
+
column_values = {
|
|
909
|
+
"status" => { "label" => record["status"] },
|
|
910
|
+
"text" => record["description"],
|
|
911
|
+
"numbers" => record["amount"].to_f,
|
|
912
|
+
"date" => record["due_date"]
|
|
913
|
+
}
|
|
914
|
+
|
|
915
|
+
@client.item.create(
|
|
916
|
+
args: {
|
|
917
|
+
board_id: @board_id,
|
|
918
|
+
item_name: record["name"],
|
|
919
|
+
column_values: JSON.generate(column_values)
|
|
920
|
+
},
|
|
921
|
+
select: ["id"]
|
|
922
|
+
)
|
|
923
|
+
end
|
|
924
|
+
end
|
|
925
|
+
|
|
926
|
+
# Define validation rules
|
|
927
|
+
validators = {
|
|
928
|
+
"name" => {
|
|
929
|
+
required: true,
|
|
930
|
+
max_length: 255
|
|
931
|
+
},
|
|
932
|
+
"status" => {
|
|
933
|
+
required: true,
|
|
934
|
+
allowed_values: ["Working on it", "Done", "Stuck", "Not Started"]
|
|
935
|
+
},
|
|
936
|
+
"amount" => {
|
|
937
|
+
type: :number
|
|
938
|
+
},
|
|
939
|
+
"due_date" => {
|
|
940
|
+
type: :date
|
|
941
|
+
}
|
|
942
|
+
}
|
|
943
|
+
|
|
944
|
+
# Usage
|
|
945
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
946
|
+
importer = ValidatingImporter.new(client, 123456789, validators)
|
|
947
|
+
|
|
948
|
+
data = JSON.parse(File.read('data/tasks.json'))
|
|
949
|
+
results = importer.import(data)
|
|
950
|
+
|
|
951
|
+
if results[:skipped_due_to_validation]
|
|
952
|
+
puts "Validation failed. Errors:"
|
|
953
|
+
results[:errors].each do |error|
|
|
954
|
+
puts "Record #{error[:index]}: #{error[:errors].join(', ')}"
|
|
955
|
+
end
|
|
956
|
+
else
|
|
957
|
+
puts "Import complete: #{results[:success]} success, #{results[:failed]} failed"
|
|
958
|
+
end
|
|
959
|
+
```
|
|
960
|
+
|
|
961
|
+
### Validation with Auto-Correction
|
|
962
|
+
|
|
963
|
+
Automatically fix common data issues:
|
|
964
|
+
|
|
965
|
+
```ruby
|
|
966
|
+
class AutoCorrectingImporter
|
|
967
|
+
def initialize(client, board_id)
|
|
968
|
+
@client = client
|
|
969
|
+
@board_id = board_id
|
|
970
|
+
end
|
|
971
|
+
|
|
972
|
+
def import(data)
|
|
973
|
+
results = { success: 0, failed: 0, corrected: 0, errors: [] }
|
|
974
|
+
|
|
975
|
+
data.each do |record|
|
|
976
|
+
begin
|
|
977
|
+
corrected_record = auto_correct(record)
|
|
978
|
+
results[:corrected] += 1 if corrected_record[:was_corrected]
|
|
979
|
+
|
|
980
|
+
create_item(corrected_record[:data])
|
|
981
|
+
results[:success] += 1
|
|
982
|
+
rescue => e
|
|
983
|
+
results[:failed] += 1
|
|
984
|
+
results[:errors] << { record: record, error: e.message }
|
|
985
|
+
end
|
|
986
|
+
end
|
|
987
|
+
|
|
988
|
+
results
|
|
989
|
+
end
|
|
990
|
+
|
|
991
|
+
private
|
|
992
|
+
|
|
993
|
+
def auto_correct(record)
|
|
994
|
+
was_corrected = false
|
|
995
|
+
corrected = record.dup
|
|
996
|
+
|
|
997
|
+
# Trim whitespace from all string values
|
|
998
|
+
corrected.transform_values! do |value|
|
|
999
|
+
if value.is_a?(String)
|
|
1000
|
+
trimmed = value.strip
|
|
1001
|
+
was_corrected = true if trimmed != value
|
|
1002
|
+
trimmed
|
|
1003
|
+
else
|
|
1004
|
+
value
|
|
1005
|
+
end
|
|
1006
|
+
end
|
|
1007
|
+
|
|
1008
|
+
# Normalize status values
|
|
1009
|
+
if corrected["status"]
|
|
1010
|
+
normalized_status = normalize_status(corrected["status"])
|
|
1011
|
+
if normalized_status != corrected["status"]
|
|
1012
|
+
corrected["status"] = normalized_status
|
|
1013
|
+
was_corrected = true
|
|
1014
|
+
end
|
|
1015
|
+
end
|
|
1016
|
+
|
|
1017
|
+
# Convert amount to proper format
|
|
1018
|
+
if corrected["amount"] && corrected["amount"].is_a?(String)
|
|
1019
|
+
# Remove currency symbols and commas
|
|
1020
|
+
cleaned_amount = corrected["amount"].gsub(/[$,]/, '')
|
|
1021
|
+
corrected["amount"] = cleaned_amount.to_f
|
|
1022
|
+
was_corrected = true
|
|
1023
|
+
end
|
|
1024
|
+
|
|
1025
|
+
# Normalize date format
|
|
1026
|
+
if corrected["due_date"] && corrected["due_date"] =~ %r{^(\d{1,2})/(\d{1,2})/(\d{4})$}
|
|
1027
|
+
# Convert MM/DD/YYYY to YYYY-MM-DD
|
|
1028
|
+
month, day, year = $1, $2, $3
|
|
1029
|
+
corrected["due_date"] = "#{year}-#{month.rjust(2, '0')}-#{day.rjust(2, '0')}"
|
|
1030
|
+
was_corrected = true
|
|
1031
|
+
end
|
|
1032
|
+
|
|
1033
|
+
{ data: corrected, was_corrected: was_corrected }
|
|
1034
|
+
end
|
|
1035
|
+
|
|
1036
|
+
def normalize_status(status)
|
|
1037
|
+
status_map = {
|
|
1038
|
+
"in progress" => "Working on it",
|
|
1039
|
+
"working" => "Working on it",
|
|
1040
|
+
"complete" => "Done",
|
|
1041
|
+
"completed" => "Done",
|
|
1042
|
+
"finished" => "Done",
|
|
1043
|
+
"blocked" => "Stuck",
|
|
1044
|
+
"waiting" => "Stuck"
|
|
1045
|
+
}
|
|
1046
|
+
|
|
1047
|
+
status_map[status.downcase] || status
|
|
1048
|
+
end
|
|
1049
|
+
|
|
1050
|
+
def create_item(record)
|
|
1051
|
+
column_values = {
|
|
1052
|
+
"status" => { "label" => record["status"] },
|
|
1053
|
+
"text" => record["description"],
|
|
1054
|
+
"numbers" => record["amount"],
|
|
1055
|
+
"date" => record["due_date"]
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
@client.item.create(
|
|
1059
|
+
args: {
|
|
1060
|
+
board_id: @board_id,
|
|
1061
|
+
item_name: record["name"],
|
|
1062
|
+
column_values: JSON.generate(column_values)
|
|
1063
|
+
},
|
|
1064
|
+
select: ["id"]
|
|
1065
|
+
)
|
|
1066
|
+
end
|
|
1067
|
+
end
|
|
1068
|
+
|
|
1069
|
+
# Usage
|
|
1070
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
1071
|
+
importer = AutoCorrectingImporter.new(client, 123456789)
|
|
1072
|
+
|
|
1073
|
+
data = JSON.parse(File.read('data/messy_tasks.json'))
|
|
1074
|
+
results = importer.import(data)
|
|
1075
|
+
|
|
1076
|
+
puts "Imported: #{results[:success]}, Corrected: #{results[:corrected]}, Failed: #{results[:failed]}"
|
|
1077
|
+
```
|
|
1078
|
+
|
|
1079
|
+
## Error Handling and Recovery
|
|
1080
|
+
|
|
1081
|
+
### Checkpoint-Based Import
|
|
1082
|
+
|
|
1083
|
+
Save progress and resume from last checkpoint on failure:
|
|
1084
|
+
|
|
1085
|
+
```ruby
|
|
1086
|
+
require 'json'
|
|
1087
|
+
|
|
1088
|
+
class CheckpointImporter
|
|
1089
|
+
def initialize(client, board_id, checkpoint_file)
|
|
1090
|
+
@client = client
|
|
1091
|
+
@board_id = board_id
|
|
1092
|
+
@checkpoint_file = checkpoint_file
|
|
1093
|
+
end
|
|
1094
|
+
|
|
1095
|
+
def import(data)
|
|
1096
|
+
checkpoint = load_checkpoint
|
|
1097
|
+
start_index = checkpoint[:last_successful_index] + 1
|
|
1098
|
+
|
|
1099
|
+
puts "Resuming from index #{start_index} (#{checkpoint[:success]} already imported)"
|
|
1100
|
+
|
|
1101
|
+
results = {
|
|
1102
|
+
success: checkpoint[:success],
|
|
1103
|
+
failed: checkpoint[:failed],
|
|
1104
|
+
errors: checkpoint[:errors]
|
|
1105
|
+
}
|
|
1106
|
+
|
|
1107
|
+
data[start_index..-1].each_with_index do |record, offset|
|
|
1108
|
+
current_index = start_index + offset
|
|
1109
|
+
|
|
1110
|
+
begin
|
|
1111
|
+
create_item(record)
|
|
1112
|
+
results[:success] += 1
|
|
1113
|
+
save_checkpoint(current_index, results)
|
|
1114
|
+
rescue => e
|
|
1115
|
+
results[:failed] += 1
|
|
1116
|
+
results[:errors] << {
|
|
1117
|
+
index: current_index,
|
|
1118
|
+
record: record,
|
|
1119
|
+
error: e.message
|
|
1120
|
+
}
|
|
1121
|
+
save_checkpoint(current_index, results)
|
|
1122
|
+
end
|
|
1123
|
+
end
|
|
1124
|
+
|
|
1125
|
+
# Clear checkpoint on successful completion
|
|
1126
|
+
File.delete(@checkpoint_file) if File.exist?(@checkpoint_file)
|
|
1127
|
+
|
|
1128
|
+
results
|
|
1129
|
+
end
|
|
1130
|
+
|
|
1131
|
+
def reset_checkpoint
|
|
1132
|
+
File.delete(@checkpoint_file) if File.exist?(@checkpoint_file)
|
|
1133
|
+
puts "Checkpoint cleared"
|
|
1134
|
+
end
|
|
1135
|
+
|
|
1136
|
+
private
|
|
1137
|
+
|
|
1138
|
+
def load_checkpoint
|
|
1139
|
+
if File.exist?(@checkpoint_file)
|
|
1140
|
+
JSON.parse(File.read(@checkpoint_file), symbolize_names: true)
|
|
1141
|
+
else
|
|
1142
|
+
{
|
|
1143
|
+
last_successful_index: -1,
|
|
1144
|
+
success: 0,
|
|
1145
|
+
failed: 0,
|
|
1146
|
+
errors: []
|
|
1147
|
+
}
|
|
1148
|
+
end
|
|
1149
|
+
end
|
|
1150
|
+
|
|
1151
|
+
def save_checkpoint(index, results)
|
|
1152
|
+
checkpoint = {
|
|
1153
|
+
last_successful_index: index,
|
|
1154
|
+
success: results[:success],
|
|
1155
|
+
failed: results[:failed],
|
|
1156
|
+
errors: results[:errors],
|
|
1157
|
+
updated_at: Time.now.iso8601
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
File.write(@checkpoint_file, JSON.pretty_generate(checkpoint))
|
|
1161
|
+
end
|
|
1162
|
+
|
|
1163
|
+
def create_item(record)
|
|
1164
|
+
column_values = {
|
|
1165
|
+
"status" => { "label" => record["status"] },
|
|
1166
|
+
"text" => record["description"]
|
|
1167
|
+
}
|
|
1168
|
+
|
|
1169
|
+
@client.item.create(
|
|
1170
|
+
args: {
|
|
1171
|
+
board_id: @board_id,
|
|
1172
|
+
item_name: record["name"],
|
|
1173
|
+
column_values: JSON.generate(column_values)
|
|
1174
|
+
},
|
|
1175
|
+
select: ["id"]
|
|
1176
|
+
)
|
|
1177
|
+
end
|
|
1178
|
+
end
|
|
1179
|
+
|
|
1180
|
+
# Usage
|
|
1181
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
1182
|
+
importer = CheckpointImporter.new(client, 123456789, 'import_checkpoint.json')
|
|
1183
|
+
|
|
1184
|
+
data = JSON.parse(File.read('data/large_dataset.json'))
|
|
1185
|
+
|
|
1186
|
+
# First run (might fail midway)
|
|
1187
|
+
begin
|
|
1188
|
+
results = importer.import(data)
|
|
1189
|
+
puts "Complete: #{results[:success]} success, #{results[:failed]} failed"
|
|
1190
|
+
rescue Interrupt
|
|
1191
|
+
puts "\nImport interrupted. Run again to resume from checkpoint."
|
|
1192
|
+
end
|
|
1193
|
+
|
|
1194
|
+
# Resume from checkpoint
|
|
1195
|
+
results = importer.import(data)
|
|
1196
|
+
puts "Complete: #{results[:success]} success, #{results[:failed]} failed"
|
|
1197
|
+
|
|
1198
|
+
# Reset checkpoint if you want to start fresh
|
|
1199
|
+
# importer.reset_checkpoint
|
|
1200
|
+
```
|
|
1201
|
+
|
|
1202
|
+
### Retry with Exponential Backoff
|
|
1203
|
+
|
|
1204
|
+
Retry failed items with exponential backoff:
|
|
1205
|
+
|
|
1206
|
+
```ruby
|
|
1207
|
+
class RetryingImporter
|
|
1208
|
+
MAX_RETRIES = 3
|
|
1209
|
+
INITIAL_RETRY_DELAY = 2 # seconds
|
|
1210
|
+
|
|
1211
|
+
def initialize(client, board_id)
|
|
1212
|
+
@client = client
|
|
1213
|
+
@board_id = board_id
|
|
1214
|
+
end
|
|
1215
|
+
|
|
1216
|
+
def import(data)
|
|
1217
|
+
results = { success: 0, failed: 0, errors: [] }
|
|
1218
|
+
|
|
1219
|
+
data.each do |record|
|
|
1220
|
+
success = import_with_retry(record, results)
|
|
1221
|
+
results[:success] += 1 if success
|
|
1222
|
+
end
|
|
1223
|
+
|
|
1224
|
+
results
|
|
1225
|
+
end
|
|
1226
|
+
|
|
1227
|
+
private
|
|
1228
|
+
|
|
1229
|
+
def import_with_retry(record, results)
|
|
1230
|
+
retries = 0
|
|
1231
|
+
|
|
1232
|
+
begin
|
|
1233
|
+
create_item(record)
|
|
1234
|
+
true
|
|
1235
|
+
rescue => e
|
|
1236
|
+
retries += 1
|
|
1237
|
+
|
|
1238
|
+
if retries <= MAX_RETRIES
|
|
1239
|
+
delay = INITIAL_RETRY_DELAY * (2 ** (retries - 1))
|
|
1240
|
+
puts "Error importing '#{record["name"]}': #{e.message}. Retrying in #{delay}s... (#{retries}/#{MAX_RETRIES})"
|
|
1241
|
+
sleep(delay)
|
|
1242
|
+
retry
|
|
1243
|
+
else
|
|
1244
|
+
results[:failed] += 1
|
|
1245
|
+
results[:errors] << {
|
|
1246
|
+
record: record,
|
|
1247
|
+
error: e.message,
|
|
1248
|
+
retries: retries
|
|
1249
|
+
}
|
|
1250
|
+
false
|
|
1251
|
+
end
|
|
1252
|
+
end
|
|
1253
|
+
end
|
|
1254
|
+
|
|
1255
|
+
def create_item(record)
|
|
1256
|
+
column_values = {
|
|
1257
|
+
"status" => { "label" => record["status"] },
|
|
1258
|
+
"text" => record["description"]
|
|
1259
|
+
}
|
|
1260
|
+
|
|
1261
|
+
@client.item.create(
|
|
1262
|
+
args: {
|
|
1263
|
+
board_id: @board_id,
|
|
1264
|
+
item_name: record["name"],
|
|
1265
|
+
column_values: JSON.generate(column_values)
|
|
1266
|
+
},
|
|
1267
|
+
select: ["id"]
|
|
1268
|
+
)
|
|
1269
|
+
end
|
|
1270
|
+
end
|
|
1271
|
+
|
|
1272
|
+
# Usage
|
|
1273
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
1274
|
+
importer = RetryingImporter.new(client, 123456789)
|
|
1275
|
+
|
|
1276
|
+
data = JSON.parse(File.read('data/tasks.json'))
|
|
1277
|
+
results = importer.import(data)
|
|
1278
|
+
```
|
|
1279
|
+
|
|
1280
|
+
### Transaction-Like Rollback
|
|
1281
|
+
|
|
1282
|
+
Track created items and delete them if import fails:
|
|
1283
|
+
|
|
1284
|
+
```ruby
|
|
1285
|
+
class TransactionalImporter
|
|
1286
|
+
def initialize(client, board_id)
|
|
1287
|
+
@client = client
|
|
1288
|
+
@board_id = board_id
|
|
1289
|
+
end
|
|
1290
|
+
|
|
1291
|
+
def import(data, rollback_on_failure: true)
|
|
1292
|
+
created_items = []
|
|
1293
|
+
results = { success: 0, failed: 0, errors: [], rolled_back: false }
|
|
1294
|
+
|
|
1295
|
+
begin
|
|
1296
|
+
data.each do |record|
|
|
1297
|
+
item_id = create_item(record)
|
|
1298
|
+
created_items << item_id
|
|
1299
|
+
results[:success] += 1
|
|
1300
|
+
end
|
|
1301
|
+
rescue => e
|
|
1302
|
+
results[:failed] += 1
|
|
1303
|
+
results[:errors] << { error: e.message }
|
|
1304
|
+
|
|
1305
|
+
if rollback_on_failure
|
|
1306
|
+
rollback(created_items)
|
|
1307
|
+
results[:rolled_back] = true
|
|
1308
|
+
results[:success] = 0
|
|
1309
|
+
end
|
|
1310
|
+
end
|
|
1311
|
+
|
|
1312
|
+
results
|
|
1313
|
+
end
|
|
1314
|
+
|
|
1315
|
+
private
|
|
1316
|
+
|
|
1317
|
+
def create_item(record)
|
|
1318
|
+
column_values = {
|
|
1319
|
+
"status" => { "label" => record["status"] },
|
|
1320
|
+
"text" => record["description"]
|
|
1321
|
+
}
|
|
1322
|
+
|
|
1323
|
+
response = @client.item.create(
|
|
1324
|
+
args: {
|
|
1325
|
+
board_id: @board_id,
|
|
1326
|
+
item_name: record["name"],
|
|
1327
|
+
column_values: JSON.generate(column_values)
|
|
1328
|
+
},
|
|
1329
|
+
select: ["id"]
|
|
1330
|
+
)
|
|
1331
|
+
|
|
1332
|
+
response.dig("data", "create_item", "id")
|
|
1333
|
+
end
|
|
1334
|
+
|
|
1335
|
+
def rollback(item_ids)
|
|
1336
|
+
puts "Rolling back #{item_ids.count} items..."
|
|
1337
|
+
|
|
1338
|
+
item_ids.each do |item_id|
|
|
1339
|
+
begin
|
|
1340
|
+
@client.item.delete(args: { item_id: item_id })
|
|
1341
|
+
rescue => e
|
|
1342
|
+
puts "Warning: Failed to delete item #{item_id}: #{e.message}"
|
|
1343
|
+
end
|
|
1344
|
+
end
|
|
1345
|
+
|
|
1346
|
+
puts "Rollback complete"
|
|
1347
|
+
end
|
|
1348
|
+
end
|
|
1349
|
+
|
|
1350
|
+
# Usage
|
|
1351
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
1352
|
+
importer = TransactionalImporter.new(client, 123456789)
|
|
1353
|
+
|
|
1354
|
+
data = JSON.parse(File.read('data/tasks.json'))
|
|
1355
|
+
results = importer.import(data, rollback_on_failure: true)
|
|
1356
|
+
|
|
1357
|
+
if results[:rolled_back]
|
|
1358
|
+
puts "Import failed and was rolled back"
|
|
1359
|
+
else
|
|
1360
|
+
puts "Import complete: #{results[:success]} items created"
|
|
1361
|
+
end
|
|
1362
|
+
```
|
|
1363
|
+
|
|
1364
|
+
## Complete Import Tool
|
|
1365
|
+
|
|
1366
|
+
### Full-Featured CLI Import Script
|
|
1367
|
+
|
|
1368
|
+
A production-ready import tool with all features combined:
|
|
1369
|
+
|
|
1370
|
+
```ruby
|
|
1371
|
+
#!/usr/bin/env ruby
|
|
1372
|
+
|
|
1373
|
+
require 'monday_ruby'
|
|
1374
|
+
require 'csv'
|
|
1375
|
+
require 'json'
|
|
1376
|
+
require 'optparse'
|
|
1377
|
+
|
|
1378
|
+
class DataImportTool
|
|
1379
|
+
SUPPORTED_FORMATS = ['csv', 'json']
|
|
1380
|
+
BATCH_SIZE = 50
|
|
1381
|
+
|
|
1382
|
+
def initialize(client, options)
|
|
1383
|
+
@client = client
|
|
1384
|
+
@board_id = options[:board_id]
|
|
1385
|
+
@source_file = options[:source_file]
|
|
1386
|
+
@format = options[:format]
|
|
1387
|
+
@dry_run = options[:dry_run]
|
|
1388
|
+
@checkpoint_file = options[:checkpoint_file]
|
|
1389
|
+
@column_mapping = options[:column_mapping]
|
|
1390
|
+
@validate = options[:validate]
|
|
1391
|
+
end
|
|
1392
|
+
|
|
1393
|
+
def run
|
|
1394
|
+
puts "=" * 60
|
|
1395
|
+
puts "Data Import Tool"
|
|
1396
|
+
puts "=" * 60
|
|
1397
|
+
puts "Board ID: #{@board_id}"
|
|
1398
|
+
puts "Source: #{@source_file}"
|
|
1399
|
+
puts "Format: #{@format}"
|
|
1400
|
+
puts "Dry run: #{@dry_run ? 'Yes' : 'No'}"
|
|
1401
|
+
puts "Validation: #{@validate ? 'Enabled' : 'Disabled'}"
|
|
1402
|
+
puts "=" * 60
|
|
1403
|
+
puts
|
|
1404
|
+
|
|
1405
|
+
# Load data
|
|
1406
|
+
data = load_data
|
|
1407
|
+
puts "Loaded #{data.count} records"
|
|
1408
|
+
|
|
1409
|
+
# Validate
|
|
1410
|
+
if @validate
|
|
1411
|
+
validation_results = validate_data(data)
|
|
1412
|
+
|
|
1413
|
+
if validation_results[:invalid].any?
|
|
1414
|
+
puts "\nValidation failed for #{validation_results[:invalid].count} records:"
|
|
1415
|
+
validation_results[:invalid].first(5).each do |error|
|
|
1416
|
+
puts " - Record #{error[:index]}: #{error[:errors].join(', ')}"
|
|
1417
|
+
end
|
|
1418
|
+
puts " ... and #{validation_results[:invalid].count - 5} more" if validation_results[:invalid].count > 5
|
|
1419
|
+
|
|
1420
|
+
print "\nContinue with valid records only? (y/n): "
|
|
1421
|
+
return unless gets.chomp.downcase == 'y'
|
|
1422
|
+
|
|
1423
|
+
data = validation_results[:valid]
|
|
1424
|
+
puts "Proceeding with #{data.count} valid records"
|
|
1425
|
+
else
|
|
1426
|
+
puts "All records passed validation"
|
|
1427
|
+
end
|
|
1428
|
+
end
|
|
1429
|
+
|
|
1430
|
+
# Dry run preview
|
|
1431
|
+
if @dry_run
|
|
1432
|
+
preview_import(data)
|
|
1433
|
+
return
|
|
1434
|
+
end
|
|
1435
|
+
|
|
1436
|
+
# Actual import
|
|
1437
|
+
results = import_data(data)
|
|
1438
|
+
print_results(results)
|
|
1439
|
+
end
|
|
1440
|
+
|
|
1441
|
+
private
|
|
1442
|
+
|
|
1443
|
+
def load_data
|
|
1444
|
+
case @format
|
|
1445
|
+
when 'csv'
|
|
1446
|
+
load_csv
|
|
1447
|
+
when 'json'
|
|
1448
|
+
load_json
|
|
1449
|
+
else
|
|
1450
|
+
raise "Unsupported format: #{@format}"
|
|
1451
|
+
end
|
|
1452
|
+
end
|
|
1453
|
+
|
|
1454
|
+
def load_csv
|
|
1455
|
+
data = []
|
|
1456
|
+
CSV.foreach(@source_file, headers: true) do |row|
|
|
1457
|
+
data << row.to_h
|
|
1458
|
+
end
|
|
1459
|
+
data
|
|
1460
|
+
end
|
|
1461
|
+
|
|
1462
|
+
def load_json
|
|
1463
|
+
JSON.parse(File.read(@source_file))
|
|
1464
|
+
end
|
|
1465
|
+
|
|
1466
|
+
def validate_data(data)
|
|
1467
|
+
validators = {
|
|
1468
|
+
"name" => { required: true, max_length: 255 },
|
|
1469
|
+
"status" => { allowed_values: ["Working on it", "Done", "Stuck", "Not Started"] }
|
|
1470
|
+
}
|
|
1471
|
+
|
|
1472
|
+
results = { valid: [], invalid: [] }
|
|
1473
|
+
|
|
1474
|
+
data.each_with_index do |record, index|
|
|
1475
|
+
errors = []
|
|
1476
|
+
|
|
1477
|
+
validators.each do |field, rules|
|
|
1478
|
+
value = record[field]
|
|
1479
|
+
|
|
1480
|
+
if rules[:required] && (value.nil? || value.to_s.strip.empty?)
|
|
1481
|
+
errors << "#{field} is required"
|
|
1482
|
+
end
|
|
1483
|
+
|
|
1484
|
+
if rules[:max_length] && value && value.to_s.length > rules[:max_length]
|
|
1485
|
+
errors << "#{field} exceeds maximum length"
|
|
1486
|
+
end
|
|
1487
|
+
|
|
1488
|
+
if rules[:allowed_values] && value && !rules[:allowed_values].include?(value)
|
|
1489
|
+
errors << "#{field} has invalid value"
|
|
1490
|
+
end
|
|
1491
|
+
end
|
|
1492
|
+
|
|
1493
|
+
if errors.empty?
|
|
1494
|
+
results[:valid] << record
|
|
1495
|
+
else
|
|
1496
|
+
results[:invalid] << { index: index, record: record, errors: errors }
|
|
1497
|
+
end
|
|
1498
|
+
end
|
|
1499
|
+
|
|
1500
|
+
results
|
|
1501
|
+
end
|
|
1502
|
+
|
|
1503
|
+
def preview_import(data)
|
|
1504
|
+
puts "\n" + "=" * 60
|
|
1505
|
+
puts "DRY RUN - Preview of first 5 records:"
|
|
1506
|
+
puts "=" * 60
|
|
1507
|
+
|
|
1508
|
+
data.first(5).each_with_index do |record, index|
|
|
1509
|
+
puts "\nRecord #{index + 1}:"
|
|
1510
|
+
puts " Name: #{record['name']}"
|
|
1511
|
+
puts " Status: #{record['status']}"
|
|
1512
|
+
puts " Description: #{record['description']}"
|
|
1513
|
+
puts " Amount: #{record['amount']}"
|
|
1514
|
+
puts " Due Date: #{record['due_date']}"
|
|
1515
|
+
end
|
|
1516
|
+
|
|
1517
|
+
puts "\n... and #{data.count - 5} more records" if data.count > 5
|
|
1518
|
+
puts "\nNo items were created (dry run mode)"
|
|
1519
|
+
end
|
|
1520
|
+
|
|
1521
|
+
def import_data(data)
|
|
1522
|
+
checkpoint = load_checkpoint
|
|
1523
|
+
start_index = checkpoint[:last_successful_index] + 1
|
|
1524
|
+
|
|
1525
|
+
if start_index > 0
|
|
1526
|
+
puts "Resuming from checkpoint (#{checkpoint[:success]} already imported)"
|
|
1527
|
+
end
|
|
1528
|
+
|
|
1529
|
+
results = {
|
|
1530
|
+
success: checkpoint[:success],
|
|
1531
|
+
failed: checkpoint[:failed],
|
|
1532
|
+
errors: checkpoint[:errors]
|
|
1533
|
+
}
|
|
1534
|
+
|
|
1535
|
+
total = data.count
|
|
1536
|
+
|
|
1537
|
+
data[start_index..-1].each_with_index do |record, offset|
|
|
1538
|
+
current_index = start_index + offset
|
|
1539
|
+
|
|
1540
|
+
begin
|
|
1541
|
+
create_item(record)
|
|
1542
|
+
results[:success] += 1
|
|
1543
|
+
save_checkpoint(current_index, results)
|
|
1544
|
+
|
|
1545
|
+
# Progress update
|
|
1546
|
+
if (current_index + 1) % 10 == 0 || current_index == total - 1
|
|
1547
|
+
percentage = ((current_index + 1).to_f / total * 100).round(2)
|
|
1548
|
+
puts "Progress: #{current_index + 1}/#{total} (#{percentage}%)"
|
|
1549
|
+
end
|
|
1550
|
+
|
|
1551
|
+
# Rate limiting
|
|
1552
|
+
sleep(0.5) if (current_index + 1) % BATCH_SIZE == 0
|
|
1553
|
+
rescue => e
|
|
1554
|
+
results[:failed] += 1
|
|
1555
|
+
results[:errors] << {
|
|
1556
|
+
index: current_index,
|
|
1557
|
+
record: record,
|
|
1558
|
+
error: e.message
|
|
1559
|
+
}
|
|
1560
|
+
save_checkpoint(current_index, results)
|
|
1561
|
+
|
|
1562
|
+
puts "Error on record #{current_index + 1}: #{e.message}"
|
|
1563
|
+
end
|
|
1564
|
+
end
|
|
1565
|
+
|
|
1566
|
+
# Clear checkpoint on completion
|
|
1567
|
+
File.delete(@checkpoint_file) if File.exist?(@checkpoint_file)
|
|
1568
|
+
|
|
1569
|
+
results
|
|
1570
|
+
end
|
|
1571
|
+
|
|
1572
|
+
def create_item(record)
|
|
1573
|
+
# Apply column mapping if provided
|
|
1574
|
+
mapped_record = @column_mapping ? apply_mapping(record) : record
|
|
1575
|
+
|
|
1576
|
+
column_values = {
|
|
1577
|
+
"status" => { "label" => mapped_record["status"] },
|
|
1578
|
+
"text" => mapped_record["description"],
|
|
1579
|
+
"numbers" => mapped_record["amount"]&.to_f,
|
|
1580
|
+
"date" => mapped_record["due_date"]
|
|
1581
|
+
}
|
|
1582
|
+
|
|
1583
|
+
# Remove nil values
|
|
1584
|
+
column_values.reject! { |_, v| v.nil? }
|
|
1585
|
+
|
|
1586
|
+
@client.item.create(
|
|
1587
|
+
args: {
|
|
1588
|
+
board_id: @board_id,
|
|
1589
|
+
item_name: mapped_record["name"],
|
|
1590
|
+
column_values: JSON.generate(column_values)
|
|
1591
|
+
},
|
|
1592
|
+
select: ["id"]
|
|
1593
|
+
)
|
|
1594
|
+
end
|
|
1595
|
+
|
|
1596
|
+
def apply_mapping(record)
|
|
1597
|
+
mapped = {}
|
|
1598
|
+
@column_mapping.each do |target, source|
|
|
1599
|
+
mapped[target] = record[source]
|
|
1600
|
+
end
|
|
1601
|
+
mapped
|
|
1602
|
+
end
|
|
1603
|
+
|
|
1604
|
+
def load_checkpoint
|
|
1605
|
+
if @checkpoint_file && File.exist?(@checkpoint_file)
|
|
1606
|
+
JSON.parse(File.read(@checkpoint_file), symbolize_names: true)
|
|
1607
|
+
else
|
|
1608
|
+
{ last_successful_index: -1, success: 0, failed: 0, errors: [] }
|
|
1609
|
+
end
|
|
1610
|
+
end
|
|
1611
|
+
|
|
1612
|
+
def save_checkpoint(index, results)
|
|
1613
|
+
return unless @checkpoint_file
|
|
1614
|
+
|
|
1615
|
+
checkpoint = {
|
|
1616
|
+
last_successful_index: index,
|
|
1617
|
+
success: results[:success],
|
|
1618
|
+
failed: results[:failed],
|
|
1619
|
+
errors: results[:errors],
|
|
1620
|
+
updated_at: Time.now.iso8601
|
|
1621
|
+
}
|
|
1622
|
+
|
|
1623
|
+
File.write(@checkpoint_file, JSON.pretty_generate(checkpoint))
|
|
1624
|
+
end
|
|
1625
|
+
|
|
1626
|
+
def print_results(results)
|
|
1627
|
+
puts "\n" + "=" * 60
|
|
1628
|
+
puts "Import Complete"
|
|
1629
|
+
puts "=" * 60
|
|
1630
|
+
puts "Successfully imported: #{results[:success]}"
|
|
1631
|
+
puts "Failed: #{results[:failed]}"
|
|
1632
|
+
|
|
1633
|
+
if results[:errors].any?
|
|
1634
|
+
puts "\nErrors:"
|
|
1635
|
+
results[:errors].first(10).each do |error|
|
|
1636
|
+
puts " - Record #{error[:index]}: #{error[:error]}"
|
|
1637
|
+
end
|
|
1638
|
+
puts " ... and #{results[:errors].count - 10} more errors" if results[:errors].count > 10
|
|
1639
|
+
|
|
1640
|
+
# Save error report
|
|
1641
|
+
error_file = "import_errors_#{Time.now.strftime('%Y%m%d_%H%M%S')}.json"
|
|
1642
|
+
File.write(error_file, JSON.pretty_generate(results[:errors]))
|
|
1643
|
+
puts "\nFull error report saved to: #{error_file}"
|
|
1644
|
+
end
|
|
1645
|
+
|
|
1646
|
+
puts "=" * 60
|
|
1647
|
+
end
|
|
1648
|
+
end
|
|
1649
|
+
|
|
1650
|
+
# CLI Interface
|
|
1651
|
+
options = {
|
|
1652
|
+
checkpoint_file: 'import_checkpoint.json',
|
|
1653
|
+
validate: true
|
|
1654
|
+
}
|
|
1655
|
+
|
|
1656
|
+
OptionParser.new do |opts|
|
|
1657
|
+
opts.banner = "Usage: import_tool.rb [options]"
|
|
1658
|
+
|
|
1659
|
+
opts.on("-b", "--board-id ID", Integer, "Monday.com board ID (required)") do |v|
|
|
1660
|
+
options[:board_id] = v
|
|
1661
|
+
end
|
|
1662
|
+
|
|
1663
|
+
opts.on("-f", "--file PATH", "Path to source file (required)") do |v|
|
|
1664
|
+
options[:source_file] = v
|
|
1665
|
+
end
|
|
1666
|
+
|
|
1667
|
+
opts.on("-t", "--format FORMAT", DataImportTool::SUPPORTED_FORMATS, "File format: #{DataImportTool::SUPPORTED_FORMATS.join(', ')} (required)") do |v|
|
|
1668
|
+
options[:format] = v
|
|
1669
|
+
end
|
|
1670
|
+
|
|
1671
|
+
opts.on("-d", "--dry-run", "Preview import without creating items") do
|
|
1672
|
+
options[:dry_run] = true
|
|
1673
|
+
end
|
|
1674
|
+
|
|
1675
|
+
opts.on("-c", "--checkpoint FILE", "Checkpoint file path (default: import_checkpoint.json)") do |v|
|
|
1676
|
+
options[:checkpoint_file] = v
|
|
1677
|
+
end
|
|
1678
|
+
|
|
1679
|
+
opts.on("--no-validate", "Skip data validation") do
|
|
1680
|
+
options[:validate] = false
|
|
1681
|
+
end
|
|
1682
|
+
|
|
1683
|
+
opts.on("-h", "--help", "Show this help message") do
|
|
1684
|
+
puts opts
|
|
1685
|
+
exit
|
|
1686
|
+
end
|
|
1687
|
+
end.parse!
|
|
1688
|
+
|
|
1689
|
+
# Validate required options
|
|
1690
|
+
required = [:board_id, :source_file, :format]
|
|
1691
|
+
missing = required.select { |opt| options[opt].nil? }
|
|
1692
|
+
|
|
1693
|
+
if missing.any?
|
|
1694
|
+
puts "Error: Missing required options: #{missing.join(', ')}"
|
|
1695
|
+
puts "Run with --help for usage information"
|
|
1696
|
+
exit 1
|
|
1697
|
+
end
|
|
1698
|
+
|
|
1699
|
+
# Validate file exists
|
|
1700
|
+
unless File.exist?(options[:source_file])
|
|
1701
|
+
puts "Error: File not found: #{options[:source_file]}"
|
|
1702
|
+
exit 1
|
|
1703
|
+
end
|
|
1704
|
+
|
|
1705
|
+
# Run import
|
|
1706
|
+
begin
|
|
1707
|
+
client = Monday::Client.new(token: ENV['MONDAY_API_TOKEN'])
|
|
1708
|
+
tool = DataImportTool.new(client, options)
|
|
1709
|
+
tool.run
|
|
1710
|
+
rescue => e
|
|
1711
|
+
puts "Fatal error: #{e.message}"
|
|
1712
|
+
puts e.backtrace.first(5)
|
|
1713
|
+
exit 1
|
|
1714
|
+
end
|
|
1715
|
+
```
|
|
1716
|
+
|
|
1717
|
+
**Usage examples:**
|
|
1718
|
+
|
|
1719
|
+
```bash
|
|
1720
|
+
# Dry run to preview import
|
|
1721
|
+
ruby import_tool.rb \
|
|
1722
|
+
--board-id 123456789 \
|
|
1723
|
+
--file data/tasks.csv \
|
|
1724
|
+
--format csv \
|
|
1725
|
+
--dry-run
|
|
1726
|
+
|
|
1727
|
+
# Actual import with validation
|
|
1728
|
+
ruby import_tool.rb \
|
|
1729
|
+
--board-id 123456789 \
|
|
1730
|
+
--file data/tasks.json \
|
|
1731
|
+
--format json
|
|
1732
|
+
|
|
1733
|
+
# Import without validation (faster)
|
|
1734
|
+
ruby import_tool.rb \
|
|
1735
|
+
--board-id 123456789 \
|
|
1736
|
+
--file data/large_dataset.csv \
|
|
1737
|
+
--format csv \
|
|
1738
|
+
--no-validate
|
|
1739
|
+
|
|
1740
|
+
# Resume from checkpoint after interruption
|
|
1741
|
+
ruby import_tool.rb \
|
|
1742
|
+
--board-id 123456789 \
|
|
1743
|
+
--file data/tasks.csv \
|
|
1744
|
+
--format csv \
|
|
1745
|
+
--checkpoint my_checkpoint.json
|
|
1746
|
+
```
|
|
1747
|
+
|
|
1748
|
+
## Best Practices
|
|
1749
|
+
|
|
1750
|
+
### 1. Always Use Batching for Large Imports
|
|
1751
|
+
|
|
1752
|
+
Process records in batches to avoid memory issues and respect rate limits:
|
|
1753
|
+
|
|
1754
|
+
```ruby
|
|
1755
|
+
# Bad: Load everything into memory
|
|
1756
|
+
all_items = CSV.read('huge_file.csv')
|
|
1757
|
+
all_items.each { |item| create_item(item) }
|
|
1758
|
+
|
|
1759
|
+
# Good: Process in batches
|
|
1760
|
+
CSV.foreach('huge_file.csv', headers: true).each_slice(50) do |batch|
|
|
1761
|
+
batch.each { |item| create_item(item) }
|
|
1762
|
+
sleep(1) # Rate limiting
|
|
1763
|
+
end
|
|
1764
|
+
```
|
|
1765
|
+
|
|
1766
|
+
### 2. Implement Progress Tracking
|
|
1767
|
+
|
|
1768
|
+
For long-running imports, provide progress feedback:
|
|
1769
|
+
|
|
1770
|
+
```ruby
|
|
1771
|
+
total = data.count
|
|
1772
|
+
data.each_with_index do |record, index|
|
|
1773
|
+
create_item(record)
|
|
1774
|
+
|
|
1775
|
+
if (index + 1) % 10 == 0
|
|
1776
|
+
percentage = ((index + 1).to_f / total * 100).round(2)
|
|
1777
|
+
puts "Progress: #{index + 1}/#{total} (#{percentage}%)"
|
|
1778
|
+
end
|
|
1779
|
+
end
|
|
1780
|
+
```
|
|
1781
|
+
|
|
1782
|
+
### 3. Validate Before Importing
|
|
1783
|
+
|
|
1784
|
+
Catch data issues before making API calls:
|
|
1785
|
+
|
|
1786
|
+
```ruby
|
|
1787
|
+
# Validate all records first
|
|
1788
|
+
invalid = data.select { |r| r['name'].nil? || r['name'].strip.empty? }
|
|
1789
|
+
|
|
1790
|
+
if invalid.any?
|
|
1791
|
+
puts "Found #{invalid.count} invalid records"
|
|
1792
|
+
return
|
|
1793
|
+
end
|
|
1794
|
+
|
|
1795
|
+
# Then import
|
|
1796
|
+
data.each { |record| create_item(record) }
|
|
1797
|
+
```
|
|
1798
|
+
|
|
1799
|
+
### 4. Use Checkpoints for Resumability
|
|
1800
|
+
|
|
1801
|
+
Save progress regularly so imports can resume after failures:
|
|
1802
|
+
|
|
1803
|
+
```ruby
|
|
1804
|
+
data.each_with_index do |record, index|
|
|
1805
|
+
create_item(record)
|
|
1806
|
+
save_checkpoint(index) if index % 10 == 0
|
|
1807
|
+
end
|
|
1808
|
+
```
|
|
1809
|
+
|
|
1810
|
+
### 5. Handle Rate Limits
|
|
1811
|
+
|
|
1812
|
+
Add delays between batches to avoid hitting API rate limits:
|
|
1813
|
+
|
|
1814
|
+
```ruby
|
|
1815
|
+
data.each_slice(50).with_index do |batch, batch_index|
|
|
1816
|
+
batch.each { |record| create_item(record) }
|
|
1817
|
+
sleep(1) if batch_index > 0 # Wait between batches
|
|
1818
|
+
end
|
|
1819
|
+
```
|
|
1820
|
+
|
|
1821
|
+
### 6. Log Errors Without Stopping
|
|
1822
|
+
|
|
1823
|
+
Continue processing even when individual items fail:
|
|
1824
|
+
|
|
1825
|
+
```ruby
|
|
1826
|
+
errors = []
|
|
1827
|
+
|
|
1828
|
+
data.each do |record|
|
|
1829
|
+
begin
|
|
1830
|
+
create_item(record)
|
|
1831
|
+
rescue => e
|
|
1832
|
+
errors << { record: record, error: e.message }
|
|
1833
|
+
# Continue with next record
|
|
1834
|
+
end
|
|
1835
|
+
end
|
|
1836
|
+
|
|
1837
|
+
# Report errors at the end
|
|
1838
|
+
File.write('errors.json', JSON.pretty_generate(errors)) if errors.any?
|
|
1839
|
+
```
|
|
1840
|
+
|
|
1841
|
+
### 7. Use Dry Run Mode for Testing
|
|
1842
|
+
|
|
1843
|
+
Always test imports with a dry run first:
|
|
1844
|
+
|
|
1845
|
+
```ruby
|
|
1846
|
+
def import(data, dry_run: false)
|
|
1847
|
+
data.each do |record|
|
|
1848
|
+
if dry_run
|
|
1849
|
+
puts "Would create: #{record['name']}"
|
|
1850
|
+
else
|
|
1851
|
+
create_item(record)
|
|
1852
|
+
end
|
|
1853
|
+
end
|
|
1854
|
+
end
|
|
1855
|
+
|
|
1856
|
+
# Test first
|
|
1857
|
+
import(data, dry_run: true)
|
|
1858
|
+
|
|
1859
|
+
# Then run for real
|
|
1860
|
+
import(data)
|
|
1861
|
+
```
|
|
1862
|
+
|
|
1863
|
+
## Troubleshooting
|
|
1864
|
+
|
|
1865
|
+
### Import is Too Slow
|
|
1866
|
+
|
|
1867
|
+
**Problem:** Import takes too long for large datasets.
|
|
1868
|
+
|
|
1869
|
+
**Solutions:**
|
|
1870
|
+
|
|
1871
|
+
1. Increase batch size (but watch rate limits)
|
|
1872
|
+
2. Reduce the `select` fields to minimum required
|
|
1873
|
+
3. Remove unnecessary sleeps between items
|
|
1874
|
+
4. Consider parallel processing for independent items
|
|
1875
|
+
|
|
1876
|
+
```ruby
|
|
1877
|
+
# Optimize by selecting only ID
|
|
1878
|
+
@client.item.create(
|
|
1879
|
+
args: { board_id: @board_id, item_name: name },
|
|
1880
|
+
select: ["id"] # Don't request unnecessary fields
|
|
1881
|
+
)
|
|
1882
|
+
```
|
|
1883
|
+
|
|
1884
|
+
### Rate Limit Errors
|
|
1885
|
+
|
|
1886
|
+
**Problem:** Getting rate limit errors from monday.com API.
|
|
1887
|
+
|
|
1888
|
+
**Solutions:**
|
|
1889
|
+
|
|
1890
|
+
1. Add delays between batches
|
|
1891
|
+
2. Reduce batch size
|
|
1892
|
+
3. Implement exponential backoff on rate limit errors
|
|
1893
|
+
|
|
1894
|
+
```ruby
|
|
1895
|
+
begin
|
|
1896
|
+
create_item(record)
|
|
1897
|
+
rescue Monday::RateLimitError => e
|
|
1898
|
+
wait_time = e.retry_after || 60
|
|
1899
|
+
puts "Rate limited. Waiting #{wait_time} seconds..."
|
|
1900
|
+
sleep(wait_time)
|
|
1901
|
+
retry
|
|
1902
|
+
end
|
|
1903
|
+
```
|
|
1904
|
+
|
|
1905
|
+
### Invalid Column Values
|
|
1906
|
+
|
|
1907
|
+
**Problem:** Items created but column values not set.
|
|
1908
|
+
|
|
1909
|
+
**Solutions:**
|
|
1910
|
+
|
|
1911
|
+
1. Verify column IDs match your board
|
|
1912
|
+
2. Check column value format matches column type
|
|
1913
|
+
3. Ensure values are JSON-encoded properly
|
|
1914
|
+
|
|
1915
|
+
```ruby
|
|
1916
|
+
# Check board structure first
|
|
1917
|
+
response = @client.board.query(
|
|
1918
|
+
args: { ids: [@board_id] },
|
|
1919
|
+
select: ["columns { id title type }"]
|
|
1920
|
+
)
|
|
1921
|
+
|
|
1922
|
+
columns = response.dig("data", "boards", 0, "columns")
|
|
1923
|
+
puts "Available columns:"
|
|
1924
|
+
columns.each do |col|
|
|
1925
|
+
puts " #{col['id']} (#{col['type']}): #{col['title']}"
|
|
1926
|
+
end
|
|
1927
|
+
```
|
|
1928
|
+
|
|
1929
|
+
### Memory Issues with Large Files
|
|
1930
|
+
|
|
1931
|
+
**Problem:** Running out of memory with large CSV/JSON files.
|
|
1932
|
+
|
|
1933
|
+
**Solutions:**
|
|
1934
|
+
|
|
1935
|
+
1. Use streaming parsers (`CSV.foreach` instead of `CSV.read`)
|
|
1936
|
+
2. Process in batches
|
|
1937
|
+
3. Don't store all results in memory
|
|
1938
|
+
|
|
1939
|
+
```ruby
|
|
1940
|
+
# Bad: Loads entire file into memory
|
|
1941
|
+
data = JSON.parse(File.read('huge_file.json'))
|
|
1942
|
+
|
|
1943
|
+
# Good: Stream processing
|
|
1944
|
+
File.open('huge_file.json') do |file|
|
|
1945
|
+
JSON.load(file).each_slice(100) do |batch|
|
|
1946
|
+
batch.each { |record| create_item(record) }
|
|
1947
|
+
end
|
|
1948
|
+
end
|
|
1949
|
+
```
|
|
1950
|
+
|
|
1951
|
+
## Next Steps
|
|
1952
|
+
|
|
1953
|
+
- Read [Error Handling Guide](/guides/advanced/errors) for robust applications
|
|
1954
|
+
- Check [Performance Optimization](/explanation/best-practices/performance) for scaling imports
|
|
1955
|
+
- Learn about [Rate Limiting Strategy](/explanation/best-practices/rate-limiting) for large imports
|
|
1956
|
+
- Explore [Batch Operations](/guides/advanced/batch) for bulk data processing
|
|
1957
|
+
|
|
1958
|
+
## Related Resources
|
|
1959
|
+
|
|
1960
|
+
- [monday.com API Rate Limits](https://developer.monday.com/api-reference/docs/rate-limits)
|
|
1961
|
+
- [Column Values Reference](https://developer.monday.com/api-reference/docs/column-values)
|
|
1962
|
+
- [Items API Documentation](https://developer.monday.com/api-reference/docs/items)
|