io-complyance-unify-sdk 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +26 -0
  3. data/README.md +595 -0
  4. data/lib/complyance/circuit_breaker.rb +99 -0
  5. data/lib/complyance/persistent_queue_manager.rb +474 -0
  6. data/lib/complyance/retry_strategy.rb +198 -0
  7. data/lib/complyance_sdk/config/retry_config.rb +127 -0
  8. data/lib/complyance_sdk/config/sdk_config.rb +212 -0
  9. data/lib/complyance_sdk/exceptions/circuit_breaker_open_error.rb +14 -0
  10. data/lib/complyance_sdk/exceptions/sdk_exception.rb +93 -0
  11. data/lib/complyance_sdk/generators/config_generator.rb +67 -0
  12. data/lib/complyance_sdk/generators/install_generator.rb +22 -0
  13. data/lib/complyance_sdk/generators/templates/complyance_initializer.rb +36 -0
  14. data/lib/complyance_sdk/http/authentication_middleware.rb +43 -0
  15. data/lib/complyance_sdk/http/client.rb +223 -0
  16. data/lib/complyance_sdk/http/logging_middleware.rb +153 -0
  17. data/lib/complyance_sdk/jobs/base_job.rb +63 -0
  18. data/lib/complyance_sdk/jobs/process_document_job.rb +92 -0
  19. data/lib/complyance_sdk/jobs/sidekiq_job.rb +165 -0
  20. data/lib/complyance_sdk/middleware/rack_middleware.rb +39 -0
  21. data/lib/complyance_sdk/models/country.rb +205 -0
  22. data/lib/complyance_sdk/models/country_policy_registry.rb +159 -0
  23. data/lib/complyance_sdk/models/document_type.rb +52 -0
  24. data/lib/complyance_sdk/models/environment.rb +144 -0
  25. data/lib/complyance_sdk/models/logical_doc_type.rb +228 -0
  26. data/lib/complyance_sdk/models/mode.rb +47 -0
  27. data/lib/complyance_sdk/models/operation.rb +47 -0
  28. data/lib/complyance_sdk/models/policy_result.rb +145 -0
  29. data/lib/complyance_sdk/models/purpose.rb +52 -0
  30. data/lib/complyance_sdk/models/source.rb +104 -0
  31. data/lib/complyance_sdk/models/source_ref.rb +130 -0
  32. data/lib/complyance_sdk/models/unify_request.rb +208 -0
  33. data/lib/complyance_sdk/models/unify_response.rb +198 -0
  34. data/lib/complyance_sdk/queue/persistent_queue_manager.rb +609 -0
  35. data/lib/complyance_sdk/railtie.rb +29 -0
  36. data/lib/complyance_sdk/retry/circuit_breaker.rb +159 -0
  37. data/lib/complyance_sdk/retry/retry_manager.rb +108 -0
  38. data/lib/complyance_sdk/retry/retry_strategy.rb +225 -0
  39. data/lib/complyance_sdk/version.rb +5 -0
  40. data/lib/complyance_sdk.rb +935 -0
  41. metadata +322 -0
@@ -0,0 +1,609 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'fileutils'
4
+ require 'json'
5
+ require 'logger'
6
+ require 'concurrent-ruby'
7
+
8
+ module ComplyanceSDK
9
+ module Queue
10
+ # Persistent queue manager for handling failed submissions with retry logic
11
+ # Ruby equivalent of the Java PersistentQueueManager
12
+ class PersistentQueueManager
13
+ QUEUE_DIR = 'complyance-queue'
14
+ PENDING_DIR = 'pending'
15
+ PROCESSING_DIR = 'processing'
16
+ FAILED_DIR = 'failed'
17
+ SUCCESS_DIR = 'success'
18
+
19
+ attr_reader :api_key, :local, :queue_base_path, :logger
20
+
21
+ # Initialize the persistent queue manager
22
+ #
23
+ # @param api_key [String] The API key
24
+ # @param local [Boolean] Whether running in local mode
25
+ # @param logger [Logger] Optional logger instance
26
+ def initialize(api_key, local = false, logger = nil)
27
+ @api_key = api_key
28
+ @local = local
29
+ @queue_base_path = File.join(Dir.home, QUEUE_DIR)
30
+ @logger = logger || Logger.new(STDOUT)
31
+ @processing_lock = Mutex.new
32
+ @running = false
33
+
34
+ initialize_queue_directories
35
+ @logger.info("PersistentQueueManager initialized with queue directory: #{@queue_base_path}")
36
+
37
+ # Automatically start processing and retry any existing failed submissions
38
+ start_processing
39
+ retry_failed_submissions
40
+ end
41
+
42
+ # Enqueue a failed submission for retry
43
+ #
44
+ # @param submission [Hash] The submission data
45
+ def enqueue(submission)
46
+ file_name = generate_file_name(submission)
47
+ file_path = File.join(@queue_base_path, PENDING_DIR, file_name)
48
+
49
+ # Check if file already exists (same document ID)
50
+ if File.exist?(file_path)
51
+ @logger.info("Document already exists in queue: #{file_name}. Skipping duplicate submission.")
52
+ return
53
+ end
54
+
55
+ # Parse the UnifyRequest JSON string to proper JSON object
56
+ json_payload = submission[:payload] || submission['payload']
57
+ @logger.info("🔥 QUEUE: Received payload from submission with length: #{json_payload.length} characters")
58
+ @logger.info("🔥 QUEUE: Payload preview: #{json_payload[0..199]}")
59
+
60
+ # Verify the payload is not empty
61
+ if json_payload.strip.empty? || json_payload == '{}'
62
+ @logger.error("🔥 QUEUE: ERROR - Received empty or invalid payload: '#{json_payload}'")
63
+ raise RuntimeError, 'Cannot enqueue empty payload'
64
+ end
65
+
66
+ # Parse the UnifyRequest JSON string to a proper JSON object
67
+ begin
68
+ unify_request_map = JSON.parse(json_payload)
69
+ rescue JSON::ParserError => e
70
+ @logger.error("🔥 QUEUE: Failed to parse JSON payload: #{e.message}")
71
+ raise RuntimeError, "Invalid JSON payload: #{e.message}"
72
+ end
73
+
74
+ # Create submission record with the parsed UnifyRequest as proper JSON object
75
+ record = {
76
+ payload: unify_request_map, # Store as Hash instead of string
77
+ source_id: submission[:source][:id] || submission['source']['id'],
78
+ country: submission[:country].to_s || submission['country'].to_s,
79
+ document_type: submission[:document_type].to_s || submission['document_type'].to_s,
80
+ enqueued_at: Time.now.utc.strftime('%Y-%m-%dT%H:%M:%SZ'),
81
+ timestamp: Time.now.to_i
82
+ }
83
+
84
+ # Write to file
85
+ File.write(file_path, JSON.pretty_generate(record))
86
+
87
+ @logger.info("🔥 QUEUE: Stored record to file: #{file_name} with payload length: #{json_payload.length}")
88
+ @logger.info("Enqueued submission to persistent storage: #{file_name} for source: #{record[:source_id]}, country: #{record[:country]}")
89
+
90
+ # Start processing if not already running
91
+ start_processing
92
+ rescue => e
93
+ @logger.error("Failed to enqueue submission to persistent storage: #{e.message}")
94
+ raise RuntimeError, "Failed to persist submission: #{e.message}"
95
+ end
96
+
97
+ # Start processing pending submissions
98
+ def start_processing
99
+ return if @running
100
+
101
+ @running = true
102
+
103
+ # Start background thread for processing
104
+ @processing_thread = Thread.new do
105
+ loop do
106
+ break unless @running
107
+
108
+ begin
109
+ process_pending_submissions
110
+ rescue => e
111
+ @logger.error("Error in processing thread: #{e.message}")
112
+ end
113
+
114
+ sleep(30) # Process every 30 seconds
115
+ end
116
+ end
117
+
118
+ @logger.info('Started persistent queue processing')
119
+ end
120
+
121
+ # Stop processing pending submissions
122
+ def stop_processing
123
+ @running = false
124
+ @processing_thread&.join(5) # Wait up to 5 seconds for thread to finish
125
+ @logger.info('Stopped persistent queue processing')
126
+ end
127
+
128
+ # Manually trigger processing of pending submissions
129
+ def process_pending_submissions_now
130
+ @logger.info('Manually triggering processing of pending submissions')
131
+ process_pending_submissions
132
+ end
133
+
134
+ # Get queue status and statistics
135
+ #
136
+ # @return [Hash] Queue status information
137
+ def queue_status
138
+ {
139
+ pending_count: count_files_in_directory(PENDING_DIR),
140
+ processing_count: count_files_in_directory(PROCESSING_DIR),
141
+ failed_count: count_files_in_directory(FAILED_DIR),
142
+ success_count: count_files_in_directory(SUCCESS_DIR),
143
+ running: @running
144
+ }
145
+ end
146
+
147
+ # Retry failed submissions by moving them back to pending
148
+ def retry_failed_submissions
149
+ failed_dir = File.join(@queue_base_path, FAILED_DIR)
150
+ pending_dir = File.join(@queue_base_path, PENDING_DIR)
151
+
152
+ failed_files = Dir.glob(File.join(failed_dir, '*.json'))
153
+
154
+ if failed_files.empty?
155
+ @logger.info('No failed submissions to retry')
156
+ return
157
+ end
158
+
159
+ @logger.info("Retrying #{failed_files.length} failed submissions")
160
+
161
+ failed_files.each do |file_path|
162
+ file_name = File.basename(file_path)
163
+ pending_path = File.join(pending_dir, file_name)
164
+
165
+ FileUtils.mv(file_path, pending_path)
166
+ @logger.debug("Moved failed submission back to pending: #{file_name}")
167
+ end
168
+ rescue => e
169
+ @logger.error("Failed to retry failed submissions: #{e.message}")
170
+ end
171
+
172
+ # Clean up old success files
173
+ #
174
+ # @param days_to_keep [Integer] Number of days to keep success files
175
+ def cleanup_old_success_files(days_to_keep)
176
+ success_dir = File.join(@queue_base_path, SUCCESS_DIR)
177
+ cutoff_time = Time.now - (days_to_keep * 24 * 60 * 60)
178
+
179
+ old_files = Dir.glob(File.join(success_dir, '*.json')).select do |file_path|
180
+ File.mtime(file_path) < cutoff_time
181
+ end
182
+
183
+ old_files.each do |file_path|
184
+ File.delete(file_path)
185
+ @logger.debug("Cleaned up old success file: #{File.basename(file_path)}")
186
+ end
187
+
188
+ @logger.info("Cleaned up #{old_files.length} old success files") unless old_files.empty?
189
+ rescue => e
190
+ @logger.error("Failed to cleanup old success files: #{e.message}")
191
+ end
192
+
193
+ # Clear all files from the queue (emergency cleanup)
194
+ def clear_all_queues
195
+ @logger.info('Clearing all queue directories...')
196
+
197
+ [PENDING_DIR, PROCESSING_DIR, FAILED_DIR, SUCCESS_DIR].each do |dir_name|
198
+ clear_directory(dir_name)
199
+ end
200
+
201
+ @logger.info('All queue directories cleared successfully')
202
+ rescue => e
203
+ @logger.error("Error clearing queue directories: #{e.message}")
204
+ raise RuntimeError, "Failed to clear queues: #{e.message}"
205
+ end
206
+
207
+ # Clean up duplicate files across queue directories
208
+ def cleanup_duplicate_files
209
+ @logger.info('Cleaning up duplicate files across queue directories...')
210
+
211
+ file_map = {}
212
+
213
+ [PENDING_DIR, PROCESSING_DIR, FAILED_DIR, SUCCESS_DIR].each do |dir_name|
214
+ dir_path = File.join(@queue_base_path, dir_name)
215
+ next unless Dir.exist?(dir_path)
216
+
217
+ Dir.glob(File.join(dir_path, '*.json')).each do |file_path|
218
+ file_name = File.basename(file_path)
219
+ existing_file = file_map[file_name]
220
+
221
+ if existing_file
222
+ # File exists in multiple directories, keep the one with latest modification time
223
+ begin
224
+ existing_time = File.mtime(existing_file)
225
+ current_time = File.mtime(file_path)
226
+
227
+ if current_time > existing_time
228
+ # Delete the older file
229
+ File.delete(existing_file)
230
+ file_map[file_name] = file_path
231
+ @logger.debug("Removed duplicate file (older): #{existing_file}")
232
+ else
233
+ # Delete the current file
234
+ File.delete(file_path)
235
+ @logger.debug("Removed duplicate file (older): #{file_path}")
236
+ end
237
+ rescue => e
238
+ @logger.warn("Could not compare modification times for duplicate file: #{file_name}")
239
+ # Keep the existing file, delete current
240
+ File.delete(file_path)
241
+ end
242
+ else
243
+ file_map[file_name] = file_path
244
+ end
245
+ end
246
+ end
247
+
248
+ @logger.info('Duplicate file cleanup completed')
249
+ rescue => e
250
+ @logger.error("Error during duplicate file cleanup: #{e.message}")
251
+ end
252
+
253
+ private
254
+
255
+ # Initialize queue directories
256
+ def initialize_queue_directories
257
+ [PENDING_DIR, PROCESSING_DIR, FAILED_DIR, SUCCESS_DIR].each do |dir_name|
258
+ dir_path = File.join(@queue_base_path, dir_name)
259
+ FileUtils.mkdir_p(dir_path)
260
+ end
261
+ @logger.debug('Queue directories initialized')
262
+ rescue => e
263
+ @logger.error("Failed to create queue directories: #{e.message}")
264
+ raise RuntimeError, "Failed to initialize persistent queue: #{e.message}"
265
+ end
266
+
267
+ # Generate file name for a submission
268
+ #
269
+ # @param submission [Hash] The submission data
270
+ # @return [String] The generated file name
271
+ def generate_file_name(submission)
272
+ # Extract document ID from payload to create unique reference
273
+ document_id = extract_document_id(submission[:payload] || submission['payload'])
274
+
275
+ # Generate filename using source and document ID for unique reference
276
+ source_id = (submission[:source][:id] || submission['source']['id']).gsub(/[^a-zA-Z0-9]/, '_')
277
+ country = submission[:country].to_s || submission['country'].to_s
278
+ document_type = submission[:document_type].to_s || submission['document_type'].to_s
279
+
280
+ "#{source_id}_#{document_id}_#{country}_#{document_type}.json"
281
+ end
282
+
283
+ # Extract document ID from payload
284
+ #
285
+ # @param payload [String] The JSON payload string
286
+ # @return [String] The document ID or timestamp-based ID
287
+ def extract_document_id(payload)
288
+ # Parse the complete UnifyRequest JSON
289
+ request_map = JSON.parse(payload)
290
+
291
+ # Extract from payload.invoice_data.invoice_number
292
+ if request_map['payload'] && request_map['payload']['invoice_data'] && request_map['payload']['invoice_data']['invoice_number']
293
+ return request_map['payload']['invoice_data']['invoice_number']
294
+ end
295
+
296
+ # Fallback to timestamp if no invoice number found
297
+ "doc_#{Time.now.to_i}"
298
+ rescue => e
299
+ @logger.warn("Failed to extract document ID from UnifyRequest payload, using timestamp: #{e.message}")
300
+ "doc_#{Time.now.to_i}"
301
+ end
302
+
303
+ # Process pending submissions
304
+ def process_pending_submissions
305
+ return unless @running
306
+
307
+ @processing_lock.synchronize do
308
+ pending_dir = File.join(@queue_base_path, PENDING_DIR)
309
+ pending_files = Dir.glob(File.join(pending_dir, '*.json'))
310
+
311
+ return if pending_files.empty?
312
+
313
+ @logger.debug("Found #{pending_files.length} pending submissions to process")
314
+
315
+ pending_files.each do |file_path|
316
+ # Check if file still exists before processing
317
+ next unless File.exist?(file_path)
318
+
319
+ process_submission_file(file_path)
320
+ end
321
+ end
322
+ rescue => e
323
+ @logger.error("Error processing pending submissions: #{e.message}")
324
+ end
325
+
326
+ # Process a single submission file
327
+ #
328
+ # @param file_path [String] Path to the submission file
329
+ def process_submission_file(file_path)
330
+ # Read submission record first (before moving)
331
+ record = JSON.parse(File.read(file_path), symbolize_names: true)
332
+
333
+ payload_json = JSON.pretty_generate(record[:payload])
334
+ @logger.info("🔥 QUEUE: Read record from file: #{File.basename(file_path)} with payload length: #{payload_json.length}")
335
+ @logger.info("🔥 QUEUE: Read payload preview: #{payload_json[0..199]}")
336
+
337
+ # Move to processing directory
338
+ processing_path = File.join(@queue_base_path, PROCESSING_DIR, File.basename(file_path))
339
+ FileUtils.mv(file_path, processing_path)
340
+
341
+ @logger.debug("Processing submission: #{File.basename(file_path)} for source: #{record[:source_id]}")
342
+
343
+ # Convert back to submission format
344
+ submission = convert_to_submission(record)
345
+
346
+ # Print the JSON payload being sent from queue
347
+ @logger.info('=== QUEUE SUBMISSION JSON ===')
348
+ @logger.info("File: #{File.basename(file_path)}")
349
+ @logger.info("Source: #{record[:source_id]}")
350
+ @logger.info("Country: #{record[:country]}")
351
+ @logger.info("Document Type: #{record[:document_type]}")
352
+ @logger.info("Enqueued At: #{record[:enqueued_at]}")
353
+ @logger.info('Payload JSON:')
354
+ @logger.info(JSON.pretty_generate(record[:payload]))
355
+ @logger.info('=== END QUEUE SUBMISSION JSON ===')
356
+
357
+ # Attempt to send using the stored UnifyRequest directly
358
+ begin
359
+ # Convert the stored Map back to UnifyRequest object
360
+ unify_request = convert_to_unify_request(record[:payload])
361
+
362
+ # Convert camelCase keys to snake_case for Ruby SDK compatibility
363
+ converted_request = convert_camelcase_to_snakecase(unify_request)
364
+
365
+ @logger.info('🔥 QUEUE: Successfully converted stored UnifyRequest Map to object')
366
+ @logger.info("🔥 QUEUE: Request ID: #{converted_request[:request_id]}")
367
+ @logger.info("🔥 QUEUE: Document Type: #{converted_request[:document_type]}")
368
+ @logger.info("🔥 QUEUE: Country: #{converted_request[:country]}")
369
+ @logger.info("🔥 QUEUE: Source: #{converted_request[:source][:id]}")
370
+ @logger.info("🔥 QUEUE: Operation: #{converted_request[:operation]}")
371
+ @logger.info("🔥 QUEUE: Mode: #{converted_request[:mode]}")
372
+ @logger.info("🔥 QUEUE: Purpose: #{converted_request[:purpose]}")
373
+
374
+ # Keep the environment as-is - no conversion
375
+ current_env = converted_request[:env]
376
+ @logger.info("🔥 QUEUE: Keeping environment as '#{current_env}'")
377
+
378
+ # Use the SDK's push_to_unify_request method to send the stored UnifyRequest
379
+ @logger.info('🔥 QUEUE: Sending stored UnifyRequest via SDK push_to_unify_request')
380
+ response = ComplyanceSDK.push_to_unify_request(converted_request)
381
+
382
+ # Log the complete response details
383
+ @logger.info('🔥 QUEUE: API Response received')
384
+ @logger.info("🔥 QUEUE: Response Status: #{response&.status || 'NULL'}")
385
+ @logger.info("🔥 QUEUE: Response Message: #{response&.message || 'NULL'}")
386
+
387
+ if response&.data
388
+ @logger.info('🔥 QUEUE: Response Data available')
389
+
390
+ # Log submission details if available
391
+ if response.data[:submission]
392
+ submission_response = response.data[:submission]
393
+ @logger.info("🔥 QUEUE: Submission ID: #{submission_response[:submission_id]}")
394
+ @logger.info("🔥 QUEUE: Submission Status: #{submission_response[:status]}")
395
+ @logger.info("🔥 QUEUE: Submission Country: #{submission_response[:country]}")
396
+ @logger.info("🔥 QUEUE: Submission Authority: #{submission_response[:authority]}")
397
+ @logger.info("🔥 QUEUE: Is Accepted: #{submission_response[:accepted]}")
398
+ @logger.info("🔥 QUEUE: Is Rejected: #{submission_response[:rejected]}")
399
+ @logger.info("🔥 QUEUE: Is Failed: #{submission_response[:failed]}")
400
+ end
401
+
402
+ # Log document details if available
403
+ if response.data[:document]
404
+ document = response.data[:document]
405
+ @logger.info("🔥 QUEUE: Document ID: #{document[:document_id]}")
406
+ @logger.info("🔥 QUEUE: Document Type: #{document[:document_type]}")
407
+ @logger.info("🔥 QUEUE: Document Status: #{document[:status]}")
408
+ @logger.info("🔥 QUEUE: Document Created At: #{document[:created_at]}")
409
+ end
410
+
411
+ # Log processing details if available
412
+ if response.data[:processing]
413
+ processing = response.data[:processing]
414
+ @logger.info("🔥 QUEUE: Processing Status: #{processing[:status]}")
415
+ @logger.info("🔥 QUEUE: Processing Purpose: #{processing[:purpose]}")
416
+ @logger.info("🔥 QUEUE: Processing Completed At: #{processing[:completed_at]}")
417
+ end
418
+ end
419
+
420
+ if response&.error
421
+ @logger.error('🔥 QUEUE: API Error Details:')
422
+ @logger.error("🔥 QUEUE: Error Code: #{response.error[:code]}")
423
+ @logger.error("🔥 QUEUE: Error Message: #{response.error[:message]}")
424
+ @logger.error("🔥 QUEUE: Error Suggestion: #{response.error[:suggestion]}")
425
+ end
426
+
427
+ # Check for success using the proper method and also check for HTTP 200 equivalent
428
+ is_success = false
429
+ status = response&.status || 'null'
430
+
431
+ @logger.info('🔥 QUEUE: Analyzing success criteria...')
432
+ @logger.info("🔥 QUEUE: Response Status: '#{status}'")
433
+
434
+ if response
435
+ # Use the proper success? method from UnifyResponse
436
+ response_success = response.success?
437
+ @logger.info("🔥 QUEUE: response.success?: #{response_success}")
438
+ is_success = response_success
439
+
440
+ # Also check if submission was accepted (for invoicing purpose)
441
+ if response.data&.dig(:submission)
442
+ submission_response = response.data[:submission]
443
+ submission_accepted = submission_response[:accepted] == true
444
+ status_accepted = submission_response[:status]&.downcase == 'accepted'
445
+ @logger.info("🔥 QUEUE: submission.accepted?: #{submission_accepted}")
446
+ @logger.info("🔥 QUEUE: submission.status == 'accepted': #{status_accepted}")
447
+ @logger.info("🔥 QUEUE: submission.status value: '#{submission_response[:status]}'")
448
+
449
+ is_success = is_success || submission_accepted || status_accepted
450
+ end
451
+
452
+ # Check for document creation success
453
+ if response.data&.dig(:document)
454
+ document = response.data[:document]
455
+ document_success = document[:status]&.downcase == 'success'
456
+ @logger.info("🔥 QUEUE: document.status == 'success': #{document_success}")
457
+ @logger.info("🔥 QUEUE: document.status value: '#{document[:status]}'")
458
+
459
+ is_success = is_success || document_success
460
+ end
461
+ end
462
+
463
+ @logger.info("🔥 QUEUE: Final is_success determination: #{is_success}")
464
+
465
+ if is_success
466
+ @logger.info('🔥 QUEUE: SUCCESS DETECTED - Removing file from queue')
467
+ @logger.info("🔥 QUEUE: File to remove: #{File.basename(processing_path)}")
468
+ # Remove the file completely - it's successfully processed
469
+ deleted = File.delete(processing_path) rescue false
470
+ @logger.info("🔥 QUEUE: File deletion result: #{deleted}")
471
+ @logger.info("🔥 QUEUE: Successfully processed and removed from queue: #{File.basename(file_path)}")
472
+ return # Exit successfully
473
+ else
474
+ # If response is not successful, treat as failure
475
+ @logger.warn('🔥 QUEUE: NON-SUCCESS DETECTED - Moving to failed directory')
476
+ @logger.warn("🔥 QUEUE: Response status: '#{status}'")
477
+ @logger.warn("🔥 QUEUE: File to move: #{File.basename(processing_path)}")
478
+ raise RuntimeError, "API returned non-success status: #{status}"
479
+ end
480
+ rescue => e
481
+ @logger.error("Failed to send queued submission via push_to_unify: #{e.message}")
482
+ # Move to failed directory instead of re-queuing
483
+ failed_path = File.join(@queue_base_path, FAILED_DIR, File.basename(processing_path))
484
+
485
+ # Check if file already exists in failed directory and handle it
486
+ if File.exist?(failed_path)
487
+ @logger.warn("File already exists in failed directory, deleting processing file: #{File.basename(processing_path)}")
488
+ File.delete(processing_path) rescue nil
489
+ else
490
+ FileUtils.mv(processing_path, failed_path)
491
+ @logger.info("Moved failed submission to: #{failed_path}")
492
+ end
493
+ raise e
494
+ end
495
+ rescue => e
496
+ @logger.warn("Failed to process submission: #{File.basename(file_path)} - Error: #{e.message}")
497
+
498
+ begin
499
+ # Move to failed directory
500
+ processing_path = File.join(@queue_base_path, PROCESSING_DIR, File.basename(file_path))
501
+ failed_path = File.join(@queue_base_path, FAILED_DIR, File.basename(file_path))
502
+
503
+ if File.exist?(processing_path)
504
+ # Check if file already exists in failed directory
505
+ if File.exist?(failed_path)
506
+ @logger.warn("File already exists in failed directory, deleting processing file: #{File.basename(file_path)}")
507
+ File.delete(processing_path) rescue nil
508
+ else
509
+ FileUtils.mv(processing_path, failed_path)
510
+ @logger.debug("Moved failed submission to failed directory: #{File.basename(file_path)}")
511
+ end
512
+ else
513
+ # If file was already moved to processing, move from pending to failed
514
+ pending_path = File.join(@queue_base_path, PENDING_DIR, File.basename(file_path))
515
+ if File.exist?(pending_path)
516
+ if File.exist?(failed_path)
517
+ @logger.warn("File already exists in failed directory, deleting pending file: #{File.basename(file_path)}")
518
+ File.delete(pending_path) rescue nil
519
+ else
520
+ FileUtils.mv(pending_path, failed_path)
521
+ @logger.debug("Moved failed submission from pending to failed directory: #{File.basename(file_path)}")
522
+ end
523
+ end
524
+ end
525
+ rescue => move_error
526
+ @logger.error("Failed to move submission to failed directory: #{File.basename(file_path)}: #{move_error.message}")
527
+ end
528
+ end
529
+
530
+ # Convert stored record back to submission format
531
+ #
532
+ # @param record [Hash] The stored record
533
+ # @return [Hash] The submission hash
534
+ def convert_to_submission(record)
535
+ # Convert Map back to JSON string for submission
536
+ json_payload = JSON.generate(record[:payload])
537
+
538
+ {
539
+ payload: json_payload,
540
+ source: { id: record[:source_id] },
541
+ country: record[:country].to_sym,
542
+ document_type: record[:document_type].to_sym
543
+ }
544
+ end
545
+
546
+ # Convert stored payload to UnifyRequest format
547
+ #
548
+ # @param payload_map [Hash] The stored payload map
549
+ # @return [Hash] The UnifyRequest hash
550
+ def convert_to_unify_request(payload_map)
551
+ # The stored payload IS the UnifyRequest data (top-level)
552
+ # The nested "payload" field contains the invoice data, not the UnifyRequest
553
+ payload_map
554
+ end
555
+
556
+ # Convert camelCase keys to snake_case for Ruby SDK compatibility
557
+ def convert_camelcase_to_snakecase(hash)
558
+ return hash unless hash.is_a?(Hash)
559
+
560
+ converted = {}
561
+ hash.each do |key, value|
562
+ # Convert camelCase to snake_case
563
+ snake_key = key.to_s.gsub(/([A-Z])/, '_\1').downcase.to_sym
564
+
565
+ # Recursively convert nested hashes
566
+ if value.is_a?(Hash)
567
+ converted[snake_key] = convert_camelcase_to_snakecase(value)
568
+ elsif value.is_a?(Array)
569
+ converted[snake_key] = value.map do |item|
570
+ item.is_a?(Hash) ? convert_camelcase_to_snakecase(item) : item
571
+ end
572
+ else
573
+ converted[snake_key] = value
574
+ end
575
+ end
576
+
577
+ converted
578
+ end
579
+
580
+ # Count files in a directory
581
+ #
582
+ # @param dir_name [String] Directory name
583
+ # @return [Integer] Number of JSON files
584
+ def count_files_in_directory(dir_name)
585
+ dir_path = File.join(@queue_base_path, dir_name)
586
+ return 0 unless Dir.exist?(dir_path)
587
+
588
+ Dir.glob(File.join(dir_path, '*.json')).length
589
+ end
590
+
591
+ # Clear all files in a directory
592
+ #
593
+ # @param dir_name [String] Directory name
594
+ def clear_directory(dir_name)
595
+ dir_path = File.join(@queue_base_path, dir_name)
596
+ return unless Dir.exist?(dir_path)
597
+
598
+ files = Dir.glob(File.join(dir_path, '*.json'))
599
+
600
+ files.each do |file_path|
601
+ File.delete(file_path) rescue nil
602
+ @logger.debug("Deleted file: #{File.basename(file_path)}")
603
+ end
604
+
605
+ @logger.info("Cleared #{files.length} files from #{dir_name}")
606
+ end
607
+ end
608
+ end
609
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rails"
4
+
5
+ module ComplyanceSDK
6
+ # Railtie for Rails integration
7
+ class Railtie < Rails::Railtie
8
+ initializer "complyance_sdk.configure" do |app|
9
+ # Auto-configure from Rails credentials if not already configured
10
+ ComplyanceSDK.configure_from_rails unless ComplyanceSDK.configured?
11
+
12
+ # Add middleware if configured
13
+ if ComplyanceSDK.configured? && defined?(ActionDispatch::Request)
14
+ require "complyance_sdk/middleware/rack_middleware"
15
+ app.middleware.use ComplyanceSDK::Middleware::RackMiddleware
16
+ end
17
+ end
18
+
19
+ # Add generators
20
+ generators do
21
+ require "complyance_sdk/generators/install_generator" if defined?(Rails::Generators)
22
+ end
23
+
24
+ # Add rake tasks
25
+ rake_tasks do
26
+ load "complyance_sdk/tasks/complyance.rake" if File.exist?(File.join(File.dirname(__FILE__), "tasks/complyance.rake"))
27
+ end
28
+ end
29
+ end