multiwoven-integrations 0.17.0 → 0.18.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1716352c3eca7da21c2291b6c0980c14c00e7acaaf2d1c806121d3b85c0628d8
4
- data.tar.gz: a6096a5d2f71aa886d3e447d98b17c4e0d3104ec283ad1015770f9ae4aeeaed6
3
+ metadata.gz: a182e10fe6151cdfbffef960e34aaf8b14be263a2f03a29c71e247d9125938a8
4
+ data.tar.gz: 79e70b4fd2f81c406c2ca436bc45d422ad63ccd6489bf5c012a7b8ff601ddf21
5
5
  SHA512:
6
- metadata.gz: 2b380bd351e7ab48a6675d05383ad57b15bf6d5f300d08064a36ffe32503280e97ae50f9a215229f431e5f84008117fe73924a4380c8bf3f242b7319ef1661e2
7
- data.tar.gz: c5fb7462349698add62a5dfd25393b0c5655e0df671972afe56391734190b23d536a1e84f4e1dd03874aec5e0dc10533b4e887b585fbc6d43bcc7eba24be965d
6
+ metadata.gz: 3808200a5d1f51b873748399963d285053cd6830bcbeb1beb20f08d0eca91bea535a5c811092b767923282aa66ccc4adf6a754cf329b312ab5248475d6a9f042
7
+ data.tar.gz: 436bee5b004c3b32b7232e28f4e75eefeeacd40d524c6d6b5bf4d4b7e2475638eba06e9bc96820c227c36c6ff33b624172f81cb7a8ffe78ad27040ff7ffbdad1
@@ -0,0 +1,92 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Multiwoven::Integrations::Destination
4
+ module AmazonS3
5
+ include Multiwoven::Integrations::Core
6
+ class Client < DestinationConnector
7
+ def check_connection(connection_config)
8
+ connection_config = connection_config.with_indifferent_access
9
+ conn = create_connection(connection_config)
10
+ conn.head_bucket(bucket: connection_config[:bucket_name])
11
+ ConnectionStatus.new(status: ConnectionStatusType["succeeded"]).to_multiwoven_message
12
+ rescue StandardError => e
13
+ ConnectionStatus.new(status: ConnectionStatusType["failed"], message: e.message).to_multiwoven_message
14
+ end
15
+
16
+ def discover(_connection_config = nil)
17
+ catalog_json = read_json(CATALOG_SPEC_PATH)
18
+ catalog = build_catalog(catalog_json)
19
+ catalog.to_multiwoven_message
20
+ rescue StandardError => e
21
+ handle_exception(e, {
22
+ context: "AMAZONS3:DISCOVER:EXCEPTION",
23
+ type: "error"
24
+ })
25
+ end
26
+
27
+ def write(sync_config, records, _action = "destination_insert")
28
+ records_size = records.size
29
+ log_message_array = []
30
+ write_success = upload_csv_content(sync_config, records)
31
+ write_failure = records_size - write_success
32
+ log_message_array << log_request_response("info", @args, @response)
33
+ tracking_message(write_success, write_failure, log_message_array)
34
+ rescue StandardError => e
35
+ handle_exception(e, {
36
+ context: "AMAZONS3:WRITE:EXCEPTION",
37
+ type: "error",
38
+ sync_id: sync_config.sync_id,
39
+ sync_run_id: sync_config.sync_run_id
40
+ })
41
+ end
42
+
43
+ private
44
+
45
+ def create_connection(connection_config)
46
+ Aws::S3::Client.new(
47
+ region: connection_config[:region],
48
+ access_key_id: connection_config[:access_key_id],
49
+ secret_access_key: connection_config[:secret_access_key]
50
+ )
51
+ end
52
+
53
+ def upload_csv_content(sync_config, records)
54
+ connection_config = sync_config.destination.connection_specification.with_indifferent_access
55
+ conn = create_connection(connection_config)
56
+ file_name = generate_local_file_name(connection_config)
57
+ csv_content = generate_csv_content(records)
58
+ begin
59
+ @args = ["create", connection_config[:bucket_name], "#{connection_config[:file_path]}#{file_name}", csv_content]
60
+ @response = conn.put_object(
61
+ bucket: connection_config[:bucket_name],
62
+ key: "#{connection_config[:file_path]}#{file_name}",
63
+ body: csv_content
64
+ )
65
+ write_success = records.size
66
+ rescue StandardError => e
67
+ handle_exception(e, {
68
+ context: "AMAZONS3:RECORD:WRITE:EXCEPTION",
69
+ type: "error",
70
+ sync_id: sync_config.sync_id,
71
+ sync_run_id: sync_config.sync_run_id
72
+ })
73
+ write_success = 0
74
+ end
75
+ write_success
76
+ end
77
+
78
+ def generate_csv_content(records)
79
+ CSV.generate do |csv|
80
+ headers = records.first.keys
81
+ csv << headers
82
+ records.each { |record| csv << record.values_at(*headers) }
83
+ end
84
+ end
85
+
86
+ def generate_local_file_name(connection_config)
87
+ timestamp = Time.now.strftime("%Y%m%d-%H%M%S")
88
+ "#{connection_config[:file_name]}_#{timestamp}.#{connection_config[:format_type]}"
89
+ end
90
+ end
91
+ end
92
+ end
@@ -0,0 +1,16 @@
1
+ {
2
+ "request_rate_limit": 600,
3
+ "request_rate_limit_unit": "minute",
4
+ "request_rate_concurrency": 10,
5
+ "schema_mode": "schemaless",
6
+ "streams": [
7
+ {
8
+ "name": "create",
9
+ "batch_support": true,
10
+ "batch_size": 100000,
11
+ "action": "create",
12
+ "json_schema": {},
13
+ "supported_sync_modes": ["full_refresh","incremental"]
14
+ }
15
+ ]
16
+ }
@@ -0,0 +1,15 @@
1
+ {
2
+ "data": {
3
+ "name": "AmazonS3",
4
+ "title": "Amazon S3",
5
+ "connector_type": "source",
6
+ "category": "Data Lake",
7
+ "documentation_url": "https://docs.mutliwoven.com",
8
+ "github_issue_label": "source-amazons3",
9
+ "icon": "icon.svg",
10
+ "license": "MIT",
11
+ "release_stage": "alpha",
12
+ "support_level": "community",
13
+ "tags": ["language:ruby", "multiwoven"]
14
+ }
15
+ }
@@ -0,0 +1,56 @@
1
+ {
2
+ "documentation_url": "https://docs.multiwoven.com/integrations/destination/amazons3",
3
+ "stream_type": "static",
4
+ "connection_specification": {
5
+ "$schema": "http://json-schema.org/draft-07/schema#",
6
+ "title": "AmazonS3",
7
+ "required": ["access_key_id", "secret_access_key", "region", "bucket_name", "file_path", "file_name", "format_type" ],
8
+ "properties": {
9
+ "access_key_id": {
10
+ "description": "The AWS Access Key ID to use for authentication.",
11
+ "type": "string",
12
+ "title": "Personal Access Key",
13
+ "order": 0
14
+ },
15
+ "secret_access_key": {
16
+ "description": "The AWS Secret Access Key to use for authentication.",
17
+ "type": "string",
18
+ "multiwoven_secret": true,
19
+ "title": "Secret Access Key",
20
+ "order": 1
21
+ },
22
+ "region": {
23
+ "description": "AWS region.",
24
+ "type": "string",
25
+ "title": "Region",
26
+ "order": 2
27
+ },
28
+ "bucket_name": {
29
+ "title": "Bucket Name",
30
+ "description": "Amazon S3 bucket name.",
31
+ "type": "string",
32
+ "order": 3
33
+ },
34
+ "file_path": {
35
+ "title": "File Path",
36
+ "type": "string",
37
+ "description": "Path to the directory where files will be written.",
38
+ "order": 4
39
+ },
40
+ "file_name": {
41
+ "title": "File Name",
42
+ "type": "string",
43
+ "description": "Name of the file to be written.",
44
+ "order": 5
45
+ },
46
+ "format_type": {
47
+ "title": "File Format Type",
48
+ "type": "string",
49
+ "description": "Format of the data output.",
50
+ "order": 6,
51
+ "enum": ["csv"],
52
+ "default": "csv"
53
+ }
54
+ }
55
+ }
56
+ }
@@ -0,0 +1,34 @@
1
+ <svg xmlns="http://www.w3.org/2000/svg" width="428" height="512" viewBox="0 0 428 512">
2
+ <defs>
3
+ <style>
4
+ .cls-1 {
5
+ fill: #e25444;
6
+ }
7
+
8
+ .cls-1, .cls-2, .cls-3 {
9
+ fill-rule: evenodd;
10
+ }
11
+
12
+ .cls-2 {
13
+ fill: #7b1d13;
14
+ }
15
+
16
+ .cls-3 {
17
+ fill: #58150d;
18
+ }
19
+ </style>
20
+ </defs>
21
+ <path class="cls-1" d="M378,99L295,257l83,158,34-19V118Z"/>
22
+ <path class="cls-2" d="M378,99L212,118,127.5,257,212,396l166,19V99Z"/>
23
+ <path class="cls-3" d="M43,99L16,111V403l27,12L212,257Z"/>
24
+ <path class="cls-1" d="M42.637,98.667l169.587,47.111V372.444L42.637,415.111V98.667Z"/>
25
+ <path class="cls-3" d="M212.313,170.667l-72.008-11.556,72.008-81.778,71.83,81.778Z"/>
26
+ <path class="cls-3" d="M284.143,159.111l-71.919,11.733-71.919-11.733V77.333"/>
27
+ <path class="cls-3" d="M212.313,342.222l-72.008,13.334,72.008,70.222,71.83-70.222Z"/>
28
+ <path class="cls-2" d="M212,16L140,54V159l72.224-20.333Z"/>
29
+ <path class="cls-2" d="M212.224,196.444l-71.919,7.823V309.105l71.919,8.228V196.444Z"/>
30
+ <path class="cls-2" d="M212.224,373.333L140.305,355.3V458.363L212.224,496V373.333Z"/>
31
+ <path class="cls-1" d="M284.143,355.3l-71.919,18.038V496l71.919-37.637V355.3Z"/>
32
+ <path class="cls-1" d="M212.224,196.444l71.919,7.823V309.105l-71.919,8.228V196.444Z"/>
33
+ <path class="cls-1" d="M212,16l72,38V159l-72-20V16Z"/>
34
+ </svg>
@@ -2,7 +2,7 @@
2
2
 
3
3
  module Multiwoven
4
4
  module Integrations
5
- VERSION = "0.17.0"
5
+ VERSION = "0.18.0"
6
6
 
7
7
  ENABLED_SOURCES = %w[
8
8
  Snowflake
@@ -46,6 +46,7 @@ module Multiwoven
46
46
  MicrosoftSql
47
47
  Mailchimp
48
48
  AISDataStore
49
+ AmazonS3
49
50
  ].freeze
50
51
  end
51
52
  end
@@ -98,6 +98,7 @@ require_relative "integrations/destination/microsoft_excel/client"
98
98
  require_relative "integrations/destination/microsoft_sql/client"
99
99
  require_relative "integrations/destination/mailchimp/client"
100
100
  require_relative "integrations/destination/ais_data_store/client"
101
+ require_relative "integrations/destination/amazon_s3/client"
101
102
 
102
103
  module Multiwoven
103
104
  module Integrations
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: multiwoven-integrations
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.17.0
4
+ version: 0.18.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Subin T P
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-12-30 00:00:00.000000000 Z
11
+ date: 2025-01-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activesupport
@@ -596,6 +596,11 @@ files:
596
596
  - lib/multiwoven/integrations/destination/ais_data_store/config/meta.json
597
597
  - lib/multiwoven/integrations/destination/ais_data_store/config/spec.json
598
598
  - lib/multiwoven/integrations/destination/ais_data_store/icon.svg
599
+ - lib/multiwoven/integrations/destination/amazon_s3/client.rb
600
+ - lib/multiwoven/integrations/destination/amazon_s3/config/catalog.json
601
+ - lib/multiwoven/integrations/destination/amazon_s3/config/meta.json
602
+ - lib/multiwoven/integrations/destination/amazon_s3/config/spec.json
603
+ - lib/multiwoven/integrations/destination/amazon_s3/icon.svg
599
604
  - lib/multiwoven/integrations/destination/databricks_lakehouse/client.rb
600
605
  - lib/multiwoven/integrations/destination/databricks_lakehouse/config/meta.json
601
606
  - lib/multiwoven/integrations/destination/databricks_lakehouse/config/spec.json