multiwoven-integrations 0.15.10 → 0.16.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/multiwoven/integrations/core/constants.rb +2 -0
- data/lib/multiwoven/integrations/core/http_client.rb +2 -28
- data/lib/multiwoven/integrations/core/http_helper.rb +36 -0
- data/lib/multiwoven/integrations/core/source_connector.rb +22 -0
- data/lib/multiwoven/integrations/core/streaming_http_client.rb +21 -0
- data/lib/multiwoven/integrations/rollout.rb +2 -1
- data/lib/multiwoven/integrations/source/http_model/client.rb +63 -45
- data/lib/multiwoven/integrations/source/http_model/config/meta.json +1 -1
- data/lib/multiwoven/integrations/source/http_model/config/spec.json +11 -4
- data/lib/multiwoven/integrations/source/open_ai/client.rb +117 -0
- data/lib/multiwoven/integrations/source/open_ai/config/catalog.json +6 -0
- data/lib/multiwoven/integrations/source/open_ai/config/meta.json +15 -0
- data/lib/multiwoven/integrations/source/open_ai/config/spec.json +54 -0
- data/lib/multiwoven/integrations/source/open_ai/icon.svg +1 -0
- data/lib/multiwoven/integrations.rb +3 -0
- metadata +12 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 85dd988a522326237c492ac96db325a16225f7e0ab96438fdc3b1475139e07e0
|
4
|
+
data.tar.gz: b36e2addc56a428a574684f317f1b1b10f2f864ea915f31564290f607604bf24
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5b2ea53b9847be5f4c4d2618cfe1a89376f980aefff351cce6a7b63b8f76d56f3067180fab42d2d3d1977996e8b34308f085ffcb793aa6e62c894048f6116549
|
7
|
+
data.tar.gz: 9e320fedd61454ea428ff0935bf7fae584f21850f2a467b951dfc98c22b24a982f9fbf76007481416241c4b35c671bfd8a362d34236e2cb7ced1a2b5325b68ce
|
@@ -3,40 +3,14 @@
|
|
3
3
|
module Multiwoven
|
4
4
|
module Integrations::Core
|
5
5
|
class HttpClient
|
6
|
+
extend HttpHelper
|
6
7
|
class << self
|
7
8
|
def request(url, method, payload: nil, headers: {}, config: {})
|
8
9
|
uri = URI(url)
|
9
|
-
http =
|
10
|
-
http.use_ssl = (uri.scheme == "https")
|
11
|
-
|
12
|
-
# Set timeout if provided
|
13
|
-
if config[:timeout]
|
14
|
-
timeout_value = config[:timeout].to_f
|
15
|
-
http.open_timeout = timeout_value
|
16
|
-
http.read_timeout = timeout_value
|
17
|
-
end
|
18
|
-
|
10
|
+
http = configure_http(uri, config)
|
19
11
|
request = build_request(method, uri, payload, headers)
|
20
12
|
http.request(request)
|
21
13
|
end
|
22
|
-
|
23
|
-
private
|
24
|
-
|
25
|
-
def build_request(method, uri, payload, headers)
|
26
|
-
request_class = case method.upcase
|
27
|
-
when Constants::HTTP_GET then Net::HTTP::Get
|
28
|
-
when Constants::HTTP_POST then Net::HTTP::Post
|
29
|
-
when Constants::HTTP_PUT then Net::HTTP::Put
|
30
|
-
when Constants::HTTP_PATCH then Net::HTTP::Patch
|
31
|
-
when Constants::HTTP_DELETE then Net::HTTP::Delete
|
32
|
-
else raise ArgumentError, "Unsupported HTTP method: #{method}"
|
33
|
-
end
|
34
|
-
|
35
|
-
request = request_class.new(uri)
|
36
|
-
headers.each { |key, value| request[key] = value }
|
37
|
-
request.body = payload.to_json if payload && %w[POST PUT PATCH].include?(method.upcase)
|
38
|
-
request
|
39
|
-
end
|
40
14
|
end
|
41
15
|
end
|
42
16
|
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Multiwoven
|
4
|
+
module Integrations::Core
|
5
|
+
module HttpHelper
|
6
|
+
def build_request(method, uri, payload, headers)
|
7
|
+
request_class = case method.upcase
|
8
|
+
when Constants::HTTP_GET then Net::HTTP::Get
|
9
|
+
when Constants::HTTP_POST then Net::HTTP::Post
|
10
|
+
when Constants::HTTP_PUT then Net::HTTP::Put
|
11
|
+
when Constants::HTTP_PATCH then Net::HTTP::Patch
|
12
|
+
when Constants::HTTP_DELETE then Net::HTTP::Delete
|
13
|
+
else raise ArgumentError, "Unsupported HTTP method: #{method}"
|
14
|
+
end
|
15
|
+
|
16
|
+
request = request_class.new(uri)
|
17
|
+
headers.each { |key, value| request[key] = value }
|
18
|
+
request.body = payload.to_json if payload && %w[POST PUT PATCH].include?(method.upcase)
|
19
|
+
request
|
20
|
+
end
|
21
|
+
|
22
|
+
def configure_http(uri, config)
|
23
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
24
|
+
http.use_ssl = (uri.scheme == "https")
|
25
|
+
|
26
|
+
if config[:timeout]
|
27
|
+
timeout_value = config[:timeout].to_f
|
28
|
+
http.open_timeout = timeout_value
|
29
|
+
http.read_timeout = timeout_value
|
30
|
+
end
|
31
|
+
|
32
|
+
http
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -39,6 +39,28 @@ module Multiwoven
|
|
39
39
|
# Appending the LIMIT and OFFSET clauses to the SQL query
|
40
40
|
"#{sql_query} LIMIT #{limit} OFFSET #{offset}"
|
41
41
|
end
|
42
|
+
|
43
|
+
def send_request(options = {})
|
44
|
+
Multiwoven::Integrations::Core::HttpClient.request(
|
45
|
+
options[:url],
|
46
|
+
options[:http_method],
|
47
|
+
payload: options[:payload],
|
48
|
+
headers: options[:headers],
|
49
|
+
config: options[:config]
|
50
|
+
)
|
51
|
+
end
|
52
|
+
|
53
|
+
def send_streaming_request(options = {})
|
54
|
+
Multiwoven::Integrations::Core::StreamingHttpClient.request(
|
55
|
+
options[:url],
|
56
|
+
options[:http_method],
|
57
|
+
payload: options[:payload],
|
58
|
+
headers: options[:headers],
|
59
|
+
config: options[:config]
|
60
|
+
) do |chunk|
|
61
|
+
yield chunk if block_given? # Pass each chunk for processing (streaming response)
|
62
|
+
end
|
63
|
+
end
|
42
64
|
end
|
43
65
|
end
|
44
66
|
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Multiwoven
|
4
|
+
module Integrations::Core
|
5
|
+
class StreamingHttpClient
|
6
|
+
extend HttpHelper
|
7
|
+
class << self
|
8
|
+
def request(url, method, payload: nil, headers: {}, config: {})
|
9
|
+
uri = URI(url)
|
10
|
+
http = configure_http(uri, config)
|
11
|
+
request = build_request(method, uri, payload, headers)
|
12
|
+
http.request(request) do |response|
|
13
|
+
response.read_body do |chunk|
|
14
|
+
yield chunk if block_given? # Pass each response chunk
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
module Multiwoven
|
4
4
|
module Integrations
|
5
|
-
VERSION = "0.
|
5
|
+
VERSION = "0.16.0"
|
6
6
|
|
7
7
|
ENABLED_SOURCES = %w[
|
8
8
|
Snowflake
|
@@ -20,6 +20,7 @@ module Multiwoven
|
|
20
20
|
AwsSagemakerModel
|
21
21
|
VertexModel
|
22
22
|
HttpModel
|
23
|
+
OpenAI
|
23
24
|
].freeze
|
24
25
|
|
25
26
|
ENABLED_DESTINATIONS = %w[
|
@@ -5,24 +5,17 @@ module Multiwoven::Integrations::Source
|
|
5
5
|
include Multiwoven::Integrations::Core
|
6
6
|
class Client < SourceConnector
|
7
7
|
def check_connection(connection_config)
|
8
|
-
connection_config = connection_config
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
success_status
|
18
|
-
else
|
19
|
-
failure_status(nil)
|
20
|
-
end
|
8
|
+
connection_config = prepare_config(connection_config)
|
9
|
+
response = send_request(
|
10
|
+
url: connection_config[:url_host],
|
11
|
+
http_method: connection_config[:http_method],
|
12
|
+
payload: JSON.parse(connection_config[:request_format]),
|
13
|
+
headers: connection_config[:headers],
|
14
|
+
config: connection_config[:config]
|
15
|
+
)
|
16
|
+
success?(response) ? success_status : failure_status(nil)
|
21
17
|
rescue StandardError => e
|
22
|
-
handle_exception(e, {
|
23
|
-
context: "HTTP MODEL:CHECK_CONNECTION:EXCEPTION",
|
24
|
-
type: "error"
|
25
|
-
})
|
18
|
+
handle_exception(e, { context: "HTTP MODEL:CHECK_CONNECTION:EXCEPTION", type: "error" })
|
26
19
|
failure_status(e)
|
27
20
|
end
|
28
21
|
|
@@ -31,40 +24,66 @@ module Multiwoven::Integrations::Source
|
|
31
24
|
catalog = build_catalog(catalog_json)
|
32
25
|
catalog.to_multiwoven_message
|
33
26
|
rescue StandardError => e
|
34
|
-
handle_exception(e, {
|
35
|
-
context: "HTTP MODEL:DISCOVER:EXCEPTION",
|
36
|
-
type: "error"
|
37
|
-
})
|
27
|
+
handle_exception(e, { context: "HTTP MODEL:DISCOVER:EXCEPTION", type: "error" })
|
38
28
|
end
|
39
29
|
|
40
30
|
def read(sync_config)
|
41
|
-
connection_config = sync_config.source.connection_specification
|
42
|
-
|
31
|
+
connection_config = prepare_config(sync_config.source.connection_specification)
|
32
|
+
stream = connection_config[:is_stream] ||= false
|
43
33
|
# The server checks the ConnectorQueryType.
|
44
34
|
# If it's "ai_ml," the server calculates the payload and passes it as a query in the sync config model protocol.
|
45
35
|
# This query is then sent to the AI/ML model.
|
46
|
-
payload =
|
47
|
-
|
36
|
+
payload = parse_json(sync_config.model.query)
|
37
|
+
|
38
|
+
if stream
|
39
|
+
run_model_stream(connection_config, payload) { |message| yield message if block_given? }
|
40
|
+
else
|
41
|
+
run_model(connection_config, payload)
|
42
|
+
end
|
48
43
|
rescue StandardError => e
|
49
|
-
handle_exception(e, {
|
50
|
-
context: "HTTP MODEL:READ:EXCEPTION",
|
51
|
-
type: "error"
|
52
|
-
})
|
44
|
+
handle_exception(e, { context: "HTTP MODEL:READ:EXCEPTION", type: "error" })
|
53
45
|
end
|
54
46
|
|
55
47
|
private
|
56
48
|
|
49
|
+
def prepare_config(config)
|
50
|
+
config.with_indifferent_access.tap do |conf|
|
51
|
+
conf[:config][:timeout] ||= 30
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def parse_json(json_string)
|
56
|
+
JSON.parse(json_string)
|
57
|
+
rescue JSON::ParserError => e
|
58
|
+
handle_exception(e, { context: "HTTP MODEL:PARSE_JSON:EXCEPTION", type: "error" })
|
59
|
+
{}
|
60
|
+
end
|
61
|
+
|
57
62
|
def run_model(connection_config, payload)
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
63
|
+
response = send_request(
|
64
|
+
url: connection_config[:url_host],
|
65
|
+
http_method: connection_config[:http_method],
|
66
|
+
payload: payload,
|
67
|
+
headers: connection_config[:headers],
|
68
|
+
config: connection_config[:config]
|
69
|
+
)
|
65
70
|
process_response(response)
|
66
71
|
rescue StandardError => e
|
67
|
-
handle_exception(e, context: "HTTP MODEL:RUN_MODEL:EXCEPTION", type: "error")
|
72
|
+
handle_exception(e, { context: "HTTP MODEL:RUN_MODEL:EXCEPTION", type: "error" })
|
73
|
+
end
|
74
|
+
|
75
|
+
def run_model_stream(connection_config, payload)
|
76
|
+
send_streaming_request(
|
77
|
+
url: connection_config[:url_host],
|
78
|
+
http_method: connection_config[:http_method],
|
79
|
+
payload: payload,
|
80
|
+
headers: connection_config[:headers],
|
81
|
+
config: connection_config[:config]
|
82
|
+
) do |chunk|
|
83
|
+
process_streaming_response(chunk) { |message| yield message if block_given? }
|
84
|
+
end
|
85
|
+
rescue StandardError => e
|
86
|
+
handle_exception(e, { context: "HTTP MODEL:RUN_STREAM_MODEL:EXCEPTION", type: "error" })
|
68
87
|
end
|
69
88
|
|
70
89
|
def process_response(response)
|
@@ -74,16 +93,15 @@ module Multiwoven::Integrations::Source
|
|
74
93
|
else
|
75
94
|
create_log_message("HTTP MODEL:RUN_MODEL", "error", "request failed: #{response.body}")
|
76
95
|
end
|
96
|
+
rescue StandardError => e
|
97
|
+
handle_exception(e, { context: "HTTP MODEL:PROCESS_RESPONSE:EXCEPTION", type: "error" })
|
77
98
|
end
|
78
99
|
|
79
|
-
def
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
headers: headers,
|
85
|
-
config: config
|
86
|
-
)
|
100
|
+
def process_streaming_response(chunk)
|
101
|
+
data = JSON.parse(chunk)
|
102
|
+
yield [RecordMessage.new(data: data, emitted_at: Time.now.to_i).to_multiwoven_message] if block_given?
|
103
|
+
rescue StandardError => e
|
104
|
+
handle_exception(e, { context: "HTTP MODEL:PROCESS_STREAMING_RESPONSE:EXCEPTION", type: "error" })
|
87
105
|
end
|
88
106
|
end
|
89
107
|
end
|
@@ -4,7 +4,7 @@
|
|
4
4
|
"title": "HTTP Model Endpoint",
|
5
5
|
"connector_type": "source",
|
6
6
|
"category": "AI Model",
|
7
|
-
"documentation_url": "https://docs.
|
7
|
+
"documentation_url": "https://docs.mutltiwoven.com",
|
8
8
|
"github_issue_label": "source-http-model",
|
9
9
|
"icon": "icon.svg",
|
10
10
|
"license": "MIT",
|
@@ -19,10 +19,17 @@
|
|
19
19
|
"title": "URL",
|
20
20
|
"order": 1
|
21
21
|
},
|
22
|
+
"is_stream": {
|
23
|
+
"type": "boolean",
|
24
|
+
"title": "Streaming Enabled",
|
25
|
+
"description": "Enables data streaming for such as chat, when supported by the model. When true, messages and model data are processed in chunks for immediate delivery, enhancing responsiveness. Default is false, processing only after the entire response is received.",
|
26
|
+
"default": false,
|
27
|
+
"order": 2
|
28
|
+
},
|
22
29
|
"headers": {
|
23
30
|
"title": "HTTP Headers",
|
24
31
|
"description": "Custom headers to include in the HTTP request. Useful for authentication, content type specifications, and other request metadata.",
|
25
|
-
"order":
|
32
|
+
"order": 3,
|
26
33
|
"additionalProperties": {
|
27
34
|
"type": "string"
|
28
35
|
},
|
@@ -42,21 +49,21 @@
|
|
42
49
|
"order": 0
|
43
50
|
}
|
44
51
|
},
|
45
|
-
"order":
|
52
|
+
"order": 4
|
46
53
|
},
|
47
54
|
"request_format": {
|
48
55
|
"title": "Request Format",
|
49
56
|
"description": "Sample Request Format",
|
50
57
|
"type": "string",
|
51
58
|
"x-request-format": true,
|
52
|
-
"order":
|
59
|
+
"order": 5
|
53
60
|
},
|
54
61
|
"response_format": {
|
55
62
|
"title": "Response Format",
|
56
63
|
"description": "Sample Response Format",
|
57
64
|
"type": "string",
|
58
65
|
"x-response-format": true,
|
59
|
-
"order":
|
66
|
+
"order": 6
|
60
67
|
}
|
61
68
|
}
|
62
69
|
}
|
@@ -0,0 +1,117 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Multiwoven::Integrations::Source
|
4
|
+
module OpenAI
|
5
|
+
include Multiwoven::Integrations::Core
|
6
|
+
class Client < SourceConnector
|
7
|
+
def check_connection(connection_config)
|
8
|
+
connection_config = prepare_config(connection_config)
|
9
|
+
response = send_request(
|
10
|
+
url: OPEN_AI_URL,
|
11
|
+
http_method: HTTP_POST,
|
12
|
+
payload: JSON.parse(connection_config[:request_format]),
|
13
|
+
headers: auth_headers(connection_config[:api_key]),
|
14
|
+
config: connection_config[:config]
|
15
|
+
)
|
16
|
+
success?(response) ? success_status : failure_status(nil)
|
17
|
+
rescue StandardError => e
|
18
|
+
handle_exception(e, { context: "OPEN AI:CHECK_CONNECTION:EXCEPTION", type: "error" })
|
19
|
+
failure_status(e)
|
20
|
+
end
|
21
|
+
|
22
|
+
def discover(_connection_config = nil)
|
23
|
+
catalog_json = read_json(CATALOG_SPEC_PATH)
|
24
|
+
catalog = build_catalog(catalog_json)
|
25
|
+
catalog.to_multiwoven_message
|
26
|
+
rescue StandardError => e
|
27
|
+
handle_exception(e, { context: "OPEN AI:DISCOVER:EXCEPTION", type: "error" })
|
28
|
+
end
|
29
|
+
|
30
|
+
def read(sync_config)
|
31
|
+
connection_config = prepare_config(sync_config.source.connection_specification)
|
32
|
+
stream = connection_config[:is_stream] ||= false
|
33
|
+
# The server checks the ConnectorQueryType.
|
34
|
+
# If it's "ai_ml," the server calculates the payload and passes it as a query in the sync config model protocol.
|
35
|
+
# This query is then sent to the AI/ML model.
|
36
|
+
payload = parse_json(sync_config.model.query)
|
37
|
+
|
38
|
+
if stream
|
39
|
+
run_model_stream(connection_config, payload) { |message| yield message if block_given? }
|
40
|
+
else
|
41
|
+
run_model(connection_config, payload)
|
42
|
+
end
|
43
|
+
rescue StandardError => e
|
44
|
+
handle_exception(e, { context: "OPEN AI:READ:EXCEPTION", type: "error" })
|
45
|
+
end
|
46
|
+
|
47
|
+
private
|
48
|
+
|
49
|
+
def prepare_config(config)
|
50
|
+
config.with_indifferent_access.tap do |conf|
|
51
|
+
conf[:config][:timeout] ||= 30
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def parse_json(json_string)
|
56
|
+
JSON.parse(json_string)
|
57
|
+
rescue JSON::ParserError => e
|
58
|
+
handle_exception(e, { context: "OPEN AI:PARSE_JSON:EXCEPTION", type: "error" })
|
59
|
+
{}
|
60
|
+
end
|
61
|
+
|
62
|
+
def run_model(connection_config, payload)
|
63
|
+
response = send_request(
|
64
|
+
url: OPEN_AI_URL,
|
65
|
+
http_method: HTTP_POST,
|
66
|
+
payload: payload,
|
67
|
+
headers: auth_headers(connection_config[:api_key]),
|
68
|
+
config: connection_config[:config]
|
69
|
+
)
|
70
|
+
process_response(response)
|
71
|
+
rescue StandardError => e
|
72
|
+
handle_exception(e, { context: "OPEN AI:RUN_MODEL:EXCEPTION", type: "error" })
|
73
|
+
end
|
74
|
+
|
75
|
+
def run_model_stream(connection_config, payload)
|
76
|
+
send_streaming_request(
|
77
|
+
url: OPEN_AI_URL,
|
78
|
+
http_method: HTTP_POST,
|
79
|
+
payload: payload,
|
80
|
+
headers: auth_headers(connection_config[:api_key]),
|
81
|
+
config: connection_config[:config]
|
82
|
+
) do |chunk|
|
83
|
+
process_streaming_response(chunk) { |message| yield message if block_given? }
|
84
|
+
end
|
85
|
+
rescue StandardError => e
|
86
|
+
handle_exception(e, { context: "OPEN AI:RUN_STREAM_MODEL:EXCEPTION", type: "error" })
|
87
|
+
end
|
88
|
+
|
89
|
+
def process_response(response)
|
90
|
+
if success?(response)
|
91
|
+
data = JSON.parse(response.body)
|
92
|
+
[RecordMessage.new(data: data, emitted_at: Time.now.to_i).to_multiwoven_message]
|
93
|
+
else
|
94
|
+
create_log_message("OPEN AI:RUN_MODEL", "error", "request failed: #{response.body}")
|
95
|
+
end
|
96
|
+
rescue StandardError => e
|
97
|
+
handle_exception(e, { context: "OPEN AI:PROCESS_RESPONSE:EXCEPTION", type: "error" })
|
98
|
+
end
|
99
|
+
|
100
|
+
def extract_data_entries(chunk)
|
101
|
+
chunk.split(/^data: /).map(&:strip).reject(&:empty?)
|
102
|
+
end
|
103
|
+
|
104
|
+
def process_streaming_response(chunk)
|
105
|
+
data_entries = extract_data_entries(chunk)
|
106
|
+
data_entries.each do |entry|
|
107
|
+
next if entry == "[DONE]"
|
108
|
+
|
109
|
+
data = parse_json(entry)
|
110
|
+
yield [RecordMessage.new(data: data, emitted_at: Time.now.to_i).to_multiwoven_message] if block_given?
|
111
|
+
rescue StandardError => e
|
112
|
+
handle_exception(e, { context: "OPEN AI:PROCESS_STREAMING_RESPONSE:EXCEPTION", type: "error", entry: entry })
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
{
|
2
|
+
"data": {
|
3
|
+
"name": "OpenAI",
|
4
|
+
"title": "OpenAI Model Endpoint",
|
5
|
+
"connector_type": "source",
|
6
|
+
"category": "AI Model",
|
7
|
+
"documentation_url": "https://docs.mutltiwoven.com",
|
8
|
+
"github_issue_label": "source-open-ai-model",
|
9
|
+
"icon": "icon.svg",
|
10
|
+
"license": "MIT",
|
11
|
+
"release_stage": "alpha",
|
12
|
+
"support_level": "community",
|
13
|
+
"tags": ["language:ruby", "multiwoven"]
|
14
|
+
}
|
15
|
+
}
|
@@ -0,0 +1,54 @@
|
|
1
|
+
{
|
2
|
+
"documentation_url": "https://docs.multiwoven.com/integrations/source/open-ai-endpoint",
|
3
|
+
"stream_type": "user_defined",
|
4
|
+
"connector_query_type": "ai_ml",
|
5
|
+
"connection_specification": {
|
6
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
7
|
+
"title": "Open AI Endpoint",
|
8
|
+
"type": "object",
|
9
|
+
"required": ["api_key", "request_format", "response_format"],
|
10
|
+
"properties": {
|
11
|
+
"api_key": {
|
12
|
+
"type": "string",
|
13
|
+
"multiwoven_secret": true,
|
14
|
+
"title": "API Key",
|
15
|
+
"order": 0
|
16
|
+
},
|
17
|
+
"is_stream": {
|
18
|
+
"type": "boolean",
|
19
|
+
"title": "Streaming Enabled",
|
20
|
+
"description": "Enables data streaming for such as chat, when supported by the model. When true, messages and model data are processed in chunks for immediate delivery, enhancing responsiveness. Default is false, processing only after the entire response is received.",
|
21
|
+
"default": false,
|
22
|
+
"order": 1
|
23
|
+
},
|
24
|
+
"config": {
|
25
|
+
"title": "",
|
26
|
+
"type": "object",
|
27
|
+
"properties": {
|
28
|
+
"timeout": {
|
29
|
+
"type": "string",
|
30
|
+
"default": "30",
|
31
|
+
"title": "HTTP Timeout",
|
32
|
+
"description": "The maximum time, in seconds, to wait for a response from the server before the request is canceled.",
|
33
|
+
"order": 0
|
34
|
+
}
|
35
|
+
},
|
36
|
+
"order": 2
|
37
|
+
},
|
38
|
+
"request_format": {
|
39
|
+
"title": "Request Format",
|
40
|
+
"description": "Sample Request Format",
|
41
|
+
"type": "string",
|
42
|
+
"x-request-format": true,
|
43
|
+
"order": 3
|
44
|
+
},
|
45
|
+
"response_format": {
|
46
|
+
"title": "Response Format",
|
47
|
+
"description": "Sample Response Format",
|
48
|
+
"type": "string",
|
49
|
+
"x-response-format": true,
|
50
|
+
"order": 4
|
51
|
+
}
|
52
|
+
}
|
53
|
+
}
|
54
|
+
}
|
@@ -0,0 +1 @@
|
|
1
|
+
<svg viewBox="0 0 1180 320" xmlns="http://www.w3.org/2000/svg"><path d="m367.44 153.84c0 52.32 33.6 88.8 80.16 88.8s80.16-36.48 80.16-88.8-33.6-88.8-80.16-88.8-80.16 36.48-80.16 88.8zm129.6 0c0 37.44-20.4 61.68-49.44 61.68s-49.44-24.24-49.44-61.68 20.4-61.68 49.44-61.68 49.44 24.24 49.44 61.68z"/><path d="m614.27 242.64c35.28 0 55.44-29.76 55.44-65.52s-20.16-65.52-55.44-65.52c-16.32 0-28.32 6.48-36.24 15.84v-13.44h-28.8v169.2h28.8v-56.4c7.92 9.36 19.92 15.84 36.24 15.84zm-36.96-69.12c0-23.76 13.44-36.72 31.2-36.72 20.88 0 32.16 16.32 32.16 40.32s-11.28 40.32-32.16 40.32c-17.76 0-31.2-13.2-31.2-36.48z"/><path d="m747.65 242.64c25.2 0 45.12-13.2 54-35.28l-24.72-9.36c-3.84 12.96-15.12 20.16-29.28 20.16-18.48 0-31.44-13.2-33.6-34.8h88.32v-9.6c0-34.56-19.44-62.16-55.92-62.16s-60 28.56-60 65.52c0 38.88 25.2 65.52 61.2 65.52zm-1.44-106.8c18.24 0 26.88 12 27.12 25.92h-57.84c4.32-17.04 15.84-25.92 30.72-25.92z"/><path d="m823.98 240h28.8v-73.92c0-18 13.2-27.6 26.16-27.6 15.84 0 22.08 11.28 22.08 26.88v74.64h28.8v-83.04c0-27.12-15.84-45.36-42.24-45.36-16.32 0-27.6 7.44-34.8 15.84v-13.44h-28.8z"/><path d="m1014.17 67.68-65.28 172.32h30.48l14.64-39.36h74.4l14.88 39.36h30.96l-65.28-172.32zm16.8 34.08 27.36 72h-54.24z"/><path d="m1163.69 68.18h-30.72v172.32h30.72z"/><path d="m297.06 130.97c7.26-21.79 4.76-45.66-6.85-65.48-17.46-30.4-52.56-46.04-86.84-38.68-15.25-17.18-37.16-26.95-60.13-26.81-35.04-.08-66.13 22.48-76.91 55.82-22.51 4.61-41.94 18.7-53.31 38.67-17.59 30.32-13.58 68.54 9.92 94.54-7.26 21.79-4.76 45.66 6.85 65.48 17.46 30.4 52.56 46.04 86.84 38.68 15.24 17.18 37.16 26.95 60.13 26.8 35.06.09 66.16-22.49 76.94-55.86 22.51-4.61 41.94-18.7 53.31-38.67 17.57-30.32 13.55-68.51-9.94-94.51zm-120.28 168.11c-14.03.02-27.62-4.89-38.39-13.88.49-.26 1.34-.73 1.89-1.07l63.72-36.8c3.26-1.85 5.26-5.32 5.24-9.07v-89.83l26.93 15.55c.29.14.48.42.52.74v74.39c-.04 33.08-26.83 59.9-59.91 59.97zm-128.84-55.03c-7.03-12.14-9.56-26.37-7.15-40.18.47.28 1.3.79 1.89 1.13l63.72 36.8c3.23 1.89 7.23 1.89 10.47 0l77.79-44.92v31.1c.02.32-.13.63-.38.83l-64.41 37.19c-28.69 16.52-65.33 6.7-81.92-21.95zm-16.77-139.09c7-12.16 18.05-21.46 31.21-26.29 0 .55-.03 1.52-.03 2.2v73.61c-.02 3.74 1.98 7.21 5.23 9.06l77.79 44.91-26.93 15.55c-.27.18-.61.21-.91.08l-64.42-37.22c-28.63-16.58-38.45-53.21-21.95-81.89zm221.26 51.49-77.79-44.92 26.93-15.54c.27-.18.61-.21.91-.08l64.42 37.19c28.68 16.57 38.51 53.26 21.94 81.94-7.01 12.14-18.05 21.44-31.2 26.28v-75.81c.03-3.74-1.96-7.2-5.2-9.06zm26.8-40.34c-.47-.29-1.3-.79-1.89-1.13l-63.72-36.8c-3.23-1.89-7.23-1.89-10.47 0l-77.79 44.92v-31.1c-.02-.32.13-.63.38-.83l64.41-37.16c28.69-16.55 65.37-6.7 81.91 22 6.99 12.12 9.52 26.31 7.15 40.1zm-168.51 55.43-26.94-15.55c-.29-.14-.48-.42-.52-.74v-74.39c.02-33.12 26.89-59.96 60.01-59.94 14.01 0 27.57 4.92 38.34 13.88-.49.26-1.33.73-1.89 1.07l-63.72 36.8c-3.26 1.85-5.26 5.31-5.24 9.06l-.04 89.79zm14.63-31.54 34.65-20.01 34.65 20v40.01l-34.65 20-34.65-20z"/></svg>
|
@@ -52,7 +52,9 @@ require_relative "integrations/protocol/protocol"
|
|
52
52
|
require_relative "integrations/core/base_connector"
|
53
53
|
require_relative "integrations/core/source_connector"
|
54
54
|
require_relative "integrations/core/destination_connector"
|
55
|
+
require_relative "integrations/core/http_helper"
|
55
56
|
require_relative "integrations/core/http_client"
|
57
|
+
require_relative "integrations/core/streaming_http_client"
|
56
58
|
require_relative "integrations/core/query_builder"
|
57
59
|
|
58
60
|
# Source
|
@@ -71,6 +73,7 @@ require_relative "integrations/source/databrics_model/client"
|
|
71
73
|
require_relative "integrations/source/aws_sagemaker_model/client"
|
72
74
|
require_relative "integrations/source/google_vertex_model/client"
|
73
75
|
require_relative "integrations/source/http_model/client"
|
76
|
+
require_relative "integrations/source/open_ai/client"
|
74
77
|
|
75
78
|
# Destination
|
76
79
|
require_relative "integrations/destination/klaviyo/client"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: multiwoven-integrations
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.16.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Subin T P
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-12-
|
11
|
+
date: 2024-12-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -580,9 +580,11 @@ files:
|
|
580
580
|
- lib/multiwoven/integrations/core/destination_connector.rb
|
581
581
|
- lib/multiwoven/integrations/core/fullrefresher.rb
|
582
582
|
- lib/multiwoven/integrations/core/http_client.rb
|
583
|
+
- lib/multiwoven/integrations/core/http_helper.rb
|
583
584
|
- lib/multiwoven/integrations/core/query_builder.rb
|
584
585
|
- lib/multiwoven/integrations/core/rate_limiter.rb
|
585
586
|
- lib/multiwoven/integrations/core/source_connector.rb
|
587
|
+
- lib/multiwoven/integrations/core/streaming_http_client.rb
|
586
588
|
- lib/multiwoven/integrations/core/utils.rb
|
587
589
|
- lib/multiwoven/integrations/destination/airtable/client.rb
|
588
590
|
- lib/multiwoven/integrations/destination/airtable/config/catalog.json
|
@@ -733,6 +735,11 @@ files:
|
|
733
735
|
- lib/multiwoven/integrations/source/maria_db/config/meta.json
|
734
736
|
- lib/multiwoven/integrations/source/maria_db/config/spec.json
|
735
737
|
- lib/multiwoven/integrations/source/maria_db/icon.svg
|
738
|
+
- lib/multiwoven/integrations/source/open_ai/client.rb
|
739
|
+
- lib/multiwoven/integrations/source/open_ai/config/catalog.json
|
740
|
+
- lib/multiwoven/integrations/source/open_ai/config/meta.json
|
741
|
+
- lib/multiwoven/integrations/source/open_ai/config/spec.json
|
742
|
+
- lib/multiwoven/integrations/source/open_ai/icon.svg
|
736
743
|
- lib/multiwoven/integrations/source/oracle_db/client.rb
|
737
744
|
- lib/multiwoven/integrations/source/oracle_db/config/meta.json
|
738
745
|
- lib/multiwoven/integrations/source/oracle_db/config/spec.json
|
@@ -765,7 +772,7 @@ metadata:
|
|
765
772
|
homepage_uri: https://www.multiwoven.com/
|
766
773
|
source_code_uri: https://github.com/Multiwoven/multiwoven/tree/main/integrations
|
767
774
|
changelog_uri: https://github.com/Multiwoven/multiwoven/blob/main/integrations/CHANGELOG.md
|
768
|
-
post_install_message:
|
775
|
+
post_install_message:
|
769
776
|
rdoc_options: []
|
770
777
|
require_paths:
|
771
778
|
- lib
|
@@ -781,7 +788,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
781
788
|
version: '0'
|
782
789
|
requirements: []
|
783
790
|
rubygems_version: 3.4.1
|
784
|
-
signing_key:
|
791
|
+
signing_key:
|
785
792
|
specification_version: 4
|
786
793
|
summary: Integration suite for open source reverse ETL platform
|
787
794
|
test_files: []
|