@motiadev/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +8 -0
- package/dist/index.js +24 -0
- package/dist/jest.config.d.ts +6 -0
- package/dist/src/config.types.d.ts +16 -0
- package/dist/src/config.types.js +2 -0
- package/dist/src/event-manager.d.ts +2 -0
- package/dist/src/event-manager.js +22 -0
- package/dist/src/flows-endpoint.d.ts +25 -0
- package/dist/src/flows-endpoint.js +81 -0
- package/dist/src/get-step-language.d.ts +1 -0
- package/dist/src/get-step-language.js +27 -0
- package/dist/src/guards.d.ts +4 -0
- package/dist/src/guards.js +9 -0
- package/dist/src/logger.d.ts +23 -0
- package/dist/src/logger.js +76 -0
- package/dist/src/node/logger.d.ts +8 -0
- package/dist/src/node/logger.js +23 -0
- package/dist/src/node/node-runner.d.ts +1 -0
- package/dist/src/node/node-runner.js +56 -0
- package/dist/src/node/rpc-state-manager.d.ts +10 -0
- package/dist/src/node/rpc-state-manager.js +21 -0
- package/dist/src/node/rpc.d.ts +7 -0
- package/dist/src/node/rpc.js +32 -0
- package/dist/src/python/get-config.py +42 -0
- package/dist/src/python/get-python-config.d.ts +2 -0
- package/dist/src/python/get-python-config.js +34 -0
- package/dist/src/python/logger.py +42 -0
- package/dist/src/python/python-runner.py +73 -0
- package/dist/src/python/rpc.py +80 -0
- package/dist/src/python/rpc_state_manager.py +18 -0
- package/dist/src/ruby/get-ruby-config.d.ts +2 -0
- package/dist/src/ruby/get-ruby-config.js +34 -0
- package/dist/src/ruby/get_config.rb +78 -0
- package/dist/src/ruby/logger.rb +55 -0
- package/dist/src/ruby/ruby_runner.rb +80 -0
- package/dist/src/ruby/state_adapter.rb +62 -0
- package/dist/src/server.d.ts +18 -0
- package/dist/src/server.js +73 -0
- package/dist/src/state/adapters/default-state-adapter.d.ts +17 -0
- package/dist/src/state/adapters/default-state-adapter.js +102 -0
- package/dist/src/state/adapters/redis-state-adapter.d.ts +19 -0
- package/dist/src/state/adapters/redis-state-adapter.js +44 -0
- package/dist/src/state/create-state-adapter.d.ts +8 -0
- package/dist/src/state/create-state-adapter.js +16 -0
- package/dist/src/state/state-adapter.d.ts +10 -0
- package/dist/src/state/state-adapter.js +2 -0
- package/dist/src/step-handler-rpc-processor.d.ts +12 -0
- package/dist/src/step-handler-rpc-processor.js +35 -0
- package/dist/src/step-handlers.d.ts +2 -0
- package/dist/src/step-handlers.js +98 -0
- package/dist/src/types.d.ts +109 -0
- package/dist/src/types.js +2 -0
- package/package.json +31 -0
- package/tsconfig.json +20 -0
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import json
|
|
3
|
+
import importlib.util
|
|
4
|
+
import traceback
|
|
5
|
+
import os
|
|
6
|
+
from logger import Logger
|
|
7
|
+
from rpc import RpcSender
|
|
8
|
+
from rpc_state_manager import RpcStateManager
|
|
9
|
+
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
def parse_args(arg: str) -> Any:
|
|
13
|
+
from types import SimpleNamespace
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
return json.loads(arg, object_hook=lambda d: SimpleNamespace(**d))
|
|
17
|
+
except json.JSONDecodeError:
|
|
18
|
+
print('Error parsing args:', arg)
|
|
19
|
+
return arg
|
|
20
|
+
|
|
21
|
+
class Context:
|
|
22
|
+
def __init__(self, args: Any, file_name: str):
|
|
23
|
+
self.trace_id = args.traceId
|
|
24
|
+
self.flows = args.flows
|
|
25
|
+
self.file_name = file_name
|
|
26
|
+
self.sender = RpcSender()
|
|
27
|
+
self.state = RpcStateManager(self.sender)
|
|
28
|
+
self.logger = Logger(self.trace_id, self.flows, self.file_name, self.sender)
|
|
29
|
+
|
|
30
|
+
async def emit(self, event: Any):
|
|
31
|
+
await self.sender.send('emit', event)
|
|
32
|
+
|
|
33
|
+
async def run_python_module(file_path: str, args: Any) -> None:
|
|
34
|
+
try:
|
|
35
|
+
# Construct path relative to steps directory
|
|
36
|
+
flows_dir = os.path.join(os.getcwd(), 'steps')
|
|
37
|
+
module_path = os.path.join(flows_dir, file_path)
|
|
38
|
+
|
|
39
|
+
# Load the module dynamically
|
|
40
|
+
spec = importlib.util.spec_from_file_location("dynamic_module", module_path)
|
|
41
|
+
if spec is None or spec.loader is None:
|
|
42
|
+
raise ImportError(f"Could not load module from {module_path}")
|
|
43
|
+
|
|
44
|
+
module = importlib.util.module_from_spec(spec)
|
|
45
|
+
spec.loader.exec_module(module)
|
|
46
|
+
|
|
47
|
+
# Check if the handler function exists
|
|
48
|
+
if not hasattr(module, 'handler'):
|
|
49
|
+
raise AttributeError(f"Function 'handler' not found in module {module_path}")
|
|
50
|
+
|
|
51
|
+
context = Context(args, file_path)
|
|
52
|
+
context.sender.init()
|
|
53
|
+
|
|
54
|
+
await module.handler(args.data, context)
|
|
55
|
+
|
|
56
|
+
# exit with 0 to indicate success
|
|
57
|
+
sys.exit(0)
|
|
58
|
+
except Exception as error:
|
|
59
|
+
print('Error running Python module:', file=sys.stderr)
|
|
60
|
+
|
|
61
|
+
traceback.print_exc(file=sys.stderr)
|
|
62
|
+
sys.exit(1)
|
|
63
|
+
|
|
64
|
+
if __name__ == "__main__":
|
|
65
|
+
if len(sys.argv) < 2:
|
|
66
|
+
print('Usage: python pythonRunner.py <file-path> <arg>', file=sys.stderr)
|
|
67
|
+
sys.exit(1)
|
|
68
|
+
|
|
69
|
+
file_path = sys.argv[1]
|
|
70
|
+
arg = sys.argv[2] if len(sys.argv) > 2 else None
|
|
71
|
+
|
|
72
|
+
import asyncio
|
|
73
|
+
asyncio.run(run_python_module(file_path, parse_args(arg)))
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
import asyncio
|
|
3
|
+
import os
|
|
4
|
+
import json
|
|
5
|
+
import sys
|
|
6
|
+
from typing import Any, Dict, Tuple
|
|
7
|
+
|
|
8
|
+
# get the FD from ENV
|
|
9
|
+
NODEIPCFD = int(os.environ["NODE_CHANNEL_FD"])
|
|
10
|
+
|
|
11
|
+
class RpcSender:
|
|
12
|
+
def __init__(self):
|
|
13
|
+
self.pending_requests: Dict[str, Tuple[asyncio.Future, str, Any]] = {}
|
|
14
|
+
|
|
15
|
+
def send_no_wait(self, method: str, args: Any) -> None:
|
|
16
|
+
request = {
|
|
17
|
+
'type': 'rpc_request',
|
|
18
|
+
'method': method,
|
|
19
|
+
'args': args
|
|
20
|
+
}
|
|
21
|
+
# encode message as json string + newline in bytes
|
|
22
|
+
bytesMessage = (json.dumps(request) + "\n").encode('utf-8')
|
|
23
|
+
# send message
|
|
24
|
+
os.write(NODEIPCFD, bytesMessage)
|
|
25
|
+
|
|
26
|
+
async def send(self, method: str, args: Any) -> Any:
|
|
27
|
+
future = asyncio.Future()
|
|
28
|
+
request_id = str(uuid.uuid4())
|
|
29
|
+
self.pending_requests[request_id] = (future, method, args)
|
|
30
|
+
|
|
31
|
+
request = {
|
|
32
|
+
'type': 'rpc_request',
|
|
33
|
+
'id': request_id,
|
|
34
|
+
'method': method,
|
|
35
|
+
'args': args
|
|
36
|
+
}
|
|
37
|
+
# encode message as json string + newline in bytes
|
|
38
|
+
bytesMessage = (json.dumps(request) + "\n").encode('utf-8')
|
|
39
|
+
# send message
|
|
40
|
+
os.write(NODEIPCFD, bytesMessage)
|
|
41
|
+
|
|
42
|
+
return future
|
|
43
|
+
|
|
44
|
+
def init(self):
|
|
45
|
+
def on_message(msg: Dict[str, Any]):
|
|
46
|
+
if msg.get('type') == 'rpc_response':
|
|
47
|
+
request_id = msg['id']
|
|
48
|
+
|
|
49
|
+
if request_id in self.pending_requests:
|
|
50
|
+
future, _, _ = self.pending_requests[request_id]
|
|
51
|
+
|
|
52
|
+
if msg.get('error'):
|
|
53
|
+
future.set_exception(msg['error'])
|
|
54
|
+
elif msg.get('result'):
|
|
55
|
+
future.set_result(msg['result'])
|
|
56
|
+
else:
|
|
57
|
+
# It's a void response
|
|
58
|
+
future.set_result(None)
|
|
59
|
+
|
|
60
|
+
del self.pending_requests[request_id]
|
|
61
|
+
|
|
62
|
+
# Read messages from Node IPC file descriptor
|
|
63
|
+
async def read_messages():
|
|
64
|
+
while True:
|
|
65
|
+
# Read message from pipe
|
|
66
|
+
message = os.read(NODEIPCFD, 4096).decode('utf-8')
|
|
67
|
+
if not message:
|
|
68
|
+
continue
|
|
69
|
+
|
|
70
|
+
# Parse messages (may be multiple due to buffering)
|
|
71
|
+
for line in message.splitlines():
|
|
72
|
+
if line:
|
|
73
|
+
try:
|
|
74
|
+
msg = json.loads(line)
|
|
75
|
+
on_message(msg)
|
|
76
|
+
except json.JSONDecodeError:
|
|
77
|
+
pass
|
|
78
|
+
|
|
79
|
+
# Start message reading loop
|
|
80
|
+
asyncio.create_task(read_messages())
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RpcStateManager:
|
|
5
|
+
def __init__(self, sender):
|
|
6
|
+
self.sender = sender
|
|
7
|
+
|
|
8
|
+
async def get(self, trace_id: str, key: str) -> Any:
|
|
9
|
+
return await self.sender.send('state.get', {'traceId': trace_id, 'key': key})
|
|
10
|
+
|
|
11
|
+
async def set(self, trace_id: str, key: str, value: Any) -> None:
|
|
12
|
+
await self.sender.send('state.set', {'traceId': trace_id, 'key': key, 'value': value})
|
|
13
|
+
|
|
14
|
+
async def delete(self, trace_id: str, key: str) -> None:
|
|
15
|
+
await self.sender.send('state.delete', {'traceId': trace_id, 'key': key})
|
|
16
|
+
|
|
17
|
+
async def clear(self, trace_id: str) -> None:
|
|
18
|
+
await self.sender.send('state.clear', {'traceId': trace_id})
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getRubyConfig = void 0;
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const logger_1 = require("../logger");
|
|
10
|
+
const getRubyConfig = (file) => {
|
|
11
|
+
const getConfig = path_1.default.join(__dirname, 'get_config.rb');
|
|
12
|
+
return new Promise((resolve, reject) => {
|
|
13
|
+
let config = null;
|
|
14
|
+
const child = (0, child_process_1.spawn)('ruby', [getConfig, file], {
|
|
15
|
+
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
|
|
16
|
+
});
|
|
17
|
+
child.on('message', (message) => {
|
|
18
|
+
logger_1.globalLogger.debug('[Ruby Config] Read config', { config: message });
|
|
19
|
+
config = message;
|
|
20
|
+
});
|
|
21
|
+
child.on('close', (code) => {
|
|
22
|
+
if (code !== 0) {
|
|
23
|
+
reject(new Error(`Process exited with code ${code}`));
|
|
24
|
+
}
|
|
25
|
+
else if (!config) {
|
|
26
|
+
reject(new Error(`No config found for file ${file}`));
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
resolve(config);
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
});
|
|
33
|
+
};
|
|
34
|
+
exports.getRubyConfig = getRubyConfig;
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
require 'json'
|
|
2
|
+
|
|
3
|
+
# Get the FD from ENV with a default fallback for testing
|
|
4
|
+
NODEIPCFD = (ENV['NODE_CHANNEL_FD'] || 1).to_i
|
|
5
|
+
|
|
6
|
+
def send_message(message)
|
|
7
|
+
begin
|
|
8
|
+
io = IO.new(NODEIPCFD, 'w')
|
|
9
|
+
json_message = message.to_json + "\n"
|
|
10
|
+
io.write(json_message)
|
|
11
|
+
io.flush
|
|
12
|
+
rescue Errno::EBADF => e
|
|
13
|
+
warn "Error writing to IPC channel: #{e.message}"
|
|
14
|
+
exit(1)
|
|
15
|
+
ensure
|
|
16
|
+
io.close if io && !io.closed?
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def extract_config(file_path)
|
|
21
|
+
begin
|
|
22
|
+
# Remove previous Config class if it exists
|
|
23
|
+
Object.send(:remove_const, :Config) if Object.const_defined?(:Config)
|
|
24
|
+
|
|
25
|
+
# Create a new binding for evaluation
|
|
26
|
+
evaluation_binding = binding
|
|
27
|
+
|
|
28
|
+
# Load and evaluate the file content in our binding
|
|
29
|
+
file_content = File.read(file_path)
|
|
30
|
+
evaluation_binding.eval(file_content)
|
|
31
|
+
|
|
32
|
+
# Get the config variable from our binding
|
|
33
|
+
config = evaluation_binding.eval('config')
|
|
34
|
+
|
|
35
|
+
# Convert config instance to hash with symbol keys
|
|
36
|
+
{
|
|
37
|
+
type: config.type,
|
|
38
|
+
name: config.name,
|
|
39
|
+
subscribes: config.subscribes,
|
|
40
|
+
emits: config.emits,
|
|
41
|
+
input: config.input,
|
|
42
|
+
flows: config.flows
|
|
43
|
+
}
|
|
44
|
+
rescue NameError => e
|
|
45
|
+
raise "Error accessing config: #{e.message}"
|
|
46
|
+
rescue => e
|
|
47
|
+
raise "Error processing config file: #{e.message}"
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Main execution block
|
|
52
|
+
begin
|
|
53
|
+
if ARGV.empty?
|
|
54
|
+
warn 'Error: No file path provided'
|
|
55
|
+
exit(1)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
file_path = ARGV[0]
|
|
59
|
+
|
|
60
|
+
unless File.exist?(file_path)
|
|
61
|
+
warn "Error: File not found: #{file_path}"
|
|
62
|
+
exit(1)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
unless File.readable?(file_path)
|
|
66
|
+
warn "Error: File is not readable: #{file_path}"
|
|
67
|
+
exit(1)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Extract and send config
|
|
71
|
+
config = extract_config(file_path)
|
|
72
|
+
send_message(config)
|
|
73
|
+
|
|
74
|
+
exit(0)
|
|
75
|
+
rescue => e
|
|
76
|
+
warn "Error: #{e.message}"
|
|
77
|
+
exit(1)
|
|
78
|
+
end
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
require 'json'
|
|
2
|
+
require 'time'
|
|
3
|
+
|
|
4
|
+
class CustomLogger
|
|
5
|
+
def initialize(trace_id, flows, file_path)
|
|
6
|
+
@trace_id = trace_id
|
|
7
|
+
@flows = flows
|
|
8
|
+
@file_name = File.basename(file_path)
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def log(level, message, args = nil)
|
|
12
|
+
# Ensure message is not nested JSON or a stringified JSON object
|
|
13
|
+
if message.is_a?(String) && message.strip.start_with?('{', '[')
|
|
14
|
+
begin
|
|
15
|
+
message = JSON.parse(message) # Parse if valid JSON
|
|
16
|
+
rescue JSON::ParserError
|
|
17
|
+
# Leave message as is if it's not valid JSON
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Construct the base log entry
|
|
22
|
+
log_entry = {
|
|
23
|
+
msg: message
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
# Merge additional arguments if provided
|
|
27
|
+
if args
|
|
28
|
+
args = case args
|
|
29
|
+
when OpenStruct then args.to_h
|
|
30
|
+
when Hash then args
|
|
31
|
+
else { data: args }
|
|
32
|
+
end
|
|
33
|
+
log_entry.merge!(args)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Generate JSON output
|
|
37
|
+
puts JSON.dump(log_entry)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def info(message, args = nil)
|
|
41
|
+
log('info', message, args)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def error(message, args = nil)
|
|
45
|
+
log('error', message, args)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def debug(message, args = nil)
|
|
49
|
+
log('debug', message, args)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def warn(message, args = nil)
|
|
53
|
+
log('warn', message, args)
|
|
54
|
+
end
|
|
55
|
+
end
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
require 'json'
|
|
2
|
+
require 'io/console'
|
|
3
|
+
require 'ostruct'
|
|
4
|
+
require 'pathname'
|
|
5
|
+
require_relative 'logger'
|
|
6
|
+
require_relative 'state_adapter'
|
|
7
|
+
|
|
8
|
+
# Parse arguments as JSON or fallback to raw string
|
|
9
|
+
def parse_args(arg)
|
|
10
|
+
begin
|
|
11
|
+
JSON.parse(arg, object_class: OpenStruct)
|
|
12
|
+
rescue JSON::ParserError
|
|
13
|
+
puts "Error parsing args: #{arg}"
|
|
14
|
+
arg
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Get the file descriptor from ENV
|
|
19
|
+
NODE_CHANNEL_FD = ENV['NODE_CHANNEL_FD'].to_i
|
|
20
|
+
|
|
21
|
+
# Context class for managing the execution environment
|
|
22
|
+
class Context
|
|
23
|
+
attr_reader :trace_id, :flows, :file_name, :state, :logger
|
|
24
|
+
|
|
25
|
+
# Emit a message to the parent process via Node IPC
|
|
26
|
+
def emit(text)
|
|
27
|
+
message = (JSON.dump(text) + "\n").encode('utf-8')
|
|
28
|
+
IO.new(NODE_CHANNEL_FD, 'w').write(message)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def initialize(args, file_name)
|
|
32
|
+
@trace_id = args.traceId
|
|
33
|
+
@flows = args.flows
|
|
34
|
+
@file_name = file_name
|
|
35
|
+
@state = create_internal_state_manager(state_manager_url: args[:stateConfig]&.dig(:stateManagerUrl))
|
|
36
|
+
@logger = CustomLogger.new(@trace_id, @flows, @file_name)
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Dynamically load and execute a Ruby script
|
|
41
|
+
def run_ruby_module(file_path, args)
|
|
42
|
+
unless File.exist?(file_path)
|
|
43
|
+
raise LoadError, "Could not load module from #{file_path}"
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
# Load the file dynamically
|
|
47
|
+
load file_path
|
|
48
|
+
|
|
49
|
+
unless defined?(executor)
|
|
50
|
+
raise NameError, "Function 'executor' not found in module #{file_path}"
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
context = Context.new(args, file_path)
|
|
54
|
+
|
|
55
|
+
executor(args.data, context)
|
|
56
|
+
rescue => e
|
|
57
|
+
$stderr.puts "Error running Ruby module: #{e.message}"
|
|
58
|
+
$stderr.puts e.backtrace
|
|
59
|
+
exit 1
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# Entry point
|
|
63
|
+
if __FILE__ == $PROGRAM_NAME
|
|
64
|
+
if ARGV.length < 1
|
|
65
|
+
$stderr.puts 'Usage: ruby ruby-runner.rb <file-path> <arg>'
|
|
66
|
+
exit 1
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
file_path = ARGV[0]
|
|
70
|
+
arg = ARGV[1] || nil
|
|
71
|
+
|
|
72
|
+
begin
|
|
73
|
+
parsed_args = parse_args(arg)
|
|
74
|
+
run_ruby_module(file_path, parsed_args)
|
|
75
|
+
rescue => e
|
|
76
|
+
$stderr.puts "Error: #{e.message}"
|
|
77
|
+
$stderr.puts e.backtrace
|
|
78
|
+
exit 1
|
|
79
|
+
end
|
|
80
|
+
end
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
require 'net/http'
|
|
2
|
+
require 'json'
|
|
3
|
+
require 'uri'
|
|
4
|
+
require 'logger'
|
|
5
|
+
|
|
6
|
+
class StateManagerError < StandardError; end
|
|
7
|
+
|
|
8
|
+
LOGGER = Logger.new($stdout)
|
|
9
|
+
|
|
10
|
+
def get_state_manager_handler(state_manager_url)
|
|
11
|
+
lambda do |action, payload|
|
|
12
|
+
begin
|
|
13
|
+
uri = URI("#{state_manager_url}/#{action}")
|
|
14
|
+
request = Net::HTTP::Post.new(uri)
|
|
15
|
+
request['Content-Type'] = 'application/json'
|
|
16
|
+
request['x-trace-id'] = payload[:traceId]
|
|
17
|
+
# TODO: Add internal auth token for security
|
|
18
|
+
# TODO: Encrypt the payload for security
|
|
19
|
+
request.body = payload.to_json
|
|
20
|
+
|
|
21
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
|
|
22
|
+
http.request(request)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
26
|
+
error_message = "Failed posting state change: #{response.code} - #{response.message}"
|
|
27
|
+
LOGGER.error(error_message)
|
|
28
|
+
raise StateManagerError, error_message
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
if action == 'get'
|
|
32
|
+
result = JSON.parse(response.body, object_class: OpenStruct)
|
|
33
|
+
return result.data
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
nil
|
|
37
|
+
rescue StandardError => error
|
|
38
|
+
LOGGER.error("[internal state manager] failed posting state change: #{error.message}")
|
|
39
|
+
raise StateManagerError, "Failed posting state change: #{error.message}"
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def create_internal_state_manager(state_manager_url:)
|
|
45
|
+
handler = get_state_manager_handler(state_manager_url)
|
|
46
|
+
|
|
47
|
+
{
|
|
48
|
+
get: ->(trace_id, key) {
|
|
49
|
+
result = handler.call('get', { traceId: trace_id, key: key })
|
|
50
|
+
{ data: result }
|
|
51
|
+
},
|
|
52
|
+
set: ->(trace_id, key, value) {
|
|
53
|
+
handler.call('set', { traceId: trace_id, key: key, value: value })
|
|
54
|
+
},
|
|
55
|
+
delete: ->(trace_id, key) {
|
|
56
|
+
handler.call('delete', { traceId: trace_id, key: key })
|
|
57
|
+
},
|
|
58
|
+
clear: ->(trace_id) {
|
|
59
|
+
handler.call('clear', { traceId: trace_id })
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Express } from 'express';
|
|
2
|
+
import http from 'http';
|
|
3
|
+
import { Server as SocketIOServer } from 'socket.io';
|
|
4
|
+
import { StateAdapter } from './state/state-adapter';
|
|
5
|
+
import { EventManager, LockedData, Step } from './types';
|
|
6
|
+
type ServerOptions = {
|
|
7
|
+
steps: Step[];
|
|
8
|
+
flows: LockedData['flows'];
|
|
9
|
+
eventManager: EventManager;
|
|
10
|
+
state: StateAdapter;
|
|
11
|
+
};
|
|
12
|
+
type ServerOutput = {
|
|
13
|
+
app: Express;
|
|
14
|
+
server: http.Server;
|
|
15
|
+
socketServer: SocketIOServer;
|
|
16
|
+
};
|
|
17
|
+
export declare const createServer: (options: ServerOptions) => Promise<ServerOutput>;
|
|
18
|
+
export {};
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.createServer = void 0;
|
|
7
|
+
const body_parser_1 = __importDefault(require("body-parser"));
|
|
8
|
+
const crypto_1 = require("crypto");
|
|
9
|
+
const express_1 = __importDefault(require("express"));
|
|
10
|
+
const http_1 = __importDefault(require("http"));
|
|
11
|
+
const socket_io_1 = require("socket.io");
|
|
12
|
+
const flows_endpoint_1 = require("./flows-endpoint");
|
|
13
|
+
const guards_1 = require("./guards");
|
|
14
|
+
const logger_1 = require("./logger");
|
|
15
|
+
const createServer = async (options) => {
|
|
16
|
+
const { flows, steps, eventManager, state } = options;
|
|
17
|
+
const app = (0, express_1.default)();
|
|
18
|
+
const server = http_1.default.createServer(app);
|
|
19
|
+
const io = new socket_io_1.Server(server);
|
|
20
|
+
const asyncHandler = (step, flows) => {
|
|
21
|
+
return async (req, res) => {
|
|
22
|
+
const traceId = (0, crypto_1.randomUUID)();
|
|
23
|
+
const logger = new logger_1.Logger(traceId, flows, step.config.name, io);
|
|
24
|
+
logger.debug('[API] Received request, processing step', { path: req.path, step });
|
|
25
|
+
const module = require(step.filePath);
|
|
26
|
+
const handler = module.handler;
|
|
27
|
+
const request = {
|
|
28
|
+
body: req.body,
|
|
29
|
+
headers: req.headers,
|
|
30
|
+
pathParams: req.params,
|
|
31
|
+
queryParams: req.query,
|
|
32
|
+
};
|
|
33
|
+
const emit = async ({ data, type }) => {
|
|
34
|
+
await eventManager.emit({ data, type, traceId, flows, logger }, step.filePath);
|
|
35
|
+
};
|
|
36
|
+
try {
|
|
37
|
+
const result = await handler(request, { emit, state, logger, traceId });
|
|
38
|
+
if (result.headers) {
|
|
39
|
+
Object.entries(result.headers).forEach(([key, value]) => res.setHeader(key, value));
|
|
40
|
+
}
|
|
41
|
+
res.status(result.status);
|
|
42
|
+
res.json(result.body);
|
|
43
|
+
}
|
|
44
|
+
catch (error) {
|
|
45
|
+
logger.error('[API] Internal server error', { error });
|
|
46
|
+
console.log(error);
|
|
47
|
+
res.status(500).json({ error: 'Internal server error' });
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
};
|
|
51
|
+
app.use(body_parser_1.default.json());
|
|
52
|
+
app.use(body_parser_1.default.urlencoded({ extended: true }));
|
|
53
|
+
const apiSteps = steps.filter(guards_1.isApiStep);
|
|
54
|
+
for (const step of apiSteps) {
|
|
55
|
+
const { method, flows, path } = step.config;
|
|
56
|
+
logger_1.globalLogger.debug('[API] Registering route', step.config);
|
|
57
|
+
if (method === 'POST') {
|
|
58
|
+
app.post(path, asyncHandler(step, flows));
|
|
59
|
+
}
|
|
60
|
+
else if (method === 'GET') {
|
|
61
|
+
app.get(path, asyncHandler(step, flows));
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
throw new Error(`Unsupported method: ${method}`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
(0, flows_endpoint_1.flowsEndpoint)(flows, app);
|
|
68
|
+
server.on('error', (error) => {
|
|
69
|
+
console.error('Server error:', error);
|
|
70
|
+
});
|
|
71
|
+
return { app, server, socketServer: io };
|
|
72
|
+
};
|
|
73
|
+
exports.createServer = createServer;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { StateAdapter } from '../state-adapter';
|
|
2
|
+
export type FileAdapterConfig = {
|
|
3
|
+
filePath: string;
|
|
4
|
+
};
|
|
5
|
+
export declare class FileStateAdapter implements StateAdapter {
|
|
6
|
+
private filePath;
|
|
7
|
+
constructor(config: FileAdapterConfig);
|
|
8
|
+
init(): Promise<void>;
|
|
9
|
+
get(traceId: string, key: string): Promise<any>;
|
|
10
|
+
set(traceId: string, key: string, value: any): Promise<void>;
|
|
11
|
+
delete(traceId: string, key: string): Promise<void>;
|
|
12
|
+
clear(traceId: string): Promise<void>;
|
|
13
|
+
cleanup(): Promise<void>;
|
|
14
|
+
private _makeKey;
|
|
15
|
+
private _readFile;
|
|
16
|
+
private _writeFile;
|
|
17
|
+
}
|