igniter 0.3.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/docs/DISTRIBUTED_CONTRACTS_V1.md +493 -0
  3. data/examples/distributed_workflow.rb +52 -0
  4. data/lib/igniter/compiler/compiled_graph.rb +12 -0
  5. data/lib/igniter/compiler/validation_pipeline.rb +3 -1
  6. data/lib/igniter/compiler/validators/await_validator.rb +53 -0
  7. data/lib/igniter/compiler/validators/dependencies_validator.rb +41 -1
  8. data/lib/igniter/compiler/validators/remote_validator.rb +58 -0
  9. data/lib/igniter/compiler.rb +2 -0
  10. data/lib/igniter/contract.rb +59 -8
  11. data/lib/igniter/dsl/contract_builder.rb +42 -4
  12. data/lib/igniter/errors.rb +6 -1
  13. data/lib/igniter/integrations/llm/config.rb +69 -0
  14. data/lib/igniter/integrations/llm/context.rb +74 -0
  15. data/lib/igniter/integrations/llm/executor.rb +159 -0
  16. data/lib/igniter/integrations/llm/providers/anthropic.rb +148 -0
  17. data/lib/igniter/integrations/llm/providers/base.rb +33 -0
  18. data/lib/igniter/integrations/llm/providers/ollama.rb +137 -0
  19. data/lib/igniter/integrations/llm/providers/openai.rb +153 -0
  20. data/lib/igniter/integrations/llm.rb +59 -0
  21. data/lib/igniter/integrations/rails/cable_adapter.rb +49 -0
  22. data/lib/igniter/integrations/rails/contract_job.rb +76 -0
  23. data/lib/igniter/integrations/rails/generators/contract/contract_generator.rb +22 -0
  24. data/lib/igniter/integrations/rails/generators/install/install_generator.rb +33 -0
  25. data/lib/igniter/integrations/rails/railtie.rb +25 -0
  26. data/lib/igniter/integrations/rails/webhook_concern.rb +49 -0
  27. data/lib/igniter/integrations/rails.rb +12 -0
  28. data/lib/igniter/model/await_node.rb +21 -0
  29. data/lib/igniter/model/remote_node.rb +26 -0
  30. data/lib/igniter/model.rb +2 -0
  31. data/lib/igniter/runtime/execution.rb +2 -2
  32. data/lib/igniter/runtime/input_validator.rb +5 -3
  33. data/lib/igniter/runtime/resolver.rb +43 -1
  34. data/lib/igniter/runtime/stores/active_record_store.rb +13 -1
  35. data/lib/igniter/runtime/stores/file_store.rb +50 -2
  36. data/lib/igniter/runtime/stores/memory_store.rb +55 -2
  37. data/lib/igniter/runtime/stores/redis_store.rb +13 -1
  38. data/lib/igniter/server/client.rb +123 -0
  39. data/lib/igniter/server/config.rb +27 -0
  40. data/lib/igniter/server/handlers/base.rb +105 -0
  41. data/lib/igniter/server/handlers/contracts_handler.rb +15 -0
  42. data/lib/igniter/server/handlers/event_handler.rb +28 -0
  43. data/lib/igniter/server/handlers/execute_handler.rb +37 -0
  44. data/lib/igniter/server/handlers/health_handler.rb +32 -0
  45. data/lib/igniter/server/handlers/status_handler.rb +27 -0
  46. data/lib/igniter/server/http_server.rb +109 -0
  47. data/lib/igniter/server/rack_app.rb +35 -0
  48. data/lib/igniter/server/registry.rb +56 -0
  49. data/lib/igniter/server/router.rb +75 -0
  50. data/lib/igniter/server.rb +67 -0
  51. data/lib/igniter/version.rb +1 -1
  52. data/lib/igniter.rb +4 -0
  53. metadata +36 -2
@@ -0,0 +1,148 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "net/http"
4
+ require "json"
5
+ require "uri"
6
+
7
+ module Igniter
8
+ module LLM
9
+ module Providers
10
+ # Anthropic Claude provider.
11
+ # Requires ANTHROPIC_API_KEY environment variable or explicit api_key:.
12
+ #
13
+ # API docs: https://docs.anthropic.com/en/api/messages
14
+ #
15
+ # Key differences from OpenAI-compatible providers:
16
+ # - system prompt is a top-level field, not a message
17
+ # - response content is an array of typed blocks (text, tool_use)
18
+ # - tool definitions use input_schema instead of parameters
19
+ class Anthropic < Base # rubocop:disable Metrics/ClassLength
20
+ ANTHROPIC_VERSION = "2023-06-01"
21
+ API_BASE = "https://api.anthropic.com"
22
+
23
+ def initialize(api_key: ENV["ANTHROPIC_API_KEY"], base_url: API_BASE, timeout: 120)
24
+ super()
25
+ @api_key = api_key
26
+ @base_url = base_url.chomp("/")
27
+ @timeout = timeout
28
+ end
29
+
30
+ # Send a chat completion request.
31
+ # Extracts any system message from the messages array automatically.
32
+ def chat(messages:, model:, tools: [], **options) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
33
+ validate_api_key!
34
+
35
+ system_content, chat_messages = extract_system(messages)
36
+
37
+ body = {
38
+ model: model,
39
+ max_tokens: options.delete(:max_tokens) || 4096,
40
+ messages: chat_messages
41
+ }
42
+ body[:system] = system_content if system_content
43
+ body[:tools] = normalize_tools(tools) if tools.any?
44
+ body[:temperature] = options[:temperature] if options[:temperature]
45
+ body[:top_p] = options[:top_p] if options[:top_p]
46
+
47
+ response = post("/v1/messages", body)
48
+ parse_response(response)
49
+ end
50
+
51
+ private
52
+
53
+ def extract_system(messages)
54
+ system_msg = messages.find { |m| (m[:role] || m["role"]).to_s == "system" }
55
+ other = messages.reject { |m| (m[:role] || m["role"]).to_s == "system" }
56
+ system_content = system_msg ? (system_msg[:content] || system_msg["content"]) : nil
57
+ [system_content, normalize_messages(other)]
58
+ end
59
+
60
+ def parse_response(response) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize,Metrics/CyclomaticComplexity,Metrics/PerceivedComplexity
61
+ content_blocks = response.fetch("content", [])
62
+
63
+ text_content = content_blocks
64
+ .select { |b| b["type"] == "text" }
65
+ .map { |b| b["text"] }
66
+ .join
67
+
68
+ tool_calls = content_blocks
69
+ .select { |b| b["type"] == "tool_use" }
70
+ .map do |b|
71
+ { name: b["name"].to_s, arguments: (b["input"] || {}).transform_keys(&:to_sym) }
72
+ end
73
+
74
+ usage = response.fetch("usage", {})
75
+ record_usage(
76
+ prompt_tokens: usage["input_tokens"] || 0,
77
+ completion_tokens: usage["output_tokens"] || 0
78
+ )
79
+
80
+ { role: :assistant, content: text_content, tool_calls: tool_calls }
81
+ end
82
+
83
+ def normalize_messages(messages)
84
+ messages.map do |m|
85
+ { "role" => (m[:role] || m["role"]).to_s, "content" => (m[:content] || m["content"]).to_s }
86
+ end
87
+ end
88
+
89
+ def normalize_tools(tools)
90
+ tools.map do |tool|
91
+ {
92
+ "name" => tool[:name].to_s,
93
+ "description" => tool[:description].to_s,
94
+ "input_schema" => tool.fetch(:parameters) { { "type" => "object", "properties" => {} } }
95
+ }
96
+ end
97
+ end
98
+
99
+ def post(path, body) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
100
+ uri = URI.parse("#{@base_url}#{path}")
101
+ http = Net::HTTP.new(uri.host, uri.port)
102
+ http.use_ssl = uri.scheme == "https"
103
+ http.read_timeout = @timeout
104
+ http.open_timeout = 10
105
+
106
+ request = Net::HTTP::Post.new(uri.path, headers)
107
+ request.body = JSON.generate(body)
108
+
109
+ response = http.request(request)
110
+ handle_response(response)
111
+ rescue Errno::ECONNREFUSED, Errno::EADDRNOTAVAIL, SocketError, Net::OpenTimeout => e
112
+ raise Igniter::LLM::ProviderError, "Cannot connect to Anthropic API: #{e.message}"
113
+ end
114
+
115
+ def headers
116
+ {
117
+ "Content-Type" => "application/json",
118
+ "x-api-key" => @api_key.to_s,
119
+ "anthropic-version" => ANTHROPIC_VERSION
120
+ }
121
+ end
122
+
123
+ def handle_response(response) # rubocop:disable Metrics/MethodLength
124
+ unless response.is_a?(Net::HTTPSuccess)
125
+ body = begin
126
+ JSON.parse(response.body)
127
+ rescue StandardError
128
+ {}
129
+ end
130
+ error_msg = body.dig("error", "message") || response.body.to_s.slice(0, 200)
131
+ raise Igniter::LLM::ProviderError, "Anthropic API error #{response.code}: #{error_msg}"
132
+ end
133
+
134
+ JSON.parse(response.body)
135
+ rescue JSON::ParserError => e
136
+ raise Igniter::LLM::ProviderError, "Anthropic returned invalid JSON: #{e.message}"
137
+ end
138
+
139
+ def validate_api_key!
140
+ return if @api_key && !@api_key.empty?
141
+
142
+ raise Igniter::LLM::ConfigurationError,
143
+ "Anthropic API key not configured. Set ANTHROPIC_API_KEY or pass api_key: to the provider."
144
+ end
145
+ end
146
+ end
147
+ end
148
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Igniter
4
+ module LLM
5
+ module Providers
6
+ class Base
7
+ attr_reader :last_usage
8
+
9
+ def chat(messages:, model:, tools: [], **options)
10
+ raise NotImplementedError, "#{self.class}#chat must be implemented"
11
+ end
12
+
13
+ def complete(prompt:, model:, system: nil, **options)
14
+ messages = []
15
+ messages << { role: "system", content: system } if system
16
+ messages << { role: "user", content: prompt }
17
+ response = chat(messages: messages, model: model, **options)
18
+ response[:content]
19
+ end
20
+
21
+ private
22
+
23
+ def record_usage(prompt_tokens: 0, completion_tokens: 0)
24
+ @last_usage = {
25
+ prompt_tokens: prompt_tokens,
26
+ completion_tokens: completion_tokens,
27
+ total_tokens: prompt_tokens + completion_tokens
28
+ }.freeze
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,137 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "net/http"
4
+ require "json"
5
+ require "uri"
6
+
7
+ module Igniter
8
+ module LLM
9
+ module Providers
10
+ # Ollama provider — calls the local Ollama REST API.
11
+ # Requires Ollama to be running: https://ollama.com
12
+ #
13
+ # Ollama API docs: https://github.com/ollama/ollama/blob/main/docs/api.md
14
+ class Ollama < Base
15
+ def initialize(base_url: "http://localhost:11434", timeout: 120)
16
+ super()
17
+ @base_url = base_url.chomp("/")
18
+ @timeout = timeout
19
+ end
20
+
21
+ # Send a chat completion request.
22
+ # Returns: { role: "assistant", content: "...", tool_calls: [...] }
23
+ def chat(messages:, model:, tools: [], **options) # rubocop:disable Metrics/MethodLength
24
+ body = {
25
+ model: model,
26
+ messages: normalize_messages(messages),
27
+ stream: false,
28
+ options: build_options(options)
29
+ }.compact
30
+
31
+ body[:tools] = normalize_tools(tools) if tools.any?
32
+
33
+ response = post("/api/chat", body)
34
+
35
+ message = response.fetch("message", {})
36
+ record_usage(
37
+ prompt_tokens: response["prompt_eval_count"] || 0,
38
+ completion_tokens: response["eval_count"] || 0
39
+ )
40
+
41
+ {
42
+ role: message.fetch("role", "assistant").to_sym,
43
+ content: message.fetch("content", ""),
44
+ tool_calls: parse_tool_calls(message["tool_calls"])
45
+ }
46
+ end
47
+
48
+ def models
49
+ get("/api/tags").fetch("models", []).map { |m| m["name"] }
50
+ end
51
+
52
+ private
53
+
54
+ def post(path, body)
55
+ uri = URI.parse("#{@base_url}#{path}")
56
+ http = Net::HTTP.new(uri.host, uri.port)
57
+ http.read_timeout = @timeout
58
+ http.open_timeout = 10
59
+
60
+ request = Net::HTTP::Post.new(uri.path, { "Content-Type" => "application/json" })
61
+ request.body = JSON.generate(body)
62
+
63
+ response = http.request(request)
64
+ handle_response(response)
65
+ rescue Errno::ECONNREFUSED, Errno::EADDRNOTAVAIL, SocketError, Net::OpenTimeout => e
66
+ raise Igniter::LLM::ProviderError, "Cannot connect to Ollama at #{@base_url}: #{e.message}"
67
+ end
68
+
69
+ def get(path)
70
+ uri = URI.parse("#{@base_url}#{path}")
71
+ http = Net::HTTP.new(uri.host, uri.port)
72
+ http.open_timeout = 10
73
+ response = http.get(uri.path)
74
+ handle_response(response)
75
+ end
76
+
77
+ def handle_response(response)
78
+ unless response.is_a?(Net::HTTPSuccess)
79
+ raise Igniter::LLM::ProviderError,
80
+ "Ollama API error #{response.code}: #{response.body.to_s.slice(0, 200)}"
81
+ end
82
+
83
+ JSON.parse(response.body)
84
+ rescue JSON::ParserError => e
85
+ raise Igniter::LLM::ProviderError, "Ollama returned invalid JSON: #{e.message}"
86
+ end
87
+
88
+ def normalize_messages(messages)
89
+ messages.map do |msg|
90
+ { "role" => msg[:role].to_s, "content" => msg[:content].to_s }
91
+ end
92
+ end
93
+
94
+ def normalize_tools(tools)
95
+ tools.map do |tool|
96
+ {
97
+ "type" => "function",
98
+ "function" => {
99
+ "name" => tool[:name].to_s,
100
+ "description" => tool[:description].to_s,
101
+ "parameters" => tool.fetch(:parameters, { type: "object", properties: {} })
102
+ }
103
+ }
104
+ end
105
+ end
106
+
107
+ def parse_tool_calls(raw)
108
+ return [] unless raw.is_a?(Array)
109
+
110
+ raw.map do |tc|
111
+ fn = tc["function"] || tc
112
+ {
113
+ name: fn["name"].to_s,
114
+ arguments: parse_arguments(fn["arguments"])
115
+ }
116
+ end
117
+ end
118
+
119
+ def parse_arguments(args)
120
+ case args
121
+ when Hash then args.transform_keys(&:to_sym)
122
+ when String then JSON.parse(args).transform_keys(&:to_sym)
123
+ else {}
124
+ end
125
+ rescue JSON::ParserError
126
+ {}
127
+ end
128
+
129
+ def build_options(opts)
130
+ known = %i[temperature top_p top_k seed num_predict stop]
131
+ filtered = opts.slice(*known)
132
+ filtered.empty? ? nil : filtered.transform_keys(&:to_s)
133
+ end
134
+ end
135
+ end
136
+ end
137
+ end
@@ -0,0 +1,153 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "net/http"
4
+ require "json"
5
+ require "uri"
6
+
7
+ module Igniter
8
+ module LLM
9
+ module Providers
10
+ # OpenAI provider (also compatible with Azure OpenAI and any OpenAI-compatible API).
11
+ # Requires OPENAI_API_KEY environment variable or explicit api_key:.
12
+ #
13
+ # API docs: https://platform.openai.com/docs/api-reference/chat
14
+ #
15
+ # Compatible with: OpenAI, Azure OpenAI, Groq, Together AI,
16
+ # Mistral, DeepSeek, and any OpenAI-compatible endpoint.
17
+ class OpenAI < Base # rubocop:disable Metrics/ClassLength
18
+ API_BASE = "https://api.openai.com"
19
+
20
+ def initialize(api_key: ENV["OPENAI_API_KEY"], base_url: API_BASE, timeout: 120)
21
+ super()
22
+ @api_key = api_key
23
+ @base_url = base_url.chomp("/")
24
+ @timeout = timeout
25
+ end
26
+
27
+ # Send a chat completion request.
28
+ def chat(messages:, model:, tools: [], **options) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
29
+ validate_api_key!
30
+
31
+ body = {
32
+ model: model,
33
+ messages: normalize_messages(messages)
34
+ }
35
+ body[:tools] = normalize_tools(tools) if tools.any?
36
+ body[:temperature] = options[:temperature] if options.key?(:temperature)
37
+ body[:top_p] = options[:top_p] if options.key?(:top_p)
38
+ body[:max_tokens] = options[:max_tokens] if options.key?(:max_tokens)
39
+ body[:seed] = options[:seed] if options.key?(:seed)
40
+ body[:stop] = options[:stop] if options.key?(:stop)
41
+
42
+ response = post("/v1/chat/completions", body)
43
+ parse_response(response)
44
+ end
45
+
46
+ private
47
+
48
+ def parse_response(response) # rubocop:disable Metrics/MethodLength
49
+ message = response.dig("choices", 0, "message") || {}
50
+ usage = response.fetch("usage", {})
51
+
52
+ record_usage(
53
+ prompt_tokens: usage["prompt_tokens"] || 0,
54
+ completion_tokens: usage["completion_tokens"] || 0
55
+ )
56
+
57
+ {
58
+ role: (message["role"] || "assistant").to_sym,
59
+ content: message["content"].to_s,
60
+ tool_calls: parse_tool_calls(message["tool_calls"])
61
+ }
62
+ end
63
+
64
+ def parse_tool_calls(raw)
65
+ return [] unless raw.is_a?(Array)
66
+
67
+ raw.map do |tc|
68
+ fn = tc["function"] || {}
69
+ {
70
+ name: fn["name"].to_s,
71
+ arguments: parse_arguments(fn["arguments"])
72
+ }
73
+ end
74
+ end
75
+
76
+ def parse_arguments(args)
77
+ case args
78
+ when Hash then args.transform_keys(&:to_sym)
79
+ when String then JSON.parse(args).transform_keys(&:to_sym)
80
+ else {}
81
+ end
82
+ rescue JSON::ParserError
83
+ {}
84
+ end
85
+
86
+ def normalize_messages(messages)
87
+ messages.map do |m|
88
+ { "role" => (m[:role] || m["role"]).to_s, "content" => (m[:content] || m["content"]).to_s }
89
+ end
90
+ end
91
+
92
+ def normalize_tools(tools)
93
+ tools.map do |tool|
94
+ {
95
+ "type" => "function",
96
+ "function" => {
97
+ "name" => tool[:name].to_s,
98
+ "description" => tool[:description].to_s,
99
+ "parameters" => tool.fetch(:parameters) { { "type" => "object", "properties" => {} } }
100
+ }
101
+ }
102
+ end
103
+ end
104
+
105
+ def post(path, body) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
106
+ uri = URI.parse("#{@base_url}#{path}")
107
+ http = Net::HTTP.new(uri.host, uri.port)
108
+ http.use_ssl = uri.scheme == "https"
109
+ http.read_timeout = @timeout
110
+ http.open_timeout = 10
111
+
112
+ request = Net::HTTP::Post.new(uri.path, headers)
113
+ request.body = JSON.generate(body)
114
+
115
+ response = http.request(request)
116
+ handle_response(response)
117
+ rescue Errno::ECONNREFUSED, Errno::EADDRNOTAVAIL, SocketError, Net::OpenTimeout => e
118
+ raise Igniter::LLM::ProviderError, "Cannot connect to OpenAI API at #{@base_url}: #{e.message}"
119
+ end
120
+
121
+ def headers
122
+ {
123
+ "Content-Type" => "application/json",
124
+ "Authorization" => "Bearer #{@api_key}"
125
+ }
126
+ end
127
+
128
+ def handle_response(response) # rubocop:disable Metrics/MethodLength
129
+ unless response.is_a?(Net::HTTPSuccess)
130
+ body = begin
131
+ JSON.parse(response.body)
132
+ rescue StandardError
133
+ {}
134
+ end
135
+ error_msg = body.dig("error", "message") || response.body.to_s.slice(0, 200)
136
+ raise Igniter::LLM::ProviderError, "OpenAI API error #{response.code}: #{error_msg}"
137
+ end
138
+
139
+ JSON.parse(response.body)
140
+ rescue JSON::ParserError => e
141
+ raise Igniter::LLM::ProviderError, "OpenAI returned invalid JSON: #{e.message}"
142
+ end
143
+
144
+ def validate_api_key!
145
+ return if @api_key && !@api_key.empty?
146
+
147
+ raise Igniter::LLM::ConfigurationError,
148
+ "OpenAI API key not configured. Set OPENAI_API_KEY or pass api_key: to the provider."
149
+ end
150
+ end
151
+ end
152
+ end
153
+ end
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "igniter"
4
+ require_relative "llm/config"
5
+ require_relative "llm/context"
6
+ require_relative "llm/providers/base"
7
+ require_relative "llm/providers/ollama"
8
+ require_relative "llm/providers/anthropic"
9
+ require_relative "llm/providers/openai"
10
+ require_relative "llm/executor"
11
+
12
+ module Igniter
13
+ module LLM
14
+ class Error < Igniter::Error; end
15
+ class ProviderError < Error; end
16
+ class ConfigurationError < Error; end
17
+
18
+ AVAILABLE_PROVIDERS = Config::PROVIDERS
19
+
20
+ class << self
21
+ def config
22
+ @config ||= Config.new
23
+ end
24
+
25
+ def configure
26
+ yield config
27
+ end
28
+
29
+ # Returns a memoized provider instance for the given provider name.
30
+ def provider_instance(name)
31
+ @provider_instances ||= {}
32
+ @provider_instances[name.to_sym] ||= build_provider(name.to_sym)
33
+ end
34
+
35
+ # Reset cached provider instances (useful after reconfiguration).
36
+ def reset_providers!
37
+ @provider_instances = nil
38
+ end
39
+
40
+ private
41
+
42
+ def build_provider(name)
43
+ case name
44
+ when :ollama
45
+ cfg = config.ollama
46
+ Providers::Ollama.new(base_url: cfg.base_url, timeout: cfg.timeout)
47
+ when :anthropic
48
+ cfg = config.anthropic
49
+ Providers::Anthropic.new(api_key: cfg.api_key, base_url: cfg.base_url, timeout: cfg.timeout)
50
+ when :openai
51
+ cfg = config.openai
52
+ Providers::OpenAI.new(api_key: cfg.api_key, base_url: cfg.base_url, timeout: cfg.timeout)
53
+ else
54
+ raise ConfigurationError, "Unknown LLM provider: #{name}. Available: #{AVAILABLE_PROVIDERS.inspect}"
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Igniter
4
+ module Rails
5
+ # ActionCable channel mixin for streaming contract execution events.
6
+ #
7
+ # Usage:
8
+ # class OrderChannel < ApplicationCable::Channel
9
+ # include Igniter::Rails::CableAdapter
10
+ #
11
+ # subscribed do
12
+ # stream_contract(OrderContract, execution_id: params[:execution_id])
13
+ # end
14
+ # end
15
+ #
16
+ # Broadcasts events as:
17
+ # { type: "node_succeeded", node: "payment", status: "succeeded", payload: { ... } }
18
+ module CableAdapter
19
+ def stream_contract(contract_class, execution_id:, store: nil)
20
+ resolved_store = store || Igniter.execution_store
21
+ snapshot = resolved_store.fetch(execution_id)
22
+ instance = contract_class.restore(snapshot)
23
+
24
+ instance.subscribe do |event|
25
+ broadcast_igniter_event(event, execution_id)
26
+ end
27
+
28
+ @_igniter_executions ||= []
29
+ @_igniter_executions << instance
30
+ rescue Igniter::ResolutionError => e
31
+ transmit({ type: "error", message: e.message })
32
+ end
33
+
34
+ private
35
+
36
+ def broadcast_igniter_event(event, execution_id)
37
+ transmit({
38
+ type: event.type.to_s,
39
+ execution_id: execution_id,
40
+ node: event.node_name,
41
+ path: event.path,
42
+ status: event.status,
43
+ payload: event.payload,
44
+ timestamp: event.timestamp&.iso8601
45
+ }.compact)
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Igniter
4
+ module Rails
5
+ # Base ActiveJob class for async contract execution.
6
+ #
7
+ # Usage:
8
+ # class ProcessOrderJob < Igniter::Rails::ContractJob
9
+ # contract OrderContract
10
+ # end
11
+ #
12
+ # ProcessOrderJob.perform_later(order_id: "ord-123")
13
+ # ProcessOrderJob.perform_now(order_id: "ord-123")
14
+ #
15
+ # The job starts the contract and persists it to the configured store.
16
+ # If the contract has correlation keys, the execution can be resumed
17
+ # later via Contract.deliver_event.
18
+ class ContractJob
19
+ # No dependency on ActiveJob here — this class acts as a blueprint.
20
+ # When Rails is present, subclasses inherit from ApplicationJob automatically
21
+ # if the user adds `< ApplicationJob` (the recommended pattern).
22
+
23
+ class << self
24
+ def contract(klass = nil)
25
+ @contract_class = klass if klass
26
+ @contract_class
27
+ end
28
+
29
+ def store(store_instance = nil)
30
+ @store = store_instance if store_instance
31
+ @store || Igniter.execution_store
32
+ end
33
+
34
+ # Wraps perform in an ActiveJob-compatible interface.
35
+ # Call this when Rails is available to get ActiveJob queueing.
36
+ def perform_later(**inputs)
37
+ if defined?(::ActiveJob::Base)
38
+ ActiveJobAdapter.perform_later(contract_class: contract, inputs: inputs, store: store)
39
+ else
40
+ perform_now(**inputs)
41
+ end
42
+ end
43
+
44
+ def perform_now(**inputs)
45
+ contract.start(inputs, store: store)
46
+ end
47
+ end
48
+
49
+ # Included by ActiveJobAdapter to bridge ActiveJob lifecycle.
50
+ module Perform
51
+ def perform(contract_class_name:, inputs:, store_class: nil, store_config: nil)
52
+ klass = Object.const_get(contract_class_name)
53
+ resolved_store = resolve_store(store_class, store_config)
54
+ klass.start(inputs.transform_keys(&:to_sym), store: resolved_store)
55
+ end
56
+
57
+ private
58
+
59
+ def resolve_store(store_class, _store_config)
60
+ return Igniter.execution_store unless store_class
61
+
62
+ Object.const_get(store_class).new
63
+ end
64
+ end
65
+ end
66
+
67
+ # ActiveJob adapter — only defined when ActiveJob is available.
68
+ if defined?(::ActiveJob::Base)
69
+ class ActiveJobAdapter < ::ActiveJob::Base
70
+ include ContractJob::Perform
71
+
72
+ queue_as :igniter
73
+ end
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rails/generators"
4
+
5
+ module Igniter
6
+ module Rails
7
+ module Generators
8
+ class ContractGenerator < ::Rails::Generators::NamedBase
9
+ source_root File.expand_path("templates", __dir__)
10
+ desc "Creates an Igniter contract."
11
+
12
+ class_option :correlate_by, type: :array, default: [], desc: "Correlation key names"
13
+ class_option :inputs, type: :array, default: [], desc: "Input names"
14
+ class_option :outputs, type: :array, default: ["result"], desc: "Output names"
15
+
16
+ def create_contract
17
+ template "contract.rb.tt", File.join("app/contracts", class_path, "#{file_name}_contract.rb")
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end