spikard 0.3.2 → 0.3.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +1 -1
- data/README.md +659 -659
- data/ext/spikard_rb/Cargo.toml +17 -17
- data/ext/spikard_rb/extconf.rb +10 -10
- data/ext/spikard_rb/src/lib.rs +6 -6
- data/lib/spikard/app.rb +386 -386
- data/lib/spikard/background.rb +27 -27
- data/lib/spikard/config.rb +396 -396
- data/lib/spikard/converters.rb +13 -13
- data/lib/spikard/handler_wrapper.rb +113 -113
- data/lib/spikard/provide.rb +214 -214
- data/lib/spikard/response.rb +173 -173
- data/lib/spikard/schema.rb +243 -243
- data/lib/spikard/sse.rb +111 -111
- data/lib/spikard/streaming_response.rb +44 -44
- data/lib/spikard/testing.rb +221 -221
- data/lib/spikard/upload_file.rb +131 -131
- data/lib/spikard/version.rb +5 -5
- data/lib/spikard/websocket.rb +59 -59
- data/lib/spikard.rb +43 -43
- data/sig/spikard.rbs +360 -360
- data/vendor/crates/spikard-core/Cargo.toml +40 -40
- data/vendor/crates/spikard-core/src/bindings/mod.rs +3 -3
- data/vendor/crates/spikard-core/src/bindings/response.rs +133 -133
- data/vendor/crates/spikard-core/src/debug.rs +63 -63
- data/vendor/crates/spikard-core/src/di/container.rs +726 -726
- data/vendor/crates/spikard-core/src/di/dependency.rs +273 -273
- data/vendor/crates/spikard-core/src/di/error.rs +118 -118
- data/vendor/crates/spikard-core/src/di/factory.rs +538 -538
- data/vendor/crates/spikard-core/src/di/graph.rs +545 -545
- data/vendor/crates/spikard-core/src/di/mod.rs +192 -192
- data/vendor/crates/spikard-core/src/di/resolved.rs +411 -411
- data/vendor/crates/spikard-core/src/di/value.rs +283 -283
- data/vendor/crates/spikard-core/src/errors.rs +39 -39
- data/vendor/crates/spikard-core/src/http.rs +153 -153
- data/vendor/crates/spikard-core/src/lib.rs +29 -29
- data/vendor/crates/spikard-core/src/lifecycle.rs +422 -422
- data/vendor/crates/spikard-core/src/parameters.rs +722 -722
- data/vendor/crates/spikard-core/src/problem.rs +310 -310
- data/vendor/crates/spikard-core/src/request_data.rs +189 -189
- data/vendor/crates/spikard-core/src/router.rs +249 -249
- data/vendor/crates/spikard-core/src/schema_registry.rs +183 -183
- data/vendor/crates/spikard-core/src/type_hints.rs +304 -304
- data/vendor/crates/spikard-core/src/validation.rs +699 -699
- data/vendor/crates/spikard-http/Cargo.toml +58 -58
- data/vendor/crates/spikard-http/src/auth.rs +247 -247
- data/vendor/crates/spikard-http/src/background.rs +249 -249
- data/vendor/crates/spikard-http/src/bindings/mod.rs +3 -3
- data/vendor/crates/spikard-http/src/bindings/response.rs +1 -1
- data/vendor/crates/spikard-http/src/body_metadata.rs +8 -8
- data/vendor/crates/spikard-http/src/cors.rs +490 -490
- data/vendor/crates/spikard-http/src/debug.rs +63 -63
- data/vendor/crates/spikard-http/src/di_handler.rs +423 -423
- data/vendor/crates/spikard-http/src/handler_response.rs +190 -190
- data/vendor/crates/spikard-http/src/handler_trait.rs +228 -228
- data/vendor/crates/spikard-http/src/handler_trait_tests.rs +284 -284
- data/vendor/crates/spikard-http/src/lib.rs +529 -529
- data/vendor/crates/spikard-http/src/lifecycle/adapter.rs +149 -149
- data/vendor/crates/spikard-http/src/lifecycle.rs +428 -428
- data/vendor/crates/spikard-http/src/middleware/mod.rs +285 -285
- data/vendor/crates/spikard-http/src/middleware/multipart.rs +86 -86
- data/vendor/crates/spikard-http/src/middleware/urlencoded.rs +147 -147
- data/vendor/crates/spikard-http/src/middleware/validation.rs +287 -287
- data/vendor/crates/spikard-http/src/openapi/mod.rs +309 -309
- data/vendor/crates/spikard-http/src/openapi/parameter_extraction.rs +190 -190
- data/vendor/crates/spikard-http/src/openapi/schema_conversion.rs +308 -308
- data/vendor/crates/spikard-http/src/openapi/spec_generation.rs +195 -195
- data/vendor/crates/spikard-http/src/parameters.rs +1 -1
- data/vendor/crates/spikard-http/src/problem.rs +1 -1
- data/vendor/crates/spikard-http/src/query_parser.rs +369 -369
- data/vendor/crates/spikard-http/src/response.rs +399 -399
- data/vendor/crates/spikard-http/src/router.rs +1 -1
- data/vendor/crates/spikard-http/src/schema_registry.rs +1 -1
- data/vendor/crates/spikard-http/src/server/handler.rs +87 -87
- data/vendor/crates/spikard-http/src/server/lifecycle_execution.rs +98 -98
- data/vendor/crates/spikard-http/src/server/mod.rs +805 -805
- data/vendor/crates/spikard-http/src/server/request_extraction.rs +119 -119
- data/vendor/crates/spikard-http/src/sse.rs +447 -447
- data/vendor/crates/spikard-http/src/testing/form.rs +14 -14
- data/vendor/crates/spikard-http/src/testing/multipart.rs +60 -60
- data/vendor/crates/spikard-http/src/testing/test_client.rs +285 -285
- data/vendor/crates/spikard-http/src/testing.rs +377 -377
- data/vendor/crates/spikard-http/src/type_hints.rs +1 -1
- data/vendor/crates/spikard-http/src/validation.rs +1 -1
- data/vendor/crates/spikard-http/src/websocket.rs +324 -324
- data/vendor/crates/spikard-rb/Cargo.toml +42 -42
- data/vendor/crates/spikard-rb/build.rs +8 -8
- data/vendor/crates/spikard-rb/src/background.rs +63 -63
- data/vendor/crates/spikard-rb/src/config.rs +294 -294
- data/vendor/crates/spikard-rb/src/conversion.rs +453 -453
- data/vendor/crates/spikard-rb/src/di.rs +409 -409
- data/vendor/crates/spikard-rb/src/handler.rs +625 -625
- data/vendor/crates/spikard-rb/src/lib.rs +2771 -2771
- data/vendor/crates/spikard-rb/src/lifecycle.rs +274 -274
- data/vendor/crates/spikard-rb/src/server.rs +283 -283
- data/vendor/crates/spikard-rb/src/sse.rs +231 -231
- data/vendor/crates/spikard-rb/src/test_client.rs +404 -404
- data/vendor/crates/spikard-rb/src/test_sse.rs +143 -143
- data/vendor/crates/spikard-rb/src/test_websocket.rs +221 -221
- data/vendor/crates/spikard-rb/src/websocket.rs +233 -233
- data/vendor/spikard-core/Cargo.toml +40 -40
- data/vendor/spikard-core/src/bindings/mod.rs +3 -3
- data/vendor/spikard-core/src/bindings/response.rs +133 -133
- data/vendor/spikard-core/src/debug.rs +63 -63
- data/vendor/spikard-core/src/di/container.rs +726 -726
- data/vendor/spikard-core/src/di/dependency.rs +273 -273
- data/vendor/spikard-core/src/di/error.rs +118 -118
- data/vendor/spikard-core/src/di/factory.rs +538 -538
- data/vendor/spikard-core/src/di/graph.rs +545 -545
- data/vendor/spikard-core/src/di/mod.rs +192 -192
- data/vendor/spikard-core/src/di/resolved.rs +411 -411
- data/vendor/spikard-core/src/di/value.rs +283 -283
- data/vendor/spikard-core/src/http.rs +153 -153
- data/vendor/spikard-core/src/lib.rs +28 -28
- data/vendor/spikard-core/src/lifecycle.rs +422 -422
- data/vendor/spikard-core/src/parameters.rs +719 -719
- data/vendor/spikard-core/src/problem.rs +310 -310
- data/vendor/spikard-core/src/request_data.rs +189 -189
- data/vendor/spikard-core/src/router.rs +249 -249
- data/vendor/spikard-core/src/schema_registry.rs +183 -183
- data/vendor/spikard-core/src/type_hints.rs +304 -304
- data/vendor/spikard-core/src/validation.rs +699 -699
- data/vendor/spikard-http/Cargo.toml +58 -58
- data/vendor/spikard-http/src/auth.rs +247 -247
- data/vendor/spikard-http/src/background.rs +249 -249
- data/vendor/spikard-http/src/bindings/mod.rs +3 -3
- data/vendor/spikard-http/src/bindings/response.rs +1 -1
- data/vendor/spikard-http/src/body_metadata.rs +8 -8
- data/vendor/spikard-http/src/cors.rs +490 -490
- data/vendor/spikard-http/src/debug.rs +63 -63
- data/vendor/spikard-http/src/di_handler.rs +423 -423
- data/vendor/spikard-http/src/handler_response.rs +190 -190
- data/vendor/spikard-http/src/handler_trait.rs +228 -228
- data/vendor/spikard-http/src/handler_trait_tests.rs +284 -284
- data/vendor/spikard-http/src/lib.rs +529 -529
- data/vendor/spikard-http/src/lifecycle/adapter.rs +149 -149
- data/vendor/spikard-http/src/lifecycle.rs +428 -428
- data/vendor/spikard-http/src/middleware/mod.rs +285 -285
- data/vendor/spikard-http/src/middleware/multipart.rs +86 -86
- data/vendor/spikard-http/src/middleware/urlencoded.rs +147 -147
- data/vendor/spikard-http/src/middleware/validation.rs +287 -287
- data/vendor/spikard-http/src/openapi/mod.rs +309 -309
- data/vendor/spikard-http/src/openapi/parameter_extraction.rs +190 -190
- data/vendor/spikard-http/src/openapi/schema_conversion.rs +308 -308
- data/vendor/spikard-http/src/openapi/spec_generation.rs +195 -195
- data/vendor/spikard-http/src/parameters.rs +1 -1
- data/vendor/spikard-http/src/problem.rs +1 -1
- data/vendor/spikard-http/src/query_parser.rs +369 -369
- data/vendor/spikard-http/src/response.rs +399 -399
- data/vendor/spikard-http/src/router.rs +1 -1
- data/vendor/spikard-http/src/schema_registry.rs +1 -1
- data/vendor/spikard-http/src/server/handler.rs +80 -80
- data/vendor/spikard-http/src/server/lifecycle_execution.rs +98 -98
- data/vendor/spikard-http/src/server/mod.rs +805 -805
- data/vendor/spikard-http/src/server/request_extraction.rs +119 -119
- data/vendor/spikard-http/src/sse.rs +447 -447
- data/vendor/spikard-http/src/testing/form.rs +14 -14
- data/vendor/spikard-http/src/testing/multipart.rs +60 -60
- data/vendor/spikard-http/src/testing/test_client.rs +285 -285
- data/vendor/spikard-http/src/testing.rs +377 -377
- data/vendor/spikard-http/src/type_hints.rs +1 -1
- data/vendor/spikard-http/src/validation.rs +1 -1
- data/vendor/spikard-http/src/websocket.rs +324 -324
- data/vendor/spikard-rb/Cargo.toml +42 -42
- data/vendor/spikard-rb/build.rs +8 -8
- data/vendor/spikard-rb/src/background.rs +63 -63
- data/vendor/spikard-rb/src/config.rs +294 -294
- data/vendor/spikard-rb/src/conversion.rs +392 -392
- data/vendor/spikard-rb/src/di.rs +409 -409
- data/vendor/spikard-rb/src/handler.rs +534 -534
- data/vendor/spikard-rb/src/lib.rs +2020 -2020
- data/vendor/spikard-rb/src/lifecycle.rs +267 -267
- data/vendor/spikard-rb/src/server.rs +283 -283
- data/vendor/spikard-rb/src/sse.rs +231 -231
- data/vendor/spikard-rb/src/test_client.rs +404 -404
- data/vendor/spikard-rb/src/test_sse.rs +143 -143
- data/vendor/spikard-rb/src/test_websocket.rs +221 -221
- data/vendor/spikard-rb/src/websocket.rs +233 -233
- metadata +1 -1
|
@@ -1,2020 +1,2020 @@
|
|
|
1
|
-
#![allow(deprecated)]
|
|
2
|
-
|
|
3
|
-
//! Spikard Ruby bindings using Magnus FFI.
|
|
4
|
-
//!
|
|
5
|
-
//! This crate provides Ruby bindings for the Spikard HTTP toolkit, allowing
|
|
6
|
-
//! Ruby developers to build and test HTTP services with Rust performance.
|
|
7
|
-
//!
|
|
8
|
-
//! ## Modules
|
|
9
|
-
//!
|
|
10
|
-
//! - `test_client`: TestClient wrapper for integration testing
|
|
11
|
-
//! - `handler`: RubyHandler trait implementation
|
|
12
|
-
//! - `di`: Dependency injection bridge for Ruby types
|
|
13
|
-
//! - `config`: ServerConfig extraction from Ruby objects
|
|
14
|
-
//! - `conversion`: Ruby ↔ Rust type conversions
|
|
15
|
-
//! - `server`: HTTP server setup and lifecycle management
|
|
16
|
-
//! - `background`: Background task management
|
|
17
|
-
//! - `lifecycle`: Lifecycle hook implementations
|
|
18
|
-
//! - `sse`: Server-Sent Events support
|
|
19
|
-
//! - `test_sse`: SSE testing utilities
|
|
20
|
-
//! - `websocket`: WebSocket support
|
|
21
|
-
//! - `test_websocket`: WebSocket testing utilities
|
|
22
|
-
|
|
23
|
-
mod background;
|
|
24
|
-
mod config;
|
|
25
|
-
mod conversion;
|
|
26
|
-
mod di;
|
|
27
|
-
mod handler;
|
|
28
|
-
mod lifecycle;
|
|
29
|
-
mod server;
|
|
30
|
-
mod sse;
|
|
31
|
-
mod test_client;
|
|
32
|
-
mod test_sse;
|
|
33
|
-
mod test_websocket;
|
|
34
|
-
mod websocket;
|
|
35
|
-
|
|
36
|
-
use async_stream::stream;
|
|
37
|
-
use axum::body::Body;
|
|
38
|
-
use axum::http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode};
|
|
39
|
-
use axum_test::{TestServer, TestServerConfig, Transport};
|
|
40
|
-
use bytes::Bytes;
|
|
41
|
-
use cookie::Cookie;
|
|
42
|
-
use magnus::prelude::*;
|
|
43
|
-
use magnus::value::{InnerValue, Opaque};
|
|
44
|
-
use magnus::{
|
|
45
|
-
Error, Module, RArray, RHash, RString, Ruby, TryConvert, Value, function, gc::Marker, method, r_hash::ForEach,
|
|
46
|
-
};
|
|
47
|
-
use once_cell::sync::Lazy;
|
|
48
|
-
use serde_json::{Map as JsonMap, Value as JsonValue};
|
|
49
|
-
use spikard_http::ParameterValidator;
|
|
50
|
-
use spikard_http::problem::ProblemDetails;
|
|
51
|
-
use spikard_http::testing::{
|
|
52
|
-
MultipartFilePart, SnapshotError, build_multipart_body, encode_urlencoded_body, snapshot_response,
|
|
53
|
-
};
|
|
54
|
-
use spikard_http::{Handler, HandlerResponse, HandlerResult, RequestData};
|
|
55
|
-
use spikard_http::{Route, RouteMetadata, SchemaValidator};
|
|
56
|
-
use std::cell::RefCell;
|
|
57
|
-
use std::collections::HashMap;
|
|
58
|
-
use std::io;
|
|
59
|
-
use std::pin::Pin;
|
|
60
|
-
use std::sync::Arc;
|
|
61
|
-
use tokio::runtime::{Builder, Runtime};
|
|
62
|
-
|
|
63
|
-
static GLOBAL_RUNTIME: Lazy<Runtime> = Lazy::new(|| {
|
|
64
|
-
Builder::new_current_thread()
|
|
65
|
-
.enable_all()
|
|
66
|
-
.build()
|
|
67
|
-
.expect("Failed to initialise global Tokio runtime")
|
|
68
|
-
});
|
|
69
|
-
|
|
70
|
-
#[derive(Default)]
|
|
71
|
-
#[magnus::wrap(class = "Spikard::Native::TestClient", free_immediately, mark)]
|
|
72
|
-
struct NativeTestClient {
|
|
73
|
-
inner: RefCell<Option<ClientInner>>,
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
struct ClientInner {
|
|
77
|
-
http_server: Arc<TestServer>,
|
|
78
|
-
transport_server: Arc<TestServer>,
|
|
79
|
-
/// Keep Ruby handler closures alive for GC; accessed via the `mark` hook.
|
|
80
|
-
#[allow(dead_code)]
|
|
81
|
-
handlers: Vec<RubyHandler>,
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
struct RequestConfig {
|
|
85
|
-
query: Option<JsonValue>,
|
|
86
|
-
headers: HashMap<String, String>,
|
|
87
|
-
cookies: HashMap<String, String>,
|
|
88
|
-
body: Option<RequestBody>,
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
enum RequestBody {
|
|
92
|
-
Json(JsonValue),
|
|
93
|
-
Form(JsonValue),
|
|
94
|
-
Raw(String),
|
|
95
|
-
Multipart {
|
|
96
|
-
form_data: Vec<(String, String)>,
|
|
97
|
-
files: Vec<MultipartFilePart>,
|
|
98
|
-
},
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
#[derive(Clone)]
|
|
102
|
-
struct RubyHandler {
|
|
103
|
-
inner: Arc<RubyHandlerInner>,
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
struct RubyHandlerInner {
|
|
107
|
-
handler_proc: Opaque<Value>,
|
|
108
|
-
handler_name: String,
|
|
109
|
-
method: String,
|
|
110
|
-
path: String,
|
|
111
|
-
json_module: Opaque<Value>,
|
|
112
|
-
request_validator: Option<Arc<SchemaValidator>>,
|
|
113
|
-
response_validator: Option<Arc<SchemaValidator>>,
|
|
114
|
-
parameter_validator: Option<ParameterValidator>,
|
|
115
|
-
#[cfg(feature = "di")]
|
|
116
|
-
handler_dependencies: Vec<String>,
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
struct HandlerResponsePayload {
|
|
120
|
-
status: u16,
|
|
121
|
-
headers: HashMap<String, String>,
|
|
122
|
-
body: Option<JsonValue>,
|
|
123
|
-
raw_body: Option<Vec<u8>>,
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
enum RubyHandlerResult {
|
|
127
|
-
Payload(HandlerResponsePayload),
|
|
128
|
-
Streaming(StreamingResponsePayload),
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
struct StreamingResponsePayload {
|
|
132
|
-
enumerator: Arc<Opaque<Value>>,
|
|
133
|
-
status: u16,
|
|
134
|
-
headers: HashMap<String, String>,
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
impl StreamingResponsePayload {
|
|
138
|
-
fn into_response(self) -> Result<HandlerResponse, Error> {
|
|
139
|
-
let ruby = Ruby::get().map_err(|_| {
|
|
140
|
-
Error::new(
|
|
141
|
-
Ruby::get().unwrap().exception_runtime_error(),
|
|
142
|
-
"Ruby VM unavailable while building streaming response",
|
|
143
|
-
)
|
|
144
|
-
})?;
|
|
145
|
-
|
|
146
|
-
let status = StatusCode::from_u16(self.status).map_err(|err| {
|
|
147
|
-
Error::new(
|
|
148
|
-
ruby.exception_arg_error(),
|
|
149
|
-
format!("Invalid streaming status code {}: {}", self.status, err),
|
|
150
|
-
)
|
|
151
|
-
})?;
|
|
152
|
-
|
|
153
|
-
let header_pairs = self
|
|
154
|
-
.headers
|
|
155
|
-
.into_iter()
|
|
156
|
-
.map(|(name, value)| {
|
|
157
|
-
let header_name = HeaderName::from_bytes(name.as_bytes()).map_err(|err| {
|
|
158
|
-
Error::new(
|
|
159
|
-
ruby.exception_arg_error(),
|
|
160
|
-
format!("Invalid header name '{name}': {err}"),
|
|
161
|
-
)
|
|
162
|
-
})?;
|
|
163
|
-
let header_value = HeaderValue::from_str(&value).map_err(|err| {
|
|
164
|
-
Error::new(
|
|
165
|
-
ruby.exception_arg_error(),
|
|
166
|
-
format!("Invalid header value for '{name}': {err}"),
|
|
167
|
-
)
|
|
168
|
-
})?;
|
|
169
|
-
Ok((header_name, header_value))
|
|
170
|
-
})
|
|
171
|
-
.collect::<Result<Vec<_>, Error>>()?;
|
|
172
|
-
|
|
173
|
-
let enumerator = self.enumerator.clone();
|
|
174
|
-
let body_stream = stream! {
|
|
175
|
-
loop {
|
|
176
|
-
match poll_stream_chunk(&enumerator) {
|
|
177
|
-
Ok(Some(bytes)) => yield Ok(bytes),
|
|
178
|
-
Ok(None) => break,
|
|
179
|
-
Err(err) => {
|
|
180
|
-
yield Err(Box::new(err));
|
|
181
|
-
break;
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
};
|
|
186
|
-
|
|
187
|
-
let mut response = HandlerResponse::stream(body_stream).with_status(status);
|
|
188
|
-
for (name, value) in header_pairs {
|
|
189
|
-
response = response.with_header(name, value);
|
|
190
|
-
}
|
|
191
|
-
Ok(response)
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
fn poll_stream_chunk(enumerator: &Arc<Opaque<Value>>) -> Result<Option<Bytes>, io::Error> {
|
|
196
|
-
let ruby = Ruby::get().map_err(|err| io::Error::other(err.to_string()))?;
|
|
197
|
-
let enum_value = enumerator.get_inner_with(&ruby);
|
|
198
|
-
match enum_value.funcall::<_, _, Value>("next", ()) {
|
|
199
|
-
Ok(chunk) => ruby_value_to_bytes(chunk).map(Some),
|
|
200
|
-
Err(err) => {
|
|
201
|
-
if err.is_kind_of(ruby.exception_stop_iteration()) {
|
|
202
|
-
Ok(None)
|
|
203
|
-
} else {
|
|
204
|
-
Err(io::Error::other(err.to_string()))
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
fn ruby_value_to_bytes(value: Value) -> Result<Bytes, io::Error> {
|
|
211
|
-
if let Ok(str_value) = RString::try_convert(value) {
|
|
212
|
-
let slice = unsafe { str_value.as_slice() };
|
|
213
|
-
return Ok(Bytes::copy_from_slice(slice));
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
if let Ok(vec_bytes) = Vec::<u8>::try_convert(value) {
|
|
217
|
-
return Ok(Bytes::from(vec_bytes));
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
Err(io::Error::other("Streaming chunks must be Strings or Arrays of bytes"))
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
struct TestResponseData {
|
|
224
|
-
status: u16,
|
|
225
|
-
headers: HashMap<String, String>,
|
|
226
|
-
body_text: Option<String>,
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
#[derive(Debug)]
|
|
230
|
-
struct NativeRequestError(String);
|
|
231
|
-
|
|
232
|
-
impl NativeTestClient {
|
|
233
|
-
#[allow(clippy::too_many_arguments)]
|
|
234
|
-
fn initialize(
|
|
235
|
-
ruby: &Ruby,
|
|
236
|
-
this: &Self,
|
|
237
|
-
routes_json: String,
|
|
238
|
-
handlers: Value,
|
|
239
|
-
config_value: Value,
|
|
240
|
-
ws_handlers: Value,
|
|
241
|
-
sse_producers: Value,
|
|
242
|
-
dependencies: Value,
|
|
243
|
-
) -> Result<(), Error> {
|
|
244
|
-
let metadata: Vec<RouteMetadata> = serde_json::from_str(&routes_json)
|
|
245
|
-
.map_err(|err| Error::new(ruby.exception_arg_error(), format!("Invalid routes JSON: {err}")))?;
|
|
246
|
-
|
|
247
|
-
let handlers_hash = RHash::from_value(handlers).ok_or_else(|| {
|
|
248
|
-
Error::new(
|
|
249
|
-
ruby.exception_arg_error(),
|
|
250
|
-
"handlers parameter must be a Hash of handler_name => Proc",
|
|
251
|
-
)
|
|
252
|
-
})?;
|
|
253
|
-
|
|
254
|
-
let json_module = ruby
|
|
255
|
-
.class_object()
|
|
256
|
-
.const_get("JSON")
|
|
257
|
-
.map_err(|_| Error::new(ruby.exception_runtime_error(), "JSON module not available"))?;
|
|
258
|
-
|
|
259
|
-
let mut server_config = extract_server_config(ruby, config_value)?;
|
|
260
|
-
|
|
261
|
-
// Extract and register dependencies
|
|
262
|
-
#[cfg(feature = "di")]
|
|
263
|
-
{
|
|
264
|
-
if !dependencies.is_nil() {
|
|
265
|
-
match build_dependency_container(ruby, dependencies) {
|
|
266
|
-
Ok(container) => {
|
|
267
|
-
server_config.di_container = Some(Arc::new(container));
|
|
268
|
-
}
|
|
269
|
-
Err(err) => {
|
|
270
|
-
return Err(Error::new(
|
|
271
|
-
ruby.exception_runtime_error(),
|
|
272
|
-
format!("Failed to build DI container: {}", err),
|
|
273
|
-
));
|
|
274
|
-
}
|
|
275
|
-
}
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
let schema_registry = spikard_http::SchemaRegistry::new();
|
|
280
|
-
let mut prepared_routes = Vec::with_capacity(metadata.len());
|
|
281
|
-
let mut handler_refs = Vec::with_capacity(metadata.len());
|
|
282
|
-
let mut route_metadata_vec = Vec::with_capacity(metadata.len());
|
|
283
|
-
|
|
284
|
-
for meta in metadata.clone() {
|
|
285
|
-
let handler_value = fetch_handler(ruby, &handlers_hash, &meta.handler_name)?;
|
|
286
|
-
let route = Route::from_metadata(meta.clone(), &schema_registry)
|
|
287
|
-
.map_err(|err| Error::new(ruby.exception_runtime_error(), format!("Failed to build route: {err}")))?;
|
|
288
|
-
|
|
289
|
-
let handler = RubyHandler::new(&route, handler_value, json_module)?;
|
|
290
|
-
prepared_routes.push((route, Arc::new(handler.clone()) as Arc<dyn spikard_http::Handler>));
|
|
291
|
-
handler_refs.push(handler);
|
|
292
|
-
route_metadata_vec.push(meta);
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
let mut router = spikard_http::server::build_router_with_handlers_and_config(
|
|
296
|
-
prepared_routes,
|
|
297
|
-
server_config,
|
|
298
|
-
route_metadata_vec,
|
|
299
|
-
)
|
|
300
|
-
.map_err(|err| Error::new(ruby.exception_runtime_error(), format!("Failed to build router: {err}")))?;
|
|
301
|
-
|
|
302
|
-
let mut ws_endpoints = Vec::new();
|
|
303
|
-
if !ws_handlers.is_nil() {
|
|
304
|
-
let ws_hash = RHash::from_value(ws_handlers)
|
|
305
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "WebSocket handlers must be a Hash"))?;
|
|
306
|
-
|
|
307
|
-
ws_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
308
|
-
let handler_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
309
|
-
Error::new(
|
|
310
|
-
ruby.exception_runtime_error(),
|
|
311
|
-
format!("Failed to create WebSocket handler: {}", e),
|
|
312
|
-
)
|
|
313
|
-
})?;
|
|
314
|
-
|
|
315
|
-
let ws_state = crate::websocket::create_websocket_state(ruby, handler_instance)?;
|
|
316
|
-
|
|
317
|
-
ws_endpoints.push((path, ws_state));
|
|
318
|
-
|
|
319
|
-
Ok(ForEach::Continue)
|
|
320
|
-
})?;
|
|
321
|
-
}
|
|
322
|
-
|
|
323
|
-
let mut sse_endpoints = Vec::new();
|
|
324
|
-
if !sse_producers.is_nil() {
|
|
325
|
-
let sse_hash = RHash::from_value(sse_producers)
|
|
326
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "SSE producers must be a Hash"))?;
|
|
327
|
-
|
|
328
|
-
sse_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
329
|
-
let producer_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
330
|
-
Error::new(
|
|
331
|
-
ruby.exception_runtime_error(),
|
|
332
|
-
format!("Failed to create SSE producer: {}", e),
|
|
333
|
-
)
|
|
334
|
-
})?;
|
|
335
|
-
|
|
336
|
-
let sse_state = crate::sse::create_sse_state(ruby, producer_instance)?;
|
|
337
|
-
|
|
338
|
-
sse_endpoints.push((path, sse_state));
|
|
339
|
-
|
|
340
|
-
Ok(ForEach::Continue)
|
|
341
|
-
})?;
|
|
342
|
-
}
|
|
343
|
-
|
|
344
|
-
use axum::routing::get;
|
|
345
|
-
for (path, ws_state) in ws_endpoints {
|
|
346
|
-
router = router.route(
|
|
347
|
-
&path,
|
|
348
|
-
get(spikard_http::websocket_handler::<crate::websocket::RubyWebSocketHandler>).with_state(ws_state),
|
|
349
|
-
);
|
|
350
|
-
}
|
|
351
|
-
|
|
352
|
-
for (path, sse_state) in sse_endpoints {
|
|
353
|
-
router = router.route(
|
|
354
|
-
&path,
|
|
355
|
-
get(spikard_http::sse_handler::<crate::sse::RubySseEventProducer>).with_state(sse_state),
|
|
356
|
-
);
|
|
357
|
-
}
|
|
358
|
-
|
|
359
|
-
let http_server = GLOBAL_RUNTIME
|
|
360
|
-
.block_on(async { TestServer::new(router.clone()) })
|
|
361
|
-
.map_err(|err| {
|
|
362
|
-
Error::new(
|
|
363
|
-
ruby.exception_runtime_error(),
|
|
364
|
-
format!("Failed to initialise test server: {err}"),
|
|
365
|
-
)
|
|
366
|
-
})?;
|
|
367
|
-
|
|
368
|
-
let ws_config = TestServerConfig {
|
|
369
|
-
transport: Some(Transport::HttpRandomPort),
|
|
370
|
-
..Default::default()
|
|
371
|
-
};
|
|
372
|
-
let transport_server = GLOBAL_RUNTIME
|
|
373
|
-
.block_on(async { TestServer::new_with_config(router, ws_config) })
|
|
374
|
-
.map_err(|err| {
|
|
375
|
-
Error::new(
|
|
376
|
-
ruby.exception_runtime_error(),
|
|
377
|
-
format!("Failed to initialise WebSocket transport server: {err}"),
|
|
378
|
-
)
|
|
379
|
-
})?;
|
|
380
|
-
|
|
381
|
-
*this.inner.borrow_mut() = Some(ClientInner {
|
|
382
|
-
http_server: Arc::new(http_server),
|
|
383
|
-
transport_server: Arc::new(transport_server),
|
|
384
|
-
handlers: handler_refs,
|
|
385
|
-
});
|
|
386
|
-
|
|
387
|
-
Ok(())
|
|
388
|
-
}
|
|
389
|
-
|
|
390
|
-
fn request(ruby: &Ruby, this: &Self, method: String, path: String, options: Value) -> Result<Value, Error> {
|
|
391
|
-
let inner_borrow = this.inner.borrow();
|
|
392
|
-
let inner = inner_borrow
|
|
393
|
-
.as_ref()
|
|
394
|
-
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "TestClient not initialised"))?;
|
|
395
|
-
let method_upper = method.to_ascii_uppercase();
|
|
396
|
-
let http_method = Method::from_bytes(method_upper.as_bytes()).map_err(|err| {
|
|
397
|
-
Error::new(
|
|
398
|
-
ruby.exception_arg_error(),
|
|
399
|
-
format!("Unsupported method {method_upper}: {err}"),
|
|
400
|
-
)
|
|
401
|
-
})?;
|
|
402
|
-
|
|
403
|
-
let request_config = parse_request_config(ruby, options)?;
|
|
404
|
-
|
|
405
|
-
let response = GLOBAL_RUNTIME
|
|
406
|
-
.block_on(execute_request(
|
|
407
|
-
inner.http_server.clone(),
|
|
408
|
-
http_method,
|
|
409
|
-
path.clone(),
|
|
410
|
-
request_config,
|
|
411
|
-
))
|
|
412
|
-
.map_err(|err| {
|
|
413
|
-
Error::new(
|
|
414
|
-
ruby.exception_runtime_error(),
|
|
415
|
-
format!("Request failed for {method_upper} {path}: {}", err.0),
|
|
416
|
-
)
|
|
417
|
-
})?;
|
|
418
|
-
|
|
419
|
-
response_to_ruby(ruby, response)
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
fn close(&self) -> Result<(), Error> {
|
|
423
|
-
*self.inner.borrow_mut() = None;
|
|
424
|
-
Ok(())
|
|
425
|
-
}
|
|
426
|
-
|
|
427
|
-
fn websocket(ruby: &Ruby, this: &Self, path: String) -> Result<Value, Error> {
|
|
428
|
-
let inner_borrow = this.inner.borrow();
|
|
429
|
-
let inner = inner_borrow
|
|
430
|
-
.as_ref()
|
|
431
|
-
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "TestClient not initialised"))?;
|
|
432
|
-
|
|
433
|
-
let server = Arc::clone(&inner.transport_server);
|
|
434
|
-
|
|
435
|
-
drop(inner_borrow);
|
|
436
|
-
|
|
437
|
-
let handle =
|
|
438
|
-
GLOBAL_RUNTIME.spawn(async move { spikard_http::testing::connect_websocket(&server, &path).await });
|
|
439
|
-
|
|
440
|
-
let ws = GLOBAL_RUNTIME.block_on(async {
|
|
441
|
-
handle
|
|
442
|
-
.await
|
|
443
|
-
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("WebSocket task failed: {}", e)))
|
|
444
|
-
})?;
|
|
445
|
-
|
|
446
|
-
let ws_conn = test_websocket::WebSocketTestConnection::new(ws);
|
|
447
|
-
Ok(ruby.obj_wrap(ws_conn).as_value())
|
|
448
|
-
}
|
|
449
|
-
|
|
450
|
-
fn sse(ruby: &Ruby, this: &Self, path: String) -> Result<Value, Error> {
|
|
451
|
-
let inner_borrow = this.inner.borrow();
|
|
452
|
-
let inner = inner_borrow
|
|
453
|
-
.as_ref()
|
|
454
|
-
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "TestClient not initialised"))?;
|
|
455
|
-
|
|
456
|
-
let response = GLOBAL_RUNTIME
|
|
457
|
-
.block_on(async {
|
|
458
|
-
let axum_response = inner.transport_server.get(&path).await;
|
|
459
|
-
snapshot_response(axum_response).await
|
|
460
|
-
})
|
|
461
|
-
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("SSE request failed: {}", e)))?;
|
|
462
|
-
|
|
463
|
-
test_sse::sse_stream_from_response(ruby, &response)
|
|
464
|
-
}
|
|
465
|
-
}
|
|
466
|
-
|
|
467
|
-
impl ClientInner {}
|
|
468
|
-
|
|
469
|
-
impl RubyHandler {
|
|
470
|
-
fn new(route: &Route, handler_value: Value, json_module: Value) -> Result<Self, Error> {
|
|
471
|
-
Ok(Self {
|
|
472
|
-
inner: Arc::new(RubyHandlerInner {
|
|
473
|
-
handler_proc: Opaque::from(handler_value),
|
|
474
|
-
handler_name: route.handler_name.clone(),
|
|
475
|
-
method: route.method.as_str().to_string(),
|
|
476
|
-
path: route.path.clone(),
|
|
477
|
-
json_module: Opaque::from(json_module),
|
|
478
|
-
request_validator: route.request_validator.clone(),
|
|
479
|
-
response_validator: route.response_validator.clone(),
|
|
480
|
-
parameter_validator: route.parameter_validator.clone(),
|
|
481
|
-
#[cfg(feature = "di")]
|
|
482
|
-
handler_dependencies: route.handler_dependencies.clone(),
|
|
483
|
-
}),
|
|
484
|
-
})
|
|
485
|
-
}
|
|
486
|
-
|
|
487
|
-
/// Create a new RubyHandler for server mode
|
|
488
|
-
///
|
|
489
|
-
/// This is used by run_server to create handlers from Ruby Procs
|
|
490
|
-
fn new_for_server(
|
|
491
|
-
_ruby: &Ruby,
|
|
492
|
-
handler_value: Value,
|
|
493
|
-
handler_name: String,
|
|
494
|
-
method: String,
|
|
495
|
-
path: String,
|
|
496
|
-
json_module: Value,
|
|
497
|
-
route: &Route,
|
|
498
|
-
) -> Result<Self, Error> {
|
|
499
|
-
Ok(Self {
|
|
500
|
-
inner: Arc::new(RubyHandlerInner {
|
|
501
|
-
handler_proc: Opaque::from(handler_value),
|
|
502
|
-
handler_name,
|
|
503
|
-
method,
|
|
504
|
-
path,
|
|
505
|
-
json_module: Opaque::from(json_module),
|
|
506
|
-
request_validator: route.request_validator.clone(),
|
|
507
|
-
response_validator: route.response_validator.clone(),
|
|
508
|
-
parameter_validator: route.parameter_validator.clone(),
|
|
509
|
-
#[cfg(feature = "di")]
|
|
510
|
-
handler_dependencies: route.handler_dependencies.clone(),
|
|
511
|
-
}),
|
|
512
|
-
})
|
|
513
|
-
}
|
|
514
|
-
|
|
515
|
-
/// Required by Ruby GC; invoked through the magnus mark hook.
|
|
516
|
-
#[allow(dead_code)]
|
|
517
|
-
fn mark(&self, marker: &Marker) {
|
|
518
|
-
if let Ok(ruby) = Ruby::get() {
|
|
519
|
-
let proc_val = self.inner.handler_proc.get_inner_with(&ruby);
|
|
520
|
-
marker.mark(proc_val);
|
|
521
|
-
}
|
|
522
|
-
}
|
|
523
|
-
|
|
524
|
-
fn handle(&self, request_data: RequestData) -> HandlerResult {
|
|
525
|
-
if let Some(validator) = &self.inner.request_validator
|
|
526
|
-
&& let Err(errors) = validator.validate(&request_data.body)
|
|
527
|
-
{
|
|
528
|
-
let problem = ProblemDetails::from_validation_error(&errors);
|
|
529
|
-
let error_json = problem_to_json(&problem);
|
|
530
|
-
return Err((problem.status_code(), error_json));
|
|
531
|
-
}
|
|
532
|
-
|
|
533
|
-
let validated_params = if let Some(validator) = &self.inner.parameter_validator {
|
|
534
|
-
let raw_query_strings: HashMap<String, String> = request_data
|
|
535
|
-
.raw_query_params
|
|
536
|
-
.as_ref()
|
|
537
|
-
.iter()
|
|
538
|
-
.filter_map(|(k, v)| v.first().map(|first| (k.clone(), first.clone())))
|
|
539
|
-
.collect();
|
|
540
|
-
|
|
541
|
-
match validator.validate_and_extract(
|
|
542
|
-
&request_data.query_params,
|
|
543
|
-
&raw_query_strings,
|
|
544
|
-
request_data.path_params.as_ref(),
|
|
545
|
-
request_data.headers.as_ref(),
|
|
546
|
-
request_data.cookies.as_ref(),
|
|
547
|
-
) {
|
|
548
|
-
Ok(value) => Some(value),
|
|
549
|
-
Err(errors) => {
|
|
550
|
-
let problem = ProblemDetails::from_validation_error(&errors);
|
|
551
|
-
return Err((problem.status_code(), problem_to_json(&problem)));
|
|
552
|
-
}
|
|
553
|
-
}
|
|
554
|
-
} else {
|
|
555
|
-
None
|
|
556
|
-
};
|
|
557
|
-
|
|
558
|
-
let ruby = Ruby::get().map_err(|_| {
|
|
559
|
-
(
|
|
560
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
561
|
-
"Ruby VM unavailable while invoking handler".to_string(),
|
|
562
|
-
)
|
|
563
|
-
})?;
|
|
564
|
-
|
|
565
|
-
let request_value = build_ruby_request(&ruby, &self.inner, &request_data, validated_params.as_ref())
|
|
566
|
-
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
567
|
-
|
|
568
|
-
let handler_proc = self.inner.handler_proc.get_inner_with(&ruby);
|
|
569
|
-
|
|
570
|
-
// Extract resolved dependencies (if any) and convert to Ruby keyword arguments
|
|
571
|
-
#[cfg(feature = "di")]
|
|
572
|
-
let handler_result = {
|
|
573
|
-
if let Some(deps) = &request_data.dependencies {
|
|
574
|
-
// Build keyword arguments hash from dependencies
|
|
575
|
-
// ONLY include dependencies that the handler actually declared
|
|
576
|
-
let kwargs_hash = ruby.hash_new();
|
|
577
|
-
|
|
578
|
-
// Check if all required handler dependencies are present
|
|
579
|
-
// If any are missing, return error BEFORE calling handler
|
|
580
|
-
for key in &self.inner.handler_dependencies {
|
|
581
|
-
if !deps.contains(key) {
|
|
582
|
-
// Handler requires a dependency that was not resolved
|
|
583
|
-
// This should have been caught by DI system, but safety check here
|
|
584
|
-
return Err((
|
|
585
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
586
|
-
format!(
|
|
587
|
-
"Handler '{}' requires dependency '{}' which was not resolved",
|
|
588
|
-
self.inner.handler_name, key
|
|
589
|
-
),
|
|
590
|
-
));
|
|
591
|
-
}
|
|
592
|
-
}
|
|
593
|
-
|
|
594
|
-
// Filter dependencies: only pass those declared by the handler
|
|
595
|
-
for key in &self.inner.handler_dependencies {
|
|
596
|
-
if let Some(value) = deps.get_arc(key) {
|
|
597
|
-
// Check what type of dependency this is and extract Ruby value
|
|
598
|
-
let ruby_val = if let Some(wrapper) = value.downcast_ref::<crate::di::RubyValueWrapper>() {
|
|
599
|
-
// It's a Ruby value wrapper (singleton with preserved mutations)
|
|
600
|
-
// Get the raw Ruby value directly to preserve object identity
|
|
601
|
-
wrapper.get_value(&ruby)
|
|
602
|
-
} else if let Some(json) = value.downcast_ref::<serde_json::Value>() {
|
|
603
|
-
// It's already JSON (non-singleton or value dependency)
|
|
604
|
-
// Convert JSON to Ruby value
|
|
605
|
-
match crate::di::json_to_ruby(&ruby, json) {
|
|
606
|
-
Ok(val) => val,
|
|
607
|
-
Err(e) => {
|
|
608
|
-
return Err((
|
|
609
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
610
|
-
format!("Failed to convert dependency '{}' to Ruby: {}", key, e),
|
|
611
|
-
));
|
|
612
|
-
}
|
|
613
|
-
}
|
|
614
|
-
} else {
|
|
615
|
-
return Err((
|
|
616
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
617
|
-
format!(
|
|
618
|
-
"Unknown dependency type for '{}': expected RubyValueWrapper or JSON",
|
|
619
|
-
key
|
|
620
|
-
),
|
|
621
|
-
));
|
|
622
|
-
};
|
|
623
|
-
|
|
624
|
-
// Add to kwargs hash
|
|
625
|
-
let key_sym = ruby.to_symbol(key);
|
|
626
|
-
if let Err(e) = kwargs_hash.aset(key_sym, ruby_val) {
|
|
627
|
-
return Err((
|
|
628
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
629
|
-
format!("Failed to add dependency '{}': {}", key, e),
|
|
630
|
-
));
|
|
631
|
-
}
|
|
632
|
-
}
|
|
633
|
-
}
|
|
634
|
-
|
|
635
|
-
// Call handler with request and dependencies as keyword arguments
|
|
636
|
-
// Ruby 3.x requires keyword arguments to be passed differently than Ruby 2.x
|
|
637
|
-
// We'll create a Ruby lambda that calls the handler with ** splat operator
|
|
638
|
-
//
|
|
639
|
-
// Equivalent Ruby code:
|
|
640
|
-
// lambda { |req, kwargs| handler_proc.call(req, **kwargs) }.call(request, kwargs_hash)
|
|
641
|
-
|
|
642
|
-
let wrapper_code = ruby
|
|
643
|
-
.eval::<Value>(
|
|
644
|
-
r#"
|
|
645
|
-
lambda do |proc, request, kwargs|
|
|
646
|
-
proc.call(request, **kwargs)
|
|
647
|
-
end
|
|
648
|
-
"#,
|
|
649
|
-
)
|
|
650
|
-
.map_err(|e| {
|
|
651
|
-
(
|
|
652
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
653
|
-
format!("Failed to create kwarg wrapper: {}", e),
|
|
654
|
-
)
|
|
655
|
-
})?;
|
|
656
|
-
|
|
657
|
-
wrapper_code.funcall("call", (handler_proc, request_value, kwargs_hash))
|
|
658
|
-
} else {
|
|
659
|
-
// No dependencies, call with just request
|
|
660
|
-
handler_proc.funcall("call", (request_value,))
|
|
661
|
-
}
|
|
662
|
-
};
|
|
663
|
-
|
|
664
|
-
#[cfg(not(feature = "di"))]
|
|
665
|
-
let handler_result = handler_proc.funcall("call", (request_value,));
|
|
666
|
-
|
|
667
|
-
let response_value = match handler_result {
|
|
668
|
-
Ok(value) => value,
|
|
669
|
-
Err(err) => {
|
|
670
|
-
return Err((
|
|
671
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
672
|
-
format!("Handler '{}' failed: {}", self.inner.handler_name, err),
|
|
673
|
-
));
|
|
674
|
-
}
|
|
675
|
-
};
|
|
676
|
-
|
|
677
|
-
let handler_result = interpret_handler_response(&ruby, &self.inner, response_value).map_err(|err| {
|
|
678
|
-
(
|
|
679
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
680
|
-
format!(
|
|
681
|
-
"Failed to interpret response from '{}': {}",
|
|
682
|
-
self.inner.handler_name, err
|
|
683
|
-
),
|
|
684
|
-
)
|
|
685
|
-
})?;
|
|
686
|
-
|
|
687
|
-
let payload = match handler_result {
|
|
688
|
-
RubyHandlerResult::Streaming(streaming) => {
|
|
689
|
-
let response = streaming.into_response().map_err(|err| {
|
|
690
|
-
(
|
|
691
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
692
|
-
format!("Failed to build streaming response: {}", err),
|
|
693
|
-
)
|
|
694
|
-
})?;
|
|
695
|
-
return Ok(response.into_response());
|
|
696
|
-
}
|
|
697
|
-
RubyHandlerResult::Payload(payload) => payload,
|
|
698
|
-
};
|
|
699
|
-
|
|
700
|
-
if let (Some(validator), Some(body)) = (&self.inner.response_validator, payload.body.as_ref())
|
|
701
|
-
&& let Err(errors) = validator.validate(body)
|
|
702
|
-
{
|
|
703
|
-
let problem = ProblemDetails::from_validation_error(&errors);
|
|
704
|
-
return Err((StatusCode::INTERNAL_SERVER_ERROR, problem_to_json(&problem)));
|
|
705
|
-
}
|
|
706
|
-
|
|
707
|
-
let HandlerResponsePayload {
|
|
708
|
-
status,
|
|
709
|
-
headers,
|
|
710
|
-
body,
|
|
711
|
-
raw_body,
|
|
712
|
-
} = payload;
|
|
713
|
-
|
|
714
|
-
let mut response_builder = axum::http::Response::builder().status(status);
|
|
715
|
-
let mut has_content_type = false;
|
|
716
|
-
|
|
717
|
-
for (name, value) in headers.iter() {
|
|
718
|
-
if name.eq_ignore_ascii_case("content-type") {
|
|
719
|
-
has_content_type = true;
|
|
720
|
-
}
|
|
721
|
-
let header_name = HeaderName::from_bytes(name.as_bytes()).map_err(|err| {
|
|
722
|
-
(
|
|
723
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
724
|
-
format!("Invalid header name '{name}': {err}"),
|
|
725
|
-
)
|
|
726
|
-
})?;
|
|
727
|
-
let header_value = HeaderValue::from_str(value).map_err(|err| {
|
|
728
|
-
(
|
|
729
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
730
|
-
format!("Invalid header value for '{name}': {err}"),
|
|
731
|
-
)
|
|
732
|
-
})?;
|
|
733
|
-
|
|
734
|
-
response_builder = response_builder.header(header_name, header_value);
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
if !has_content_type && body.is_some() {
|
|
738
|
-
response_builder = response_builder.header(
|
|
739
|
-
HeaderName::from_static("content-type"),
|
|
740
|
-
HeaderValue::from_static("application/json"),
|
|
741
|
-
);
|
|
742
|
-
}
|
|
743
|
-
|
|
744
|
-
let body_bytes = if let Some(raw) = raw_body {
|
|
745
|
-
raw
|
|
746
|
-
} else if let Some(json_value) = body {
|
|
747
|
-
serde_json::to_vec(&json_value).map_err(|err| {
|
|
748
|
-
(
|
|
749
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
750
|
-
format!("Failed to serialise response body: {err}"),
|
|
751
|
-
)
|
|
752
|
-
})?
|
|
753
|
-
} else {
|
|
754
|
-
Vec::new()
|
|
755
|
-
};
|
|
756
|
-
|
|
757
|
-
response_builder.body(Body::from(body_bytes)).map_err(|err| {
|
|
758
|
-
(
|
|
759
|
-
StatusCode::INTERNAL_SERVER_ERROR,
|
|
760
|
-
format!("Failed to build response: {err}"),
|
|
761
|
-
)
|
|
762
|
-
})
|
|
763
|
-
}
|
|
764
|
-
}
|
|
765
|
-
|
|
766
|
-
impl Handler for RubyHandler {
|
|
767
|
-
fn call(
|
|
768
|
-
&self,
|
|
769
|
-
_req: axum::http::Request<Body>,
|
|
770
|
-
request_data: RequestData,
|
|
771
|
-
) -> Pin<Box<dyn std::future::Future<Output = HandlerResult> + Send + '_>> {
|
|
772
|
-
let handler = self.clone();
|
|
773
|
-
Box::pin(async move { handler.handle(request_data) })
|
|
774
|
-
}
|
|
775
|
-
}
|
|
776
|
-
|
|
777
|
-
async fn execute_request(
|
|
778
|
-
server: Arc<TestServer>,
|
|
779
|
-
method: Method,
|
|
780
|
-
path: String,
|
|
781
|
-
config: RequestConfig,
|
|
782
|
-
) -> Result<TestResponseData, NativeRequestError> {
|
|
783
|
-
let mut request = match method {
|
|
784
|
-
Method::GET => server.get(&path),
|
|
785
|
-
Method::POST => server.post(&path),
|
|
786
|
-
Method::PUT => server.put(&path),
|
|
787
|
-
Method::PATCH => server.patch(&path),
|
|
788
|
-
Method::DELETE => server.delete(&path),
|
|
789
|
-
Method::HEAD => server.method(Method::HEAD, &path),
|
|
790
|
-
Method::OPTIONS => server.method(Method::OPTIONS, &path),
|
|
791
|
-
Method::TRACE => server.method(Method::TRACE, &path),
|
|
792
|
-
other => return Err(NativeRequestError(format!("Unsupported HTTP method {other}"))),
|
|
793
|
-
};
|
|
794
|
-
|
|
795
|
-
if let Some(query) = config.query {
|
|
796
|
-
request = request.add_query_params(&query);
|
|
797
|
-
}
|
|
798
|
-
|
|
799
|
-
for (name, value) in config.headers {
|
|
800
|
-
request = request.add_header(name.as_str(), value.as_str());
|
|
801
|
-
}
|
|
802
|
-
|
|
803
|
-
for (name, value) in config.cookies {
|
|
804
|
-
request = request.add_cookie(Cookie::new(name, value));
|
|
805
|
-
}
|
|
806
|
-
|
|
807
|
-
if let Some(body) = config.body {
|
|
808
|
-
match body {
|
|
809
|
-
RequestBody::Json(json_value) => {
|
|
810
|
-
request = request.json(&json_value);
|
|
811
|
-
}
|
|
812
|
-
RequestBody::Form(form_value) => {
|
|
813
|
-
let encoded = encode_urlencoded_body(&form_value)
|
|
814
|
-
.map_err(|err| NativeRequestError(format!("Failed to encode form body: {err}")))?;
|
|
815
|
-
request = request
|
|
816
|
-
.content_type("application/x-www-form-urlencoded")
|
|
817
|
-
.bytes(Bytes::from(encoded));
|
|
818
|
-
}
|
|
819
|
-
RequestBody::Raw(raw) => {
|
|
820
|
-
request = request.bytes(Bytes::from(raw));
|
|
821
|
-
}
|
|
822
|
-
RequestBody::Multipart { form_data, files } => {
|
|
823
|
-
let (multipart_body, boundary) = build_multipart_body(&form_data, &files);
|
|
824
|
-
request = request
|
|
825
|
-
.content_type(&format!("multipart/form-data; boundary={}", boundary))
|
|
826
|
-
.bytes(Bytes::from(multipart_body));
|
|
827
|
-
}
|
|
828
|
-
}
|
|
829
|
-
}
|
|
830
|
-
|
|
831
|
-
let response = request.await;
|
|
832
|
-
let snapshot = snapshot_response(response).await.map_err(snapshot_err_to_native)?;
|
|
833
|
-
let body_text = if snapshot.body.is_empty() {
|
|
834
|
-
None
|
|
835
|
-
} else {
|
|
836
|
-
Some(String::from_utf8_lossy(&snapshot.body).into_owned())
|
|
837
|
-
};
|
|
838
|
-
|
|
839
|
-
Ok(TestResponseData {
|
|
840
|
-
status: snapshot.status,
|
|
841
|
-
headers: snapshot.headers,
|
|
842
|
-
body_text,
|
|
843
|
-
})
|
|
844
|
-
}
|
|
845
|
-
|
|
846
|
-
fn snapshot_err_to_native(err: SnapshotError) -> NativeRequestError {
|
|
847
|
-
NativeRequestError(err.to_string())
|
|
848
|
-
}
|
|
849
|
-
|
|
850
|
-
fn parse_request_config(ruby: &Ruby, options: Value) -> Result<RequestConfig, Error> {
|
|
851
|
-
if options.is_nil() {
|
|
852
|
-
return Ok(RequestConfig {
|
|
853
|
-
query: None,
|
|
854
|
-
headers: HashMap::new(),
|
|
855
|
-
cookies: HashMap::new(),
|
|
856
|
-
body: None,
|
|
857
|
-
});
|
|
858
|
-
}
|
|
859
|
-
|
|
860
|
-
let hash = RHash::from_value(options)
|
|
861
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "request options must be a Hash"))?;
|
|
862
|
-
|
|
863
|
-
let json_module = ruby
|
|
864
|
-
.class_object()
|
|
865
|
-
.const_get("JSON")
|
|
866
|
-
.map_err(|_| Error::new(ruby.exception_runtime_error(), "JSON module not available"))?;
|
|
867
|
-
|
|
868
|
-
let query = if let Some(value) = get_kw(ruby, hash, "query") {
|
|
869
|
-
if value.is_nil() {
|
|
870
|
-
None
|
|
871
|
-
} else {
|
|
872
|
-
Some(ruby_value_to_json(ruby, json_module, value)?)
|
|
873
|
-
}
|
|
874
|
-
} else {
|
|
875
|
-
None
|
|
876
|
-
};
|
|
877
|
-
|
|
878
|
-
let headers = if let Some(value) = get_kw(ruby, hash, "headers") {
|
|
879
|
-
if value.is_nil() {
|
|
880
|
-
HashMap::new()
|
|
881
|
-
} else {
|
|
882
|
-
let hash = RHash::try_convert(value)?;
|
|
883
|
-
hash.to_hash_map::<String, String>()?
|
|
884
|
-
}
|
|
885
|
-
} else {
|
|
886
|
-
HashMap::new()
|
|
887
|
-
};
|
|
888
|
-
|
|
889
|
-
let cookies = if let Some(value) = get_kw(ruby, hash, "cookies") {
|
|
890
|
-
if value.is_nil() {
|
|
891
|
-
HashMap::new()
|
|
892
|
-
} else {
|
|
893
|
-
let hash = RHash::try_convert(value)?;
|
|
894
|
-
hash.to_hash_map::<String, String>()?
|
|
895
|
-
}
|
|
896
|
-
} else {
|
|
897
|
-
HashMap::new()
|
|
898
|
-
};
|
|
899
|
-
|
|
900
|
-
let files_opt = get_kw(ruby, hash, "files");
|
|
901
|
-
let has_files = files_opt.is_some() && !files_opt.unwrap().is_nil();
|
|
902
|
-
|
|
903
|
-
let body = if has_files {
|
|
904
|
-
let files_value = files_opt.unwrap();
|
|
905
|
-
let files = extract_files(ruby, files_value)?;
|
|
906
|
-
|
|
907
|
-
let mut form_data = Vec::new();
|
|
908
|
-
if let Some(data_value) = get_kw(ruby, hash, "data")
|
|
909
|
-
&& !data_value.is_nil()
|
|
910
|
-
{
|
|
911
|
-
let data_hash = RHash::try_convert(data_value)?;
|
|
912
|
-
|
|
913
|
-
let keys_array: RArray = data_hash.funcall("keys", ())?;
|
|
914
|
-
|
|
915
|
-
for i in 0..keys_array.len() {
|
|
916
|
-
let key_val = keys_array.entry::<Value>(i as isize)?;
|
|
917
|
-
let field_name = String::try_convert(key_val)?;
|
|
918
|
-
let value = data_hash
|
|
919
|
-
.get(key_val)
|
|
920
|
-
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "Failed to get hash value"))?;
|
|
921
|
-
|
|
922
|
-
if let Some(array) = RArray::from_value(value) {
|
|
923
|
-
for j in 0..array.len() {
|
|
924
|
-
let item = array.entry::<Value>(j as isize)?;
|
|
925
|
-
let item_str = String::try_convert(item)?;
|
|
926
|
-
form_data.push((field_name.clone(), item_str));
|
|
927
|
-
}
|
|
928
|
-
} else {
|
|
929
|
-
let value_str = String::try_convert(value)?;
|
|
930
|
-
form_data.push((field_name, value_str));
|
|
931
|
-
}
|
|
932
|
-
}
|
|
933
|
-
}
|
|
934
|
-
|
|
935
|
-
Some(RequestBody::Multipart { form_data, files })
|
|
936
|
-
} else if let Some(value) = get_kw(ruby, hash, "json") {
|
|
937
|
-
if value.is_nil() {
|
|
938
|
-
None
|
|
939
|
-
} else {
|
|
940
|
-
Some(RequestBody::Json(ruby_value_to_json(ruby, json_module, value)?))
|
|
941
|
-
}
|
|
942
|
-
} else if let Some(value) = get_kw(ruby, hash, "data") {
|
|
943
|
-
if value.is_nil() {
|
|
944
|
-
None
|
|
945
|
-
} else {
|
|
946
|
-
Some(RequestBody::Form(ruby_value_to_json(ruby, json_module, value)?))
|
|
947
|
-
}
|
|
948
|
-
} else if let Some(value) = get_kw(ruby, hash, "raw_body") {
|
|
949
|
-
if value.is_nil() {
|
|
950
|
-
None
|
|
951
|
-
} else {
|
|
952
|
-
Some(RequestBody::Raw(String::try_convert(value)?))
|
|
953
|
-
}
|
|
954
|
-
} else {
|
|
955
|
-
None
|
|
956
|
-
};
|
|
957
|
-
|
|
958
|
-
Ok(RequestConfig {
|
|
959
|
-
query,
|
|
960
|
-
headers,
|
|
961
|
-
cookies,
|
|
962
|
-
body,
|
|
963
|
-
})
|
|
964
|
-
}
|
|
965
|
-
|
|
966
|
-
fn build_ruby_request(
|
|
967
|
-
ruby: &Ruby,
|
|
968
|
-
handler: &RubyHandlerInner,
|
|
969
|
-
request_data: &RequestData,
|
|
970
|
-
validated_params: Option<&JsonValue>,
|
|
971
|
-
) -> Result<Value, Error> {
|
|
972
|
-
let hash = ruby.hash_new();
|
|
973
|
-
|
|
974
|
-
hash.aset(ruby.intern("method"), ruby.str_new(&handler.method))?;
|
|
975
|
-
hash.aset(ruby.intern("path"), ruby.str_new(&handler.path))?;
|
|
976
|
-
|
|
977
|
-
let path_params = map_to_ruby_hash(ruby, request_data.path_params.as_ref())?;
|
|
978
|
-
hash.aset(ruby.intern("path_params"), path_params)?;
|
|
979
|
-
|
|
980
|
-
let query_value = json_to_ruby(ruby, &request_data.query_params)?;
|
|
981
|
-
hash.aset(ruby.intern("query"), query_value)?;
|
|
982
|
-
|
|
983
|
-
let raw_query = multimap_to_ruby_hash(ruby, request_data.raw_query_params.as_ref())?;
|
|
984
|
-
hash.aset(ruby.intern("raw_query"), raw_query)?;
|
|
985
|
-
|
|
986
|
-
let headers = map_to_ruby_hash(ruby, request_data.headers.as_ref())?;
|
|
987
|
-
hash.aset(ruby.intern("headers"), headers)?;
|
|
988
|
-
|
|
989
|
-
let cookies = map_to_ruby_hash(ruby, request_data.cookies.as_ref())?;
|
|
990
|
-
hash.aset(ruby.intern("cookies"), cookies)?;
|
|
991
|
-
|
|
992
|
-
let body_value = json_to_ruby(ruby, &request_data.body)?;
|
|
993
|
-
hash.aset(ruby.intern("body"), body_value)?;
|
|
994
|
-
|
|
995
|
-
let params_value = if let Some(validated) = validated_params {
|
|
996
|
-
json_to_ruby(ruby, validated)?
|
|
997
|
-
} else {
|
|
998
|
-
build_default_params(ruby, request_data)?
|
|
999
|
-
};
|
|
1000
|
-
hash.aset(ruby.intern("params"), params_value)?;
|
|
1001
|
-
|
|
1002
|
-
Ok(hash.as_value())
|
|
1003
|
-
}
|
|
1004
|
-
|
|
1005
|
-
fn build_default_params(ruby: &Ruby, request_data: &RequestData) -> Result<Value, Error> {
|
|
1006
|
-
let mut map = JsonMap::new();
|
|
1007
|
-
|
|
1008
|
-
for (key, value) in request_data.path_params.as_ref() {
|
|
1009
|
-
map.insert(key.clone(), JsonValue::String(value.clone()));
|
|
1010
|
-
}
|
|
1011
|
-
|
|
1012
|
-
if let JsonValue::Object(obj) = &request_data.query_params {
|
|
1013
|
-
for (key, value) in obj {
|
|
1014
|
-
map.insert(key.clone(), value.clone());
|
|
1015
|
-
}
|
|
1016
|
-
}
|
|
1017
|
-
|
|
1018
|
-
for (key, value) in request_data.headers.as_ref() {
|
|
1019
|
-
map.insert(key.clone(), JsonValue::String(value.clone()));
|
|
1020
|
-
}
|
|
1021
|
-
|
|
1022
|
-
for (key, value) in request_data.cookies.as_ref() {
|
|
1023
|
-
map.insert(key.clone(), JsonValue::String(value.clone()));
|
|
1024
|
-
}
|
|
1025
|
-
|
|
1026
|
-
json_to_ruby(ruby, &JsonValue::Object(map))
|
|
1027
|
-
}
|
|
1028
|
-
|
|
1029
|
-
fn interpret_handler_response(
|
|
1030
|
-
ruby: &Ruby,
|
|
1031
|
-
handler: &RubyHandlerInner,
|
|
1032
|
-
value: Value,
|
|
1033
|
-
) -> Result<RubyHandlerResult, Error> {
|
|
1034
|
-
if value.is_nil() {
|
|
1035
|
-
return Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1036
|
-
status: 200,
|
|
1037
|
-
headers: HashMap::new(),
|
|
1038
|
-
body: None,
|
|
1039
|
-
raw_body: None,
|
|
1040
|
-
}));
|
|
1041
|
-
}
|
|
1042
|
-
|
|
1043
|
-
if is_streaming_response(ruby, value)? {
|
|
1044
|
-
let stream_value: Value = value.funcall("stream", ())?;
|
|
1045
|
-
let status: i64 = value.funcall("status_code", ())?;
|
|
1046
|
-
let headers_value: Value = value.funcall("headers", ())?;
|
|
1047
|
-
|
|
1048
|
-
let status_u16 = u16::try_from(status).map_err(|_| {
|
|
1049
|
-
Error::new(
|
|
1050
|
-
ruby.exception_arg_error(),
|
|
1051
|
-
"StreamingResponse status_code must be between 0 and 65535",
|
|
1052
|
-
)
|
|
1053
|
-
})?;
|
|
1054
|
-
|
|
1055
|
-
let headers = value_to_string_map(ruby, headers_value)?;
|
|
1056
|
-
|
|
1057
|
-
return Ok(RubyHandlerResult::Streaming(StreamingResponsePayload {
|
|
1058
|
-
enumerator: Arc::new(Opaque::from(stream_value)),
|
|
1059
|
-
status: status_u16,
|
|
1060
|
-
headers,
|
|
1061
|
-
}));
|
|
1062
|
-
}
|
|
1063
|
-
|
|
1064
|
-
let status_symbol = ruby.intern("status_code");
|
|
1065
|
-
if value.respond_to(status_symbol, false)? {
|
|
1066
|
-
let status: i64 = value.funcall("status_code", ())?;
|
|
1067
|
-
let status_u16 = u16::try_from(status)
|
|
1068
|
-
.map_err(|_| Error::new(ruby.exception_arg_error(), "status_code must be between 0 and 65535"))?;
|
|
1069
|
-
|
|
1070
|
-
let headers_value: Value = value.funcall("headers", ())?;
|
|
1071
|
-
let headers = if headers_value.is_nil() {
|
|
1072
|
-
HashMap::new()
|
|
1073
|
-
} else {
|
|
1074
|
-
let hash = RHash::try_convert(headers_value)?;
|
|
1075
|
-
hash.to_hash_map::<String, String>()?
|
|
1076
|
-
};
|
|
1077
|
-
|
|
1078
|
-
let content_value: Value = value.funcall("content", ())?;
|
|
1079
|
-
let mut raw_body = None;
|
|
1080
|
-
let body = if content_value.is_nil() {
|
|
1081
|
-
None
|
|
1082
|
-
} else if let Ok(str_value) = RString::try_convert(content_value) {
|
|
1083
|
-
let slice = unsafe { str_value.as_slice() };
|
|
1084
|
-
raw_body = Some(slice.to_vec());
|
|
1085
|
-
None
|
|
1086
|
-
} else {
|
|
1087
|
-
Some(ruby_value_to_json(
|
|
1088
|
-
ruby,
|
|
1089
|
-
handler.json_module.get_inner_with(ruby),
|
|
1090
|
-
content_value,
|
|
1091
|
-
)?)
|
|
1092
|
-
};
|
|
1093
|
-
|
|
1094
|
-
return Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1095
|
-
status: status_u16,
|
|
1096
|
-
headers,
|
|
1097
|
-
body,
|
|
1098
|
-
raw_body,
|
|
1099
|
-
}));
|
|
1100
|
-
}
|
|
1101
|
-
|
|
1102
|
-
if let Ok(str_value) = RString::try_convert(value) {
|
|
1103
|
-
let slice = unsafe { str_value.as_slice() };
|
|
1104
|
-
return Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1105
|
-
status: 200,
|
|
1106
|
-
headers: HashMap::new(),
|
|
1107
|
-
body: None,
|
|
1108
|
-
raw_body: Some(slice.to_vec()),
|
|
1109
|
-
}));
|
|
1110
|
-
}
|
|
1111
|
-
|
|
1112
|
-
let body_json = ruby_value_to_json(ruby, handler.json_module.get_inner_with(ruby), value)?;
|
|
1113
|
-
|
|
1114
|
-
Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1115
|
-
status: 200,
|
|
1116
|
-
headers: HashMap::new(),
|
|
1117
|
-
body: Some(body_json),
|
|
1118
|
-
raw_body: None,
|
|
1119
|
-
}))
|
|
1120
|
-
}
|
|
1121
|
-
|
|
1122
|
-
fn value_to_string_map(ruby: &Ruby, value: Value) -> Result<HashMap<String, String>, Error> {
|
|
1123
|
-
if value.is_nil() {
|
|
1124
|
-
return Ok(HashMap::new());
|
|
1125
|
-
}
|
|
1126
|
-
let hash = RHash::try_convert(value)?;
|
|
1127
|
-
hash.to_hash_map::<String, String>().map_err(|err| {
|
|
1128
|
-
Error::new(
|
|
1129
|
-
ruby.exception_arg_error(),
|
|
1130
|
-
format!("Expected headers hash of strings: {}", err),
|
|
1131
|
-
)
|
|
1132
|
-
})
|
|
1133
|
-
}
|
|
1134
|
-
|
|
1135
|
-
fn is_streaming_response(ruby: &Ruby, value: Value) -> Result<bool, Error> {
|
|
1136
|
-
let stream_sym = ruby.intern("stream");
|
|
1137
|
-
let status_sym = ruby.intern("status_code");
|
|
1138
|
-
Ok(value.respond_to(stream_sym, false)? && value.respond_to(status_sym, false)?)
|
|
1139
|
-
}
|
|
1140
|
-
|
|
1141
|
-
fn response_to_ruby(ruby: &Ruby, response: TestResponseData) -> Result<Value, Error> {
|
|
1142
|
-
let hash = ruby.hash_new();
|
|
1143
|
-
|
|
1144
|
-
hash.aset(
|
|
1145
|
-
ruby.intern("status_code"),
|
|
1146
|
-
ruby.integer_from_i64(response.status as i64),
|
|
1147
|
-
)?;
|
|
1148
|
-
|
|
1149
|
-
let headers_hash = ruby.hash_new();
|
|
1150
|
-
for (key, value) in response.headers {
|
|
1151
|
-
headers_hash.aset(ruby.str_new(&key), ruby.str_new(&value))?;
|
|
1152
|
-
}
|
|
1153
|
-
hash.aset(ruby.intern("headers"), headers_hash)?;
|
|
1154
|
-
|
|
1155
|
-
if let Some(body) = response.body_text {
|
|
1156
|
-
let body_value = ruby.str_new(&body);
|
|
1157
|
-
hash.aset(ruby.intern("body"), body_value)?;
|
|
1158
|
-
hash.aset(ruby.intern("body_text"), body_value)?;
|
|
1159
|
-
} else {
|
|
1160
|
-
hash.aset(ruby.intern("body"), ruby.qnil())?;
|
|
1161
|
-
hash.aset(ruby.intern("body_text"), ruby.qnil())?;
|
|
1162
|
-
}
|
|
1163
|
-
|
|
1164
|
-
Ok(hash.as_value())
|
|
1165
|
-
}
|
|
1166
|
-
|
|
1167
|
-
fn ruby_value_to_json(ruby: &Ruby, json_module: Value, value: Value) -> Result<JsonValue, Error> {
|
|
1168
|
-
if value.is_nil() {
|
|
1169
|
-
return Ok(JsonValue::Null);
|
|
1170
|
-
}
|
|
1171
|
-
|
|
1172
|
-
let json_string: String = json_module.funcall("generate", (value,))?;
|
|
1173
|
-
serde_json::from_str(&json_string).map_err(|err| {
|
|
1174
|
-
Error::new(
|
|
1175
|
-
ruby.exception_runtime_error(),
|
|
1176
|
-
format!("Failed to convert Ruby value to JSON: {err}"),
|
|
1177
|
-
)
|
|
1178
|
-
})
|
|
1179
|
-
}
|
|
1180
|
-
|
|
1181
|
-
fn json_to_ruby(ruby: &Ruby, value: &JsonValue) -> Result<Value, Error> {
|
|
1182
|
-
match value {
|
|
1183
|
-
JsonValue::Null => Ok(ruby.qnil().as_value()),
|
|
1184
|
-
JsonValue::Bool(b) => Ok(if *b {
|
|
1185
|
-
ruby.qtrue().as_value()
|
|
1186
|
-
} else {
|
|
1187
|
-
ruby.qfalse().as_value()
|
|
1188
|
-
}),
|
|
1189
|
-
JsonValue::Number(num) => {
|
|
1190
|
-
if let Some(i) = num.as_i64() {
|
|
1191
|
-
Ok(ruby.integer_from_i64(i).as_value())
|
|
1192
|
-
} else if let Some(f) = num.as_f64() {
|
|
1193
|
-
Ok(ruby.float_from_f64(f).as_value())
|
|
1194
|
-
} else {
|
|
1195
|
-
Ok(ruby.qnil().as_value())
|
|
1196
|
-
}
|
|
1197
|
-
}
|
|
1198
|
-
JsonValue::String(str_val) => Ok(ruby.str_new(str_val).as_value()),
|
|
1199
|
-
JsonValue::Array(items) => {
|
|
1200
|
-
let array = ruby.ary_new();
|
|
1201
|
-
for item in items {
|
|
1202
|
-
array.push(json_to_ruby(ruby, item)?)?;
|
|
1203
|
-
}
|
|
1204
|
-
Ok(array.as_value())
|
|
1205
|
-
}
|
|
1206
|
-
JsonValue::Object(map) => {
|
|
1207
|
-
let hash = ruby.hash_new();
|
|
1208
|
-
for (key, item) in map {
|
|
1209
|
-
hash.aset(ruby.str_new(key), json_to_ruby(ruby, item)?)?;
|
|
1210
|
-
}
|
|
1211
|
-
Ok(hash.as_value())
|
|
1212
|
-
}
|
|
1213
|
-
}
|
|
1214
|
-
}
|
|
1215
|
-
|
|
1216
|
-
fn map_to_ruby_hash(ruby: &Ruby, map: &HashMap<String, String>) -> Result<Value, Error> {
|
|
1217
|
-
let hash = ruby.hash_new();
|
|
1218
|
-
for (key, value) in map {
|
|
1219
|
-
hash.aset(ruby.str_new(key), ruby.str_new(value))?;
|
|
1220
|
-
}
|
|
1221
|
-
Ok(hash.as_value())
|
|
1222
|
-
}
|
|
1223
|
-
|
|
1224
|
-
fn multimap_to_ruby_hash(ruby: &Ruby, map: &HashMap<String, Vec<String>>) -> Result<Value, Error> {
|
|
1225
|
-
let hash = ruby.hash_new();
|
|
1226
|
-
for (key, values) in map {
|
|
1227
|
-
let array = ruby.ary_new();
|
|
1228
|
-
for value in values {
|
|
1229
|
-
array.push(ruby.str_new(value))?;
|
|
1230
|
-
}
|
|
1231
|
-
hash.aset(ruby.str_new(key), array)?;
|
|
1232
|
-
}
|
|
1233
|
-
Ok(hash.as_value())
|
|
1234
|
-
}
|
|
1235
|
-
|
|
1236
|
-
fn problem_to_json(problem: &ProblemDetails) -> String {
|
|
1237
|
-
problem
|
|
1238
|
-
.to_json_pretty()
|
|
1239
|
-
.unwrap_or_else(|err| format!("Failed to serialise problem details: {err}"))
|
|
1240
|
-
}
|
|
1241
|
-
|
|
1242
|
-
fn get_kw(ruby: &Ruby, hash: RHash, name: &str) -> Option<Value> {
|
|
1243
|
-
let sym = ruby.intern(name);
|
|
1244
|
-
hash.get(sym).or_else(|| hash.get(name))
|
|
1245
|
-
}
|
|
1246
|
-
|
|
1247
|
-
fn fetch_handler(ruby: &Ruby, handlers: &RHash, name: &str) -> Result<Value, Error> {
|
|
1248
|
-
let symbol_key = ruby.intern(name);
|
|
1249
|
-
if let Some(value) = handlers.get(symbol_key) {
|
|
1250
|
-
return Ok(value);
|
|
1251
|
-
}
|
|
1252
|
-
|
|
1253
|
-
let string_key = ruby.str_new(name);
|
|
1254
|
-
if let Some(value) = handlers.get(string_key) {
|
|
1255
|
-
return Ok(value);
|
|
1256
|
-
}
|
|
1257
|
-
|
|
1258
|
-
Err(Error::new(
|
|
1259
|
-
ruby.exception_name_error(),
|
|
1260
|
-
format!("Handler '{name}' not provided"),
|
|
1261
|
-
))
|
|
1262
|
-
}
|
|
1263
|
-
|
|
1264
|
-
/// GC mark hook so Ruby keeps handler closures alive.
|
|
1265
|
-
#[allow(dead_code)]
|
|
1266
|
-
fn mark(client: &NativeTestClient, marker: &Marker) {
|
|
1267
|
-
let inner_ref = client.inner.borrow();
|
|
1268
|
-
if let Some(inner) = inner_ref.as_ref() {
|
|
1269
|
-
for handler in &inner.handlers {
|
|
1270
|
-
handler.mark(marker);
|
|
1271
|
-
}
|
|
1272
|
-
}
|
|
1273
|
-
}
|
|
1274
|
-
|
|
1275
|
-
/// Return the Spikard version.
|
|
1276
|
-
fn version() -> String {
|
|
1277
|
-
env!("CARGO_PKG_VERSION").to_string()
|
|
1278
|
-
}
|
|
1279
|
-
|
|
1280
|
-
/// Build dependency container from Ruby dependencies
|
|
1281
|
-
///
|
|
1282
|
-
/// Converts Ruby dependencies (values and factories) to Rust DependencyContainer
|
|
1283
|
-
#[cfg(feature = "di")]
|
|
1284
|
-
fn build_dependency_container(
|
|
1285
|
-
ruby: &Ruby,
|
|
1286
|
-
dependencies: Value,
|
|
1287
|
-
) -> Result<spikard_core::di::DependencyContainer, Error> {
|
|
1288
|
-
use spikard_core::di::DependencyContainer;
|
|
1289
|
-
use std::sync::Arc;
|
|
1290
|
-
|
|
1291
|
-
if dependencies.is_nil() {
|
|
1292
|
-
return Ok(DependencyContainer::new());
|
|
1293
|
-
}
|
|
1294
|
-
|
|
1295
|
-
let mut container = DependencyContainer::new();
|
|
1296
|
-
let deps_hash = RHash::try_convert(dependencies)?;
|
|
1297
|
-
|
|
1298
|
-
deps_hash.foreach(|key: String, value: Value| -> Result<ForEach, Error> {
|
|
1299
|
-
// Check if this is a factory (has a 'type' field set to :factory)
|
|
1300
|
-
if let Ok(dep_hash) = RHash::try_convert(value) {
|
|
1301
|
-
let dep_type: Option<String> = get_kw(ruby, dep_hash, "type").and_then(|v| {
|
|
1302
|
-
// Handle both symbol and string types
|
|
1303
|
-
if let Ok(sym) = magnus::Symbol::try_convert(v) {
|
|
1304
|
-
Some(sym.name().ok()?.to_string())
|
|
1305
|
-
} else {
|
|
1306
|
-
String::try_convert(v).ok()
|
|
1307
|
-
}
|
|
1308
|
-
});
|
|
1309
|
-
|
|
1310
|
-
match dep_type.as_deref() {
|
|
1311
|
-
Some("factory") => {
|
|
1312
|
-
// Factory dependency
|
|
1313
|
-
let factory = get_kw(ruby, dep_hash, "factory")
|
|
1314
|
-
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "Factory missing 'factory' key"))?;
|
|
1315
|
-
|
|
1316
|
-
let depends_on: Vec<String> = get_kw(ruby, dep_hash, "depends_on")
|
|
1317
|
-
.and_then(|v| Vec::<String>::try_convert(v).ok())
|
|
1318
|
-
.unwrap_or_default();
|
|
1319
|
-
|
|
1320
|
-
let singleton: bool = get_kw(ruby, dep_hash, "singleton")
|
|
1321
|
-
.and_then(|v| bool::try_convert(v).ok())
|
|
1322
|
-
.unwrap_or(false);
|
|
1323
|
-
|
|
1324
|
-
let cacheable: bool = get_kw(ruby, dep_hash, "cacheable")
|
|
1325
|
-
.and_then(|v| bool::try_convert(v).ok())
|
|
1326
|
-
.unwrap_or(true);
|
|
1327
|
-
|
|
1328
|
-
let factory_dep =
|
|
1329
|
-
crate::di::RubyFactoryDependency::new(key.clone(), factory, depends_on, singleton, cacheable);
|
|
1330
|
-
|
|
1331
|
-
container.register(key.clone(), Arc::new(factory_dep)).map_err(|e| {
|
|
1332
|
-
Error::new(
|
|
1333
|
-
ruby.exception_runtime_error(),
|
|
1334
|
-
format!("Failed to register factory '{}': {}", key, e),
|
|
1335
|
-
)
|
|
1336
|
-
})?;
|
|
1337
|
-
}
|
|
1338
|
-
Some("value") => {
|
|
1339
|
-
// Value dependency
|
|
1340
|
-
let value_data = get_kw(ruby, dep_hash, "value").ok_or_else(|| {
|
|
1341
|
-
Error::new(ruby.exception_runtime_error(), "Value dependency missing 'value' key")
|
|
1342
|
-
})?;
|
|
1343
|
-
|
|
1344
|
-
let value_dep = crate::di::RubyValueDependency::new(key.clone(), value_data);
|
|
1345
|
-
|
|
1346
|
-
container.register(key.clone(), Arc::new(value_dep)).map_err(|e| {
|
|
1347
|
-
Error::new(
|
|
1348
|
-
ruby.exception_runtime_error(),
|
|
1349
|
-
format!("Failed to register value '{}': {}", key, e),
|
|
1350
|
-
)
|
|
1351
|
-
})?;
|
|
1352
|
-
}
|
|
1353
|
-
_ => {
|
|
1354
|
-
return Err(Error::new(
|
|
1355
|
-
ruby.exception_runtime_error(),
|
|
1356
|
-
format!("Invalid dependency type for '{}'", key),
|
|
1357
|
-
));
|
|
1358
|
-
}
|
|
1359
|
-
}
|
|
1360
|
-
} else {
|
|
1361
|
-
// Treat as raw value
|
|
1362
|
-
let value_dep = crate::di::RubyValueDependency::new(key.clone(), value);
|
|
1363
|
-
container.register(key.clone(), Arc::new(value_dep)).map_err(|e| {
|
|
1364
|
-
Error::new(
|
|
1365
|
-
ruby.exception_runtime_error(),
|
|
1366
|
-
format!("Failed to register value '{}': {}", key, e),
|
|
1367
|
-
)
|
|
1368
|
-
})?;
|
|
1369
|
-
}
|
|
1370
|
-
|
|
1371
|
-
Ok(ForEach::Continue)
|
|
1372
|
-
})?;
|
|
1373
|
-
|
|
1374
|
-
Ok(container)
|
|
1375
|
-
}
|
|
1376
|
-
|
|
1377
|
-
/// Helper to extract an optional string from a Ruby Hash
|
|
1378
|
-
fn get_optional_string_from_hash(hash: RHash, key: &str) -> Result<Option<String>, Error> {
|
|
1379
|
-
match hash.get(String::from(key)) {
|
|
1380
|
-
Some(v) if !v.is_nil() => Ok(Some(String::try_convert(v)?)),
|
|
1381
|
-
_ => Ok(None),
|
|
1382
|
-
}
|
|
1383
|
-
}
|
|
1384
|
-
|
|
1385
|
-
/// Helper to extract a required string from a Ruby Hash
|
|
1386
|
-
fn get_required_string_from_hash(hash: RHash, key: &str, ruby: &Ruby) -> Result<String, Error> {
|
|
1387
|
-
let value = hash
|
|
1388
|
-
.get(String::from(key))
|
|
1389
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), format!("missing required key '{}'", key)))?;
|
|
1390
|
-
if value.is_nil() {
|
|
1391
|
-
return Err(Error::new(
|
|
1392
|
-
ruby.exception_arg_error(),
|
|
1393
|
-
format!("key '{}' cannot be nil", key),
|
|
1394
|
-
));
|
|
1395
|
-
}
|
|
1396
|
-
String::try_convert(value)
|
|
1397
|
-
}
|
|
1398
|
-
|
|
1399
|
-
fn extract_files(ruby: &Ruby, files_value: Value) -> Result<Vec<MultipartFilePart>, Error> {
|
|
1400
|
-
let files_hash = RHash::try_convert(files_value)?;
|
|
1401
|
-
|
|
1402
|
-
let keys_array: RArray = files_hash.funcall("keys", ())?;
|
|
1403
|
-
let mut result = Vec::new();
|
|
1404
|
-
|
|
1405
|
-
for i in 0..keys_array.len() {
|
|
1406
|
-
let key_val = keys_array.entry::<Value>(i as isize)?;
|
|
1407
|
-
let field_name = String::try_convert(key_val)?;
|
|
1408
|
-
let value = files_hash
|
|
1409
|
-
.get(key_val)
|
|
1410
|
-
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "Failed to get hash value"))?;
|
|
1411
|
-
|
|
1412
|
-
if let Some(outer_array) = RArray::from_value(value) {
|
|
1413
|
-
if outer_array.is_empty() {
|
|
1414
|
-
continue;
|
|
1415
|
-
}
|
|
1416
|
-
|
|
1417
|
-
let first_elem = outer_array.entry::<Value>(0)?;
|
|
1418
|
-
|
|
1419
|
-
if RArray::from_value(first_elem).is_some() {
|
|
1420
|
-
for j in 0..outer_array.len() {
|
|
1421
|
-
let file_array = outer_array.entry::<Value>(j as isize)?;
|
|
1422
|
-
let file_data = extract_single_file(ruby, &field_name, file_array)?;
|
|
1423
|
-
result.push(file_data);
|
|
1424
|
-
}
|
|
1425
|
-
} else {
|
|
1426
|
-
let file_data = extract_single_file(ruby, &field_name, value)?;
|
|
1427
|
-
result.push(file_data);
|
|
1428
|
-
}
|
|
1429
|
-
}
|
|
1430
|
-
}
|
|
1431
|
-
|
|
1432
|
-
Ok(result)
|
|
1433
|
-
}
|
|
1434
|
-
|
|
1435
|
-
/// Extract a single file from Ruby array [filename, content, content_type (optional)]
|
|
1436
|
-
fn extract_single_file(ruby: &Ruby, field_name: &str, array_value: Value) -> Result<MultipartFilePart, Error> {
|
|
1437
|
-
let array = RArray::from_value(array_value)
|
|
1438
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "file must be an Array [filename, content]"))?;
|
|
1439
|
-
|
|
1440
|
-
if array.len() < 2 {
|
|
1441
|
-
return Err(Error::new(
|
|
1442
|
-
ruby.exception_arg_error(),
|
|
1443
|
-
"file Array must have at least 2 elements: [filename, content]",
|
|
1444
|
-
));
|
|
1445
|
-
}
|
|
1446
|
-
|
|
1447
|
-
let filename: String = String::try_convert(array.shift()?)?;
|
|
1448
|
-
let content_str: String = String::try_convert(array.shift()?)?;
|
|
1449
|
-
let content = content_str.into_bytes();
|
|
1450
|
-
|
|
1451
|
-
let content_type: Option<String> = if !array.is_empty() {
|
|
1452
|
-
String::try_convert(array.shift()?).ok()
|
|
1453
|
-
} else {
|
|
1454
|
-
None
|
|
1455
|
-
};
|
|
1456
|
-
|
|
1457
|
-
Ok(MultipartFilePart {
|
|
1458
|
-
field_name: field_name.to_string(),
|
|
1459
|
-
filename,
|
|
1460
|
-
content,
|
|
1461
|
-
content_type,
|
|
1462
|
-
})
|
|
1463
|
-
}
|
|
1464
|
-
|
|
1465
|
-
/// Extract ServerConfig from Ruby ServerConfig object
|
|
1466
|
-
fn extract_server_config(ruby: &Ruby, config_value: Value) -> Result<spikard_http::ServerConfig, Error> {
|
|
1467
|
-
use spikard_http::{
|
|
1468
|
-
ApiKeyConfig, CompressionConfig, ContactInfo, JwtConfig, LicenseInfo, OpenApiConfig, RateLimitConfig,
|
|
1469
|
-
ServerInfo, StaticFilesConfig,
|
|
1470
|
-
};
|
|
1471
|
-
use std::collections::HashMap;
|
|
1472
|
-
|
|
1473
|
-
let host: String = config_value.funcall("host", ())?;
|
|
1474
|
-
|
|
1475
|
-
let port: u32 = config_value.funcall("port", ())?;
|
|
1476
|
-
|
|
1477
|
-
let workers: usize = config_value.funcall("workers", ())?;
|
|
1478
|
-
|
|
1479
|
-
let enable_request_id: bool = config_value.funcall("enable_request_id", ())?;
|
|
1480
|
-
|
|
1481
|
-
let max_body_size_value: Value = config_value.funcall("max_body_size", ())?;
|
|
1482
|
-
let max_body_size = if max_body_size_value.is_nil() {
|
|
1483
|
-
None
|
|
1484
|
-
} else {
|
|
1485
|
-
Some(u64::try_convert(max_body_size_value)? as usize)
|
|
1486
|
-
};
|
|
1487
|
-
|
|
1488
|
-
let request_timeout_value: Value = config_value.funcall("request_timeout", ())?;
|
|
1489
|
-
let request_timeout = if request_timeout_value.is_nil() {
|
|
1490
|
-
None
|
|
1491
|
-
} else {
|
|
1492
|
-
Some(u64::try_convert(request_timeout_value)?)
|
|
1493
|
-
};
|
|
1494
|
-
|
|
1495
|
-
let graceful_shutdown: bool = config_value.funcall("graceful_shutdown", ())?;
|
|
1496
|
-
|
|
1497
|
-
let shutdown_timeout: u64 = config_value.funcall("shutdown_timeout", ())?;
|
|
1498
|
-
|
|
1499
|
-
let compression_value: Value = config_value.funcall("compression", ())?;
|
|
1500
|
-
let compression = if compression_value.is_nil() {
|
|
1501
|
-
None
|
|
1502
|
-
} else {
|
|
1503
|
-
let gzip: bool = compression_value.funcall("gzip", ())?;
|
|
1504
|
-
let brotli: bool = compression_value.funcall("brotli", ())?;
|
|
1505
|
-
let min_size: usize = compression_value.funcall("min_size", ())?;
|
|
1506
|
-
let quality: u32 = compression_value.funcall("quality", ())?;
|
|
1507
|
-
Some(CompressionConfig {
|
|
1508
|
-
gzip,
|
|
1509
|
-
brotli,
|
|
1510
|
-
min_size,
|
|
1511
|
-
quality,
|
|
1512
|
-
})
|
|
1513
|
-
};
|
|
1514
|
-
|
|
1515
|
-
let rate_limit_value: Value = config_value.funcall("rate_limit", ())?;
|
|
1516
|
-
let rate_limit = if rate_limit_value.is_nil() {
|
|
1517
|
-
None
|
|
1518
|
-
} else {
|
|
1519
|
-
let per_second: u64 = rate_limit_value.funcall("per_second", ())?;
|
|
1520
|
-
let burst: u32 = rate_limit_value.funcall("burst", ())?;
|
|
1521
|
-
let ip_based: bool = rate_limit_value.funcall("ip_based", ())?;
|
|
1522
|
-
Some(RateLimitConfig {
|
|
1523
|
-
per_second,
|
|
1524
|
-
burst,
|
|
1525
|
-
ip_based,
|
|
1526
|
-
})
|
|
1527
|
-
};
|
|
1528
|
-
|
|
1529
|
-
let jwt_auth_value: Value = config_value.funcall("jwt_auth", ())?;
|
|
1530
|
-
let jwt_auth = if jwt_auth_value.is_nil() {
|
|
1531
|
-
None
|
|
1532
|
-
} else {
|
|
1533
|
-
let secret: String = jwt_auth_value.funcall("secret", ())?;
|
|
1534
|
-
let algorithm: String = jwt_auth_value.funcall("algorithm", ())?;
|
|
1535
|
-
let audience_value: Value = jwt_auth_value.funcall("audience", ())?;
|
|
1536
|
-
let audience = if audience_value.is_nil() {
|
|
1537
|
-
None
|
|
1538
|
-
} else {
|
|
1539
|
-
Some(Vec::<String>::try_convert(audience_value)?)
|
|
1540
|
-
};
|
|
1541
|
-
let issuer_value: Value = jwt_auth_value.funcall("issuer", ())?;
|
|
1542
|
-
let issuer = if issuer_value.is_nil() {
|
|
1543
|
-
None
|
|
1544
|
-
} else {
|
|
1545
|
-
Some(String::try_convert(issuer_value)?)
|
|
1546
|
-
};
|
|
1547
|
-
let leeway: u64 = jwt_auth_value.funcall("leeway", ())?;
|
|
1548
|
-
Some(JwtConfig {
|
|
1549
|
-
secret,
|
|
1550
|
-
algorithm,
|
|
1551
|
-
audience,
|
|
1552
|
-
issuer,
|
|
1553
|
-
leeway,
|
|
1554
|
-
})
|
|
1555
|
-
};
|
|
1556
|
-
|
|
1557
|
-
let api_key_auth_value: Value = config_value.funcall("api_key_auth", ())?;
|
|
1558
|
-
let api_key_auth = if api_key_auth_value.is_nil() {
|
|
1559
|
-
None
|
|
1560
|
-
} else {
|
|
1561
|
-
let keys: Vec<String> = api_key_auth_value.funcall("keys", ())?;
|
|
1562
|
-
let header_name: String = api_key_auth_value.funcall("header_name", ())?;
|
|
1563
|
-
Some(ApiKeyConfig { keys, header_name })
|
|
1564
|
-
};
|
|
1565
|
-
|
|
1566
|
-
let static_files_value: Value = config_value.funcall("static_files", ())?;
|
|
1567
|
-
let static_files_array = RArray::from_value(static_files_value)
|
|
1568
|
-
.ok_or_else(|| Error::new(ruby.exception_type_error(), "static_files must be an Array"))?;
|
|
1569
|
-
|
|
1570
|
-
let mut static_files = Vec::new();
|
|
1571
|
-
for i in 0..static_files_array.len() {
|
|
1572
|
-
let sf_value = static_files_array.entry::<Value>(i as isize)?;
|
|
1573
|
-
let directory: String = sf_value.funcall("directory", ())?;
|
|
1574
|
-
let route_prefix: String = sf_value.funcall("route_prefix", ())?;
|
|
1575
|
-
let index_file: bool = sf_value.funcall("index_file", ())?;
|
|
1576
|
-
let cache_control_value: Value = sf_value.funcall("cache_control", ())?;
|
|
1577
|
-
let cache_control = if cache_control_value.is_nil() {
|
|
1578
|
-
None
|
|
1579
|
-
} else {
|
|
1580
|
-
Some(String::try_convert(cache_control_value)?)
|
|
1581
|
-
};
|
|
1582
|
-
static_files.push(StaticFilesConfig {
|
|
1583
|
-
directory,
|
|
1584
|
-
route_prefix,
|
|
1585
|
-
index_file,
|
|
1586
|
-
cache_control,
|
|
1587
|
-
});
|
|
1588
|
-
}
|
|
1589
|
-
|
|
1590
|
-
let openapi_value: Value = config_value.funcall("openapi", ())?;
|
|
1591
|
-
let openapi = if openapi_value.is_nil() {
|
|
1592
|
-
None
|
|
1593
|
-
} else {
|
|
1594
|
-
let enabled: bool = openapi_value.funcall("enabled", ())?;
|
|
1595
|
-
let title: String = openapi_value.funcall("title", ())?;
|
|
1596
|
-
let version: String = openapi_value.funcall("version", ())?;
|
|
1597
|
-
let description_value: Value = openapi_value.funcall("description", ())?;
|
|
1598
|
-
let description = if description_value.is_nil() {
|
|
1599
|
-
None
|
|
1600
|
-
} else {
|
|
1601
|
-
Some(String::try_convert(description_value)?)
|
|
1602
|
-
};
|
|
1603
|
-
let swagger_ui_path: String = openapi_value.funcall("swagger_ui_path", ())?;
|
|
1604
|
-
let redoc_path: String = openapi_value.funcall("redoc_path", ())?;
|
|
1605
|
-
let openapi_json_path: String = openapi_value.funcall("openapi_json_path", ())?;
|
|
1606
|
-
|
|
1607
|
-
let contact_value: Value = openapi_value.funcall("contact", ())?;
|
|
1608
|
-
let contact = if contact_value.is_nil() {
|
|
1609
|
-
None
|
|
1610
|
-
} else if let Some(contact_hash) = RHash::from_value(contact_value) {
|
|
1611
|
-
let name = get_optional_string_from_hash(contact_hash, "name")?;
|
|
1612
|
-
let email = get_optional_string_from_hash(contact_hash, "email")?;
|
|
1613
|
-
let url = get_optional_string_from_hash(contact_hash, "url")?;
|
|
1614
|
-
Some(ContactInfo { name, email, url })
|
|
1615
|
-
} else {
|
|
1616
|
-
let name_value: Value = contact_value.funcall("name", ())?;
|
|
1617
|
-
let email_value: Value = contact_value.funcall("email", ())?;
|
|
1618
|
-
let url_value: Value = contact_value.funcall("url", ())?;
|
|
1619
|
-
Some(ContactInfo {
|
|
1620
|
-
name: if name_value.is_nil() {
|
|
1621
|
-
None
|
|
1622
|
-
} else {
|
|
1623
|
-
Some(String::try_convert(name_value)?)
|
|
1624
|
-
},
|
|
1625
|
-
email: if email_value.is_nil() {
|
|
1626
|
-
None
|
|
1627
|
-
} else {
|
|
1628
|
-
Some(String::try_convert(email_value)?)
|
|
1629
|
-
},
|
|
1630
|
-
url: if url_value.is_nil() {
|
|
1631
|
-
None
|
|
1632
|
-
} else {
|
|
1633
|
-
Some(String::try_convert(url_value)?)
|
|
1634
|
-
},
|
|
1635
|
-
})
|
|
1636
|
-
};
|
|
1637
|
-
|
|
1638
|
-
let license_value: Value = openapi_value.funcall("license", ())?;
|
|
1639
|
-
let license = if license_value.is_nil() {
|
|
1640
|
-
None
|
|
1641
|
-
} else if let Some(license_hash) = RHash::from_value(license_value) {
|
|
1642
|
-
let name = get_required_string_from_hash(license_hash, "name", ruby)?;
|
|
1643
|
-
let url = get_optional_string_from_hash(license_hash, "url")?;
|
|
1644
|
-
Some(LicenseInfo { name, url })
|
|
1645
|
-
} else {
|
|
1646
|
-
let name: String = license_value.funcall("name", ())?;
|
|
1647
|
-
let url_value: Value = license_value.funcall("url", ())?;
|
|
1648
|
-
let url = if url_value.is_nil() {
|
|
1649
|
-
None
|
|
1650
|
-
} else {
|
|
1651
|
-
Some(String::try_convert(url_value)?)
|
|
1652
|
-
};
|
|
1653
|
-
Some(LicenseInfo { name, url })
|
|
1654
|
-
};
|
|
1655
|
-
|
|
1656
|
-
let servers_value: Value = openapi_value.funcall("servers", ())?;
|
|
1657
|
-
let servers_array = RArray::from_value(servers_value)
|
|
1658
|
-
.ok_or_else(|| Error::new(ruby.exception_type_error(), "servers must be an Array"))?;
|
|
1659
|
-
|
|
1660
|
-
let mut servers = Vec::new();
|
|
1661
|
-
for i in 0..servers_array.len() {
|
|
1662
|
-
let server_value = servers_array.entry::<Value>(i as isize)?;
|
|
1663
|
-
|
|
1664
|
-
let (url, description) = if let Some(server_hash) = RHash::from_value(server_value) {
|
|
1665
|
-
let url = get_required_string_from_hash(server_hash, "url", ruby)?;
|
|
1666
|
-
let description = get_optional_string_from_hash(server_hash, "description")?;
|
|
1667
|
-
(url, description)
|
|
1668
|
-
} else {
|
|
1669
|
-
let url: String = server_value.funcall("url", ())?;
|
|
1670
|
-
let description_value: Value = server_value.funcall("description", ())?;
|
|
1671
|
-
let description = if description_value.is_nil() {
|
|
1672
|
-
None
|
|
1673
|
-
} else {
|
|
1674
|
-
Some(String::try_convert(description_value)?)
|
|
1675
|
-
};
|
|
1676
|
-
(url, description)
|
|
1677
|
-
};
|
|
1678
|
-
|
|
1679
|
-
servers.push(ServerInfo { url, description });
|
|
1680
|
-
}
|
|
1681
|
-
|
|
1682
|
-
let security_schemes = HashMap::new();
|
|
1683
|
-
|
|
1684
|
-
Some(OpenApiConfig {
|
|
1685
|
-
enabled,
|
|
1686
|
-
title,
|
|
1687
|
-
version,
|
|
1688
|
-
description,
|
|
1689
|
-
swagger_ui_path,
|
|
1690
|
-
redoc_path,
|
|
1691
|
-
openapi_json_path,
|
|
1692
|
-
contact,
|
|
1693
|
-
license,
|
|
1694
|
-
servers,
|
|
1695
|
-
security_schemes,
|
|
1696
|
-
})
|
|
1697
|
-
};
|
|
1698
|
-
|
|
1699
|
-
Ok(spikard_http::ServerConfig {
|
|
1700
|
-
host,
|
|
1701
|
-
port: port as u16,
|
|
1702
|
-
workers,
|
|
1703
|
-
enable_request_id,
|
|
1704
|
-
max_body_size,
|
|
1705
|
-
request_timeout,
|
|
1706
|
-
compression,
|
|
1707
|
-
rate_limit,
|
|
1708
|
-
jwt_auth,
|
|
1709
|
-
api_key_auth,
|
|
1710
|
-
static_files,
|
|
1711
|
-
graceful_shutdown,
|
|
1712
|
-
shutdown_timeout,
|
|
1713
|
-
background_tasks: spikard_http::BackgroundTaskConfig::default(),
|
|
1714
|
-
openapi,
|
|
1715
|
-
lifecycle_hooks: None,
|
|
1716
|
-
di_container: None,
|
|
1717
|
-
})
|
|
1718
|
-
}
|
|
1719
|
-
|
|
1720
|
-
/// Start the Spikard HTTP server from Ruby
|
|
1721
|
-
///
|
|
1722
|
-
/// Creates an Axum HTTP server in a dedicated background thread with its own Tokio runtime.
|
|
1723
|
-
///
|
|
1724
|
-
/// # Arguments
|
|
1725
|
-
///
|
|
1726
|
-
/// * `routes_json` - JSON string containing route metadata
|
|
1727
|
-
/// * `handlers` - Ruby Hash mapping handler_name => Proc
|
|
1728
|
-
/// * `config` - Ruby ServerConfig object with all middleware settings
|
|
1729
|
-
/// * `hooks_value` - Lifecycle hooks
|
|
1730
|
-
/// * `ws_handlers` - WebSocket handlers
|
|
1731
|
-
/// * `sse_producers` - SSE producers
|
|
1732
|
-
/// * `dependencies` - Dependency injection container
|
|
1733
|
-
///
|
|
1734
|
-
/// # Example (Ruby)
|
|
1735
|
-
///
|
|
1736
|
-
/// ```ruby
|
|
1737
|
-
/// config = Spikard::ServerConfig.new(host: '0.0.0.0', port: 8000)
|
|
1738
|
-
/// Spikard::Native.run_server(routes_json, handlers, config, hooks, ws, sse, deps)
|
|
1739
|
-
/// ```
|
|
1740
|
-
#[allow(clippy::too_many_arguments)]
|
|
1741
|
-
fn run_server(
|
|
1742
|
-
ruby: &Ruby,
|
|
1743
|
-
routes_json: String,
|
|
1744
|
-
handlers: Value,
|
|
1745
|
-
config_value: Value,
|
|
1746
|
-
hooks_value: Value,
|
|
1747
|
-
ws_handlers: Value,
|
|
1748
|
-
sse_producers: Value,
|
|
1749
|
-
dependencies: Value,
|
|
1750
|
-
) -> Result<(), Error> {
|
|
1751
|
-
use spikard_http::{SchemaRegistry, Server};
|
|
1752
|
-
use tracing::{error, info, warn};
|
|
1753
|
-
|
|
1754
|
-
let mut config = extract_server_config(ruby, config_value)?;
|
|
1755
|
-
|
|
1756
|
-
let host = config.host.clone();
|
|
1757
|
-
let port = config.port;
|
|
1758
|
-
|
|
1759
|
-
let metadata: Vec<RouteMetadata> = serde_json::from_str(&routes_json)
|
|
1760
|
-
.map_err(|err| Error::new(ruby.exception_arg_error(), format!("Invalid routes JSON: {}", err)))?;
|
|
1761
|
-
|
|
1762
|
-
let handlers_hash = RHash::from_value(handlers).ok_or_else(|| {
|
|
1763
|
-
Error::new(
|
|
1764
|
-
ruby.exception_arg_error(),
|
|
1765
|
-
"handlers parameter must be a Hash of handler_name => Proc",
|
|
1766
|
-
)
|
|
1767
|
-
})?;
|
|
1768
|
-
|
|
1769
|
-
let json_module = ruby
|
|
1770
|
-
.class_object()
|
|
1771
|
-
.funcall::<_, _, Value>("const_get", ("JSON",))
|
|
1772
|
-
.map_err(|err| Error::new(ruby.exception_name_error(), format!("JSON module not found: {}", err)))?;
|
|
1773
|
-
|
|
1774
|
-
let schema_registry = SchemaRegistry::new();
|
|
1775
|
-
|
|
1776
|
-
let mut routes_with_handlers: Vec<(Route, Arc<dyn spikard_http::Handler>)> = Vec::new();
|
|
1777
|
-
|
|
1778
|
-
for route_meta in metadata {
|
|
1779
|
-
let route = Route::from_metadata(route_meta.clone(), &schema_registry)
|
|
1780
|
-
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("Failed to create route: {}", e)))?;
|
|
1781
|
-
|
|
1782
|
-
let handler_key = ruby.str_new(&route_meta.handler_name);
|
|
1783
|
-
let handler_value: Value = match handlers_hash.lookup(handler_key) {
|
|
1784
|
-
Ok(val) => val,
|
|
1785
|
-
Err(_) => {
|
|
1786
|
-
return Err(Error::new(
|
|
1787
|
-
ruby.exception_arg_error(),
|
|
1788
|
-
format!("Handler '{}' not found in handlers hash", route_meta.handler_name),
|
|
1789
|
-
));
|
|
1790
|
-
}
|
|
1791
|
-
};
|
|
1792
|
-
|
|
1793
|
-
let ruby_handler = RubyHandler::new_for_server(
|
|
1794
|
-
ruby,
|
|
1795
|
-
handler_value,
|
|
1796
|
-
route_meta.handler_name.clone(),
|
|
1797
|
-
route_meta.method.clone(),
|
|
1798
|
-
route_meta.path.clone(),
|
|
1799
|
-
json_module,
|
|
1800
|
-
&route,
|
|
1801
|
-
)?;
|
|
1802
|
-
|
|
1803
|
-
routes_with_handlers.push((route, Arc::new(ruby_handler) as Arc<dyn spikard_http::Handler>));
|
|
1804
|
-
}
|
|
1805
|
-
|
|
1806
|
-
let lifecycle_hooks = if !hooks_value.is_nil() {
|
|
1807
|
-
let hooks_hash = RHash::from_value(hooks_value)
|
|
1808
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "lifecycle_hooks parameter must be a Hash"))?;
|
|
1809
|
-
|
|
1810
|
-
let mut hooks = spikard_http::LifecycleHooks::new();
|
|
1811
|
-
type RubyHookVec = Vec<Arc<dyn spikard_http::lifecycle::LifecycleHook<Request<Body>, Response<Body>>>>;
|
|
1812
|
-
|
|
1813
|
-
let extract_hooks = |key: &str| -> Result<RubyHookVec, Error> {
|
|
1814
|
-
let key_sym = ruby.to_symbol(key);
|
|
1815
|
-
if let Some(hooks_array) = hooks_hash.get(key_sym)
|
|
1816
|
-
&& !hooks_array.is_nil()
|
|
1817
|
-
{
|
|
1818
|
-
let array = RArray::from_value(hooks_array)
|
|
1819
|
-
.ok_or_else(|| Error::new(ruby.exception_type_error(), format!("{} must be an Array", key)))?;
|
|
1820
|
-
|
|
1821
|
-
let mut result = Vec::new();
|
|
1822
|
-
let len = array.len();
|
|
1823
|
-
for i in 0..len {
|
|
1824
|
-
let hook_value: Value = array.entry(i as isize)?;
|
|
1825
|
-
let name = format!("{}_{}", key, i);
|
|
1826
|
-
let ruby_hook = lifecycle::RubyLifecycleHook::new(name, hook_value);
|
|
1827
|
-
result.push(Arc::new(ruby_hook)
|
|
1828
|
-
as Arc<
|
|
1829
|
-
dyn spikard_http::lifecycle::LifecycleHook<Request<Body>, Response<Body>>,
|
|
1830
|
-
>);
|
|
1831
|
-
}
|
|
1832
|
-
return Ok(result);
|
|
1833
|
-
}
|
|
1834
|
-
Ok(Vec::new())
|
|
1835
|
-
};
|
|
1836
|
-
|
|
1837
|
-
for hook in extract_hooks("on_request")? {
|
|
1838
|
-
hooks.add_on_request(hook);
|
|
1839
|
-
}
|
|
1840
|
-
|
|
1841
|
-
for hook in extract_hooks("pre_validation")? {
|
|
1842
|
-
hooks.add_pre_validation(hook);
|
|
1843
|
-
}
|
|
1844
|
-
|
|
1845
|
-
for hook in extract_hooks("pre_handler")? {
|
|
1846
|
-
hooks.add_pre_handler(hook);
|
|
1847
|
-
}
|
|
1848
|
-
|
|
1849
|
-
for hook in extract_hooks("on_response")? {
|
|
1850
|
-
hooks.add_on_response(hook);
|
|
1851
|
-
}
|
|
1852
|
-
|
|
1853
|
-
for hook in extract_hooks("on_error")? {
|
|
1854
|
-
hooks.add_on_error(hook);
|
|
1855
|
-
}
|
|
1856
|
-
|
|
1857
|
-
Some(hooks)
|
|
1858
|
-
} else {
|
|
1859
|
-
None
|
|
1860
|
-
};
|
|
1861
|
-
|
|
1862
|
-
config.lifecycle_hooks = lifecycle_hooks.map(Arc::new);
|
|
1863
|
-
|
|
1864
|
-
// Extract and register dependencies
|
|
1865
|
-
#[cfg(feature = "di")]
|
|
1866
|
-
{
|
|
1867
|
-
if !dependencies.is_nil() {
|
|
1868
|
-
match build_dependency_container(ruby, dependencies) {
|
|
1869
|
-
Ok(container) => {
|
|
1870
|
-
config.di_container = Some(Arc::new(container));
|
|
1871
|
-
}
|
|
1872
|
-
Err(err) => {
|
|
1873
|
-
return Err(Error::new(
|
|
1874
|
-
ruby.exception_runtime_error(),
|
|
1875
|
-
format!("Failed to build DI container: {}", err),
|
|
1876
|
-
));
|
|
1877
|
-
}
|
|
1878
|
-
}
|
|
1879
|
-
}
|
|
1880
|
-
}
|
|
1881
|
-
|
|
1882
|
-
Server::init_logging();
|
|
1883
|
-
|
|
1884
|
-
info!("Starting Spikard server on {}:{}", host, port);
|
|
1885
|
-
info!("Registered {} routes", routes_with_handlers.len());
|
|
1886
|
-
|
|
1887
|
-
let mut app_router = Server::with_handlers(config.clone(), routes_with_handlers)
|
|
1888
|
-
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("Failed to build router: {}", e)))?;
|
|
1889
|
-
|
|
1890
|
-
let mut ws_endpoints = Vec::new();
|
|
1891
|
-
if !ws_handlers.is_nil() {
|
|
1892
|
-
let ws_hash = RHash::from_value(ws_handlers)
|
|
1893
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "WebSocket handlers must be a Hash"))?;
|
|
1894
|
-
|
|
1895
|
-
ws_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
1896
|
-
let handler_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
1897
|
-
Error::new(
|
|
1898
|
-
ruby.exception_runtime_error(),
|
|
1899
|
-
format!("Failed to create WebSocket handler: {}", e),
|
|
1900
|
-
)
|
|
1901
|
-
})?;
|
|
1902
|
-
|
|
1903
|
-
let ws_state = crate::websocket::create_websocket_state(ruby, handler_instance)?;
|
|
1904
|
-
|
|
1905
|
-
ws_endpoints.push((path, ws_state));
|
|
1906
|
-
|
|
1907
|
-
Ok(ForEach::Continue)
|
|
1908
|
-
})?;
|
|
1909
|
-
}
|
|
1910
|
-
|
|
1911
|
-
let mut sse_endpoints = Vec::new();
|
|
1912
|
-
if !sse_producers.is_nil() {
|
|
1913
|
-
let sse_hash = RHash::from_value(sse_producers)
|
|
1914
|
-
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "SSE producers must be a Hash"))?;
|
|
1915
|
-
|
|
1916
|
-
sse_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
1917
|
-
let producer_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
1918
|
-
Error::new(
|
|
1919
|
-
ruby.exception_runtime_error(),
|
|
1920
|
-
format!("Failed to create SSE producer: {}", e),
|
|
1921
|
-
)
|
|
1922
|
-
})?;
|
|
1923
|
-
|
|
1924
|
-
let sse_state = crate::sse::create_sse_state(ruby, producer_instance)?;
|
|
1925
|
-
|
|
1926
|
-
sse_endpoints.push((path, sse_state));
|
|
1927
|
-
|
|
1928
|
-
Ok(ForEach::Continue)
|
|
1929
|
-
})?;
|
|
1930
|
-
}
|
|
1931
|
-
|
|
1932
|
-
use axum::routing::get;
|
|
1933
|
-
for (path, ws_state) in ws_endpoints {
|
|
1934
|
-
info!("Registered WebSocket endpoint: {}", path);
|
|
1935
|
-
app_router = app_router.route(
|
|
1936
|
-
&path,
|
|
1937
|
-
get(spikard_http::websocket_handler::<crate::websocket::RubyWebSocketHandler>).with_state(ws_state),
|
|
1938
|
-
);
|
|
1939
|
-
}
|
|
1940
|
-
|
|
1941
|
-
for (path, sse_state) in sse_endpoints {
|
|
1942
|
-
info!("Registered SSE endpoint: {}", path);
|
|
1943
|
-
app_router = app_router.route(
|
|
1944
|
-
&path,
|
|
1945
|
-
get(spikard_http::sse_handler::<crate::sse::RubySseEventProducer>).with_state(sse_state),
|
|
1946
|
-
);
|
|
1947
|
-
}
|
|
1948
|
-
|
|
1949
|
-
let addr = format!("{}:{}", config.host, config.port);
|
|
1950
|
-
let socket_addr: std::net::SocketAddr = addr.parse().map_err(|e| {
|
|
1951
|
-
Error::new(
|
|
1952
|
-
ruby.exception_arg_error(),
|
|
1953
|
-
format!("Invalid socket address {}: {}", addr, e),
|
|
1954
|
-
)
|
|
1955
|
-
})?;
|
|
1956
|
-
|
|
1957
|
-
let runtime = tokio::runtime::Builder::new_current_thread()
|
|
1958
|
-
.enable_all()
|
|
1959
|
-
.build()
|
|
1960
|
-
.map_err(|e| {
|
|
1961
|
-
Error::new(
|
|
1962
|
-
ruby.exception_runtime_error(),
|
|
1963
|
-
format!("Failed to create Tokio runtime: {}", e),
|
|
1964
|
-
)
|
|
1965
|
-
})?;
|
|
1966
|
-
|
|
1967
|
-
let background_config = config.background_tasks.clone();
|
|
1968
|
-
|
|
1969
|
-
runtime.block_on(async move {
|
|
1970
|
-
let listener = tokio::net::TcpListener::bind(socket_addr)
|
|
1971
|
-
.await
|
|
1972
|
-
.unwrap_or_else(|_| panic!("Failed to bind to {}", socket_addr));
|
|
1973
|
-
|
|
1974
|
-
info!("Server listening on {}", socket_addr);
|
|
1975
|
-
|
|
1976
|
-
let background_runtime = spikard_http::BackgroundRuntime::start(background_config.clone()).await;
|
|
1977
|
-
crate::background::install_handle(background_runtime.handle());
|
|
1978
|
-
|
|
1979
|
-
let serve_result = axum::serve(listener, app_router).await;
|
|
1980
|
-
|
|
1981
|
-
crate::background::clear_handle();
|
|
1982
|
-
|
|
1983
|
-
if let Err(err) = background_runtime.shutdown().await {
|
|
1984
|
-
warn!("Failed to drain background tasks during shutdown: {:?}", err);
|
|
1985
|
-
}
|
|
1986
|
-
|
|
1987
|
-
if let Err(e) = serve_result {
|
|
1988
|
-
error!("Server error: {}", e);
|
|
1989
|
-
}
|
|
1990
|
-
});
|
|
1991
|
-
|
|
1992
|
-
Ok(())
|
|
1993
|
-
}
|
|
1994
|
-
|
|
1995
|
-
#[magnus::init]
|
|
1996
|
-
pub fn init(ruby: &Ruby) -> Result<(), Error> {
|
|
1997
|
-
let spikard = ruby.define_module("Spikard")?;
|
|
1998
|
-
spikard.define_singleton_method("version", function!(version, 0))?;
|
|
1999
|
-
let native = match spikard.const_get("Native") {
|
|
2000
|
-
Ok(module) => module,
|
|
2001
|
-
Err(_) => spikard.define_module("Native")?,
|
|
2002
|
-
};
|
|
2003
|
-
|
|
2004
|
-
native.define_singleton_method("run_server", function!(run_server, 7))?;
|
|
2005
|
-
native.define_singleton_method("background_run", function!(background::background_run, 1))?;
|
|
2006
|
-
|
|
2007
|
-
let class = native.define_class("TestClient", ruby.class_object())?;
|
|
2008
|
-
class.define_alloc_func::<NativeTestClient>();
|
|
2009
|
-
class.define_method("initialize", method!(NativeTestClient::initialize, 6))?;
|
|
2010
|
-
class.define_method("request", method!(NativeTestClient::request, 3))?;
|
|
2011
|
-
class.define_method("websocket", method!(NativeTestClient::websocket, 1))?;
|
|
2012
|
-
class.define_method("sse", method!(NativeTestClient::sse, 1))?;
|
|
2013
|
-
class.define_method("close", method!(NativeTestClient::close, 0))?;
|
|
2014
|
-
|
|
2015
|
-
let spikard_module = ruby.define_module("Spikard")?;
|
|
2016
|
-
test_websocket::init(ruby, &spikard_module)?;
|
|
2017
|
-
test_sse::init(ruby, &spikard_module)?;
|
|
2018
|
-
|
|
2019
|
-
Ok(())
|
|
2020
|
-
}
|
|
1
|
+
#![allow(deprecated)]
|
|
2
|
+
|
|
3
|
+
//! Spikard Ruby bindings using Magnus FFI.
|
|
4
|
+
//!
|
|
5
|
+
//! This crate provides Ruby bindings for the Spikard HTTP toolkit, allowing
|
|
6
|
+
//! Ruby developers to build and test HTTP services with Rust performance.
|
|
7
|
+
//!
|
|
8
|
+
//! ## Modules
|
|
9
|
+
//!
|
|
10
|
+
//! - `test_client`: TestClient wrapper for integration testing
|
|
11
|
+
//! - `handler`: RubyHandler trait implementation
|
|
12
|
+
//! - `di`: Dependency injection bridge for Ruby types
|
|
13
|
+
//! - `config`: ServerConfig extraction from Ruby objects
|
|
14
|
+
//! - `conversion`: Ruby ↔ Rust type conversions
|
|
15
|
+
//! - `server`: HTTP server setup and lifecycle management
|
|
16
|
+
//! - `background`: Background task management
|
|
17
|
+
//! - `lifecycle`: Lifecycle hook implementations
|
|
18
|
+
//! - `sse`: Server-Sent Events support
|
|
19
|
+
//! - `test_sse`: SSE testing utilities
|
|
20
|
+
//! - `websocket`: WebSocket support
|
|
21
|
+
//! - `test_websocket`: WebSocket testing utilities
|
|
22
|
+
|
|
23
|
+
mod background;
|
|
24
|
+
mod config;
|
|
25
|
+
mod conversion;
|
|
26
|
+
mod di;
|
|
27
|
+
mod handler;
|
|
28
|
+
mod lifecycle;
|
|
29
|
+
mod server;
|
|
30
|
+
mod sse;
|
|
31
|
+
mod test_client;
|
|
32
|
+
mod test_sse;
|
|
33
|
+
mod test_websocket;
|
|
34
|
+
mod websocket;
|
|
35
|
+
|
|
36
|
+
use async_stream::stream;
|
|
37
|
+
use axum::body::Body;
|
|
38
|
+
use axum::http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode};
|
|
39
|
+
use axum_test::{TestServer, TestServerConfig, Transport};
|
|
40
|
+
use bytes::Bytes;
|
|
41
|
+
use cookie::Cookie;
|
|
42
|
+
use magnus::prelude::*;
|
|
43
|
+
use magnus::value::{InnerValue, Opaque};
|
|
44
|
+
use magnus::{
|
|
45
|
+
Error, Module, RArray, RHash, RString, Ruby, TryConvert, Value, function, gc::Marker, method, r_hash::ForEach,
|
|
46
|
+
};
|
|
47
|
+
use once_cell::sync::Lazy;
|
|
48
|
+
use serde_json::{Map as JsonMap, Value as JsonValue};
|
|
49
|
+
use spikard_http::ParameterValidator;
|
|
50
|
+
use spikard_http::problem::ProblemDetails;
|
|
51
|
+
use spikard_http::testing::{
|
|
52
|
+
MultipartFilePart, SnapshotError, build_multipart_body, encode_urlencoded_body, snapshot_response,
|
|
53
|
+
};
|
|
54
|
+
use spikard_http::{Handler, HandlerResponse, HandlerResult, RequestData};
|
|
55
|
+
use spikard_http::{Route, RouteMetadata, SchemaValidator};
|
|
56
|
+
use std::cell::RefCell;
|
|
57
|
+
use std::collections::HashMap;
|
|
58
|
+
use std::io;
|
|
59
|
+
use std::pin::Pin;
|
|
60
|
+
use std::sync::Arc;
|
|
61
|
+
use tokio::runtime::{Builder, Runtime};
|
|
62
|
+
|
|
63
|
+
static GLOBAL_RUNTIME: Lazy<Runtime> = Lazy::new(|| {
|
|
64
|
+
Builder::new_current_thread()
|
|
65
|
+
.enable_all()
|
|
66
|
+
.build()
|
|
67
|
+
.expect("Failed to initialise global Tokio runtime")
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
#[derive(Default)]
|
|
71
|
+
#[magnus::wrap(class = "Spikard::Native::TestClient", free_immediately, mark)]
|
|
72
|
+
struct NativeTestClient {
|
|
73
|
+
inner: RefCell<Option<ClientInner>>,
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
struct ClientInner {
|
|
77
|
+
http_server: Arc<TestServer>,
|
|
78
|
+
transport_server: Arc<TestServer>,
|
|
79
|
+
/// Keep Ruby handler closures alive for GC; accessed via the `mark` hook.
|
|
80
|
+
#[allow(dead_code)]
|
|
81
|
+
handlers: Vec<RubyHandler>,
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
struct RequestConfig {
|
|
85
|
+
query: Option<JsonValue>,
|
|
86
|
+
headers: HashMap<String, String>,
|
|
87
|
+
cookies: HashMap<String, String>,
|
|
88
|
+
body: Option<RequestBody>,
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
enum RequestBody {
|
|
92
|
+
Json(JsonValue),
|
|
93
|
+
Form(JsonValue),
|
|
94
|
+
Raw(String),
|
|
95
|
+
Multipart {
|
|
96
|
+
form_data: Vec<(String, String)>,
|
|
97
|
+
files: Vec<MultipartFilePart>,
|
|
98
|
+
},
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
#[derive(Clone)]
|
|
102
|
+
struct RubyHandler {
|
|
103
|
+
inner: Arc<RubyHandlerInner>,
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
struct RubyHandlerInner {
|
|
107
|
+
handler_proc: Opaque<Value>,
|
|
108
|
+
handler_name: String,
|
|
109
|
+
method: String,
|
|
110
|
+
path: String,
|
|
111
|
+
json_module: Opaque<Value>,
|
|
112
|
+
request_validator: Option<Arc<SchemaValidator>>,
|
|
113
|
+
response_validator: Option<Arc<SchemaValidator>>,
|
|
114
|
+
parameter_validator: Option<ParameterValidator>,
|
|
115
|
+
#[cfg(feature = "di")]
|
|
116
|
+
handler_dependencies: Vec<String>,
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
struct HandlerResponsePayload {
|
|
120
|
+
status: u16,
|
|
121
|
+
headers: HashMap<String, String>,
|
|
122
|
+
body: Option<JsonValue>,
|
|
123
|
+
raw_body: Option<Vec<u8>>,
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
enum RubyHandlerResult {
|
|
127
|
+
Payload(HandlerResponsePayload),
|
|
128
|
+
Streaming(StreamingResponsePayload),
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
struct StreamingResponsePayload {
|
|
132
|
+
enumerator: Arc<Opaque<Value>>,
|
|
133
|
+
status: u16,
|
|
134
|
+
headers: HashMap<String, String>,
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
impl StreamingResponsePayload {
|
|
138
|
+
fn into_response(self) -> Result<HandlerResponse, Error> {
|
|
139
|
+
let ruby = Ruby::get().map_err(|_| {
|
|
140
|
+
Error::new(
|
|
141
|
+
Ruby::get().unwrap().exception_runtime_error(),
|
|
142
|
+
"Ruby VM unavailable while building streaming response",
|
|
143
|
+
)
|
|
144
|
+
})?;
|
|
145
|
+
|
|
146
|
+
let status = StatusCode::from_u16(self.status).map_err(|err| {
|
|
147
|
+
Error::new(
|
|
148
|
+
ruby.exception_arg_error(),
|
|
149
|
+
format!("Invalid streaming status code {}: {}", self.status, err),
|
|
150
|
+
)
|
|
151
|
+
})?;
|
|
152
|
+
|
|
153
|
+
let header_pairs = self
|
|
154
|
+
.headers
|
|
155
|
+
.into_iter()
|
|
156
|
+
.map(|(name, value)| {
|
|
157
|
+
let header_name = HeaderName::from_bytes(name.as_bytes()).map_err(|err| {
|
|
158
|
+
Error::new(
|
|
159
|
+
ruby.exception_arg_error(),
|
|
160
|
+
format!("Invalid header name '{name}': {err}"),
|
|
161
|
+
)
|
|
162
|
+
})?;
|
|
163
|
+
let header_value = HeaderValue::from_str(&value).map_err(|err| {
|
|
164
|
+
Error::new(
|
|
165
|
+
ruby.exception_arg_error(),
|
|
166
|
+
format!("Invalid header value for '{name}': {err}"),
|
|
167
|
+
)
|
|
168
|
+
})?;
|
|
169
|
+
Ok((header_name, header_value))
|
|
170
|
+
})
|
|
171
|
+
.collect::<Result<Vec<_>, Error>>()?;
|
|
172
|
+
|
|
173
|
+
let enumerator = self.enumerator.clone();
|
|
174
|
+
let body_stream = stream! {
|
|
175
|
+
loop {
|
|
176
|
+
match poll_stream_chunk(&enumerator) {
|
|
177
|
+
Ok(Some(bytes)) => yield Ok(bytes),
|
|
178
|
+
Ok(None) => break,
|
|
179
|
+
Err(err) => {
|
|
180
|
+
yield Err(Box::new(err));
|
|
181
|
+
break;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
let mut response = HandlerResponse::stream(body_stream).with_status(status);
|
|
188
|
+
for (name, value) in header_pairs {
|
|
189
|
+
response = response.with_header(name, value);
|
|
190
|
+
}
|
|
191
|
+
Ok(response)
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
fn poll_stream_chunk(enumerator: &Arc<Opaque<Value>>) -> Result<Option<Bytes>, io::Error> {
|
|
196
|
+
let ruby = Ruby::get().map_err(|err| io::Error::other(err.to_string()))?;
|
|
197
|
+
let enum_value = enumerator.get_inner_with(&ruby);
|
|
198
|
+
match enum_value.funcall::<_, _, Value>("next", ()) {
|
|
199
|
+
Ok(chunk) => ruby_value_to_bytes(chunk).map(Some),
|
|
200
|
+
Err(err) => {
|
|
201
|
+
if err.is_kind_of(ruby.exception_stop_iteration()) {
|
|
202
|
+
Ok(None)
|
|
203
|
+
} else {
|
|
204
|
+
Err(io::Error::other(err.to_string()))
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
fn ruby_value_to_bytes(value: Value) -> Result<Bytes, io::Error> {
|
|
211
|
+
if let Ok(str_value) = RString::try_convert(value) {
|
|
212
|
+
let slice = unsafe { str_value.as_slice() };
|
|
213
|
+
return Ok(Bytes::copy_from_slice(slice));
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
if let Ok(vec_bytes) = Vec::<u8>::try_convert(value) {
|
|
217
|
+
return Ok(Bytes::from(vec_bytes));
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
Err(io::Error::other("Streaming chunks must be Strings or Arrays of bytes"))
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
struct TestResponseData {
|
|
224
|
+
status: u16,
|
|
225
|
+
headers: HashMap<String, String>,
|
|
226
|
+
body_text: Option<String>,
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
#[derive(Debug)]
|
|
230
|
+
struct NativeRequestError(String);
|
|
231
|
+
|
|
232
|
+
impl NativeTestClient {
|
|
233
|
+
#[allow(clippy::too_many_arguments)]
|
|
234
|
+
fn initialize(
|
|
235
|
+
ruby: &Ruby,
|
|
236
|
+
this: &Self,
|
|
237
|
+
routes_json: String,
|
|
238
|
+
handlers: Value,
|
|
239
|
+
config_value: Value,
|
|
240
|
+
ws_handlers: Value,
|
|
241
|
+
sse_producers: Value,
|
|
242
|
+
dependencies: Value,
|
|
243
|
+
) -> Result<(), Error> {
|
|
244
|
+
let metadata: Vec<RouteMetadata> = serde_json::from_str(&routes_json)
|
|
245
|
+
.map_err(|err| Error::new(ruby.exception_arg_error(), format!("Invalid routes JSON: {err}")))?;
|
|
246
|
+
|
|
247
|
+
let handlers_hash = RHash::from_value(handlers).ok_or_else(|| {
|
|
248
|
+
Error::new(
|
|
249
|
+
ruby.exception_arg_error(),
|
|
250
|
+
"handlers parameter must be a Hash of handler_name => Proc",
|
|
251
|
+
)
|
|
252
|
+
})?;
|
|
253
|
+
|
|
254
|
+
let json_module = ruby
|
|
255
|
+
.class_object()
|
|
256
|
+
.const_get("JSON")
|
|
257
|
+
.map_err(|_| Error::new(ruby.exception_runtime_error(), "JSON module not available"))?;
|
|
258
|
+
|
|
259
|
+
let mut server_config = extract_server_config(ruby, config_value)?;
|
|
260
|
+
|
|
261
|
+
// Extract and register dependencies
|
|
262
|
+
#[cfg(feature = "di")]
|
|
263
|
+
{
|
|
264
|
+
if !dependencies.is_nil() {
|
|
265
|
+
match build_dependency_container(ruby, dependencies) {
|
|
266
|
+
Ok(container) => {
|
|
267
|
+
server_config.di_container = Some(Arc::new(container));
|
|
268
|
+
}
|
|
269
|
+
Err(err) => {
|
|
270
|
+
return Err(Error::new(
|
|
271
|
+
ruby.exception_runtime_error(),
|
|
272
|
+
format!("Failed to build DI container: {}", err),
|
|
273
|
+
));
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
let schema_registry = spikard_http::SchemaRegistry::new();
|
|
280
|
+
let mut prepared_routes = Vec::with_capacity(metadata.len());
|
|
281
|
+
let mut handler_refs = Vec::with_capacity(metadata.len());
|
|
282
|
+
let mut route_metadata_vec = Vec::with_capacity(metadata.len());
|
|
283
|
+
|
|
284
|
+
for meta in metadata.clone() {
|
|
285
|
+
let handler_value = fetch_handler(ruby, &handlers_hash, &meta.handler_name)?;
|
|
286
|
+
let route = Route::from_metadata(meta.clone(), &schema_registry)
|
|
287
|
+
.map_err(|err| Error::new(ruby.exception_runtime_error(), format!("Failed to build route: {err}")))?;
|
|
288
|
+
|
|
289
|
+
let handler = RubyHandler::new(&route, handler_value, json_module)?;
|
|
290
|
+
prepared_routes.push((route, Arc::new(handler.clone()) as Arc<dyn spikard_http::Handler>));
|
|
291
|
+
handler_refs.push(handler);
|
|
292
|
+
route_metadata_vec.push(meta);
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
let mut router = spikard_http::server::build_router_with_handlers_and_config(
|
|
296
|
+
prepared_routes,
|
|
297
|
+
server_config,
|
|
298
|
+
route_metadata_vec,
|
|
299
|
+
)
|
|
300
|
+
.map_err(|err| Error::new(ruby.exception_runtime_error(), format!("Failed to build router: {err}")))?;
|
|
301
|
+
|
|
302
|
+
let mut ws_endpoints = Vec::new();
|
|
303
|
+
if !ws_handlers.is_nil() {
|
|
304
|
+
let ws_hash = RHash::from_value(ws_handlers)
|
|
305
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "WebSocket handlers must be a Hash"))?;
|
|
306
|
+
|
|
307
|
+
ws_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
308
|
+
let handler_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
309
|
+
Error::new(
|
|
310
|
+
ruby.exception_runtime_error(),
|
|
311
|
+
format!("Failed to create WebSocket handler: {}", e),
|
|
312
|
+
)
|
|
313
|
+
})?;
|
|
314
|
+
|
|
315
|
+
let ws_state = crate::websocket::create_websocket_state(ruby, handler_instance)?;
|
|
316
|
+
|
|
317
|
+
ws_endpoints.push((path, ws_state));
|
|
318
|
+
|
|
319
|
+
Ok(ForEach::Continue)
|
|
320
|
+
})?;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
let mut sse_endpoints = Vec::new();
|
|
324
|
+
if !sse_producers.is_nil() {
|
|
325
|
+
let sse_hash = RHash::from_value(sse_producers)
|
|
326
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "SSE producers must be a Hash"))?;
|
|
327
|
+
|
|
328
|
+
sse_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
329
|
+
let producer_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
330
|
+
Error::new(
|
|
331
|
+
ruby.exception_runtime_error(),
|
|
332
|
+
format!("Failed to create SSE producer: {}", e),
|
|
333
|
+
)
|
|
334
|
+
})?;
|
|
335
|
+
|
|
336
|
+
let sse_state = crate::sse::create_sse_state(ruby, producer_instance)?;
|
|
337
|
+
|
|
338
|
+
sse_endpoints.push((path, sse_state));
|
|
339
|
+
|
|
340
|
+
Ok(ForEach::Continue)
|
|
341
|
+
})?;
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
use axum::routing::get;
|
|
345
|
+
for (path, ws_state) in ws_endpoints {
|
|
346
|
+
router = router.route(
|
|
347
|
+
&path,
|
|
348
|
+
get(spikard_http::websocket_handler::<crate::websocket::RubyWebSocketHandler>).with_state(ws_state),
|
|
349
|
+
);
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
for (path, sse_state) in sse_endpoints {
|
|
353
|
+
router = router.route(
|
|
354
|
+
&path,
|
|
355
|
+
get(spikard_http::sse_handler::<crate::sse::RubySseEventProducer>).with_state(sse_state),
|
|
356
|
+
);
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
let http_server = GLOBAL_RUNTIME
|
|
360
|
+
.block_on(async { TestServer::new(router.clone()) })
|
|
361
|
+
.map_err(|err| {
|
|
362
|
+
Error::new(
|
|
363
|
+
ruby.exception_runtime_error(),
|
|
364
|
+
format!("Failed to initialise test server: {err}"),
|
|
365
|
+
)
|
|
366
|
+
})?;
|
|
367
|
+
|
|
368
|
+
let ws_config = TestServerConfig {
|
|
369
|
+
transport: Some(Transport::HttpRandomPort),
|
|
370
|
+
..Default::default()
|
|
371
|
+
};
|
|
372
|
+
let transport_server = GLOBAL_RUNTIME
|
|
373
|
+
.block_on(async { TestServer::new_with_config(router, ws_config) })
|
|
374
|
+
.map_err(|err| {
|
|
375
|
+
Error::new(
|
|
376
|
+
ruby.exception_runtime_error(),
|
|
377
|
+
format!("Failed to initialise WebSocket transport server: {err}"),
|
|
378
|
+
)
|
|
379
|
+
})?;
|
|
380
|
+
|
|
381
|
+
*this.inner.borrow_mut() = Some(ClientInner {
|
|
382
|
+
http_server: Arc::new(http_server),
|
|
383
|
+
transport_server: Arc::new(transport_server),
|
|
384
|
+
handlers: handler_refs,
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
Ok(())
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
fn request(ruby: &Ruby, this: &Self, method: String, path: String, options: Value) -> Result<Value, Error> {
|
|
391
|
+
let inner_borrow = this.inner.borrow();
|
|
392
|
+
let inner = inner_borrow
|
|
393
|
+
.as_ref()
|
|
394
|
+
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "TestClient not initialised"))?;
|
|
395
|
+
let method_upper = method.to_ascii_uppercase();
|
|
396
|
+
let http_method = Method::from_bytes(method_upper.as_bytes()).map_err(|err| {
|
|
397
|
+
Error::new(
|
|
398
|
+
ruby.exception_arg_error(),
|
|
399
|
+
format!("Unsupported method {method_upper}: {err}"),
|
|
400
|
+
)
|
|
401
|
+
})?;
|
|
402
|
+
|
|
403
|
+
let request_config = parse_request_config(ruby, options)?;
|
|
404
|
+
|
|
405
|
+
let response = GLOBAL_RUNTIME
|
|
406
|
+
.block_on(execute_request(
|
|
407
|
+
inner.http_server.clone(),
|
|
408
|
+
http_method,
|
|
409
|
+
path.clone(),
|
|
410
|
+
request_config,
|
|
411
|
+
))
|
|
412
|
+
.map_err(|err| {
|
|
413
|
+
Error::new(
|
|
414
|
+
ruby.exception_runtime_error(),
|
|
415
|
+
format!("Request failed for {method_upper} {path}: {}", err.0),
|
|
416
|
+
)
|
|
417
|
+
})?;
|
|
418
|
+
|
|
419
|
+
response_to_ruby(ruby, response)
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
fn close(&self) -> Result<(), Error> {
|
|
423
|
+
*self.inner.borrow_mut() = None;
|
|
424
|
+
Ok(())
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
fn websocket(ruby: &Ruby, this: &Self, path: String) -> Result<Value, Error> {
|
|
428
|
+
let inner_borrow = this.inner.borrow();
|
|
429
|
+
let inner = inner_borrow
|
|
430
|
+
.as_ref()
|
|
431
|
+
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "TestClient not initialised"))?;
|
|
432
|
+
|
|
433
|
+
let server = Arc::clone(&inner.transport_server);
|
|
434
|
+
|
|
435
|
+
drop(inner_borrow);
|
|
436
|
+
|
|
437
|
+
let handle =
|
|
438
|
+
GLOBAL_RUNTIME.spawn(async move { spikard_http::testing::connect_websocket(&server, &path).await });
|
|
439
|
+
|
|
440
|
+
let ws = GLOBAL_RUNTIME.block_on(async {
|
|
441
|
+
handle
|
|
442
|
+
.await
|
|
443
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("WebSocket task failed: {}", e)))
|
|
444
|
+
})?;
|
|
445
|
+
|
|
446
|
+
let ws_conn = test_websocket::WebSocketTestConnection::new(ws);
|
|
447
|
+
Ok(ruby.obj_wrap(ws_conn).as_value())
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
fn sse(ruby: &Ruby, this: &Self, path: String) -> Result<Value, Error> {
|
|
451
|
+
let inner_borrow = this.inner.borrow();
|
|
452
|
+
let inner = inner_borrow
|
|
453
|
+
.as_ref()
|
|
454
|
+
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "TestClient not initialised"))?;
|
|
455
|
+
|
|
456
|
+
let response = GLOBAL_RUNTIME
|
|
457
|
+
.block_on(async {
|
|
458
|
+
let axum_response = inner.transport_server.get(&path).await;
|
|
459
|
+
snapshot_response(axum_response).await
|
|
460
|
+
})
|
|
461
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("SSE request failed: {}", e)))?;
|
|
462
|
+
|
|
463
|
+
test_sse::sse_stream_from_response(ruby, &response)
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
impl ClientInner {}
|
|
468
|
+
|
|
469
|
+
impl RubyHandler {
|
|
470
|
+
fn new(route: &Route, handler_value: Value, json_module: Value) -> Result<Self, Error> {
|
|
471
|
+
Ok(Self {
|
|
472
|
+
inner: Arc::new(RubyHandlerInner {
|
|
473
|
+
handler_proc: Opaque::from(handler_value),
|
|
474
|
+
handler_name: route.handler_name.clone(),
|
|
475
|
+
method: route.method.as_str().to_string(),
|
|
476
|
+
path: route.path.clone(),
|
|
477
|
+
json_module: Opaque::from(json_module),
|
|
478
|
+
request_validator: route.request_validator.clone(),
|
|
479
|
+
response_validator: route.response_validator.clone(),
|
|
480
|
+
parameter_validator: route.parameter_validator.clone(),
|
|
481
|
+
#[cfg(feature = "di")]
|
|
482
|
+
handler_dependencies: route.handler_dependencies.clone(),
|
|
483
|
+
}),
|
|
484
|
+
})
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
/// Create a new RubyHandler for server mode
|
|
488
|
+
///
|
|
489
|
+
/// This is used by run_server to create handlers from Ruby Procs
|
|
490
|
+
fn new_for_server(
|
|
491
|
+
_ruby: &Ruby,
|
|
492
|
+
handler_value: Value,
|
|
493
|
+
handler_name: String,
|
|
494
|
+
method: String,
|
|
495
|
+
path: String,
|
|
496
|
+
json_module: Value,
|
|
497
|
+
route: &Route,
|
|
498
|
+
) -> Result<Self, Error> {
|
|
499
|
+
Ok(Self {
|
|
500
|
+
inner: Arc::new(RubyHandlerInner {
|
|
501
|
+
handler_proc: Opaque::from(handler_value),
|
|
502
|
+
handler_name,
|
|
503
|
+
method,
|
|
504
|
+
path,
|
|
505
|
+
json_module: Opaque::from(json_module),
|
|
506
|
+
request_validator: route.request_validator.clone(),
|
|
507
|
+
response_validator: route.response_validator.clone(),
|
|
508
|
+
parameter_validator: route.parameter_validator.clone(),
|
|
509
|
+
#[cfg(feature = "di")]
|
|
510
|
+
handler_dependencies: route.handler_dependencies.clone(),
|
|
511
|
+
}),
|
|
512
|
+
})
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
/// Required by Ruby GC; invoked through the magnus mark hook.
|
|
516
|
+
#[allow(dead_code)]
|
|
517
|
+
fn mark(&self, marker: &Marker) {
|
|
518
|
+
if let Ok(ruby) = Ruby::get() {
|
|
519
|
+
let proc_val = self.inner.handler_proc.get_inner_with(&ruby);
|
|
520
|
+
marker.mark(proc_val);
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
fn handle(&self, request_data: RequestData) -> HandlerResult {
|
|
525
|
+
if let Some(validator) = &self.inner.request_validator
|
|
526
|
+
&& let Err(errors) = validator.validate(&request_data.body)
|
|
527
|
+
{
|
|
528
|
+
let problem = ProblemDetails::from_validation_error(&errors);
|
|
529
|
+
let error_json = problem_to_json(&problem);
|
|
530
|
+
return Err((problem.status_code(), error_json));
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
let validated_params = if let Some(validator) = &self.inner.parameter_validator {
|
|
534
|
+
let raw_query_strings: HashMap<String, String> = request_data
|
|
535
|
+
.raw_query_params
|
|
536
|
+
.as_ref()
|
|
537
|
+
.iter()
|
|
538
|
+
.filter_map(|(k, v)| v.first().map(|first| (k.clone(), first.clone())))
|
|
539
|
+
.collect();
|
|
540
|
+
|
|
541
|
+
match validator.validate_and_extract(
|
|
542
|
+
&request_data.query_params,
|
|
543
|
+
&raw_query_strings,
|
|
544
|
+
request_data.path_params.as_ref(),
|
|
545
|
+
request_data.headers.as_ref(),
|
|
546
|
+
request_data.cookies.as_ref(),
|
|
547
|
+
) {
|
|
548
|
+
Ok(value) => Some(value),
|
|
549
|
+
Err(errors) => {
|
|
550
|
+
let problem = ProblemDetails::from_validation_error(&errors);
|
|
551
|
+
return Err((problem.status_code(), problem_to_json(&problem)));
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
} else {
|
|
555
|
+
None
|
|
556
|
+
};
|
|
557
|
+
|
|
558
|
+
let ruby = Ruby::get().map_err(|_| {
|
|
559
|
+
(
|
|
560
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
561
|
+
"Ruby VM unavailable while invoking handler".to_string(),
|
|
562
|
+
)
|
|
563
|
+
})?;
|
|
564
|
+
|
|
565
|
+
let request_value = build_ruby_request(&ruby, &self.inner, &request_data, validated_params.as_ref())
|
|
566
|
+
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
567
|
+
|
|
568
|
+
let handler_proc = self.inner.handler_proc.get_inner_with(&ruby);
|
|
569
|
+
|
|
570
|
+
// Extract resolved dependencies (if any) and convert to Ruby keyword arguments
|
|
571
|
+
#[cfg(feature = "di")]
|
|
572
|
+
let handler_result = {
|
|
573
|
+
if let Some(deps) = &request_data.dependencies {
|
|
574
|
+
// Build keyword arguments hash from dependencies
|
|
575
|
+
// ONLY include dependencies that the handler actually declared
|
|
576
|
+
let kwargs_hash = ruby.hash_new();
|
|
577
|
+
|
|
578
|
+
// Check if all required handler dependencies are present
|
|
579
|
+
// If any are missing, return error BEFORE calling handler
|
|
580
|
+
for key in &self.inner.handler_dependencies {
|
|
581
|
+
if !deps.contains(key) {
|
|
582
|
+
// Handler requires a dependency that was not resolved
|
|
583
|
+
// This should have been caught by DI system, but safety check here
|
|
584
|
+
return Err((
|
|
585
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
586
|
+
format!(
|
|
587
|
+
"Handler '{}' requires dependency '{}' which was not resolved",
|
|
588
|
+
self.inner.handler_name, key
|
|
589
|
+
),
|
|
590
|
+
));
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
// Filter dependencies: only pass those declared by the handler
|
|
595
|
+
for key in &self.inner.handler_dependencies {
|
|
596
|
+
if let Some(value) = deps.get_arc(key) {
|
|
597
|
+
// Check what type of dependency this is and extract Ruby value
|
|
598
|
+
let ruby_val = if let Some(wrapper) = value.downcast_ref::<crate::di::RubyValueWrapper>() {
|
|
599
|
+
// It's a Ruby value wrapper (singleton with preserved mutations)
|
|
600
|
+
// Get the raw Ruby value directly to preserve object identity
|
|
601
|
+
wrapper.get_value(&ruby)
|
|
602
|
+
} else if let Some(json) = value.downcast_ref::<serde_json::Value>() {
|
|
603
|
+
// It's already JSON (non-singleton or value dependency)
|
|
604
|
+
// Convert JSON to Ruby value
|
|
605
|
+
match crate::di::json_to_ruby(&ruby, json) {
|
|
606
|
+
Ok(val) => val,
|
|
607
|
+
Err(e) => {
|
|
608
|
+
return Err((
|
|
609
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
610
|
+
format!("Failed to convert dependency '{}' to Ruby: {}", key, e),
|
|
611
|
+
));
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
} else {
|
|
615
|
+
return Err((
|
|
616
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
617
|
+
format!(
|
|
618
|
+
"Unknown dependency type for '{}': expected RubyValueWrapper or JSON",
|
|
619
|
+
key
|
|
620
|
+
),
|
|
621
|
+
));
|
|
622
|
+
};
|
|
623
|
+
|
|
624
|
+
// Add to kwargs hash
|
|
625
|
+
let key_sym = ruby.to_symbol(key);
|
|
626
|
+
if let Err(e) = kwargs_hash.aset(key_sym, ruby_val) {
|
|
627
|
+
return Err((
|
|
628
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
629
|
+
format!("Failed to add dependency '{}': {}", key, e),
|
|
630
|
+
));
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
// Call handler with request and dependencies as keyword arguments
|
|
636
|
+
// Ruby 3.x requires keyword arguments to be passed differently than Ruby 2.x
|
|
637
|
+
// We'll create a Ruby lambda that calls the handler with ** splat operator
|
|
638
|
+
//
|
|
639
|
+
// Equivalent Ruby code:
|
|
640
|
+
// lambda { |req, kwargs| handler_proc.call(req, **kwargs) }.call(request, kwargs_hash)
|
|
641
|
+
|
|
642
|
+
let wrapper_code = ruby
|
|
643
|
+
.eval::<Value>(
|
|
644
|
+
r#"
|
|
645
|
+
lambda do |proc, request, kwargs|
|
|
646
|
+
proc.call(request, **kwargs)
|
|
647
|
+
end
|
|
648
|
+
"#,
|
|
649
|
+
)
|
|
650
|
+
.map_err(|e| {
|
|
651
|
+
(
|
|
652
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
653
|
+
format!("Failed to create kwarg wrapper: {}", e),
|
|
654
|
+
)
|
|
655
|
+
})?;
|
|
656
|
+
|
|
657
|
+
wrapper_code.funcall("call", (handler_proc, request_value, kwargs_hash))
|
|
658
|
+
} else {
|
|
659
|
+
// No dependencies, call with just request
|
|
660
|
+
handler_proc.funcall("call", (request_value,))
|
|
661
|
+
}
|
|
662
|
+
};
|
|
663
|
+
|
|
664
|
+
#[cfg(not(feature = "di"))]
|
|
665
|
+
let handler_result = handler_proc.funcall("call", (request_value,));
|
|
666
|
+
|
|
667
|
+
let response_value = match handler_result {
|
|
668
|
+
Ok(value) => value,
|
|
669
|
+
Err(err) => {
|
|
670
|
+
return Err((
|
|
671
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
672
|
+
format!("Handler '{}' failed: {}", self.inner.handler_name, err),
|
|
673
|
+
));
|
|
674
|
+
}
|
|
675
|
+
};
|
|
676
|
+
|
|
677
|
+
let handler_result = interpret_handler_response(&ruby, &self.inner, response_value).map_err(|err| {
|
|
678
|
+
(
|
|
679
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
680
|
+
format!(
|
|
681
|
+
"Failed to interpret response from '{}': {}",
|
|
682
|
+
self.inner.handler_name, err
|
|
683
|
+
),
|
|
684
|
+
)
|
|
685
|
+
})?;
|
|
686
|
+
|
|
687
|
+
let payload = match handler_result {
|
|
688
|
+
RubyHandlerResult::Streaming(streaming) => {
|
|
689
|
+
let response = streaming.into_response().map_err(|err| {
|
|
690
|
+
(
|
|
691
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
692
|
+
format!("Failed to build streaming response: {}", err),
|
|
693
|
+
)
|
|
694
|
+
})?;
|
|
695
|
+
return Ok(response.into_response());
|
|
696
|
+
}
|
|
697
|
+
RubyHandlerResult::Payload(payload) => payload,
|
|
698
|
+
};
|
|
699
|
+
|
|
700
|
+
if let (Some(validator), Some(body)) = (&self.inner.response_validator, payload.body.as_ref())
|
|
701
|
+
&& let Err(errors) = validator.validate(body)
|
|
702
|
+
{
|
|
703
|
+
let problem = ProblemDetails::from_validation_error(&errors);
|
|
704
|
+
return Err((StatusCode::INTERNAL_SERVER_ERROR, problem_to_json(&problem)));
|
|
705
|
+
}
|
|
706
|
+
|
|
707
|
+
let HandlerResponsePayload {
|
|
708
|
+
status,
|
|
709
|
+
headers,
|
|
710
|
+
body,
|
|
711
|
+
raw_body,
|
|
712
|
+
} = payload;
|
|
713
|
+
|
|
714
|
+
let mut response_builder = axum::http::Response::builder().status(status);
|
|
715
|
+
let mut has_content_type = false;
|
|
716
|
+
|
|
717
|
+
for (name, value) in headers.iter() {
|
|
718
|
+
if name.eq_ignore_ascii_case("content-type") {
|
|
719
|
+
has_content_type = true;
|
|
720
|
+
}
|
|
721
|
+
let header_name = HeaderName::from_bytes(name.as_bytes()).map_err(|err| {
|
|
722
|
+
(
|
|
723
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
724
|
+
format!("Invalid header name '{name}': {err}"),
|
|
725
|
+
)
|
|
726
|
+
})?;
|
|
727
|
+
let header_value = HeaderValue::from_str(value).map_err(|err| {
|
|
728
|
+
(
|
|
729
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
730
|
+
format!("Invalid header value for '{name}': {err}"),
|
|
731
|
+
)
|
|
732
|
+
})?;
|
|
733
|
+
|
|
734
|
+
response_builder = response_builder.header(header_name, header_value);
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
if !has_content_type && body.is_some() {
|
|
738
|
+
response_builder = response_builder.header(
|
|
739
|
+
HeaderName::from_static("content-type"),
|
|
740
|
+
HeaderValue::from_static("application/json"),
|
|
741
|
+
);
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
let body_bytes = if let Some(raw) = raw_body {
|
|
745
|
+
raw
|
|
746
|
+
} else if let Some(json_value) = body {
|
|
747
|
+
serde_json::to_vec(&json_value).map_err(|err| {
|
|
748
|
+
(
|
|
749
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
750
|
+
format!("Failed to serialise response body: {err}"),
|
|
751
|
+
)
|
|
752
|
+
})?
|
|
753
|
+
} else {
|
|
754
|
+
Vec::new()
|
|
755
|
+
};
|
|
756
|
+
|
|
757
|
+
response_builder.body(Body::from(body_bytes)).map_err(|err| {
|
|
758
|
+
(
|
|
759
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
760
|
+
format!("Failed to build response: {err}"),
|
|
761
|
+
)
|
|
762
|
+
})
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
impl Handler for RubyHandler {
|
|
767
|
+
fn call(
|
|
768
|
+
&self,
|
|
769
|
+
_req: axum::http::Request<Body>,
|
|
770
|
+
request_data: RequestData,
|
|
771
|
+
) -> Pin<Box<dyn std::future::Future<Output = HandlerResult> + Send + '_>> {
|
|
772
|
+
let handler = self.clone();
|
|
773
|
+
Box::pin(async move { handler.handle(request_data) })
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
|
|
777
|
+
async fn execute_request(
|
|
778
|
+
server: Arc<TestServer>,
|
|
779
|
+
method: Method,
|
|
780
|
+
path: String,
|
|
781
|
+
config: RequestConfig,
|
|
782
|
+
) -> Result<TestResponseData, NativeRequestError> {
|
|
783
|
+
let mut request = match method {
|
|
784
|
+
Method::GET => server.get(&path),
|
|
785
|
+
Method::POST => server.post(&path),
|
|
786
|
+
Method::PUT => server.put(&path),
|
|
787
|
+
Method::PATCH => server.patch(&path),
|
|
788
|
+
Method::DELETE => server.delete(&path),
|
|
789
|
+
Method::HEAD => server.method(Method::HEAD, &path),
|
|
790
|
+
Method::OPTIONS => server.method(Method::OPTIONS, &path),
|
|
791
|
+
Method::TRACE => server.method(Method::TRACE, &path),
|
|
792
|
+
other => return Err(NativeRequestError(format!("Unsupported HTTP method {other}"))),
|
|
793
|
+
};
|
|
794
|
+
|
|
795
|
+
if let Some(query) = config.query {
|
|
796
|
+
request = request.add_query_params(&query);
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
for (name, value) in config.headers {
|
|
800
|
+
request = request.add_header(name.as_str(), value.as_str());
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
for (name, value) in config.cookies {
|
|
804
|
+
request = request.add_cookie(Cookie::new(name, value));
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
if let Some(body) = config.body {
|
|
808
|
+
match body {
|
|
809
|
+
RequestBody::Json(json_value) => {
|
|
810
|
+
request = request.json(&json_value);
|
|
811
|
+
}
|
|
812
|
+
RequestBody::Form(form_value) => {
|
|
813
|
+
let encoded = encode_urlencoded_body(&form_value)
|
|
814
|
+
.map_err(|err| NativeRequestError(format!("Failed to encode form body: {err}")))?;
|
|
815
|
+
request = request
|
|
816
|
+
.content_type("application/x-www-form-urlencoded")
|
|
817
|
+
.bytes(Bytes::from(encoded));
|
|
818
|
+
}
|
|
819
|
+
RequestBody::Raw(raw) => {
|
|
820
|
+
request = request.bytes(Bytes::from(raw));
|
|
821
|
+
}
|
|
822
|
+
RequestBody::Multipart { form_data, files } => {
|
|
823
|
+
let (multipart_body, boundary) = build_multipart_body(&form_data, &files);
|
|
824
|
+
request = request
|
|
825
|
+
.content_type(&format!("multipart/form-data; boundary={}", boundary))
|
|
826
|
+
.bytes(Bytes::from(multipart_body));
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
let response = request.await;
|
|
832
|
+
let snapshot = snapshot_response(response).await.map_err(snapshot_err_to_native)?;
|
|
833
|
+
let body_text = if snapshot.body.is_empty() {
|
|
834
|
+
None
|
|
835
|
+
} else {
|
|
836
|
+
Some(String::from_utf8_lossy(&snapshot.body).into_owned())
|
|
837
|
+
};
|
|
838
|
+
|
|
839
|
+
Ok(TestResponseData {
|
|
840
|
+
status: snapshot.status,
|
|
841
|
+
headers: snapshot.headers,
|
|
842
|
+
body_text,
|
|
843
|
+
})
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
fn snapshot_err_to_native(err: SnapshotError) -> NativeRequestError {
|
|
847
|
+
NativeRequestError(err.to_string())
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
fn parse_request_config(ruby: &Ruby, options: Value) -> Result<RequestConfig, Error> {
|
|
851
|
+
if options.is_nil() {
|
|
852
|
+
return Ok(RequestConfig {
|
|
853
|
+
query: None,
|
|
854
|
+
headers: HashMap::new(),
|
|
855
|
+
cookies: HashMap::new(),
|
|
856
|
+
body: None,
|
|
857
|
+
});
|
|
858
|
+
}
|
|
859
|
+
|
|
860
|
+
let hash = RHash::from_value(options)
|
|
861
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "request options must be a Hash"))?;
|
|
862
|
+
|
|
863
|
+
let json_module = ruby
|
|
864
|
+
.class_object()
|
|
865
|
+
.const_get("JSON")
|
|
866
|
+
.map_err(|_| Error::new(ruby.exception_runtime_error(), "JSON module not available"))?;
|
|
867
|
+
|
|
868
|
+
let query = if let Some(value) = get_kw(ruby, hash, "query") {
|
|
869
|
+
if value.is_nil() {
|
|
870
|
+
None
|
|
871
|
+
} else {
|
|
872
|
+
Some(ruby_value_to_json(ruby, json_module, value)?)
|
|
873
|
+
}
|
|
874
|
+
} else {
|
|
875
|
+
None
|
|
876
|
+
};
|
|
877
|
+
|
|
878
|
+
let headers = if let Some(value) = get_kw(ruby, hash, "headers") {
|
|
879
|
+
if value.is_nil() {
|
|
880
|
+
HashMap::new()
|
|
881
|
+
} else {
|
|
882
|
+
let hash = RHash::try_convert(value)?;
|
|
883
|
+
hash.to_hash_map::<String, String>()?
|
|
884
|
+
}
|
|
885
|
+
} else {
|
|
886
|
+
HashMap::new()
|
|
887
|
+
};
|
|
888
|
+
|
|
889
|
+
let cookies = if let Some(value) = get_kw(ruby, hash, "cookies") {
|
|
890
|
+
if value.is_nil() {
|
|
891
|
+
HashMap::new()
|
|
892
|
+
} else {
|
|
893
|
+
let hash = RHash::try_convert(value)?;
|
|
894
|
+
hash.to_hash_map::<String, String>()?
|
|
895
|
+
}
|
|
896
|
+
} else {
|
|
897
|
+
HashMap::new()
|
|
898
|
+
};
|
|
899
|
+
|
|
900
|
+
let files_opt = get_kw(ruby, hash, "files");
|
|
901
|
+
let has_files = files_opt.is_some() && !files_opt.unwrap().is_nil();
|
|
902
|
+
|
|
903
|
+
let body = if has_files {
|
|
904
|
+
let files_value = files_opt.unwrap();
|
|
905
|
+
let files = extract_files(ruby, files_value)?;
|
|
906
|
+
|
|
907
|
+
let mut form_data = Vec::new();
|
|
908
|
+
if let Some(data_value) = get_kw(ruby, hash, "data")
|
|
909
|
+
&& !data_value.is_nil()
|
|
910
|
+
{
|
|
911
|
+
let data_hash = RHash::try_convert(data_value)?;
|
|
912
|
+
|
|
913
|
+
let keys_array: RArray = data_hash.funcall("keys", ())?;
|
|
914
|
+
|
|
915
|
+
for i in 0..keys_array.len() {
|
|
916
|
+
let key_val = keys_array.entry::<Value>(i as isize)?;
|
|
917
|
+
let field_name = String::try_convert(key_val)?;
|
|
918
|
+
let value = data_hash
|
|
919
|
+
.get(key_val)
|
|
920
|
+
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "Failed to get hash value"))?;
|
|
921
|
+
|
|
922
|
+
if let Some(array) = RArray::from_value(value) {
|
|
923
|
+
for j in 0..array.len() {
|
|
924
|
+
let item = array.entry::<Value>(j as isize)?;
|
|
925
|
+
let item_str = String::try_convert(item)?;
|
|
926
|
+
form_data.push((field_name.clone(), item_str));
|
|
927
|
+
}
|
|
928
|
+
} else {
|
|
929
|
+
let value_str = String::try_convert(value)?;
|
|
930
|
+
form_data.push((field_name, value_str));
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
Some(RequestBody::Multipart { form_data, files })
|
|
936
|
+
} else if let Some(value) = get_kw(ruby, hash, "json") {
|
|
937
|
+
if value.is_nil() {
|
|
938
|
+
None
|
|
939
|
+
} else {
|
|
940
|
+
Some(RequestBody::Json(ruby_value_to_json(ruby, json_module, value)?))
|
|
941
|
+
}
|
|
942
|
+
} else if let Some(value) = get_kw(ruby, hash, "data") {
|
|
943
|
+
if value.is_nil() {
|
|
944
|
+
None
|
|
945
|
+
} else {
|
|
946
|
+
Some(RequestBody::Form(ruby_value_to_json(ruby, json_module, value)?))
|
|
947
|
+
}
|
|
948
|
+
} else if let Some(value) = get_kw(ruby, hash, "raw_body") {
|
|
949
|
+
if value.is_nil() {
|
|
950
|
+
None
|
|
951
|
+
} else {
|
|
952
|
+
Some(RequestBody::Raw(String::try_convert(value)?))
|
|
953
|
+
}
|
|
954
|
+
} else {
|
|
955
|
+
None
|
|
956
|
+
};
|
|
957
|
+
|
|
958
|
+
Ok(RequestConfig {
|
|
959
|
+
query,
|
|
960
|
+
headers,
|
|
961
|
+
cookies,
|
|
962
|
+
body,
|
|
963
|
+
})
|
|
964
|
+
}
|
|
965
|
+
|
|
966
|
+
fn build_ruby_request(
|
|
967
|
+
ruby: &Ruby,
|
|
968
|
+
handler: &RubyHandlerInner,
|
|
969
|
+
request_data: &RequestData,
|
|
970
|
+
validated_params: Option<&JsonValue>,
|
|
971
|
+
) -> Result<Value, Error> {
|
|
972
|
+
let hash = ruby.hash_new();
|
|
973
|
+
|
|
974
|
+
hash.aset(ruby.intern("method"), ruby.str_new(&handler.method))?;
|
|
975
|
+
hash.aset(ruby.intern("path"), ruby.str_new(&handler.path))?;
|
|
976
|
+
|
|
977
|
+
let path_params = map_to_ruby_hash(ruby, request_data.path_params.as_ref())?;
|
|
978
|
+
hash.aset(ruby.intern("path_params"), path_params)?;
|
|
979
|
+
|
|
980
|
+
let query_value = json_to_ruby(ruby, &request_data.query_params)?;
|
|
981
|
+
hash.aset(ruby.intern("query"), query_value)?;
|
|
982
|
+
|
|
983
|
+
let raw_query = multimap_to_ruby_hash(ruby, request_data.raw_query_params.as_ref())?;
|
|
984
|
+
hash.aset(ruby.intern("raw_query"), raw_query)?;
|
|
985
|
+
|
|
986
|
+
let headers = map_to_ruby_hash(ruby, request_data.headers.as_ref())?;
|
|
987
|
+
hash.aset(ruby.intern("headers"), headers)?;
|
|
988
|
+
|
|
989
|
+
let cookies = map_to_ruby_hash(ruby, request_data.cookies.as_ref())?;
|
|
990
|
+
hash.aset(ruby.intern("cookies"), cookies)?;
|
|
991
|
+
|
|
992
|
+
let body_value = json_to_ruby(ruby, &request_data.body)?;
|
|
993
|
+
hash.aset(ruby.intern("body"), body_value)?;
|
|
994
|
+
|
|
995
|
+
let params_value = if let Some(validated) = validated_params {
|
|
996
|
+
json_to_ruby(ruby, validated)?
|
|
997
|
+
} else {
|
|
998
|
+
build_default_params(ruby, request_data)?
|
|
999
|
+
};
|
|
1000
|
+
hash.aset(ruby.intern("params"), params_value)?;
|
|
1001
|
+
|
|
1002
|
+
Ok(hash.as_value())
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
fn build_default_params(ruby: &Ruby, request_data: &RequestData) -> Result<Value, Error> {
|
|
1006
|
+
let mut map = JsonMap::new();
|
|
1007
|
+
|
|
1008
|
+
for (key, value) in request_data.path_params.as_ref() {
|
|
1009
|
+
map.insert(key.clone(), JsonValue::String(value.clone()));
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
if let JsonValue::Object(obj) = &request_data.query_params {
|
|
1013
|
+
for (key, value) in obj {
|
|
1014
|
+
map.insert(key.clone(), value.clone());
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
|
|
1018
|
+
for (key, value) in request_data.headers.as_ref() {
|
|
1019
|
+
map.insert(key.clone(), JsonValue::String(value.clone()));
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
for (key, value) in request_data.cookies.as_ref() {
|
|
1023
|
+
map.insert(key.clone(), JsonValue::String(value.clone()));
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
json_to_ruby(ruby, &JsonValue::Object(map))
|
|
1027
|
+
}
|
|
1028
|
+
|
|
1029
|
+
fn interpret_handler_response(
|
|
1030
|
+
ruby: &Ruby,
|
|
1031
|
+
handler: &RubyHandlerInner,
|
|
1032
|
+
value: Value,
|
|
1033
|
+
) -> Result<RubyHandlerResult, Error> {
|
|
1034
|
+
if value.is_nil() {
|
|
1035
|
+
return Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1036
|
+
status: 200,
|
|
1037
|
+
headers: HashMap::new(),
|
|
1038
|
+
body: None,
|
|
1039
|
+
raw_body: None,
|
|
1040
|
+
}));
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
if is_streaming_response(ruby, value)? {
|
|
1044
|
+
let stream_value: Value = value.funcall("stream", ())?;
|
|
1045
|
+
let status: i64 = value.funcall("status_code", ())?;
|
|
1046
|
+
let headers_value: Value = value.funcall("headers", ())?;
|
|
1047
|
+
|
|
1048
|
+
let status_u16 = u16::try_from(status).map_err(|_| {
|
|
1049
|
+
Error::new(
|
|
1050
|
+
ruby.exception_arg_error(),
|
|
1051
|
+
"StreamingResponse status_code must be between 0 and 65535",
|
|
1052
|
+
)
|
|
1053
|
+
})?;
|
|
1054
|
+
|
|
1055
|
+
let headers = value_to_string_map(ruby, headers_value)?;
|
|
1056
|
+
|
|
1057
|
+
return Ok(RubyHandlerResult::Streaming(StreamingResponsePayload {
|
|
1058
|
+
enumerator: Arc::new(Opaque::from(stream_value)),
|
|
1059
|
+
status: status_u16,
|
|
1060
|
+
headers,
|
|
1061
|
+
}));
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
let status_symbol = ruby.intern("status_code");
|
|
1065
|
+
if value.respond_to(status_symbol, false)? {
|
|
1066
|
+
let status: i64 = value.funcall("status_code", ())?;
|
|
1067
|
+
let status_u16 = u16::try_from(status)
|
|
1068
|
+
.map_err(|_| Error::new(ruby.exception_arg_error(), "status_code must be between 0 and 65535"))?;
|
|
1069
|
+
|
|
1070
|
+
let headers_value: Value = value.funcall("headers", ())?;
|
|
1071
|
+
let headers = if headers_value.is_nil() {
|
|
1072
|
+
HashMap::new()
|
|
1073
|
+
} else {
|
|
1074
|
+
let hash = RHash::try_convert(headers_value)?;
|
|
1075
|
+
hash.to_hash_map::<String, String>()?
|
|
1076
|
+
};
|
|
1077
|
+
|
|
1078
|
+
let content_value: Value = value.funcall("content", ())?;
|
|
1079
|
+
let mut raw_body = None;
|
|
1080
|
+
let body = if content_value.is_nil() {
|
|
1081
|
+
None
|
|
1082
|
+
} else if let Ok(str_value) = RString::try_convert(content_value) {
|
|
1083
|
+
let slice = unsafe { str_value.as_slice() };
|
|
1084
|
+
raw_body = Some(slice.to_vec());
|
|
1085
|
+
None
|
|
1086
|
+
} else {
|
|
1087
|
+
Some(ruby_value_to_json(
|
|
1088
|
+
ruby,
|
|
1089
|
+
handler.json_module.get_inner_with(ruby),
|
|
1090
|
+
content_value,
|
|
1091
|
+
)?)
|
|
1092
|
+
};
|
|
1093
|
+
|
|
1094
|
+
return Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1095
|
+
status: status_u16,
|
|
1096
|
+
headers,
|
|
1097
|
+
body,
|
|
1098
|
+
raw_body,
|
|
1099
|
+
}));
|
|
1100
|
+
}
|
|
1101
|
+
|
|
1102
|
+
if let Ok(str_value) = RString::try_convert(value) {
|
|
1103
|
+
let slice = unsafe { str_value.as_slice() };
|
|
1104
|
+
return Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1105
|
+
status: 200,
|
|
1106
|
+
headers: HashMap::new(),
|
|
1107
|
+
body: None,
|
|
1108
|
+
raw_body: Some(slice.to_vec()),
|
|
1109
|
+
}));
|
|
1110
|
+
}
|
|
1111
|
+
|
|
1112
|
+
let body_json = ruby_value_to_json(ruby, handler.json_module.get_inner_with(ruby), value)?;
|
|
1113
|
+
|
|
1114
|
+
Ok(RubyHandlerResult::Payload(HandlerResponsePayload {
|
|
1115
|
+
status: 200,
|
|
1116
|
+
headers: HashMap::new(),
|
|
1117
|
+
body: Some(body_json),
|
|
1118
|
+
raw_body: None,
|
|
1119
|
+
}))
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
fn value_to_string_map(ruby: &Ruby, value: Value) -> Result<HashMap<String, String>, Error> {
|
|
1123
|
+
if value.is_nil() {
|
|
1124
|
+
return Ok(HashMap::new());
|
|
1125
|
+
}
|
|
1126
|
+
let hash = RHash::try_convert(value)?;
|
|
1127
|
+
hash.to_hash_map::<String, String>().map_err(|err| {
|
|
1128
|
+
Error::new(
|
|
1129
|
+
ruby.exception_arg_error(),
|
|
1130
|
+
format!("Expected headers hash of strings: {}", err),
|
|
1131
|
+
)
|
|
1132
|
+
})
|
|
1133
|
+
}
|
|
1134
|
+
|
|
1135
|
+
fn is_streaming_response(ruby: &Ruby, value: Value) -> Result<bool, Error> {
|
|
1136
|
+
let stream_sym = ruby.intern("stream");
|
|
1137
|
+
let status_sym = ruby.intern("status_code");
|
|
1138
|
+
Ok(value.respond_to(stream_sym, false)? && value.respond_to(status_sym, false)?)
|
|
1139
|
+
}
|
|
1140
|
+
|
|
1141
|
+
fn response_to_ruby(ruby: &Ruby, response: TestResponseData) -> Result<Value, Error> {
|
|
1142
|
+
let hash = ruby.hash_new();
|
|
1143
|
+
|
|
1144
|
+
hash.aset(
|
|
1145
|
+
ruby.intern("status_code"),
|
|
1146
|
+
ruby.integer_from_i64(response.status as i64),
|
|
1147
|
+
)?;
|
|
1148
|
+
|
|
1149
|
+
let headers_hash = ruby.hash_new();
|
|
1150
|
+
for (key, value) in response.headers {
|
|
1151
|
+
headers_hash.aset(ruby.str_new(&key), ruby.str_new(&value))?;
|
|
1152
|
+
}
|
|
1153
|
+
hash.aset(ruby.intern("headers"), headers_hash)?;
|
|
1154
|
+
|
|
1155
|
+
if let Some(body) = response.body_text {
|
|
1156
|
+
let body_value = ruby.str_new(&body);
|
|
1157
|
+
hash.aset(ruby.intern("body"), body_value)?;
|
|
1158
|
+
hash.aset(ruby.intern("body_text"), body_value)?;
|
|
1159
|
+
} else {
|
|
1160
|
+
hash.aset(ruby.intern("body"), ruby.qnil())?;
|
|
1161
|
+
hash.aset(ruby.intern("body_text"), ruby.qnil())?;
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
Ok(hash.as_value())
|
|
1165
|
+
}
|
|
1166
|
+
|
|
1167
|
+
fn ruby_value_to_json(ruby: &Ruby, json_module: Value, value: Value) -> Result<JsonValue, Error> {
|
|
1168
|
+
if value.is_nil() {
|
|
1169
|
+
return Ok(JsonValue::Null);
|
|
1170
|
+
}
|
|
1171
|
+
|
|
1172
|
+
let json_string: String = json_module.funcall("generate", (value,))?;
|
|
1173
|
+
serde_json::from_str(&json_string).map_err(|err| {
|
|
1174
|
+
Error::new(
|
|
1175
|
+
ruby.exception_runtime_error(),
|
|
1176
|
+
format!("Failed to convert Ruby value to JSON: {err}"),
|
|
1177
|
+
)
|
|
1178
|
+
})
|
|
1179
|
+
}
|
|
1180
|
+
|
|
1181
|
+
fn json_to_ruby(ruby: &Ruby, value: &JsonValue) -> Result<Value, Error> {
|
|
1182
|
+
match value {
|
|
1183
|
+
JsonValue::Null => Ok(ruby.qnil().as_value()),
|
|
1184
|
+
JsonValue::Bool(b) => Ok(if *b {
|
|
1185
|
+
ruby.qtrue().as_value()
|
|
1186
|
+
} else {
|
|
1187
|
+
ruby.qfalse().as_value()
|
|
1188
|
+
}),
|
|
1189
|
+
JsonValue::Number(num) => {
|
|
1190
|
+
if let Some(i) = num.as_i64() {
|
|
1191
|
+
Ok(ruby.integer_from_i64(i).as_value())
|
|
1192
|
+
} else if let Some(f) = num.as_f64() {
|
|
1193
|
+
Ok(ruby.float_from_f64(f).as_value())
|
|
1194
|
+
} else {
|
|
1195
|
+
Ok(ruby.qnil().as_value())
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
JsonValue::String(str_val) => Ok(ruby.str_new(str_val).as_value()),
|
|
1199
|
+
JsonValue::Array(items) => {
|
|
1200
|
+
let array = ruby.ary_new();
|
|
1201
|
+
for item in items {
|
|
1202
|
+
array.push(json_to_ruby(ruby, item)?)?;
|
|
1203
|
+
}
|
|
1204
|
+
Ok(array.as_value())
|
|
1205
|
+
}
|
|
1206
|
+
JsonValue::Object(map) => {
|
|
1207
|
+
let hash = ruby.hash_new();
|
|
1208
|
+
for (key, item) in map {
|
|
1209
|
+
hash.aset(ruby.str_new(key), json_to_ruby(ruby, item)?)?;
|
|
1210
|
+
}
|
|
1211
|
+
Ok(hash.as_value())
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
}
|
|
1215
|
+
|
|
1216
|
+
fn map_to_ruby_hash(ruby: &Ruby, map: &HashMap<String, String>) -> Result<Value, Error> {
|
|
1217
|
+
let hash = ruby.hash_new();
|
|
1218
|
+
for (key, value) in map {
|
|
1219
|
+
hash.aset(ruby.str_new(key), ruby.str_new(value))?;
|
|
1220
|
+
}
|
|
1221
|
+
Ok(hash.as_value())
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1224
|
+
fn multimap_to_ruby_hash(ruby: &Ruby, map: &HashMap<String, Vec<String>>) -> Result<Value, Error> {
|
|
1225
|
+
let hash = ruby.hash_new();
|
|
1226
|
+
for (key, values) in map {
|
|
1227
|
+
let array = ruby.ary_new();
|
|
1228
|
+
for value in values {
|
|
1229
|
+
array.push(ruby.str_new(value))?;
|
|
1230
|
+
}
|
|
1231
|
+
hash.aset(ruby.str_new(key), array)?;
|
|
1232
|
+
}
|
|
1233
|
+
Ok(hash.as_value())
|
|
1234
|
+
}
|
|
1235
|
+
|
|
1236
|
+
fn problem_to_json(problem: &ProblemDetails) -> String {
|
|
1237
|
+
problem
|
|
1238
|
+
.to_json_pretty()
|
|
1239
|
+
.unwrap_or_else(|err| format!("Failed to serialise problem details: {err}"))
|
|
1240
|
+
}
|
|
1241
|
+
|
|
1242
|
+
fn get_kw(ruby: &Ruby, hash: RHash, name: &str) -> Option<Value> {
|
|
1243
|
+
let sym = ruby.intern(name);
|
|
1244
|
+
hash.get(sym).or_else(|| hash.get(name))
|
|
1245
|
+
}
|
|
1246
|
+
|
|
1247
|
+
fn fetch_handler(ruby: &Ruby, handlers: &RHash, name: &str) -> Result<Value, Error> {
|
|
1248
|
+
let symbol_key = ruby.intern(name);
|
|
1249
|
+
if let Some(value) = handlers.get(symbol_key) {
|
|
1250
|
+
return Ok(value);
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
let string_key = ruby.str_new(name);
|
|
1254
|
+
if let Some(value) = handlers.get(string_key) {
|
|
1255
|
+
return Ok(value);
|
|
1256
|
+
}
|
|
1257
|
+
|
|
1258
|
+
Err(Error::new(
|
|
1259
|
+
ruby.exception_name_error(),
|
|
1260
|
+
format!("Handler '{name}' not provided"),
|
|
1261
|
+
))
|
|
1262
|
+
}
|
|
1263
|
+
|
|
1264
|
+
/// GC mark hook so Ruby keeps handler closures alive.
|
|
1265
|
+
#[allow(dead_code)]
|
|
1266
|
+
fn mark(client: &NativeTestClient, marker: &Marker) {
|
|
1267
|
+
let inner_ref = client.inner.borrow();
|
|
1268
|
+
if let Some(inner) = inner_ref.as_ref() {
|
|
1269
|
+
for handler in &inner.handlers {
|
|
1270
|
+
handler.mark(marker);
|
|
1271
|
+
}
|
|
1272
|
+
}
|
|
1273
|
+
}
|
|
1274
|
+
|
|
1275
|
+
/// Return the Spikard version.
|
|
1276
|
+
fn version() -> String {
|
|
1277
|
+
env!("CARGO_PKG_VERSION").to_string()
|
|
1278
|
+
}
|
|
1279
|
+
|
|
1280
|
+
/// Build dependency container from Ruby dependencies
|
|
1281
|
+
///
|
|
1282
|
+
/// Converts Ruby dependencies (values and factories) to Rust DependencyContainer
|
|
1283
|
+
#[cfg(feature = "di")]
|
|
1284
|
+
fn build_dependency_container(
|
|
1285
|
+
ruby: &Ruby,
|
|
1286
|
+
dependencies: Value,
|
|
1287
|
+
) -> Result<spikard_core::di::DependencyContainer, Error> {
|
|
1288
|
+
use spikard_core::di::DependencyContainer;
|
|
1289
|
+
use std::sync::Arc;
|
|
1290
|
+
|
|
1291
|
+
if dependencies.is_nil() {
|
|
1292
|
+
return Ok(DependencyContainer::new());
|
|
1293
|
+
}
|
|
1294
|
+
|
|
1295
|
+
let mut container = DependencyContainer::new();
|
|
1296
|
+
let deps_hash = RHash::try_convert(dependencies)?;
|
|
1297
|
+
|
|
1298
|
+
deps_hash.foreach(|key: String, value: Value| -> Result<ForEach, Error> {
|
|
1299
|
+
// Check if this is a factory (has a 'type' field set to :factory)
|
|
1300
|
+
if let Ok(dep_hash) = RHash::try_convert(value) {
|
|
1301
|
+
let dep_type: Option<String> = get_kw(ruby, dep_hash, "type").and_then(|v| {
|
|
1302
|
+
// Handle both symbol and string types
|
|
1303
|
+
if let Ok(sym) = magnus::Symbol::try_convert(v) {
|
|
1304
|
+
Some(sym.name().ok()?.to_string())
|
|
1305
|
+
} else {
|
|
1306
|
+
String::try_convert(v).ok()
|
|
1307
|
+
}
|
|
1308
|
+
});
|
|
1309
|
+
|
|
1310
|
+
match dep_type.as_deref() {
|
|
1311
|
+
Some("factory") => {
|
|
1312
|
+
// Factory dependency
|
|
1313
|
+
let factory = get_kw(ruby, dep_hash, "factory")
|
|
1314
|
+
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "Factory missing 'factory' key"))?;
|
|
1315
|
+
|
|
1316
|
+
let depends_on: Vec<String> = get_kw(ruby, dep_hash, "depends_on")
|
|
1317
|
+
.and_then(|v| Vec::<String>::try_convert(v).ok())
|
|
1318
|
+
.unwrap_or_default();
|
|
1319
|
+
|
|
1320
|
+
let singleton: bool = get_kw(ruby, dep_hash, "singleton")
|
|
1321
|
+
.and_then(|v| bool::try_convert(v).ok())
|
|
1322
|
+
.unwrap_or(false);
|
|
1323
|
+
|
|
1324
|
+
let cacheable: bool = get_kw(ruby, dep_hash, "cacheable")
|
|
1325
|
+
.and_then(|v| bool::try_convert(v).ok())
|
|
1326
|
+
.unwrap_or(true);
|
|
1327
|
+
|
|
1328
|
+
let factory_dep =
|
|
1329
|
+
crate::di::RubyFactoryDependency::new(key.clone(), factory, depends_on, singleton, cacheable);
|
|
1330
|
+
|
|
1331
|
+
container.register(key.clone(), Arc::new(factory_dep)).map_err(|e| {
|
|
1332
|
+
Error::new(
|
|
1333
|
+
ruby.exception_runtime_error(),
|
|
1334
|
+
format!("Failed to register factory '{}': {}", key, e),
|
|
1335
|
+
)
|
|
1336
|
+
})?;
|
|
1337
|
+
}
|
|
1338
|
+
Some("value") => {
|
|
1339
|
+
// Value dependency
|
|
1340
|
+
let value_data = get_kw(ruby, dep_hash, "value").ok_or_else(|| {
|
|
1341
|
+
Error::new(ruby.exception_runtime_error(), "Value dependency missing 'value' key")
|
|
1342
|
+
})?;
|
|
1343
|
+
|
|
1344
|
+
let value_dep = crate::di::RubyValueDependency::new(key.clone(), value_data);
|
|
1345
|
+
|
|
1346
|
+
container.register(key.clone(), Arc::new(value_dep)).map_err(|e| {
|
|
1347
|
+
Error::new(
|
|
1348
|
+
ruby.exception_runtime_error(),
|
|
1349
|
+
format!("Failed to register value '{}': {}", key, e),
|
|
1350
|
+
)
|
|
1351
|
+
})?;
|
|
1352
|
+
}
|
|
1353
|
+
_ => {
|
|
1354
|
+
return Err(Error::new(
|
|
1355
|
+
ruby.exception_runtime_error(),
|
|
1356
|
+
format!("Invalid dependency type for '{}'", key),
|
|
1357
|
+
));
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
} else {
|
|
1361
|
+
// Treat as raw value
|
|
1362
|
+
let value_dep = crate::di::RubyValueDependency::new(key.clone(), value);
|
|
1363
|
+
container.register(key.clone(), Arc::new(value_dep)).map_err(|e| {
|
|
1364
|
+
Error::new(
|
|
1365
|
+
ruby.exception_runtime_error(),
|
|
1366
|
+
format!("Failed to register value '{}': {}", key, e),
|
|
1367
|
+
)
|
|
1368
|
+
})?;
|
|
1369
|
+
}
|
|
1370
|
+
|
|
1371
|
+
Ok(ForEach::Continue)
|
|
1372
|
+
})?;
|
|
1373
|
+
|
|
1374
|
+
Ok(container)
|
|
1375
|
+
}
|
|
1376
|
+
|
|
1377
|
+
/// Helper to extract an optional string from a Ruby Hash
|
|
1378
|
+
fn get_optional_string_from_hash(hash: RHash, key: &str) -> Result<Option<String>, Error> {
|
|
1379
|
+
match hash.get(String::from(key)) {
|
|
1380
|
+
Some(v) if !v.is_nil() => Ok(Some(String::try_convert(v)?)),
|
|
1381
|
+
_ => Ok(None),
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
|
|
1385
|
+
/// Helper to extract a required string from a Ruby Hash
|
|
1386
|
+
fn get_required_string_from_hash(hash: RHash, key: &str, ruby: &Ruby) -> Result<String, Error> {
|
|
1387
|
+
let value = hash
|
|
1388
|
+
.get(String::from(key))
|
|
1389
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), format!("missing required key '{}'", key)))?;
|
|
1390
|
+
if value.is_nil() {
|
|
1391
|
+
return Err(Error::new(
|
|
1392
|
+
ruby.exception_arg_error(),
|
|
1393
|
+
format!("key '{}' cannot be nil", key),
|
|
1394
|
+
));
|
|
1395
|
+
}
|
|
1396
|
+
String::try_convert(value)
|
|
1397
|
+
}
|
|
1398
|
+
|
|
1399
|
+
fn extract_files(ruby: &Ruby, files_value: Value) -> Result<Vec<MultipartFilePart>, Error> {
|
|
1400
|
+
let files_hash = RHash::try_convert(files_value)?;
|
|
1401
|
+
|
|
1402
|
+
let keys_array: RArray = files_hash.funcall("keys", ())?;
|
|
1403
|
+
let mut result = Vec::new();
|
|
1404
|
+
|
|
1405
|
+
for i in 0..keys_array.len() {
|
|
1406
|
+
let key_val = keys_array.entry::<Value>(i as isize)?;
|
|
1407
|
+
let field_name = String::try_convert(key_val)?;
|
|
1408
|
+
let value = files_hash
|
|
1409
|
+
.get(key_val)
|
|
1410
|
+
.ok_or_else(|| Error::new(ruby.exception_runtime_error(), "Failed to get hash value"))?;
|
|
1411
|
+
|
|
1412
|
+
if let Some(outer_array) = RArray::from_value(value) {
|
|
1413
|
+
if outer_array.is_empty() {
|
|
1414
|
+
continue;
|
|
1415
|
+
}
|
|
1416
|
+
|
|
1417
|
+
let first_elem = outer_array.entry::<Value>(0)?;
|
|
1418
|
+
|
|
1419
|
+
if RArray::from_value(first_elem).is_some() {
|
|
1420
|
+
for j in 0..outer_array.len() {
|
|
1421
|
+
let file_array = outer_array.entry::<Value>(j as isize)?;
|
|
1422
|
+
let file_data = extract_single_file(ruby, &field_name, file_array)?;
|
|
1423
|
+
result.push(file_data);
|
|
1424
|
+
}
|
|
1425
|
+
} else {
|
|
1426
|
+
let file_data = extract_single_file(ruby, &field_name, value)?;
|
|
1427
|
+
result.push(file_data);
|
|
1428
|
+
}
|
|
1429
|
+
}
|
|
1430
|
+
}
|
|
1431
|
+
|
|
1432
|
+
Ok(result)
|
|
1433
|
+
}
|
|
1434
|
+
|
|
1435
|
+
/// Extract a single file from Ruby array [filename, content, content_type (optional)]
|
|
1436
|
+
fn extract_single_file(ruby: &Ruby, field_name: &str, array_value: Value) -> Result<MultipartFilePart, Error> {
|
|
1437
|
+
let array = RArray::from_value(array_value)
|
|
1438
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "file must be an Array [filename, content]"))?;
|
|
1439
|
+
|
|
1440
|
+
if array.len() < 2 {
|
|
1441
|
+
return Err(Error::new(
|
|
1442
|
+
ruby.exception_arg_error(),
|
|
1443
|
+
"file Array must have at least 2 elements: [filename, content]",
|
|
1444
|
+
));
|
|
1445
|
+
}
|
|
1446
|
+
|
|
1447
|
+
let filename: String = String::try_convert(array.shift()?)?;
|
|
1448
|
+
let content_str: String = String::try_convert(array.shift()?)?;
|
|
1449
|
+
let content = content_str.into_bytes();
|
|
1450
|
+
|
|
1451
|
+
let content_type: Option<String> = if !array.is_empty() {
|
|
1452
|
+
String::try_convert(array.shift()?).ok()
|
|
1453
|
+
} else {
|
|
1454
|
+
None
|
|
1455
|
+
};
|
|
1456
|
+
|
|
1457
|
+
Ok(MultipartFilePart {
|
|
1458
|
+
field_name: field_name.to_string(),
|
|
1459
|
+
filename,
|
|
1460
|
+
content,
|
|
1461
|
+
content_type,
|
|
1462
|
+
})
|
|
1463
|
+
}
|
|
1464
|
+
|
|
1465
|
+
/// Extract ServerConfig from Ruby ServerConfig object
|
|
1466
|
+
fn extract_server_config(ruby: &Ruby, config_value: Value) -> Result<spikard_http::ServerConfig, Error> {
|
|
1467
|
+
use spikard_http::{
|
|
1468
|
+
ApiKeyConfig, CompressionConfig, ContactInfo, JwtConfig, LicenseInfo, OpenApiConfig, RateLimitConfig,
|
|
1469
|
+
ServerInfo, StaticFilesConfig,
|
|
1470
|
+
};
|
|
1471
|
+
use std::collections::HashMap;
|
|
1472
|
+
|
|
1473
|
+
let host: String = config_value.funcall("host", ())?;
|
|
1474
|
+
|
|
1475
|
+
let port: u32 = config_value.funcall("port", ())?;
|
|
1476
|
+
|
|
1477
|
+
let workers: usize = config_value.funcall("workers", ())?;
|
|
1478
|
+
|
|
1479
|
+
let enable_request_id: bool = config_value.funcall("enable_request_id", ())?;
|
|
1480
|
+
|
|
1481
|
+
let max_body_size_value: Value = config_value.funcall("max_body_size", ())?;
|
|
1482
|
+
let max_body_size = if max_body_size_value.is_nil() {
|
|
1483
|
+
None
|
|
1484
|
+
} else {
|
|
1485
|
+
Some(u64::try_convert(max_body_size_value)? as usize)
|
|
1486
|
+
};
|
|
1487
|
+
|
|
1488
|
+
let request_timeout_value: Value = config_value.funcall("request_timeout", ())?;
|
|
1489
|
+
let request_timeout = if request_timeout_value.is_nil() {
|
|
1490
|
+
None
|
|
1491
|
+
} else {
|
|
1492
|
+
Some(u64::try_convert(request_timeout_value)?)
|
|
1493
|
+
};
|
|
1494
|
+
|
|
1495
|
+
let graceful_shutdown: bool = config_value.funcall("graceful_shutdown", ())?;
|
|
1496
|
+
|
|
1497
|
+
let shutdown_timeout: u64 = config_value.funcall("shutdown_timeout", ())?;
|
|
1498
|
+
|
|
1499
|
+
let compression_value: Value = config_value.funcall("compression", ())?;
|
|
1500
|
+
let compression = if compression_value.is_nil() {
|
|
1501
|
+
None
|
|
1502
|
+
} else {
|
|
1503
|
+
let gzip: bool = compression_value.funcall("gzip", ())?;
|
|
1504
|
+
let brotli: bool = compression_value.funcall("brotli", ())?;
|
|
1505
|
+
let min_size: usize = compression_value.funcall("min_size", ())?;
|
|
1506
|
+
let quality: u32 = compression_value.funcall("quality", ())?;
|
|
1507
|
+
Some(CompressionConfig {
|
|
1508
|
+
gzip,
|
|
1509
|
+
brotli,
|
|
1510
|
+
min_size,
|
|
1511
|
+
quality,
|
|
1512
|
+
})
|
|
1513
|
+
};
|
|
1514
|
+
|
|
1515
|
+
let rate_limit_value: Value = config_value.funcall("rate_limit", ())?;
|
|
1516
|
+
let rate_limit = if rate_limit_value.is_nil() {
|
|
1517
|
+
None
|
|
1518
|
+
} else {
|
|
1519
|
+
let per_second: u64 = rate_limit_value.funcall("per_second", ())?;
|
|
1520
|
+
let burst: u32 = rate_limit_value.funcall("burst", ())?;
|
|
1521
|
+
let ip_based: bool = rate_limit_value.funcall("ip_based", ())?;
|
|
1522
|
+
Some(RateLimitConfig {
|
|
1523
|
+
per_second,
|
|
1524
|
+
burst,
|
|
1525
|
+
ip_based,
|
|
1526
|
+
})
|
|
1527
|
+
};
|
|
1528
|
+
|
|
1529
|
+
let jwt_auth_value: Value = config_value.funcall("jwt_auth", ())?;
|
|
1530
|
+
let jwt_auth = if jwt_auth_value.is_nil() {
|
|
1531
|
+
None
|
|
1532
|
+
} else {
|
|
1533
|
+
let secret: String = jwt_auth_value.funcall("secret", ())?;
|
|
1534
|
+
let algorithm: String = jwt_auth_value.funcall("algorithm", ())?;
|
|
1535
|
+
let audience_value: Value = jwt_auth_value.funcall("audience", ())?;
|
|
1536
|
+
let audience = if audience_value.is_nil() {
|
|
1537
|
+
None
|
|
1538
|
+
} else {
|
|
1539
|
+
Some(Vec::<String>::try_convert(audience_value)?)
|
|
1540
|
+
};
|
|
1541
|
+
let issuer_value: Value = jwt_auth_value.funcall("issuer", ())?;
|
|
1542
|
+
let issuer = if issuer_value.is_nil() {
|
|
1543
|
+
None
|
|
1544
|
+
} else {
|
|
1545
|
+
Some(String::try_convert(issuer_value)?)
|
|
1546
|
+
};
|
|
1547
|
+
let leeway: u64 = jwt_auth_value.funcall("leeway", ())?;
|
|
1548
|
+
Some(JwtConfig {
|
|
1549
|
+
secret,
|
|
1550
|
+
algorithm,
|
|
1551
|
+
audience,
|
|
1552
|
+
issuer,
|
|
1553
|
+
leeway,
|
|
1554
|
+
})
|
|
1555
|
+
};
|
|
1556
|
+
|
|
1557
|
+
let api_key_auth_value: Value = config_value.funcall("api_key_auth", ())?;
|
|
1558
|
+
let api_key_auth = if api_key_auth_value.is_nil() {
|
|
1559
|
+
None
|
|
1560
|
+
} else {
|
|
1561
|
+
let keys: Vec<String> = api_key_auth_value.funcall("keys", ())?;
|
|
1562
|
+
let header_name: String = api_key_auth_value.funcall("header_name", ())?;
|
|
1563
|
+
Some(ApiKeyConfig { keys, header_name })
|
|
1564
|
+
};
|
|
1565
|
+
|
|
1566
|
+
let static_files_value: Value = config_value.funcall("static_files", ())?;
|
|
1567
|
+
let static_files_array = RArray::from_value(static_files_value)
|
|
1568
|
+
.ok_or_else(|| Error::new(ruby.exception_type_error(), "static_files must be an Array"))?;
|
|
1569
|
+
|
|
1570
|
+
let mut static_files = Vec::new();
|
|
1571
|
+
for i in 0..static_files_array.len() {
|
|
1572
|
+
let sf_value = static_files_array.entry::<Value>(i as isize)?;
|
|
1573
|
+
let directory: String = sf_value.funcall("directory", ())?;
|
|
1574
|
+
let route_prefix: String = sf_value.funcall("route_prefix", ())?;
|
|
1575
|
+
let index_file: bool = sf_value.funcall("index_file", ())?;
|
|
1576
|
+
let cache_control_value: Value = sf_value.funcall("cache_control", ())?;
|
|
1577
|
+
let cache_control = if cache_control_value.is_nil() {
|
|
1578
|
+
None
|
|
1579
|
+
} else {
|
|
1580
|
+
Some(String::try_convert(cache_control_value)?)
|
|
1581
|
+
};
|
|
1582
|
+
static_files.push(StaticFilesConfig {
|
|
1583
|
+
directory,
|
|
1584
|
+
route_prefix,
|
|
1585
|
+
index_file,
|
|
1586
|
+
cache_control,
|
|
1587
|
+
});
|
|
1588
|
+
}
|
|
1589
|
+
|
|
1590
|
+
let openapi_value: Value = config_value.funcall("openapi", ())?;
|
|
1591
|
+
let openapi = if openapi_value.is_nil() {
|
|
1592
|
+
None
|
|
1593
|
+
} else {
|
|
1594
|
+
let enabled: bool = openapi_value.funcall("enabled", ())?;
|
|
1595
|
+
let title: String = openapi_value.funcall("title", ())?;
|
|
1596
|
+
let version: String = openapi_value.funcall("version", ())?;
|
|
1597
|
+
let description_value: Value = openapi_value.funcall("description", ())?;
|
|
1598
|
+
let description = if description_value.is_nil() {
|
|
1599
|
+
None
|
|
1600
|
+
} else {
|
|
1601
|
+
Some(String::try_convert(description_value)?)
|
|
1602
|
+
};
|
|
1603
|
+
let swagger_ui_path: String = openapi_value.funcall("swagger_ui_path", ())?;
|
|
1604
|
+
let redoc_path: String = openapi_value.funcall("redoc_path", ())?;
|
|
1605
|
+
let openapi_json_path: String = openapi_value.funcall("openapi_json_path", ())?;
|
|
1606
|
+
|
|
1607
|
+
let contact_value: Value = openapi_value.funcall("contact", ())?;
|
|
1608
|
+
let contact = if contact_value.is_nil() {
|
|
1609
|
+
None
|
|
1610
|
+
} else if let Some(contact_hash) = RHash::from_value(contact_value) {
|
|
1611
|
+
let name = get_optional_string_from_hash(contact_hash, "name")?;
|
|
1612
|
+
let email = get_optional_string_from_hash(contact_hash, "email")?;
|
|
1613
|
+
let url = get_optional_string_from_hash(contact_hash, "url")?;
|
|
1614
|
+
Some(ContactInfo { name, email, url })
|
|
1615
|
+
} else {
|
|
1616
|
+
let name_value: Value = contact_value.funcall("name", ())?;
|
|
1617
|
+
let email_value: Value = contact_value.funcall("email", ())?;
|
|
1618
|
+
let url_value: Value = contact_value.funcall("url", ())?;
|
|
1619
|
+
Some(ContactInfo {
|
|
1620
|
+
name: if name_value.is_nil() {
|
|
1621
|
+
None
|
|
1622
|
+
} else {
|
|
1623
|
+
Some(String::try_convert(name_value)?)
|
|
1624
|
+
},
|
|
1625
|
+
email: if email_value.is_nil() {
|
|
1626
|
+
None
|
|
1627
|
+
} else {
|
|
1628
|
+
Some(String::try_convert(email_value)?)
|
|
1629
|
+
},
|
|
1630
|
+
url: if url_value.is_nil() {
|
|
1631
|
+
None
|
|
1632
|
+
} else {
|
|
1633
|
+
Some(String::try_convert(url_value)?)
|
|
1634
|
+
},
|
|
1635
|
+
})
|
|
1636
|
+
};
|
|
1637
|
+
|
|
1638
|
+
let license_value: Value = openapi_value.funcall("license", ())?;
|
|
1639
|
+
let license = if license_value.is_nil() {
|
|
1640
|
+
None
|
|
1641
|
+
} else if let Some(license_hash) = RHash::from_value(license_value) {
|
|
1642
|
+
let name = get_required_string_from_hash(license_hash, "name", ruby)?;
|
|
1643
|
+
let url = get_optional_string_from_hash(license_hash, "url")?;
|
|
1644
|
+
Some(LicenseInfo { name, url })
|
|
1645
|
+
} else {
|
|
1646
|
+
let name: String = license_value.funcall("name", ())?;
|
|
1647
|
+
let url_value: Value = license_value.funcall("url", ())?;
|
|
1648
|
+
let url = if url_value.is_nil() {
|
|
1649
|
+
None
|
|
1650
|
+
} else {
|
|
1651
|
+
Some(String::try_convert(url_value)?)
|
|
1652
|
+
};
|
|
1653
|
+
Some(LicenseInfo { name, url })
|
|
1654
|
+
};
|
|
1655
|
+
|
|
1656
|
+
let servers_value: Value = openapi_value.funcall("servers", ())?;
|
|
1657
|
+
let servers_array = RArray::from_value(servers_value)
|
|
1658
|
+
.ok_or_else(|| Error::new(ruby.exception_type_error(), "servers must be an Array"))?;
|
|
1659
|
+
|
|
1660
|
+
let mut servers = Vec::new();
|
|
1661
|
+
for i in 0..servers_array.len() {
|
|
1662
|
+
let server_value = servers_array.entry::<Value>(i as isize)?;
|
|
1663
|
+
|
|
1664
|
+
let (url, description) = if let Some(server_hash) = RHash::from_value(server_value) {
|
|
1665
|
+
let url = get_required_string_from_hash(server_hash, "url", ruby)?;
|
|
1666
|
+
let description = get_optional_string_from_hash(server_hash, "description")?;
|
|
1667
|
+
(url, description)
|
|
1668
|
+
} else {
|
|
1669
|
+
let url: String = server_value.funcall("url", ())?;
|
|
1670
|
+
let description_value: Value = server_value.funcall("description", ())?;
|
|
1671
|
+
let description = if description_value.is_nil() {
|
|
1672
|
+
None
|
|
1673
|
+
} else {
|
|
1674
|
+
Some(String::try_convert(description_value)?)
|
|
1675
|
+
};
|
|
1676
|
+
(url, description)
|
|
1677
|
+
};
|
|
1678
|
+
|
|
1679
|
+
servers.push(ServerInfo { url, description });
|
|
1680
|
+
}
|
|
1681
|
+
|
|
1682
|
+
let security_schemes = HashMap::new();
|
|
1683
|
+
|
|
1684
|
+
Some(OpenApiConfig {
|
|
1685
|
+
enabled,
|
|
1686
|
+
title,
|
|
1687
|
+
version,
|
|
1688
|
+
description,
|
|
1689
|
+
swagger_ui_path,
|
|
1690
|
+
redoc_path,
|
|
1691
|
+
openapi_json_path,
|
|
1692
|
+
contact,
|
|
1693
|
+
license,
|
|
1694
|
+
servers,
|
|
1695
|
+
security_schemes,
|
|
1696
|
+
})
|
|
1697
|
+
};
|
|
1698
|
+
|
|
1699
|
+
Ok(spikard_http::ServerConfig {
|
|
1700
|
+
host,
|
|
1701
|
+
port: port as u16,
|
|
1702
|
+
workers,
|
|
1703
|
+
enable_request_id,
|
|
1704
|
+
max_body_size,
|
|
1705
|
+
request_timeout,
|
|
1706
|
+
compression,
|
|
1707
|
+
rate_limit,
|
|
1708
|
+
jwt_auth,
|
|
1709
|
+
api_key_auth,
|
|
1710
|
+
static_files,
|
|
1711
|
+
graceful_shutdown,
|
|
1712
|
+
shutdown_timeout,
|
|
1713
|
+
background_tasks: spikard_http::BackgroundTaskConfig::default(),
|
|
1714
|
+
openapi,
|
|
1715
|
+
lifecycle_hooks: None,
|
|
1716
|
+
di_container: None,
|
|
1717
|
+
})
|
|
1718
|
+
}
|
|
1719
|
+
|
|
1720
|
+
/// Start the Spikard HTTP server from Ruby
|
|
1721
|
+
///
|
|
1722
|
+
/// Creates an Axum HTTP server in a dedicated background thread with its own Tokio runtime.
|
|
1723
|
+
///
|
|
1724
|
+
/// # Arguments
|
|
1725
|
+
///
|
|
1726
|
+
/// * `routes_json` - JSON string containing route metadata
|
|
1727
|
+
/// * `handlers` - Ruby Hash mapping handler_name => Proc
|
|
1728
|
+
/// * `config` - Ruby ServerConfig object with all middleware settings
|
|
1729
|
+
/// * `hooks_value` - Lifecycle hooks
|
|
1730
|
+
/// * `ws_handlers` - WebSocket handlers
|
|
1731
|
+
/// * `sse_producers` - SSE producers
|
|
1732
|
+
/// * `dependencies` - Dependency injection container
|
|
1733
|
+
///
|
|
1734
|
+
/// # Example (Ruby)
|
|
1735
|
+
///
|
|
1736
|
+
/// ```ruby
|
|
1737
|
+
/// config = Spikard::ServerConfig.new(host: '0.0.0.0', port: 8000)
|
|
1738
|
+
/// Spikard::Native.run_server(routes_json, handlers, config, hooks, ws, sse, deps)
|
|
1739
|
+
/// ```
|
|
1740
|
+
#[allow(clippy::too_many_arguments)]
|
|
1741
|
+
fn run_server(
|
|
1742
|
+
ruby: &Ruby,
|
|
1743
|
+
routes_json: String,
|
|
1744
|
+
handlers: Value,
|
|
1745
|
+
config_value: Value,
|
|
1746
|
+
hooks_value: Value,
|
|
1747
|
+
ws_handlers: Value,
|
|
1748
|
+
sse_producers: Value,
|
|
1749
|
+
dependencies: Value,
|
|
1750
|
+
) -> Result<(), Error> {
|
|
1751
|
+
use spikard_http::{SchemaRegistry, Server};
|
|
1752
|
+
use tracing::{error, info, warn};
|
|
1753
|
+
|
|
1754
|
+
let mut config = extract_server_config(ruby, config_value)?;
|
|
1755
|
+
|
|
1756
|
+
let host = config.host.clone();
|
|
1757
|
+
let port = config.port;
|
|
1758
|
+
|
|
1759
|
+
let metadata: Vec<RouteMetadata> = serde_json::from_str(&routes_json)
|
|
1760
|
+
.map_err(|err| Error::new(ruby.exception_arg_error(), format!("Invalid routes JSON: {}", err)))?;
|
|
1761
|
+
|
|
1762
|
+
let handlers_hash = RHash::from_value(handlers).ok_or_else(|| {
|
|
1763
|
+
Error::new(
|
|
1764
|
+
ruby.exception_arg_error(),
|
|
1765
|
+
"handlers parameter must be a Hash of handler_name => Proc",
|
|
1766
|
+
)
|
|
1767
|
+
})?;
|
|
1768
|
+
|
|
1769
|
+
let json_module = ruby
|
|
1770
|
+
.class_object()
|
|
1771
|
+
.funcall::<_, _, Value>("const_get", ("JSON",))
|
|
1772
|
+
.map_err(|err| Error::new(ruby.exception_name_error(), format!("JSON module not found: {}", err)))?;
|
|
1773
|
+
|
|
1774
|
+
let schema_registry = SchemaRegistry::new();
|
|
1775
|
+
|
|
1776
|
+
let mut routes_with_handlers: Vec<(Route, Arc<dyn spikard_http::Handler>)> = Vec::new();
|
|
1777
|
+
|
|
1778
|
+
for route_meta in metadata {
|
|
1779
|
+
let route = Route::from_metadata(route_meta.clone(), &schema_registry)
|
|
1780
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("Failed to create route: {}", e)))?;
|
|
1781
|
+
|
|
1782
|
+
let handler_key = ruby.str_new(&route_meta.handler_name);
|
|
1783
|
+
let handler_value: Value = match handlers_hash.lookup(handler_key) {
|
|
1784
|
+
Ok(val) => val,
|
|
1785
|
+
Err(_) => {
|
|
1786
|
+
return Err(Error::new(
|
|
1787
|
+
ruby.exception_arg_error(),
|
|
1788
|
+
format!("Handler '{}' not found in handlers hash", route_meta.handler_name),
|
|
1789
|
+
));
|
|
1790
|
+
}
|
|
1791
|
+
};
|
|
1792
|
+
|
|
1793
|
+
let ruby_handler = RubyHandler::new_for_server(
|
|
1794
|
+
ruby,
|
|
1795
|
+
handler_value,
|
|
1796
|
+
route_meta.handler_name.clone(),
|
|
1797
|
+
route_meta.method.clone(),
|
|
1798
|
+
route_meta.path.clone(),
|
|
1799
|
+
json_module,
|
|
1800
|
+
&route,
|
|
1801
|
+
)?;
|
|
1802
|
+
|
|
1803
|
+
routes_with_handlers.push((route, Arc::new(ruby_handler) as Arc<dyn spikard_http::Handler>));
|
|
1804
|
+
}
|
|
1805
|
+
|
|
1806
|
+
let lifecycle_hooks = if !hooks_value.is_nil() {
|
|
1807
|
+
let hooks_hash = RHash::from_value(hooks_value)
|
|
1808
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "lifecycle_hooks parameter must be a Hash"))?;
|
|
1809
|
+
|
|
1810
|
+
let mut hooks = spikard_http::LifecycleHooks::new();
|
|
1811
|
+
type RubyHookVec = Vec<Arc<dyn spikard_http::lifecycle::LifecycleHook<Request<Body>, Response<Body>>>>;
|
|
1812
|
+
|
|
1813
|
+
let extract_hooks = |key: &str| -> Result<RubyHookVec, Error> {
|
|
1814
|
+
let key_sym = ruby.to_symbol(key);
|
|
1815
|
+
if let Some(hooks_array) = hooks_hash.get(key_sym)
|
|
1816
|
+
&& !hooks_array.is_nil()
|
|
1817
|
+
{
|
|
1818
|
+
let array = RArray::from_value(hooks_array)
|
|
1819
|
+
.ok_or_else(|| Error::new(ruby.exception_type_error(), format!("{} must be an Array", key)))?;
|
|
1820
|
+
|
|
1821
|
+
let mut result = Vec::new();
|
|
1822
|
+
let len = array.len();
|
|
1823
|
+
for i in 0..len {
|
|
1824
|
+
let hook_value: Value = array.entry(i as isize)?;
|
|
1825
|
+
let name = format!("{}_{}", key, i);
|
|
1826
|
+
let ruby_hook = lifecycle::RubyLifecycleHook::new(name, hook_value);
|
|
1827
|
+
result.push(Arc::new(ruby_hook)
|
|
1828
|
+
as Arc<
|
|
1829
|
+
dyn spikard_http::lifecycle::LifecycleHook<Request<Body>, Response<Body>>,
|
|
1830
|
+
>);
|
|
1831
|
+
}
|
|
1832
|
+
return Ok(result);
|
|
1833
|
+
}
|
|
1834
|
+
Ok(Vec::new())
|
|
1835
|
+
};
|
|
1836
|
+
|
|
1837
|
+
for hook in extract_hooks("on_request")? {
|
|
1838
|
+
hooks.add_on_request(hook);
|
|
1839
|
+
}
|
|
1840
|
+
|
|
1841
|
+
for hook in extract_hooks("pre_validation")? {
|
|
1842
|
+
hooks.add_pre_validation(hook);
|
|
1843
|
+
}
|
|
1844
|
+
|
|
1845
|
+
for hook in extract_hooks("pre_handler")? {
|
|
1846
|
+
hooks.add_pre_handler(hook);
|
|
1847
|
+
}
|
|
1848
|
+
|
|
1849
|
+
for hook in extract_hooks("on_response")? {
|
|
1850
|
+
hooks.add_on_response(hook);
|
|
1851
|
+
}
|
|
1852
|
+
|
|
1853
|
+
for hook in extract_hooks("on_error")? {
|
|
1854
|
+
hooks.add_on_error(hook);
|
|
1855
|
+
}
|
|
1856
|
+
|
|
1857
|
+
Some(hooks)
|
|
1858
|
+
} else {
|
|
1859
|
+
None
|
|
1860
|
+
};
|
|
1861
|
+
|
|
1862
|
+
config.lifecycle_hooks = lifecycle_hooks.map(Arc::new);
|
|
1863
|
+
|
|
1864
|
+
// Extract and register dependencies
|
|
1865
|
+
#[cfg(feature = "di")]
|
|
1866
|
+
{
|
|
1867
|
+
if !dependencies.is_nil() {
|
|
1868
|
+
match build_dependency_container(ruby, dependencies) {
|
|
1869
|
+
Ok(container) => {
|
|
1870
|
+
config.di_container = Some(Arc::new(container));
|
|
1871
|
+
}
|
|
1872
|
+
Err(err) => {
|
|
1873
|
+
return Err(Error::new(
|
|
1874
|
+
ruby.exception_runtime_error(),
|
|
1875
|
+
format!("Failed to build DI container: {}", err),
|
|
1876
|
+
));
|
|
1877
|
+
}
|
|
1878
|
+
}
|
|
1879
|
+
}
|
|
1880
|
+
}
|
|
1881
|
+
|
|
1882
|
+
Server::init_logging();
|
|
1883
|
+
|
|
1884
|
+
info!("Starting Spikard server on {}:{}", host, port);
|
|
1885
|
+
info!("Registered {} routes", routes_with_handlers.len());
|
|
1886
|
+
|
|
1887
|
+
let mut app_router = Server::with_handlers(config.clone(), routes_with_handlers)
|
|
1888
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), format!("Failed to build router: {}", e)))?;
|
|
1889
|
+
|
|
1890
|
+
let mut ws_endpoints = Vec::new();
|
|
1891
|
+
if !ws_handlers.is_nil() {
|
|
1892
|
+
let ws_hash = RHash::from_value(ws_handlers)
|
|
1893
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "WebSocket handlers must be a Hash"))?;
|
|
1894
|
+
|
|
1895
|
+
ws_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
1896
|
+
let handler_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
1897
|
+
Error::new(
|
|
1898
|
+
ruby.exception_runtime_error(),
|
|
1899
|
+
format!("Failed to create WebSocket handler: {}", e),
|
|
1900
|
+
)
|
|
1901
|
+
})?;
|
|
1902
|
+
|
|
1903
|
+
let ws_state = crate::websocket::create_websocket_state(ruby, handler_instance)?;
|
|
1904
|
+
|
|
1905
|
+
ws_endpoints.push((path, ws_state));
|
|
1906
|
+
|
|
1907
|
+
Ok(ForEach::Continue)
|
|
1908
|
+
})?;
|
|
1909
|
+
}
|
|
1910
|
+
|
|
1911
|
+
let mut sse_endpoints = Vec::new();
|
|
1912
|
+
if !sse_producers.is_nil() {
|
|
1913
|
+
let sse_hash = RHash::from_value(sse_producers)
|
|
1914
|
+
.ok_or_else(|| Error::new(ruby.exception_arg_error(), "SSE producers must be a Hash"))?;
|
|
1915
|
+
|
|
1916
|
+
sse_hash.foreach(|path: String, factory: Value| -> Result<ForEach, Error> {
|
|
1917
|
+
let producer_instance = factory.funcall::<_, _, Value>("call", ()).map_err(|e| {
|
|
1918
|
+
Error::new(
|
|
1919
|
+
ruby.exception_runtime_error(),
|
|
1920
|
+
format!("Failed to create SSE producer: {}", e),
|
|
1921
|
+
)
|
|
1922
|
+
})?;
|
|
1923
|
+
|
|
1924
|
+
let sse_state = crate::sse::create_sse_state(ruby, producer_instance)?;
|
|
1925
|
+
|
|
1926
|
+
sse_endpoints.push((path, sse_state));
|
|
1927
|
+
|
|
1928
|
+
Ok(ForEach::Continue)
|
|
1929
|
+
})?;
|
|
1930
|
+
}
|
|
1931
|
+
|
|
1932
|
+
use axum::routing::get;
|
|
1933
|
+
for (path, ws_state) in ws_endpoints {
|
|
1934
|
+
info!("Registered WebSocket endpoint: {}", path);
|
|
1935
|
+
app_router = app_router.route(
|
|
1936
|
+
&path,
|
|
1937
|
+
get(spikard_http::websocket_handler::<crate::websocket::RubyWebSocketHandler>).with_state(ws_state),
|
|
1938
|
+
);
|
|
1939
|
+
}
|
|
1940
|
+
|
|
1941
|
+
for (path, sse_state) in sse_endpoints {
|
|
1942
|
+
info!("Registered SSE endpoint: {}", path);
|
|
1943
|
+
app_router = app_router.route(
|
|
1944
|
+
&path,
|
|
1945
|
+
get(spikard_http::sse_handler::<crate::sse::RubySseEventProducer>).with_state(sse_state),
|
|
1946
|
+
);
|
|
1947
|
+
}
|
|
1948
|
+
|
|
1949
|
+
let addr = format!("{}:{}", config.host, config.port);
|
|
1950
|
+
let socket_addr: std::net::SocketAddr = addr.parse().map_err(|e| {
|
|
1951
|
+
Error::new(
|
|
1952
|
+
ruby.exception_arg_error(),
|
|
1953
|
+
format!("Invalid socket address {}: {}", addr, e),
|
|
1954
|
+
)
|
|
1955
|
+
})?;
|
|
1956
|
+
|
|
1957
|
+
let runtime = tokio::runtime::Builder::new_current_thread()
|
|
1958
|
+
.enable_all()
|
|
1959
|
+
.build()
|
|
1960
|
+
.map_err(|e| {
|
|
1961
|
+
Error::new(
|
|
1962
|
+
ruby.exception_runtime_error(),
|
|
1963
|
+
format!("Failed to create Tokio runtime: {}", e),
|
|
1964
|
+
)
|
|
1965
|
+
})?;
|
|
1966
|
+
|
|
1967
|
+
let background_config = config.background_tasks.clone();
|
|
1968
|
+
|
|
1969
|
+
runtime.block_on(async move {
|
|
1970
|
+
let listener = tokio::net::TcpListener::bind(socket_addr)
|
|
1971
|
+
.await
|
|
1972
|
+
.unwrap_or_else(|_| panic!("Failed to bind to {}", socket_addr));
|
|
1973
|
+
|
|
1974
|
+
info!("Server listening on {}", socket_addr);
|
|
1975
|
+
|
|
1976
|
+
let background_runtime = spikard_http::BackgroundRuntime::start(background_config.clone()).await;
|
|
1977
|
+
crate::background::install_handle(background_runtime.handle());
|
|
1978
|
+
|
|
1979
|
+
let serve_result = axum::serve(listener, app_router).await;
|
|
1980
|
+
|
|
1981
|
+
crate::background::clear_handle();
|
|
1982
|
+
|
|
1983
|
+
if let Err(err) = background_runtime.shutdown().await {
|
|
1984
|
+
warn!("Failed to drain background tasks during shutdown: {:?}", err);
|
|
1985
|
+
}
|
|
1986
|
+
|
|
1987
|
+
if let Err(e) = serve_result {
|
|
1988
|
+
error!("Server error: {}", e);
|
|
1989
|
+
}
|
|
1990
|
+
});
|
|
1991
|
+
|
|
1992
|
+
Ok(())
|
|
1993
|
+
}
|
|
1994
|
+
|
|
1995
|
+
#[magnus::init]
|
|
1996
|
+
pub fn init(ruby: &Ruby) -> Result<(), Error> {
|
|
1997
|
+
let spikard = ruby.define_module("Spikard")?;
|
|
1998
|
+
spikard.define_singleton_method("version", function!(version, 0))?;
|
|
1999
|
+
let native = match spikard.const_get("Native") {
|
|
2000
|
+
Ok(module) => module,
|
|
2001
|
+
Err(_) => spikard.define_module("Native")?,
|
|
2002
|
+
};
|
|
2003
|
+
|
|
2004
|
+
native.define_singleton_method("run_server", function!(run_server, 7))?;
|
|
2005
|
+
native.define_singleton_method("background_run", function!(background::background_run, 1))?;
|
|
2006
|
+
|
|
2007
|
+
let class = native.define_class("TestClient", ruby.class_object())?;
|
|
2008
|
+
class.define_alloc_func::<NativeTestClient>();
|
|
2009
|
+
class.define_method("initialize", method!(NativeTestClient::initialize, 6))?;
|
|
2010
|
+
class.define_method("request", method!(NativeTestClient::request, 3))?;
|
|
2011
|
+
class.define_method("websocket", method!(NativeTestClient::websocket, 1))?;
|
|
2012
|
+
class.define_method("sse", method!(NativeTestClient::sse, 1))?;
|
|
2013
|
+
class.define_method("close", method!(NativeTestClient::close, 0))?;
|
|
2014
|
+
|
|
2015
|
+
let spikard_module = ruby.define_module("Spikard")?;
|
|
2016
|
+
test_websocket::init(ruby, &spikard_module)?;
|
|
2017
|
+
test_sse::init(ruby, &spikard_module)?;
|
|
2018
|
+
|
|
2019
|
+
Ok(())
|
|
2020
|
+
}
|