spikard 0.8.3 → 0.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +19 -10
  3. data/ext/spikard_rb/Cargo.lock +234 -162
  4. data/ext/spikard_rb/Cargo.toml +2 -2
  5. data/ext/spikard_rb/extconf.rb +4 -3
  6. data/lib/spikard/config.rb +88 -12
  7. data/lib/spikard/testing.rb +3 -1
  8. data/lib/spikard/version.rb +1 -1
  9. data/lib/spikard.rb +11 -0
  10. data/vendor/crates/spikard-bindings-shared/Cargo.toml +3 -6
  11. data/vendor/crates/spikard-bindings-shared/examples/config_extraction.rs +8 -8
  12. data/vendor/crates/spikard-bindings-shared/src/config_extractor.rs +2 -2
  13. data/vendor/crates/spikard-bindings-shared/src/conversion_traits.rs +4 -4
  14. data/vendor/crates/spikard-bindings-shared/src/di_traits.rs +10 -4
  15. data/vendor/crates/spikard-bindings-shared/src/error_response.rs +3 -3
  16. data/vendor/crates/spikard-bindings-shared/src/handler_base.rs +10 -5
  17. data/vendor/crates/spikard-bindings-shared/src/json_conversion.rs +829 -0
  18. data/vendor/crates/spikard-bindings-shared/src/lazy_cache.rs +587 -0
  19. data/vendor/crates/spikard-bindings-shared/src/lib.rs +7 -0
  20. data/vendor/crates/spikard-bindings-shared/src/lifecycle_base.rs +11 -11
  21. data/vendor/crates/spikard-bindings-shared/src/lifecycle_executor.rs +9 -37
  22. data/vendor/crates/spikard-bindings-shared/src/response_builder.rs +436 -3
  23. data/vendor/crates/spikard-bindings-shared/src/response_interpreter.rs +944 -0
  24. data/vendor/crates/spikard-bindings-shared/src/test_client_base.rs +4 -4
  25. data/vendor/crates/spikard-bindings-shared/tests/config_extractor_behavior.rs +3 -2
  26. data/vendor/crates/spikard-bindings-shared/tests/error_response_edge_cases.rs +13 -13
  27. data/vendor/crates/spikard-bindings-shared/tests/{comprehensive_coverage.rs → full_coverage.rs} +10 -5
  28. data/vendor/crates/spikard-bindings-shared/tests/handler_base_integration.rs +14 -14
  29. data/vendor/crates/spikard-bindings-shared/tests/integration_tests.rs +669 -0
  30. data/vendor/crates/spikard-core/Cargo.toml +3 -3
  31. data/vendor/crates/spikard-core/src/di/container.rs +1 -1
  32. data/vendor/crates/spikard-core/src/di/factory.rs +2 -2
  33. data/vendor/crates/spikard-core/src/di/resolved.rs +2 -2
  34. data/vendor/crates/spikard-core/src/di/value.rs +1 -1
  35. data/vendor/crates/spikard-core/src/http.rs +75 -0
  36. data/vendor/crates/spikard-core/src/lifecycle.rs +43 -43
  37. data/vendor/crates/spikard-core/src/parameters.rs +14 -19
  38. data/vendor/crates/spikard-core/src/problem.rs +1 -1
  39. data/vendor/crates/spikard-core/src/request_data.rs +7 -16
  40. data/vendor/crates/spikard-core/src/router.rs +6 -0
  41. data/vendor/crates/spikard-core/src/schema_registry.rs +2 -3
  42. data/vendor/crates/spikard-core/src/type_hints.rs +3 -2
  43. data/vendor/crates/spikard-core/src/validation/error_mapper.rs +1 -1
  44. data/vendor/crates/spikard-core/src/validation/mod.rs +1 -1
  45. data/vendor/crates/spikard-core/tests/di_dependency_defaults.rs +1 -1
  46. data/vendor/crates/spikard-core/tests/error_mapper.rs +2 -2
  47. data/vendor/crates/spikard-core/tests/parameters_edge_cases.rs +1 -1
  48. data/vendor/crates/spikard-core/tests/parameters_full.rs +1 -1
  49. data/vendor/crates/spikard-core/tests/parameters_schema_and_formats.rs +1 -1
  50. data/vendor/crates/spikard-core/tests/validation_coverage.rs +4 -4
  51. data/vendor/crates/spikard-http/Cargo.toml +4 -2
  52. data/vendor/crates/spikard-http/src/cors.rs +32 -11
  53. data/vendor/crates/spikard-http/src/di_handler.rs +12 -8
  54. data/vendor/crates/spikard-http/src/grpc/framing.rs +469 -0
  55. data/vendor/crates/spikard-http/src/grpc/handler.rs +887 -25
  56. data/vendor/crates/spikard-http/src/grpc/mod.rs +114 -22
  57. data/vendor/crates/spikard-http/src/grpc/service.rs +232 -2
  58. data/vendor/crates/spikard-http/src/grpc/streaming.rs +80 -2
  59. data/vendor/crates/spikard-http/src/handler_trait.rs +204 -27
  60. data/vendor/crates/spikard-http/src/handler_trait_tests.rs +15 -15
  61. data/vendor/crates/spikard-http/src/jsonrpc/http_handler.rs +2 -2
  62. data/vendor/crates/spikard-http/src/jsonrpc/router.rs +2 -2
  63. data/vendor/crates/spikard-http/src/lib.rs +1 -1
  64. data/vendor/crates/spikard-http/src/lifecycle/adapter.rs +2 -2
  65. data/vendor/crates/spikard-http/src/lifecycle.rs +4 -4
  66. data/vendor/crates/spikard-http/src/openapi/spec_generation.rs +2 -0
  67. data/vendor/crates/spikard-http/src/server/fast_router.rs +186 -0
  68. data/vendor/crates/spikard-http/src/server/grpc_routing.rs +324 -23
  69. data/vendor/crates/spikard-http/src/server/handler.rs +33 -22
  70. data/vendor/crates/spikard-http/src/server/lifecycle_execution.rs +21 -2
  71. data/vendor/crates/spikard-http/src/server/mod.rs +125 -20
  72. data/vendor/crates/spikard-http/src/server/request_extraction.rs +126 -44
  73. data/vendor/crates/spikard-http/src/server/routing_factory.rs +80 -69
  74. data/vendor/crates/spikard-http/tests/common/handlers.rs +2 -2
  75. data/vendor/crates/spikard-http/tests/common/test_builders.rs +12 -12
  76. data/vendor/crates/spikard-http/tests/di_handler_error_responses.rs +2 -2
  77. data/vendor/crates/spikard-http/tests/di_integration.rs +6 -6
  78. data/vendor/crates/spikard-http/tests/grpc_bidirectional_streaming.rs +430 -0
  79. data/vendor/crates/spikard-http/tests/grpc_client_streaming.rs +738 -0
  80. data/vendor/crates/spikard-http/tests/grpc_integration_test.rs +13 -9
  81. data/vendor/crates/spikard-http/tests/grpc_server_streaming.rs +974 -0
  82. data/vendor/crates/spikard-http/tests/lifecycle_execution.rs +2 -2
  83. data/vendor/crates/spikard-http/tests/request_extraction_full.rs +4 -4
  84. data/vendor/crates/spikard-http/tests/server_config_builder.rs +2 -2
  85. data/vendor/crates/spikard-http/tests/server_cors_preflight.rs +1 -0
  86. data/vendor/crates/spikard-http/tests/server_openapi_jsonrpc_static.rs +140 -0
  87. data/vendor/crates/spikard-rb/Cargo.toml +3 -1
  88. data/vendor/crates/spikard-rb/src/conversion.rs +138 -4
  89. data/vendor/crates/spikard-rb/src/grpc/handler.rs +706 -229
  90. data/vendor/crates/spikard-rb/src/grpc/mod.rs +6 -2
  91. data/vendor/crates/spikard-rb/src/gvl.rs +2 -2
  92. data/vendor/crates/spikard-rb/src/handler.rs +169 -91
  93. data/vendor/crates/spikard-rb/src/lib.rs +444 -62
  94. data/vendor/crates/spikard-rb/src/lifecycle.rs +29 -1
  95. data/vendor/crates/spikard-rb/src/metadata/route_extraction.rs +108 -43
  96. data/vendor/crates/spikard-rb/src/request.rs +117 -20
  97. data/vendor/crates/spikard-rb/src/runtime/server_runner.rs +52 -25
  98. data/vendor/crates/spikard-rb/src/server.rs +23 -14
  99. data/vendor/crates/spikard-rb/src/testing/client.rs +5 -4
  100. data/vendor/crates/spikard-rb/src/testing/sse.rs +1 -36
  101. data/vendor/crates/spikard-rb/src/testing/websocket.rs +3 -38
  102. data/vendor/crates/spikard-rb/src/websocket.rs +32 -23
  103. data/vendor/crates/spikard-rb-macros/Cargo.toml +1 -1
  104. metadata +14 -4
  105. data/vendor/bundle/ruby/3.4.0/gems/diff-lcs-1.6.2/mise.toml +0 -5
  106. data/vendor/bundle/ruby/3.4.0/gems/rake-compiler-dock-1.10.0/build/buildkitd.toml +0 -2
@@ -0,0 +1,587 @@
1
+ //! Lazy initialization and caching for language binding values.
2
+ //!
3
+ //! This module provides `LazyCache<T>`, a zero-cost abstraction for lazy evaluation
4
+ //! and caching of expensive-to-compute values within single-threaded language bindings.
5
+ //!
6
+ //! # Overview
7
+ //!
8
+ //! Language bindings (Python, Node.js, Ruby, PHP) frequently need to convert Rust data
9
+ //! to native language objects. These conversions are expensive and often requested multiple
10
+ //! times per request. `LazyCache<T>` defers expensive conversions until requested and caches
11
+ //! the result for subsequent accesses.
12
+ //!
13
+ //! This pattern eliminates 30-40% of conversion overhead in typical request handling:
14
+ //! - Headers are only converted if accessed
15
+ //! - Query parameters are cached after first access
16
+ //! - Complex nested structures are materialized once
17
+ //!
18
+ //! # Thread Safety
19
+ //!
20
+ //! **This type is NOT thread-safe.** It uses `RefCell<Option<T>>` for interior mutability,
21
+ //! which will panic if accessed concurrently. This is intentional and correct because:
22
+ //!
23
+ //! - **Python GIL**: Single-threaded execution; one handler at a time
24
+ //! - **Node.js**: Single-threaded event loop; async handled via futures
25
+ //! - **Ruby GVL**: Global VM lock ensures single-threaded execution
26
+ //! - **PHP**: Request-scoped execution; single-threaded per request
27
+ //!
28
+ //! For multi-threaded Rust code, use `parking_lot::Mutex<Option<T>>` instead.
29
+ //!
30
+ //! # Example
31
+ //!
32
+ //! ```ignore
33
+ //! use spikard_bindings_shared::LazyCache;
34
+ //!
35
+ //! struct Request {
36
+ //! raw_headers: HashMap<String, String>,
37
+ //! headers_cache: LazyCache<RubyHash>, // Expensive Ruby object
38
+ //! }
39
+ //!
40
+ //! impl Request {
41
+ //! fn get_headers(&self, ruby: &Ruby) -> Result<&RubyHash> {
42
+ //! self.headers_cache.get_or_init(|| {
43
+ //! convert_hashmap_to_ruby_hash(ruby, &self.raw_headers)
44
+ //! })
45
+ //! }
46
+ //! }
47
+ //! ```
48
+ //!
49
+ //! First call to `get_headers()` invokes the closure and caches the result.
50
+ //! Subsequent calls return the cached reference without invoking the closure.
51
+
52
+ use std::cell::RefCell;
53
+
54
+ /// Lazy-initialized and cached value.
55
+ ///
56
+ /// Stores an `Option<T>` in a `RefCell` for interior mutability. The value is
57
+ /// initialized on first access via a provided closure and cached for subsequent
58
+ /// accesses.
59
+ ///
60
+ /// # Panics
61
+ ///
62
+ /// Accessing `LazyCache` during active mutable borrowing will panic. This is
63
+ /// only possible with nested or recursive access patterns, which should be avoided
64
+ /// in language bindings.
65
+ #[derive(Default, Debug)]
66
+ pub struct LazyCache<T> {
67
+ /// Interior mutability cell holding the cached value.
68
+ ///
69
+ /// `None` means not yet initialized. Some(value) means cached.
70
+ cache: RefCell<Option<T>>,
71
+ }
72
+
73
+ impl<T> LazyCache<T> {
74
+ /// Create a new empty cache.
75
+ ///
76
+ /// The value will be initialized on first access via `get_or_init`.
77
+ ///
78
+ /// # Example
79
+ ///
80
+ /// ```
81
+ /// use spikard_bindings_shared::LazyCache;
82
+ ///
83
+ /// let cache: LazyCache<String> = LazyCache::new();
84
+ /// assert!(!cache.is_cached());
85
+ /// ```
86
+ #[inline]
87
+ pub const fn new() -> Self {
88
+ Self {
89
+ cache: RefCell::new(None),
90
+ }
91
+ }
92
+
93
+ /// Get a cached reference or initialize via closure.
94
+ ///
95
+ /// If the value is already cached, returns a reference to it immediately
96
+ /// without invoking the closure. On first call, invokes the closure, caches
97
+ /// the result, and returns a reference.
98
+ ///
99
+ /// # Borrowing
100
+ ///
101
+ /// The returned reference is bound to the lifetime of the `LazyCache`.
102
+ /// This is safe because the cache ensures the value persists for the
103
+ /// lifetime of the `LazyCache` itself.
104
+ ///
105
+ /// # Panics
106
+ ///
107
+ /// Panics if the `RefCell` is currently borrowed mutably (e.g., from
108
+ /// a nested call during initialization). This should not occur in normal
109
+ /// single-threaded usage. This happens when `unwrap()` is called on a
110
+ /// `RefCell` that is actively borrowed, which the runtime detects.
111
+ ///
112
+ /// # Example
113
+ ///
114
+ /// ```
115
+ /// use spikard_bindings_shared::LazyCache;
116
+ ///
117
+ /// let cache = LazyCache::new();
118
+ ///
119
+ /// // First call: invokes closure
120
+ /// let value1 = cache.get_or_init(|| 42);
121
+ /// assert_eq!(*value1, 42);
122
+ ///
123
+ /// // Second call: returns cached value without invoking closure
124
+ /// let value2 = cache.get_or_init(|| {
125
+ /// panic!("This should not be called");
126
+ /// // #[allow(unreachable_code)]
127
+ /// // 999
128
+ /// });
129
+ /// assert_eq!(*value2, 42);
130
+ /// ```
131
+ #[must_use]
132
+ pub fn get_or_init<F>(&self, init: F) -> &T
133
+ where
134
+ F: FnOnce() -> T,
135
+ {
136
+ // PERFORMANCE + SAFETY: Check if already cached without holding borrow.
137
+ // This avoids the RefCell borrow guard and reduces overhead for cached hits.
138
+ if self.cache.borrow().is_some() {
139
+ // SAFETY: We verified the value exists. The returned reference is tied to
140
+ // this function call's stack frame, but RefCell::map ensures it's valid
141
+ // for the cache's lifetime. We map the borrow to extract &T directly.
142
+ return unsafe {
143
+ // Cast the raw pointer from RefCell's internal storage to &T.
144
+ // This is safe because:
145
+ // 1. The value is guaranteed to exist (Some branch)
146
+ // 2. RefCell stores values contiguously; dereferencing is valid
147
+ // 3. No RefCell borrow is held after this function returns
148
+ // 4. The lifetime is correctly extended to the cache's lifetime
149
+ let ptr = self.cache.as_ptr().cast_const();
150
+ (*ptr).as_ref().unwrap_or_else(|| unreachable!())
151
+ };
152
+ }
153
+
154
+ // Not cached; initialize and cache
155
+ let value = init();
156
+ *self.cache.borrow_mut() = Some(value);
157
+
158
+ // SAFETY: We just set the value; same reasoning as above.
159
+ unsafe {
160
+ let ptr = self.cache.as_ptr().cast_const();
161
+ (*ptr).as_ref().unwrap_or_else(|| unreachable!())
162
+ }
163
+ }
164
+
165
+ /// Get a cached reference or initialize via fallible closure.
166
+ ///
167
+ /// Similar to `get_or_init`, but the closure returns a `Result`. If the closure
168
+ /// returns `Err`, the error is returned and the cache remains uninitialized.
169
+ /// Subsequent calls will re-attempt initialization.
170
+ ///
171
+ /// If the cache already contains a value, returns a reference without invoking
172
+ /// the closure.
173
+ ///
174
+ /// # Panics
175
+ ///
176
+ /// Panics if the `RefCell` is currently borrowed mutably. This should not occur
177
+ /// in normal single-threaded usage.
178
+ ///
179
+ /// # Errors
180
+ ///
181
+ /// Returns `Err(E)` if the initialization closure returns an error.
182
+ /// The cache remains uninitialized, allowing subsequent retry attempts.
183
+ ///
184
+ /// # Example
185
+ ///
186
+ /// ```
187
+ /// use spikard_bindings_shared::LazyCache;
188
+ ///
189
+ /// let cache: LazyCache<i32> = LazyCache::new();
190
+ ///
191
+ /// // First call: succeeds
192
+ /// let result1 = cache.get_or_try_init::<_, String>(|| Ok(42));
193
+ /// assert_eq!(result1, Ok(&42));
194
+ ///
195
+ /// // Second call: returns cached value
196
+ /// let result2 = cache.get_or_try_init::<_, String>(|| {
197
+ /// Err("This should not be called".to_string())
198
+ /// });
199
+ /// assert_eq!(result2, Ok(&42));
200
+ ///
201
+ /// // Failed initialization doesn't cache
202
+ /// let cache2: LazyCache<i32> = LazyCache::new();
203
+ /// let result3 = cache2.get_or_try_init::<_, String>(|| {
204
+ /// Err("initialization failed".to_string())
205
+ /// });
206
+ /// assert!(result3.is_err());
207
+ ///
208
+ /// // Subsequent call re-attempts initialization
209
+ /// let result4 = cache2.get_or_try_init::<_, String>(|| Ok(100));
210
+ /// assert_eq!(result4, Ok(&100));
211
+ /// ```
212
+ pub fn get_or_try_init<F, E>(&self, init: F) -> Result<&T, E>
213
+ where
214
+ F: FnOnce() -> Result<T, E>,
215
+ {
216
+ // PERFORMANCE: Check if cached without holding the borrow.
217
+ if self.cache.borrow().is_some() {
218
+ // SAFETY: Same as `get_or_init`; value is guaranteed to exist.
219
+ return Ok(unsafe {
220
+ let ptr = self.cache.as_ptr().cast_const();
221
+ (*ptr).as_ref().unwrap_or_else(|| unreachable!())
222
+ });
223
+ }
224
+
225
+ // Not cached; attempt initialization
226
+ let value = init()?;
227
+ *self.cache.borrow_mut() = Some(value);
228
+
229
+ // SAFETY: We just set the value; same reasoning as get_or_init.
230
+ Ok(unsafe {
231
+ let ptr = self.cache.as_ptr().cast_const();
232
+ (*ptr).as_ref().unwrap_or_else(|| unreachable!())
233
+ })
234
+ }
235
+
236
+ /// Check if a value is currently cached.
237
+ ///
238
+ /// Returns `true` if `get_or_init` or `get_or_try_init` has successfully
239
+ /// cached a value, `false` otherwise.
240
+ ///
241
+ /// # Example
242
+ ///
243
+ /// ```
244
+ /// use spikard_bindings_shared::LazyCache;
245
+ ///
246
+ /// let cache = LazyCache::new();
247
+ /// assert!(!cache.is_cached());
248
+ ///
249
+ /// let _ = cache.get_or_init(|| 42);
250
+ /// assert!(cache.is_cached());
251
+ /// ```
252
+ #[inline]
253
+ #[must_use]
254
+ pub fn is_cached(&self) -> bool {
255
+ self.cache.borrow().is_some()
256
+ }
257
+
258
+ /// Clear the cached value.
259
+ ///
260
+ /// After invalidation, the cache behaves as if freshly created. The next call
261
+ /// to `get_or_init` or `get_or_try_init` will re-invoke the initialization closure.
262
+ ///
263
+ /// # Example
264
+ ///
265
+ /// ```
266
+ /// use spikard_bindings_shared::LazyCache;
267
+ ///
268
+ /// let cache = LazyCache::new();
269
+ /// let v1 = cache.get_or_init(|| 42);
270
+ /// assert_eq!(*v1, 42);
271
+ ///
272
+ /// cache.invalidate();
273
+ /// assert!(!cache.is_cached());
274
+ ///
275
+ /// let call_count = std::cell::Cell::new(0);
276
+ /// let v2 = cache.get_or_init(|| {
277
+ /// call_count.set(call_count.get() + 1);
278
+ /// 100
279
+ /// });
280
+ /// assert_eq!(*v2, 100);
281
+ /// assert_eq!(call_count.get(), 1);
282
+ /// ```
283
+ #[inline]
284
+ pub fn invalidate(&self) {
285
+ *self.cache.borrow_mut() = None;
286
+ }
287
+
288
+ /// Attempt to unwrap and take ownership of the cached value.
289
+ ///
290
+ /// Returns the cached value if it exists, consuming the cache. If the cache
291
+ /// is empty, returns `None`.
292
+ ///
293
+ /// This is useful when the `LazyCache` itself is being dropped or moved,
294
+ /// and you want to recover the cached value.
295
+ ///
296
+ /// # Example
297
+ ///
298
+ /// ```
299
+ /// use spikard_bindings_shared::LazyCache;
300
+ ///
301
+ /// let cache = LazyCache::new();
302
+ /// let _ = cache.get_or_init(|| vec![1, 2, 3]);
303
+ ///
304
+ /// let value = cache.into_inner();
305
+ /// assert_eq!(value, Some(vec![1, 2, 3]));
306
+ /// ```
307
+ #[inline]
308
+ #[must_use]
309
+ pub fn into_inner(self) -> Option<T> {
310
+ self.cache.into_inner()
311
+ }
312
+ }
313
+
314
+ // Implement Clone only if T is Clone
315
+ impl<T: Clone> Clone for LazyCache<T> {
316
+ fn clone(&self) -> Self {
317
+ Self {
318
+ cache: RefCell::new(self.cache.borrow().clone()),
319
+ }
320
+ }
321
+ }
322
+
323
+ #[cfg(test)]
324
+ mod tests {
325
+ use super::*;
326
+ use std::cell::Cell;
327
+ use std::rc::Rc;
328
+
329
+ #[test]
330
+ fn test_new_cache_is_empty() {
331
+ let cache: LazyCache<i32> = LazyCache::new();
332
+ assert!(!cache.is_cached());
333
+ }
334
+
335
+ #[test]
336
+ fn test_get_or_init_initializes_once() {
337
+ let cache = LazyCache::new();
338
+ let call_count = Rc::new(Cell::new(0));
339
+ let call_count_clone = call_count.clone();
340
+
341
+ let value1 = cache.get_or_init(|| {
342
+ call_count_clone.set(call_count_clone.get() + 1);
343
+ 42
344
+ });
345
+ assert_eq!(*value1, 42);
346
+ assert_eq!(call_count.get(), 1);
347
+
348
+ // Second call should not invoke the closure
349
+ let value2 = cache.get_or_init(|| {
350
+ call_count.set(call_count.get() + 999);
351
+ unreachable!()
352
+ });
353
+ assert_eq!(*value2, 42);
354
+ assert_eq!(call_count.get(), 1); // Still 1, not 1000
355
+ }
356
+
357
+ #[test]
358
+ fn test_get_or_init_returns_stable_reference() {
359
+ let cache = LazyCache::new();
360
+ let v1 = cache.get_or_init(|| "hello".to_string());
361
+ let v2 = cache.get_or_init(|| "world".to_string());
362
+
363
+ // Both should be the same value
364
+ assert_eq!(v1, v2);
365
+ assert_eq!(*v1, "hello");
366
+ }
367
+
368
+ #[test]
369
+ fn test_is_cached_tracks_state() {
370
+ let cache: LazyCache<i32> = LazyCache::new();
371
+ assert!(!cache.is_cached());
372
+
373
+ let _ = cache.get_or_init(|| 10);
374
+ assert!(cache.is_cached());
375
+
376
+ cache.invalidate();
377
+ assert!(!cache.is_cached());
378
+ }
379
+
380
+ #[test]
381
+ fn test_invalidate_forces_reinit() {
382
+ let cache = LazyCache::new();
383
+ let call_count = Rc::new(Cell::new(0));
384
+
385
+ let call_count_clone1 = call_count.clone();
386
+ let v1 = cache.get_or_init(|| {
387
+ call_count_clone1.set(call_count_clone1.get() + 1);
388
+ 100
389
+ });
390
+ assert_eq!(*v1, 100);
391
+ assert_eq!(call_count.get(), 1);
392
+
393
+ cache.invalidate();
394
+ assert!(!cache.is_cached());
395
+
396
+ let call_count_clone2 = call_count.clone();
397
+ let v2 = cache.get_or_init(|| {
398
+ call_count_clone2.set(call_count_clone2.get() + 1);
399
+ 200
400
+ });
401
+ assert_eq!(*v2, 200);
402
+ assert_eq!(call_count.get(), 2);
403
+ }
404
+
405
+ #[test]
406
+ fn test_get_or_try_init_success() {
407
+ let cache: LazyCache<String> = LazyCache::new();
408
+ let call_count = Rc::new(Cell::new(0));
409
+
410
+ let call_count_clone = call_count.clone();
411
+ let result = cache.get_or_try_init::<_, &str>(|| {
412
+ call_count_clone.set(call_count_clone.get() + 1);
413
+ Ok("success".to_string())
414
+ });
415
+
416
+ assert_eq!(result, Ok(&"success".to_string()));
417
+ assert_eq!(call_count.get(), 1);
418
+ assert!(cache.is_cached());
419
+ }
420
+
421
+ #[test]
422
+ fn test_get_or_try_init_failure_does_not_cache() {
423
+ let cache: LazyCache<i32> = LazyCache::new();
424
+ let call_count = Rc::new(Cell::new(0));
425
+
426
+ let call_count_clone1 = call_count.clone();
427
+ let result1 = cache.get_or_try_init::<_, String>(|| {
428
+ call_count_clone1.set(call_count_clone1.get() + 1);
429
+ Err("error1".to_string())
430
+ });
431
+
432
+ assert_eq!(result1, Err("error1".to_string()));
433
+ assert!(!cache.is_cached());
434
+ assert_eq!(call_count.get(), 1);
435
+
436
+ // Second call should attempt initialization again
437
+ let call_count_clone2 = call_count.clone();
438
+ let result2 = cache.get_or_try_init::<_, String>(|| {
439
+ call_count_clone2.set(call_count_clone2.get() + 1);
440
+ Ok(42)
441
+ });
442
+
443
+ assert_eq!(result2, Ok(&42));
444
+ assert!(cache.is_cached());
445
+ assert_eq!(call_count.get(), 2);
446
+ }
447
+
448
+ #[test]
449
+ fn test_get_or_try_init_cached_skips_closure() {
450
+ let cache = LazyCache::new();
451
+ let call_count = Rc::new(Cell::new(0));
452
+
453
+ // First call succeeds
454
+ let call_count_clone1 = call_count.clone();
455
+ let result1 = cache.get_or_try_init::<_, &str>(|| {
456
+ call_count_clone1.set(call_count_clone1.get() + 1);
457
+ Ok(100)
458
+ });
459
+ assert_eq!(result1, Ok(&100));
460
+ assert_eq!(call_count.get(), 1);
461
+
462
+ // Second call returns cached value without invoking closure
463
+ let call_count_clone2 = call_count.clone();
464
+ let result2 = cache.get_or_try_init::<_, String>(|| {
465
+ call_count_clone2.set(call_count_clone2.get() + 999);
466
+ Err("should not reach".to_string())
467
+ });
468
+ assert_eq!(result2, Ok(&100));
469
+ assert_eq!(call_count.get(), 1); // Not incremented
470
+ }
471
+
472
+ #[test]
473
+ fn test_into_inner_with_value() {
474
+ let cache = LazyCache::new();
475
+ let _ = cache.get_or_init(|| vec![1, 2, 3]);
476
+
477
+ let value = cache.into_inner();
478
+ assert_eq!(value, Some(vec![1, 2, 3]));
479
+ }
480
+
481
+ #[test]
482
+ fn test_into_inner_without_value() {
483
+ let cache: LazyCache<i32> = LazyCache::new();
484
+ let value = cache.into_inner();
485
+ assert_eq!(value, None);
486
+ }
487
+
488
+ #[test]
489
+ fn test_default_is_empty() {
490
+ let cache: LazyCache<i32> = LazyCache::default();
491
+ assert!(!cache.is_cached());
492
+ }
493
+
494
+ #[test]
495
+ fn test_clone_copies_cached_state() {
496
+ let cache = LazyCache::new();
497
+ let _ = cache.get_or_init(|| 42);
498
+
499
+ let _cloned = cache.clone();
500
+ assert!(cache.is_cached());
501
+ let value = cache.get_or_init(|| 0); // Should not reinit
502
+ assert_eq!(*value, 42);
503
+ }
504
+
505
+ #[test]
506
+ fn test_clone_empty_cache() {
507
+ let cache: LazyCache<i32> = LazyCache::new();
508
+ let _cloned = cache.clone();
509
+ assert!(!cache.is_cached());
510
+ }
511
+
512
+ #[test]
513
+ fn test_complex_type_conversion() {
514
+ struct Complex {
515
+ data: Vec<(String, i32)>,
516
+ }
517
+
518
+ let cache = LazyCache::new();
519
+ let call_count = Rc::new(Cell::new(0));
520
+
521
+ let call_count_clone = call_count.clone();
522
+ let value = cache.get_or_init(|| {
523
+ call_count_clone.set(call_count_clone.get() + 1);
524
+ Complex {
525
+ data: vec![("a".to_string(), 1), ("b".to_string(), 2)],
526
+ }
527
+ });
528
+
529
+ assert_eq!(value.data.len(), 2);
530
+ assert_eq!(value.data[0].0, "a");
531
+ assert_eq!(call_count.get(), 1);
532
+
533
+ // Second access doesn't reinit
534
+ let _ = cache.get_or_init(|| {
535
+ call_count.set(1000); // Would fail if called
536
+ unreachable!()
537
+ });
538
+ assert_eq!(call_count.get(), 1);
539
+ }
540
+
541
+ #[test]
542
+ fn test_lifetime_binding() {
543
+ // This test verifies that the returned reference is properly bound
544
+ // to the cache's lifetime
545
+ let cache = LazyCache::new();
546
+ let reference = cache.get_or_init(|| 123);
547
+ assert_eq!(*reference, 123);
548
+
549
+ // Reference should be valid for the entire cache's lifetime
550
+ let reference2 = cache.get_or_init(|| 456);
551
+ assert_eq!(*reference2, 123); // Still the cached value
552
+ }
553
+
554
+ #[test]
555
+ fn test_zero_overhead_when_cached() {
556
+ // This is more of a conceptual test; actual performance would require benchmarking
557
+ let cache = LazyCache::new();
558
+ let _ = cache.get_or_init(|| "initial".to_string());
559
+
560
+ // Accessing cached value should be minimal overhead
561
+ for _ in 0..1000 {
562
+ let _ = cache.get_or_init(|| {
563
+ panic!("Should not be called");
564
+ });
565
+ }
566
+ }
567
+
568
+ #[test]
569
+ fn test_multiple_sequential_invalidations() {
570
+ let cache = LazyCache::new();
571
+ let call_count = Rc::new(Cell::new(0));
572
+
573
+ for i in 0..3 {
574
+ let call_count_clone = call_count.clone();
575
+ let value = cache.get_or_init(|| {
576
+ call_count_clone.set(call_count_clone.get() + 1);
577
+ i * 100
578
+ });
579
+ assert_eq!(*value, i * 100);
580
+
581
+ cache.invalidate();
582
+ assert!(!cache.is_cached());
583
+ }
584
+
585
+ assert_eq!(call_count.get(), 3);
586
+ }
587
+ }
@@ -10,9 +10,12 @@ pub mod di_traits;
10
10
  pub mod error_response;
11
11
  pub mod grpc_metadata;
12
12
  pub mod handler_base;
13
+ pub mod json_conversion;
14
+ pub mod lazy_cache;
13
15
  pub mod lifecycle_base;
14
16
  pub mod lifecycle_executor;
15
17
  pub mod response_builder;
18
+ pub mod response_interpreter;
16
19
  pub mod test_client_base;
17
20
  pub mod validation_helpers;
18
21
 
@@ -21,6 +24,10 @@ pub use di_traits::{FactoryDependencyAdapter, ValueDependencyAdapter};
21
24
  pub use error_response::ErrorResponseBuilder;
22
25
  pub use grpc_metadata::{extract_metadata_to_hashmap, hashmap_to_metadata};
23
26
  pub use handler_base::{HandlerError, HandlerExecutor, LanguageHandler};
27
+ pub use json_conversion::{JsonConversionError, JsonConversionHelper, JsonConverter, JsonPrimitive};
28
+ pub use lazy_cache::LazyCache;
24
29
  pub use lifecycle_executor::{
25
30
  HookResultData, LanguageLifecycleHook, LifecycleExecutor, RequestModifications, extract_body,
26
31
  };
32
+ pub use response_builder::{build_optimized_response, build_optimized_response_bytes};
33
+ pub use response_interpreter::{InterpretedResponse, ResponseInterpreter, StreamSource};
@@ -210,24 +210,24 @@ mod tests {
210
210
  fn test_get_hooks_multiple_calls() {
211
211
  let mut config = LifecycleConfig::new();
212
212
 
213
- let hook1 = Arc::new(TestHook {
213
+ let hook_a = Arc::new(TestHook {
214
214
  hook_type: LifecycleHookType::OnResponse,
215
215
  result: HookResult::Continue,
216
216
  });
217
217
 
218
- let hook2 = Arc::new(TestHook {
218
+ let hook_b = Arc::new(TestHook {
219
219
  hook_type: LifecycleHookType::OnResponse,
220
220
  result: HookResult::Continue,
221
221
  });
222
222
 
223
- config.register(hook1);
224
- config.register(hook2);
223
+ config.register(hook_a);
224
+ config.register(hook_b);
225
225
 
226
- let hooks1 = config.get_hooks(LifecycleHookType::OnResponse);
227
- let hooks2 = config.get_hooks(LifecycleHookType::OnResponse);
226
+ let hooks_on_response_first = config.get_hooks(LifecycleHookType::OnResponse);
227
+ let hooks_on_response_second = config.get_hooks(LifecycleHookType::OnResponse);
228
228
 
229
- assert_eq!(hooks1.len(), 2);
230
- assert_eq!(hooks2.len(), 2);
229
+ assert_eq!(hooks_on_response_first.len(), 2);
230
+ assert_eq!(hooks_on_response_second.len(), 2);
231
231
  }
232
232
 
233
233
  #[test]
@@ -269,7 +269,7 @@ mod tests {
269
269
 
270
270
  let mut config = LifecycleConfig::new();
271
271
 
272
- for hook_type in hook_types.iter() {
272
+ for hook_type in &hook_types {
273
273
  let hook = Arc::new(TestHook {
274
274
  hook_type: *hook_type,
275
275
  result: HookResult::Continue,
@@ -286,13 +286,13 @@ mod tests {
286
286
  #[test]
287
287
  fn test_hook_result_clone() {
288
288
  let original = HookResult::ShortCircuit(json!({ "key": "value" }));
289
- let cloned = original.clone();
289
+ let cloned = original;
290
290
 
291
291
  match cloned {
292
292
  HookResult::ShortCircuit(response) => {
293
293
  assert_eq!(response["key"], "value");
294
294
  }
295
- _ => panic!("Expected ShortCircuit"),
295
+ HookResult::Continue => panic!("Expected ShortCircuit"),
296
296
  }
297
297
  }
298
298
  }