@typokit/transform-native 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +148 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +290 -0
- package/dist/index.js.map +1 -0
- package/package.json +42 -0
- package/src/env.d.ts +40 -0
- package/src/index.d.ts +135 -0
- package/src/index.test.ts +878 -0
- package/src/index.ts +437 -0
- package/src/lib.rs +388 -0
- package/src/openapi_generator.rs +525 -0
- package/src/output_pipeline.rs +234 -0
- package/src/parser.rs +105 -0
- package/src/route_compiler.rs +615 -0
- package/src/schema_differ.rs +393 -0
- package/src/test_stub_generator.rs +318 -0
- package/src/type_extractor.rs +370 -0
- package/src/typia_bridge.rs +179 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
use std::collections::HashMap;
|
|
2
|
+
use std::fs;
|
|
3
|
+
use sha2::{Sha256, Digest};
|
|
4
|
+
|
|
5
|
+
use crate::parser;
|
|
6
|
+
use crate::route_compiler;
|
|
7
|
+
use crate::openapi_generator;
|
|
8
|
+
use crate::test_stub_generator;
|
|
9
|
+
use crate::typia_bridge;
|
|
10
|
+
use crate::type_extractor::TypeMetadata;
|
|
11
|
+
|
|
12
|
+
/// Result of running the full output pipeline
|
|
13
|
+
#[derive(Debug, Clone)]
|
|
14
|
+
pub struct PipelineResult {
|
|
15
|
+
/// Content hash of all input source files
|
|
16
|
+
pub content_hash: String,
|
|
17
|
+
/// Extracted type metadata (SchemaTypeMap-compatible)
|
|
18
|
+
pub types: HashMap<String, TypeMetadata>,
|
|
19
|
+
/// Compiled route table as TypeScript source
|
|
20
|
+
pub compiled_routes: String,
|
|
21
|
+
/// OpenAPI 3.1.0 spec as JSON string
|
|
22
|
+
pub openapi_spec: String,
|
|
23
|
+
/// Generated contract test stubs as TypeScript source
|
|
24
|
+
pub test_stubs: String,
|
|
25
|
+
/// Validator inputs ready for Typia bridge callback
|
|
26
|
+
pub validator_inputs: Vec<typia_bridge::TypeValidatorInput>,
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/// Compute a SHA-256 content hash of the given file paths.
|
|
30
|
+
///
|
|
31
|
+
/// The hash is computed over sorted file paths and their contents to ensure
|
|
32
|
+
/// deterministic results regardless of input order. Returns a hex-encoded hash string.
|
|
33
|
+
pub fn compute_content_hash(file_paths: &[String]) -> Result<String, String> {
|
|
34
|
+
let mut hasher = Sha256::new();
|
|
35
|
+
|
|
36
|
+
// Sort paths for deterministic ordering
|
|
37
|
+
let mut sorted: Vec<&String> = file_paths.iter().collect();
|
|
38
|
+
sorted.sort();
|
|
39
|
+
|
|
40
|
+
for path in sorted {
|
|
41
|
+
// Include the path itself in the hash for rename detection
|
|
42
|
+
hasher.update(path.as_bytes());
|
|
43
|
+
let content = fs::read(path)
|
|
44
|
+
.map_err(|e| format!("Failed to read file {}: {}", path, e))?;
|
|
45
|
+
hasher.update(&content);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
let hash = hasher.finalize();
|
|
49
|
+
Ok(hex::encode(hash))
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/// Run the full output pipeline: parse types, compile routes, generate OpenAPI,
|
|
53
|
+
/// generate test stubs, and prepare validator inputs.
|
|
54
|
+
///
|
|
55
|
+
/// Returns a PipelineResult containing all generated outputs plus a content hash
|
|
56
|
+
/// that can be used for caching.
|
|
57
|
+
pub fn run_pipeline(
|
|
58
|
+
type_file_paths: &[String],
|
|
59
|
+
route_file_paths: &[String],
|
|
60
|
+
) -> Result<PipelineResult, String> {
|
|
61
|
+
// 1. Compute content hash of all input files
|
|
62
|
+
let mut all_paths: Vec<String> = Vec::new();
|
|
63
|
+
all_paths.extend_from_slice(type_file_paths);
|
|
64
|
+
all_paths.extend_from_slice(route_file_paths);
|
|
65
|
+
let content_hash = compute_content_hash(&all_paths)?;
|
|
66
|
+
|
|
67
|
+
// 2. Parse and extract types
|
|
68
|
+
let types = parser::parse_and_extract_types(type_file_paths)?;
|
|
69
|
+
|
|
70
|
+
// 3. Compile route contracts into radix tree
|
|
71
|
+
let mut all_route_entries = Vec::new();
|
|
72
|
+
for path in route_file_paths {
|
|
73
|
+
let source = fs::read_to_string(path)
|
|
74
|
+
.map_err(|e| format!("Failed to read file {}: {}", path, e))?;
|
|
75
|
+
let parsed = parser::parse_typescript(path, &source)?;
|
|
76
|
+
let entries = route_compiler::extract_route_contracts(&parsed.module);
|
|
77
|
+
all_route_entries.extend(entries);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
let tree = route_compiler::build_radix_tree(&all_route_entries)?;
|
|
81
|
+
let compiled_routes = route_compiler::serialize_to_typescript(&tree);
|
|
82
|
+
|
|
83
|
+
// 4. Generate OpenAPI spec
|
|
84
|
+
let openapi_spec = openapi_generator::generate_openapi(&all_route_entries, &types);
|
|
85
|
+
|
|
86
|
+
// 5. Generate test stubs
|
|
87
|
+
let test_stubs = test_stub_generator::generate_test_stubs(&all_route_entries);
|
|
88
|
+
|
|
89
|
+
// 6. Prepare validator inputs
|
|
90
|
+
let validator_inputs = typia_bridge::prepare_validator_inputs(&types);
|
|
91
|
+
|
|
92
|
+
Ok(PipelineResult {
|
|
93
|
+
content_hash,
|
|
94
|
+
types,
|
|
95
|
+
compiled_routes,
|
|
96
|
+
openapi_spec,
|
|
97
|
+
test_stubs,
|
|
98
|
+
validator_inputs,
|
|
99
|
+
})
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
#[cfg(test)]
|
|
103
|
+
mod tests {
|
|
104
|
+
use super::*;
|
|
105
|
+
use std::io::Write;
|
|
106
|
+
|
|
107
|
+
fn create_temp_file(content: &str) -> String {
|
|
108
|
+
let dir = std::env::temp_dir();
|
|
109
|
+
let file_name = format!(
|
|
110
|
+
"typokit-pipeline-test-{}-{}.ts",
|
|
111
|
+
std::process::id(),
|
|
112
|
+
std::time::SystemTime::now()
|
|
113
|
+
.duration_since(std::time::UNIX_EPOCH)
|
|
114
|
+
.unwrap()
|
|
115
|
+
.as_nanos()
|
|
116
|
+
);
|
|
117
|
+
let path = dir.join(file_name);
|
|
118
|
+
let mut file = fs::File::create(&path).unwrap();
|
|
119
|
+
file.write_all(content.as_bytes()).unwrap();
|
|
120
|
+
path.to_string_lossy().to_string()
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
fn cleanup(path: &str) {
|
|
124
|
+
let _ = fs::remove_file(path);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
#[test]
|
|
128
|
+
fn test_content_hash_deterministic() {
|
|
129
|
+
let f1 = create_temp_file("interface A { id: string; }");
|
|
130
|
+
let f2 = create_temp_file("interface B { id: string; }");
|
|
131
|
+
|
|
132
|
+
let hash1 = compute_content_hash(&[f1.clone(), f2.clone()]).unwrap();
|
|
133
|
+
let hash2 = compute_content_hash(&[f2.clone(), f1.clone()]).unwrap();
|
|
134
|
+
// Same files in different order should produce same hash
|
|
135
|
+
assert_eq!(hash1, hash2);
|
|
136
|
+
|
|
137
|
+
cleanup(&f1);
|
|
138
|
+
cleanup(&f2);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
#[test]
|
|
142
|
+
fn test_content_hash_changes_on_modification() {
|
|
143
|
+
let f = create_temp_file("interface A { id: string; }");
|
|
144
|
+
let hash1 = compute_content_hash(&[f.clone()]).unwrap();
|
|
145
|
+
|
|
146
|
+
fs::write(&f, "interface A { id: string; name: string; }").unwrap();
|
|
147
|
+
let hash2 = compute_content_hash(&[f.clone()]).unwrap();
|
|
148
|
+
|
|
149
|
+
assert_ne!(hash1, hash2);
|
|
150
|
+
cleanup(&f);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
#[test]
|
|
154
|
+
fn test_run_pipeline() {
|
|
155
|
+
let type_file = create_temp_file(
|
|
156
|
+
r#"
|
|
157
|
+
/**
|
|
158
|
+
* @table users
|
|
159
|
+
*/
|
|
160
|
+
interface User {
|
|
161
|
+
/** @id @generated */
|
|
162
|
+
id: string;
|
|
163
|
+
name: string;
|
|
164
|
+
email: string;
|
|
165
|
+
age?: number;
|
|
166
|
+
}
|
|
167
|
+
"#,
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
let route_file = create_temp_file(
|
|
171
|
+
r#"
|
|
172
|
+
interface UsersRoutes {
|
|
173
|
+
"GET /users": RouteContract<void, void, void, void>;
|
|
174
|
+
"POST /users": RouteContract<void, void, void, void>;
|
|
175
|
+
"GET /users/:id": RouteContract<{ id: string }, void, void, void>;
|
|
176
|
+
}
|
|
177
|
+
"#,
|
|
178
|
+
);
|
|
179
|
+
|
|
180
|
+
let result = run_pipeline(&[type_file.clone()], &[route_file.clone()]).unwrap();
|
|
181
|
+
|
|
182
|
+
// Content hash is computed
|
|
183
|
+
assert!(!result.content_hash.is_empty());
|
|
184
|
+
assert_eq!(result.content_hash.len(), 64); // SHA-256 hex
|
|
185
|
+
|
|
186
|
+
// Types are extracted
|
|
187
|
+
assert!(result.types.contains_key("User"));
|
|
188
|
+
|
|
189
|
+
// Routes are compiled
|
|
190
|
+
assert!(result.compiled_routes.contains("routeTree"));
|
|
191
|
+
assert!(result.compiled_routes.contains("users"));
|
|
192
|
+
|
|
193
|
+
// OpenAPI spec is generated
|
|
194
|
+
assert!(result.openapi_spec.contains("3.1.0"));
|
|
195
|
+
assert!(result.openapi_spec.contains("/users"));
|
|
196
|
+
|
|
197
|
+
// Test stubs are generated
|
|
198
|
+
assert!(result.test_stubs.contains("GET /users"));
|
|
199
|
+
assert!(result.test_stubs.contains("POST /users"));
|
|
200
|
+
|
|
201
|
+
// Validator inputs are prepared
|
|
202
|
+
assert_eq!(result.validator_inputs.len(), 1);
|
|
203
|
+
assert_eq!(result.validator_inputs[0].name, "User");
|
|
204
|
+
|
|
205
|
+
cleanup(&type_file);
|
|
206
|
+
cleanup(&route_file);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
#[test]
|
|
210
|
+
fn test_run_pipeline_empty_routes() {
|
|
211
|
+
let type_file = create_temp_file(
|
|
212
|
+
r#"
|
|
213
|
+
interface User {
|
|
214
|
+
id: string;
|
|
215
|
+
name: string;
|
|
216
|
+
}
|
|
217
|
+
"#,
|
|
218
|
+
);
|
|
219
|
+
|
|
220
|
+
let result = run_pipeline(&[type_file.clone()], &[]).unwrap();
|
|
221
|
+
|
|
222
|
+
assert!(result.types.contains_key("User"));
|
|
223
|
+
assert!(result.compiled_routes.contains("routeTree"));
|
|
224
|
+
assert_eq!(result.validator_inputs.len(), 1);
|
|
225
|
+
|
|
226
|
+
cleanup(&type_file);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
#[test]
|
|
230
|
+
fn test_run_pipeline_nonexistent_file() {
|
|
231
|
+
let result = run_pipeline(&["nonexistent.ts".to_string()], &[]);
|
|
232
|
+
assert!(result.is_err());
|
|
233
|
+
}
|
|
234
|
+
}
|
package/src/parser.rs
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
use std::collections::HashMap;
|
|
2
|
+
use std::fs;
|
|
3
|
+
use swc_common::{
|
|
4
|
+
comments::SingleThreadedComments,
|
|
5
|
+
sync::Lrc,
|
|
6
|
+
FileName, SourceMap,
|
|
7
|
+
};
|
|
8
|
+
use swc_ecma_ast::EsVersion;
|
|
9
|
+
use swc_ecma_parser::{lexer::Lexer, Parser, Syntax, TsSyntax, StringInput};
|
|
10
|
+
|
|
11
|
+
use crate::type_extractor::TypeMetadata;
|
|
12
|
+
|
|
13
|
+
/// Parsed result for a single TypeScript file
|
|
14
|
+
pub struct ParsedFile {
|
|
15
|
+
pub path: String,
|
|
16
|
+
pub module: swc_ecma_ast::Module,
|
|
17
|
+
pub comments: SingleThreadedComments,
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/// Parse a TypeScript source string into an AST with comments
|
|
21
|
+
pub fn parse_typescript(path: &str, source: &str) -> Result<ParsedFile, String> {
|
|
22
|
+
let cm: Lrc<SourceMap> = Default::default();
|
|
23
|
+
let comments = SingleThreadedComments::default();
|
|
24
|
+
|
|
25
|
+
let fm = cm.new_source_file(Lrc::new(FileName::Custom(path.to_string())), source.to_string());
|
|
26
|
+
|
|
27
|
+
let lexer = Lexer::new(
|
|
28
|
+
Syntax::Typescript(TsSyntax {
|
|
29
|
+
tsx: false,
|
|
30
|
+
decorators: true,
|
|
31
|
+
..Default::default()
|
|
32
|
+
}),
|
|
33
|
+
EsVersion::latest(),
|
|
34
|
+
StringInput::from(&*fm),
|
|
35
|
+
Some(&comments),
|
|
36
|
+
);
|
|
37
|
+
|
|
38
|
+
let mut parser = Parser::new_from(lexer);
|
|
39
|
+
let module = parser.parse_module().map_err(|e| format!("Parse error in {}: {:?}", path, e))?;
|
|
40
|
+
|
|
41
|
+
Ok(ParsedFile {
|
|
42
|
+
path: path.to_string(),
|
|
43
|
+
module,
|
|
44
|
+
comments,
|
|
45
|
+
})
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/// Parse multiple TypeScript files and extract type metadata from all of them
|
|
49
|
+
pub fn parse_and_extract_types(file_paths: &[String]) -> Result<HashMap<String, TypeMetadata>, String> {
|
|
50
|
+
let mut schema_map: HashMap<String, TypeMetadata> = HashMap::new();
|
|
51
|
+
|
|
52
|
+
for path in file_paths {
|
|
53
|
+
let source = fs::read_to_string(path)
|
|
54
|
+
.map_err(|e| format!("Failed to read file {}: {}", path, e))?;
|
|
55
|
+
|
|
56
|
+
let parsed = parse_typescript(path, &source)?;
|
|
57
|
+
let types = crate::type_extractor::extract_types(&parsed.module, &parsed.comments);
|
|
58
|
+
|
|
59
|
+
for (name, metadata) in types {
|
|
60
|
+
schema_map.insert(name, metadata);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
Ok(schema_map)
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
#[cfg(test)]
|
|
68
|
+
mod tests {
|
|
69
|
+
use super::*;
|
|
70
|
+
|
|
71
|
+
#[test]
|
|
72
|
+
fn test_parse_simple_interface() {
|
|
73
|
+
let source = r#"
|
|
74
|
+
interface User {
|
|
75
|
+
id: string;
|
|
76
|
+
name: string;
|
|
77
|
+
}
|
|
78
|
+
"#;
|
|
79
|
+
let result = parse_typescript("test.ts", source);
|
|
80
|
+
assert!(result.is_ok());
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
#[test]
|
|
84
|
+
fn test_parse_with_jsdoc() {
|
|
85
|
+
let source = r#"
|
|
86
|
+
/**
|
|
87
|
+
* @table users
|
|
88
|
+
*/
|
|
89
|
+
interface User {
|
|
90
|
+
/** @id */
|
|
91
|
+
id: string;
|
|
92
|
+
name: string;
|
|
93
|
+
}
|
|
94
|
+
"#;
|
|
95
|
+
let result = parse_typescript("test.ts", source);
|
|
96
|
+
assert!(result.is_ok());
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
#[test]
|
|
100
|
+
fn test_parse_invalid_syntax() {
|
|
101
|
+
let source = "interface { invalid }";
|
|
102
|
+
let result = parse_typescript("test.ts", source);
|
|
103
|
+
assert!(result.is_err());
|
|
104
|
+
}
|
|
105
|
+
}
|