@typokit/plugin-axum 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +81 -0
- package/dist/index.d.ts +28 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +186 -0
- package/dist/index.js.map +1 -0
- package/index.darwin-arm64.node +0 -0
- package/index.darwin-x64.node +0 -0
- package/index.linux-arm64-gnu.node +0 -0
- package/index.linux-x64-gnu.node +0 -0
- package/index.linux-x64-musl.node +0 -0
- package/index.win32-x64-msvc.node +0 -0
- package/package.json +57 -0
- package/src/index.ts +309 -0
- package/src/lib.rs +80 -0
- package/src/rust_codegen/database.rs +898 -0
- package/src/rust_codegen/handlers.rs +1111 -0
- package/src/rust_codegen/middleware.rs +156 -0
- package/src/rust_codegen/mod.rs +91 -0
- package/src/rust_codegen/project.rs +593 -0
- package/src/rust_codegen/router.rs +385 -0
- package/src/rust_codegen/services.rs +476 -0
- package/src/rust_codegen/structs.rs +1363 -0
|
@@ -0,0 +1,898 @@
|
|
|
1
|
+
use std::collections::{BTreeMap, HashMap};
|
|
2
|
+
|
|
3
|
+
use typokit_transform_native::type_extractor::{PropertyMetadata, TypeMetadata};
|
|
4
|
+
use super::GeneratedOutput;
|
|
5
|
+
|
|
6
|
+
/// Generate the sqlx database layer from @table-annotated entities.
|
|
7
|
+
///
|
|
8
|
+
/// Produces:
|
|
9
|
+
/// - `.typokit/db/mod.rs` — PgPool connection setup
|
|
10
|
+
/// - `.typokit/db/repository.rs` — CRUD functions per @table entity
|
|
11
|
+
/// - `.typokit/migrations/{timestamp}_create_{table}.sql` — CREATE TABLE migrations
|
|
12
|
+
pub fn generate_database(type_map: &HashMap<String, TypeMetadata>) -> Vec<GeneratedOutput> {
|
|
13
|
+
let mut outputs = Vec::new();
|
|
14
|
+
|
|
15
|
+
// Filter to entities with @table annotation, sorted for determinism
|
|
16
|
+
let table_entities: BTreeMap<String, &TypeMetadata> = type_map
|
|
17
|
+
.iter()
|
|
18
|
+
.filter(|(_, meta)| is_table_entity(meta))
|
|
19
|
+
.map(|(name, meta)| (name.clone(), meta))
|
|
20
|
+
.collect();
|
|
21
|
+
|
|
22
|
+
// Always generate db/mod.rs
|
|
23
|
+
outputs.push(generate_db_mod());
|
|
24
|
+
|
|
25
|
+
// Generate repository only if there are table entities
|
|
26
|
+
if !table_entities.is_empty() {
|
|
27
|
+
outputs.push(generate_repository(&table_entities));
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Generate migration files
|
|
31
|
+
for (idx, (_name, meta)) in table_entities.iter().enumerate() {
|
|
32
|
+
outputs.push(generate_migration(meta, idx));
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
outputs
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// ─────────────────────────── db/mod.rs ────────────────────────────────────────
|
|
39
|
+
|
|
40
|
+
/// Generate the `.typokit/db/mod.rs` with PgPool connection setup.
|
|
41
|
+
fn generate_db_mod() -> GeneratedOutput {
|
|
42
|
+
let content = r#"// AUTO-GENERATED by @typokit/transform-native — DO NOT EDIT
|
|
43
|
+
|
|
44
|
+
pub mod repository;
|
|
45
|
+
|
|
46
|
+
use sqlx::postgres::PgPool;
|
|
47
|
+
|
|
48
|
+
/// Connect to PostgreSQL and return a connection pool.
|
|
49
|
+
pub async fn connect(database_url: &str) -> PgPool {
|
|
50
|
+
PgPool::connect(database_url)
|
|
51
|
+
.await
|
|
52
|
+
.expect("Failed to connect to database")
|
|
53
|
+
}
|
|
54
|
+
"#;
|
|
55
|
+
|
|
56
|
+
GeneratedOutput {
|
|
57
|
+
path: ".typokit/db/mod.rs".to_string(),
|
|
58
|
+
content: content.to_string(),
|
|
59
|
+
overwrite: true,
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// ─────────────────────────── db/repository.rs ─────────────────────────────────
|
|
64
|
+
|
|
65
|
+
/// Generate the `.typokit/db/repository.rs` with CRUD functions per @table entity.
|
|
66
|
+
fn generate_repository(table_entities: &BTreeMap<String, &TypeMetadata>) -> GeneratedOutput {
|
|
67
|
+
let mut output = String::new();
|
|
68
|
+
|
|
69
|
+
output.push_str("// AUTO-GENERATED by @typokit/transform-native — DO NOT EDIT\n\n");
|
|
70
|
+
output.push_str("use sqlx::postgres::PgPool;\n");
|
|
71
|
+
|
|
72
|
+
// Check if any entity has @generated uuid or now fields
|
|
73
|
+
let needs_uuid = table_entities.values().any(|meta| {
|
|
74
|
+
meta.properties.values().any(|p| is_generated_uuid(p))
|
|
75
|
+
});
|
|
76
|
+
let needs_chrono = table_entities.values().any(|meta| {
|
|
77
|
+
meta.properties.values().any(|p| is_generated_now(p))
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
if needs_uuid {
|
|
81
|
+
output.push_str("use uuid::Uuid;\n");
|
|
82
|
+
}
|
|
83
|
+
if needs_chrono {
|
|
84
|
+
output.push_str("use chrono::Utc;\n");
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
output.push_str("use crate::models;\n");
|
|
88
|
+
output.push('\n');
|
|
89
|
+
|
|
90
|
+
for (_name, meta) in table_entities {
|
|
91
|
+
output.push_str(&generate_entity_crud(meta));
|
|
92
|
+
output.push('\n');
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
GeneratedOutput {
|
|
96
|
+
path: ".typokit/db/repository.rs".to_string(),
|
|
97
|
+
content: output,
|
|
98
|
+
overwrite: true,
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/// Generate CRUD functions for a single @table entity.
|
|
103
|
+
fn generate_entity_crud(meta: &TypeMetadata) -> String {
|
|
104
|
+
let mut output = String::new();
|
|
105
|
+
let entity_name = &meta.name;
|
|
106
|
+
let entity_snake = to_snake_case(entity_name);
|
|
107
|
+
let table_name = get_table_name(meta);
|
|
108
|
+
|
|
109
|
+
let id_prop = find_id_property(meta);
|
|
110
|
+
let id_field_name = id_prop
|
|
111
|
+
.as_ref()
|
|
112
|
+
.map(|(name, _)| name.as_str())
|
|
113
|
+
.unwrap_or("id");
|
|
114
|
+
let id_column = id_field_name;
|
|
115
|
+
let id_rust_type = id_prop
|
|
116
|
+
.as_ref()
|
|
117
|
+
.map(|(_, prop)| sql_id_rust_type(prop))
|
|
118
|
+
.unwrap_or_else(|| "String".to_string());
|
|
119
|
+
|
|
120
|
+
// Sorted properties for deterministic output
|
|
121
|
+
let sorted_props = sorted_properties(meta);
|
|
122
|
+
|
|
123
|
+
// Properties for INSERT (exclude @generated fields)
|
|
124
|
+
let insert_props: Vec<(&str, &PropertyMetadata)> = sorted_props
|
|
125
|
+
.iter()
|
|
126
|
+
.filter(|(_, p)| !is_generated(p))
|
|
127
|
+
.copied()
|
|
128
|
+
.collect();
|
|
129
|
+
|
|
130
|
+
// Properties for UPDATE (exclude @id and @generated fields)
|
|
131
|
+
let update_props: Vec<(&str, &PropertyMetadata)> = sorted_props
|
|
132
|
+
.iter()
|
|
133
|
+
.filter(|(name, p)| !is_id_property(name, p) && !is_generated(p))
|
|
134
|
+
.copied()
|
|
135
|
+
.collect();
|
|
136
|
+
|
|
137
|
+
// Generated UUID fields
|
|
138
|
+
let generated_uuid_props: Vec<(&str, &PropertyMetadata)> = sorted_props
|
|
139
|
+
.iter()
|
|
140
|
+
.filter(|(_, p)| is_generated_uuid(p))
|
|
141
|
+
.copied()
|
|
142
|
+
.collect();
|
|
143
|
+
|
|
144
|
+
// Generated now fields
|
|
145
|
+
let generated_now_props: Vec<(&str, &PropertyMetadata)> = sorted_props
|
|
146
|
+
.iter()
|
|
147
|
+
.filter(|(_, p)| is_generated_now(p))
|
|
148
|
+
.copied()
|
|
149
|
+
.collect();
|
|
150
|
+
|
|
151
|
+
// ─── find_all ────────────────────────────────────────────────────────
|
|
152
|
+
output.push_str(&format!(
|
|
153
|
+
"/// List all {} with pagination.\n",
|
|
154
|
+
entity_snake
|
|
155
|
+
));
|
|
156
|
+
output.push_str(&format!(
|
|
157
|
+
"pub async fn find_all_{entity_snake}(\n pool: &PgPool,\n page: u32,\n page_size: u32,\n) -> sqlx::Result<Vec<models::{entity_name}>> {{\n"
|
|
158
|
+
));
|
|
159
|
+
output.push_str(&format!(
|
|
160
|
+
" let offset = (page.saturating_sub(1)) * page_size;\n"
|
|
161
|
+
));
|
|
162
|
+
output.push_str(&format!(
|
|
163
|
+
" sqlx::query_as::<_, models::{entity_name}>(\n \"SELECT * FROM {table_name} ORDER BY {id_column} LIMIT $1 OFFSET $2\"\n )\n"
|
|
164
|
+
));
|
|
165
|
+
output.push_str(" .bind(page_size as i64)\n");
|
|
166
|
+
output.push_str(" .bind(offset as i64)\n");
|
|
167
|
+
output.push_str(" .fetch_all(pool)\n");
|
|
168
|
+
output.push_str(" .await\n");
|
|
169
|
+
output.push_str("}\n\n");
|
|
170
|
+
|
|
171
|
+
// ─── find_by_id ──────────────────────────────────────────────────────
|
|
172
|
+
output.push_str(&format!(
|
|
173
|
+
"/// Find a single {} by ID.\n",
|
|
174
|
+
entity_snake
|
|
175
|
+
));
|
|
176
|
+
output.push_str(&format!(
|
|
177
|
+
"pub async fn find_{entity_snake}_by_id(\n pool: &PgPool,\n id: &{id_rust_type},\n) -> sqlx::Result<Option<models::{entity_name}>> {{\n"
|
|
178
|
+
));
|
|
179
|
+
output.push_str(&format!(
|
|
180
|
+
" sqlx::query_as::<_, models::{entity_name}>(\n \"SELECT * FROM {table_name} WHERE {id_column} = $1\"\n )\n"
|
|
181
|
+
));
|
|
182
|
+
output.push_str(" .bind(id)\n");
|
|
183
|
+
output.push_str(" .fetch_optional(pool)\n");
|
|
184
|
+
output.push_str(" .await\n");
|
|
185
|
+
output.push_str("}\n\n");
|
|
186
|
+
|
|
187
|
+
// ─── create ──────────────────────────────────────────────────────────
|
|
188
|
+
// Build the column list: user-supplied INSERT columns + generated columns
|
|
189
|
+
let mut all_insert_columns: Vec<String> = Vec::new();
|
|
190
|
+
let mut all_insert_placeholders: Vec<String> = Vec::new();
|
|
191
|
+
let mut placeholder_idx: usize = 1;
|
|
192
|
+
|
|
193
|
+
// User-supplied columns
|
|
194
|
+
for (col_name, _) in &insert_props {
|
|
195
|
+
all_insert_columns.push(col_name.to_string());
|
|
196
|
+
all_insert_placeholders.push(format!("${}", placeholder_idx));
|
|
197
|
+
placeholder_idx += 1;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// @generated uuid columns
|
|
201
|
+
for (col_name, _) in &generated_uuid_props {
|
|
202
|
+
all_insert_columns.push(col_name.to_string());
|
|
203
|
+
all_insert_placeholders.push(format!("${}", placeholder_idx));
|
|
204
|
+
placeholder_idx += 1;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// @generated now columns
|
|
208
|
+
for (col_name, _) in &generated_now_props {
|
|
209
|
+
all_insert_columns.push(col_name.to_string());
|
|
210
|
+
all_insert_placeholders.push(format!("${}", placeholder_idx));
|
|
211
|
+
placeholder_idx += 1;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
let columns_str = all_insert_columns.join(", ");
|
|
215
|
+
let placeholders_str = all_insert_placeholders.join(", ");
|
|
216
|
+
|
|
217
|
+
// Determine the input struct name — use the entity name without generated fields
|
|
218
|
+
// Use the type directly if it has an input variant; otherwise accept individual params
|
|
219
|
+
let input_struct = format!("{}WithoutId", entity_name);
|
|
220
|
+
|
|
221
|
+
output.push_str(&format!(
|
|
222
|
+
"/// Create a new {}.\n",
|
|
223
|
+
entity_snake
|
|
224
|
+
));
|
|
225
|
+
output.push_str(&format!(
|
|
226
|
+
"pub async fn create_{entity_snake}(\n pool: &PgPool,\n input: &models::{input_struct},\n) -> sqlx::Result<models::{entity_name}> {{\n"
|
|
227
|
+
));
|
|
228
|
+
|
|
229
|
+
// Generate UUID and now values for @generated fields
|
|
230
|
+
for (col_name, _) in &generated_uuid_props {
|
|
231
|
+
let field = to_snake_case(col_name);
|
|
232
|
+
output.push_str(&format!(
|
|
233
|
+
" let {field} = Uuid::new_v4().to_string();\n"
|
|
234
|
+
));
|
|
235
|
+
}
|
|
236
|
+
for (col_name, _) in &generated_now_props {
|
|
237
|
+
let field = to_snake_case(col_name);
|
|
238
|
+
output.push_str(&format!(
|
|
239
|
+
" let {field} = Utc::now();\n"
|
|
240
|
+
));
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
output.push_str(&format!(
|
|
244
|
+
" sqlx::query_as::<_, models::{entity_name}>(\n \"INSERT INTO {table_name} ({columns_str}) VALUES ({placeholders_str}) RETURNING *\"\n )\n"
|
|
245
|
+
));
|
|
246
|
+
|
|
247
|
+
// Bind user-supplied input fields
|
|
248
|
+
for (col_name, _) in &insert_props {
|
|
249
|
+
let field = to_snake_case(col_name);
|
|
250
|
+
output.push_str(&format!(" .bind(&input.{field})\n"));
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// Bind generated fields
|
|
254
|
+
for (col_name, _) in &generated_uuid_props {
|
|
255
|
+
let field = to_snake_case(col_name);
|
|
256
|
+
output.push_str(&format!(" .bind(&{field})\n"));
|
|
257
|
+
}
|
|
258
|
+
for (col_name, _) in &generated_now_props {
|
|
259
|
+
let field = to_snake_case(col_name);
|
|
260
|
+
output.push_str(&format!(" .bind(&{field})\n"));
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
output.push_str(" .fetch_one(pool)\n");
|
|
264
|
+
output.push_str(" .await\n");
|
|
265
|
+
output.push_str("}\n\n");
|
|
266
|
+
|
|
267
|
+
// ─── update ──────────────────────────────────────────────────────────
|
|
268
|
+
if !update_props.is_empty() {
|
|
269
|
+
let set_clauses: Vec<String> = update_props
|
|
270
|
+
.iter()
|
|
271
|
+
.enumerate()
|
|
272
|
+
.map(|(i, (col_name, _))| format!("{} = ${}", col_name, i + 1))
|
|
273
|
+
.collect();
|
|
274
|
+
let set_str = set_clauses.join(", ");
|
|
275
|
+
let id_placeholder = format!("${}", update_props.len() + 1);
|
|
276
|
+
|
|
277
|
+
output.push_str(&format!(
|
|
278
|
+
"/// Update an existing {} by ID.\n",
|
|
279
|
+
entity_snake
|
|
280
|
+
));
|
|
281
|
+
output.push_str(&format!(
|
|
282
|
+
"pub async fn update_{entity_snake}(\n pool: &PgPool,\n id: &{id_rust_type},\n input: &models::{input_struct},\n) -> sqlx::Result<Option<models::{entity_name}>> {{\n"
|
|
283
|
+
));
|
|
284
|
+
output.push_str(&format!(
|
|
285
|
+
" sqlx::query_as::<_, models::{entity_name}>(\n \"UPDATE {table_name} SET {set_str} WHERE {id_column} = {id_placeholder} RETURNING *\"\n )\n"
|
|
286
|
+
));
|
|
287
|
+
|
|
288
|
+
for (col_name, _) in &update_props {
|
|
289
|
+
let field = to_snake_case(col_name);
|
|
290
|
+
output.push_str(&format!(" .bind(&input.{field})\n"));
|
|
291
|
+
}
|
|
292
|
+
output.push_str(" .bind(id)\n");
|
|
293
|
+
output.push_str(" .fetch_optional(pool)\n");
|
|
294
|
+
output.push_str(" .await\n");
|
|
295
|
+
output.push_str("}\n\n");
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
// ─── delete ──────────────────────────────────────────────────────────
|
|
299
|
+
output.push_str(&format!(
|
|
300
|
+
"/// Delete a {} by ID.\n",
|
|
301
|
+
entity_snake
|
|
302
|
+
));
|
|
303
|
+
output.push_str(&format!(
|
|
304
|
+
"pub async fn delete_{entity_snake}(\n pool: &PgPool,\n id: &{id_rust_type},\n) -> sqlx::Result<Option<models::{entity_name}>> {{\n"
|
|
305
|
+
));
|
|
306
|
+
output.push_str(&format!(
|
|
307
|
+
" sqlx::query_as::<_, models::{entity_name}>(\n \"DELETE FROM {table_name} WHERE {id_column} = $1 RETURNING *\"\n )\n"
|
|
308
|
+
));
|
|
309
|
+
output.push_str(" .bind(id)\n");
|
|
310
|
+
output.push_str(" .fetch_optional(pool)\n");
|
|
311
|
+
output.push_str(" .await\n");
|
|
312
|
+
output.push_str("}\n");
|
|
313
|
+
|
|
314
|
+
output
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
// ─────────────────────────── migrations ───────────────────────────────────────
|
|
318
|
+
|
|
319
|
+
/// Generate a SQL migration file for a @table entity.
|
|
320
|
+
fn generate_migration(meta: &TypeMetadata, index: usize) -> GeneratedOutput {
|
|
321
|
+
let table_name = get_table_name(meta);
|
|
322
|
+
let entity_snake = to_snake_case(&meta.name);
|
|
323
|
+
|
|
324
|
+
// Use a sequential timestamp based on index for ordering
|
|
325
|
+
let timestamp = format!("00000000000{}", index + 1);
|
|
326
|
+
|
|
327
|
+
let mut sql = String::new();
|
|
328
|
+
sql.push_str(&format!(
|
|
329
|
+
"-- AUTO-GENERATED by @typokit/transform-native — DO NOT EDIT\n\n"
|
|
330
|
+
));
|
|
331
|
+
sql.push_str(&format!(
|
|
332
|
+
"CREATE TABLE IF NOT EXISTS {} (\n",
|
|
333
|
+
table_name
|
|
334
|
+
));
|
|
335
|
+
|
|
336
|
+
let sorted_props = sorted_properties(meta);
|
|
337
|
+
let unique_fields = collect_unique_fields(meta);
|
|
338
|
+
|
|
339
|
+
for (i, (col_name, prop)) in sorted_props.iter().enumerate() {
|
|
340
|
+
let pg_type = ts_type_to_postgres(&prop.type_str, col_name, &prop.jsdoc);
|
|
341
|
+
let not_null = if prop.optional { "" } else { " NOT NULL" };
|
|
342
|
+
|
|
343
|
+
let is_pk = is_id_property(col_name, prop);
|
|
344
|
+
let pk_suffix = if is_pk { " PRIMARY KEY" } else { "" };
|
|
345
|
+
|
|
346
|
+
let unique_suffix = if unique_fields.contains(&col_name.to_string()) && !is_pk {
|
|
347
|
+
" UNIQUE"
|
|
348
|
+
} else {
|
|
349
|
+
""
|
|
350
|
+
};
|
|
351
|
+
|
|
352
|
+
let trailing = if i < sorted_props.len() - 1 { "," } else { "" };
|
|
353
|
+
|
|
354
|
+
sql.push_str(&format!(
|
|
355
|
+
" {} {}{}{}{}{}\n",
|
|
356
|
+
col_name, pg_type, not_null, pk_suffix, unique_suffix, trailing
|
|
357
|
+
));
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
sql.push_str(");\n");
|
|
361
|
+
|
|
362
|
+
GeneratedOutput {
|
|
363
|
+
path: format!(".typokit/migrations/{}_create_{}.sql", timestamp, entity_snake),
|
|
364
|
+
content: sql,
|
|
365
|
+
overwrite: true,
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// ─────────────────────────── Helpers ──────────────────────────────────────────
|
|
370
|
+
|
|
371
|
+
/// Check if a TypeMetadata has the @table JSDoc annotation.
|
|
372
|
+
fn is_table_entity(meta: &TypeMetadata) -> bool {
|
|
373
|
+
meta.jsdoc
|
|
374
|
+
.as_ref()
|
|
375
|
+
.map(|j| j.contains_key("table"))
|
|
376
|
+
.unwrap_or(false)
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
/// Get the SQL table name from @table annotation value, or fall back to snake_case.
|
|
380
|
+
fn get_table_name(meta: &TypeMetadata) -> String {
|
|
381
|
+
meta.jsdoc
|
|
382
|
+
.as_ref()
|
|
383
|
+
.and_then(|j| j.get("table"))
|
|
384
|
+
.filter(|v| !v.is_empty())
|
|
385
|
+
.map(|v| v.to_string())
|
|
386
|
+
.unwrap_or_else(|| to_snake_case(&meta.name) + "s")
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
/// Find the @id property of a type (or fall back to "id" by name convention).
|
|
390
|
+
fn find_id_property(meta: &TypeMetadata) -> Option<(String, PropertyMetadata)> {
|
|
391
|
+
// First look for explicit @id annotation
|
|
392
|
+
for (name, prop) in &meta.properties {
|
|
393
|
+
if prop
|
|
394
|
+
.jsdoc
|
|
395
|
+
.as_ref()
|
|
396
|
+
.map(|j| j.contains_key("id"))
|
|
397
|
+
.unwrap_or(false)
|
|
398
|
+
{
|
|
399
|
+
return Some((name.clone(), prop.clone()));
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
// Fall back to property named "id"
|
|
403
|
+
meta.properties
|
|
404
|
+
.get("id")
|
|
405
|
+
.map(|p| ("id".to_string(), p.clone()))
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
/// Check if a property is the @id field.
|
|
409
|
+
fn is_id_property(name: &str, prop: &PropertyMetadata) -> bool {
|
|
410
|
+
prop.jsdoc
|
|
411
|
+
.as_ref()
|
|
412
|
+
.map(|j| j.contains_key("id"))
|
|
413
|
+
.unwrap_or(false)
|
|
414
|
+
|| name == "id"
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
/// Check if a property has @generated annotation.
|
|
418
|
+
fn is_generated(prop: &PropertyMetadata) -> bool {
|
|
419
|
+
prop.jsdoc
|
|
420
|
+
.as_ref()
|
|
421
|
+
.map(|j| j.contains_key("generated"))
|
|
422
|
+
.unwrap_or(false)
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
/// Check if a property is a @generated uuid field.
|
|
426
|
+
fn is_generated_uuid(prop: &PropertyMetadata) -> bool {
|
|
427
|
+
if let Some(jsdoc) = &prop.jsdoc {
|
|
428
|
+
if let Some(val) = jsdoc.get("generated") {
|
|
429
|
+
return val == "uuid" || (jsdoc.contains_key("id") && val.is_empty());
|
|
430
|
+
}
|
|
431
|
+
// @generated on an @id string field implies uuid
|
|
432
|
+
if jsdoc.contains_key("generated") && jsdoc.contains_key("id") {
|
|
433
|
+
return true;
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
false
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
/// Check if a property is a @generated now field (timestamp auto-fill).
|
|
440
|
+
fn is_generated_now(prop: &PropertyMetadata) -> bool {
|
|
441
|
+
if let Some(jsdoc) = &prop.jsdoc {
|
|
442
|
+
if let Some(val) = jsdoc.get("generated") {
|
|
443
|
+
return val == "now";
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
false
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
/// Determine the Rust type for an @id field in function signatures.
|
|
450
|
+
fn sql_id_rust_type(prop: &PropertyMetadata) -> String {
|
|
451
|
+
match prop.type_str.as_str() {
|
|
452
|
+
"string" => "str".to_string(),
|
|
453
|
+
"number" => "i64".to_string(),
|
|
454
|
+
_ => "str".to_string(),
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
/// Map a TypeScript type to a PostgreSQL column type.
|
|
459
|
+
fn ts_type_to_postgres(
|
|
460
|
+
ts_type: &str,
|
|
461
|
+
prop_name: &str,
|
|
462
|
+
jsdoc: &Option<HashMap<String, String>>,
|
|
463
|
+
) -> String {
|
|
464
|
+
// Check for @integer override or @id context
|
|
465
|
+
if let Some(j) = jsdoc {
|
|
466
|
+
if j.contains_key("id") && ts_type == "string" {
|
|
467
|
+
return "TEXT".to_string();
|
|
468
|
+
}
|
|
469
|
+
if j.contains_key("integer") {
|
|
470
|
+
return "BIGINT".to_string();
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
match ts_type {
|
|
475
|
+
"string" => "TEXT".to_string(),
|
|
476
|
+
"number" => {
|
|
477
|
+
// Pagination/count fields → INTEGER, general → DOUBLE PRECISION
|
|
478
|
+
match prop_name {
|
|
479
|
+
"page" | "pageSize" | "page_size" | "limit" | "offset" | "perPage"
|
|
480
|
+
| "per_page" | "total" | "totalPages" | "total_pages" | "count" | "size" => {
|
|
481
|
+
"INTEGER".to_string()
|
|
482
|
+
}
|
|
483
|
+
_ => "DOUBLE PRECISION".to_string(),
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
"boolean" => "BOOLEAN".to_string(),
|
|
487
|
+
t if t == "Date" || t == "DateTime" => "TIMESTAMPTZ".to_string(),
|
|
488
|
+
// Array types → JSONB
|
|
489
|
+
t if t.ends_with("[]") || (t.starts_with("Array<") && t.ends_with('>')) => {
|
|
490
|
+
"JSONB".to_string()
|
|
491
|
+
}
|
|
492
|
+
// Fallback
|
|
493
|
+
_ => "TEXT".to_string(),
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
/// Collect field names that have @unique annotation.
|
|
498
|
+
fn collect_unique_fields(meta: &TypeMetadata) -> Vec<String> {
|
|
499
|
+
meta.properties
|
|
500
|
+
.iter()
|
|
501
|
+
.filter(|(_, prop)| {
|
|
502
|
+
prop.jsdoc
|
|
503
|
+
.as_ref()
|
|
504
|
+
.map(|j| j.contains_key("unique"))
|
|
505
|
+
.unwrap_or(false)
|
|
506
|
+
})
|
|
507
|
+
.map(|(name, _)| name.clone())
|
|
508
|
+
.collect()
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
/// Return sorted properties as Vec for deterministic output.
|
|
512
|
+
fn sorted_properties(meta: &TypeMetadata) -> Vec<(&str, &PropertyMetadata)> {
|
|
513
|
+
let mut props: Vec<(&str, &PropertyMetadata)> = meta
|
|
514
|
+
.properties
|
|
515
|
+
.iter()
|
|
516
|
+
.map(|(k, v)| (k.as_str(), v))
|
|
517
|
+
.collect();
|
|
518
|
+
props.sort_by_key(|(name, _)| name.to_string());
|
|
519
|
+
props
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
/// Convert a camelCase or PascalCase string to snake_case.
|
|
523
|
+
fn to_snake_case(s: &str) -> String {
|
|
524
|
+
let mut result = String::new();
|
|
525
|
+
for (i, c) in s.chars().enumerate() {
|
|
526
|
+
if c.is_uppercase() {
|
|
527
|
+
if i > 0 {
|
|
528
|
+
result.push('_');
|
|
529
|
+
}
|
|
530
|
+
result.push(c.to_lowercase().next().unwrap());
|
|
531
|
+
} else {
|
|
532
|
+
result.push(c);
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
result
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
#[cfg(test)]
|
|
539
|
+
mod tests {
|
|
540
|
+
use super::*;
|
|
541
|
+
use typokit_transform_native::type_extractor::{PropertyMetadata, TypeMetadata};
|
|
542
|
+
|
|
543
|
+
fn make_table_type(
|
|
544
|
+
name: &str,
|
|
545
|
+
table_name: &str,
|
|
546
|
+
props: Vec<(&str, &str, bool, Vec<(&str, &str)>)>,
|
|
547
|
+
) -> TypeMetadata {
|
|
548
|
+
let mut properties = HashMap::new();
|
|
549
|
+
for (pname, ptype, optional, jsdoc_tags) in props {
|
|
550
|
+
let jsdoc = if jsdoc_tags.is_empty() {
|
|
551
|
+
None
|
|
552
|
+
} else {
|
|
553
|
+
let mut map = HashMap::new();
|
|
554
|
+
for (key, value) in jsdoc_tags {
|
|
555
|
+
map.insert(key.to_string(), value.to_string());
|
|
556
|
+
}
|
|
557
|
+
Some(map)
|
|
558
|
+
};
|
|
559
|
+
properties.insert(
|
|
560
|
+
pname.to_string(),
|
|
561
|
+
PropertyMetadata {
|
|
562
|
+
type_str: ptype.to_string(),
|
|
563
|
+
optional,
|
|
564
|
+
jsdoc,
|
|
565
|
+
},
|
|
566
|
+
);
|
|
567
|
+
}
|
|
568
|
+
let mut type_jsdoc = HashMap::new();
|
|
569
|
+
type_jsdoc.insert("table".to_string(), table_name.to_string());
|
|
570
|
+
TypeMetadata {
|
|
571
|
+
name: name.to_string(),
|
|
572
|
+
properties,
|
|
573
|
+
jsdoc: Some(type_jsdoc),
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
fn make_non_table_type(name: &str) -> TypeMetadata {
|
|
578
|
+
let mut properties = HashMap::new();
|
|
579
|
+
properties.insert(
|
|
580
|
+
"value".to_string(),
|
|
581
|
+
PropertyMetadata {
|
|
582
|
+
type_str: "string".to_string(),
|
|
583
|
+
optional: false,
|
|
584
|
+
jsdoc: None,
|
|
585
|
+
},
|
|
586
|
+
);
|
|
587
|
+
TypeMetadata {
|
|
588
|
+
name: name.to_string(),
|
|
589
|
+
properties,
|
|
590
|
+
jsdoc: None,
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
fn user_type() -> TypeMetadata {
|
|
595
|
+
make_table_type(
|
|
596
|
+
"User",
|
|
597
|
+
"users",
|
|
598
|
+
vec![
|
|
599
|
+
("id", "string", false, vec![("id", ""), ("generated", "uuid")]),
|
|
600
|
+
("email", "string", false, vec![("unique", ""), ("format", "email")]),
|
|
601
|
+
("name", "string", false, vec![]),
|
|
602
|
+
("createdAt", "Date", false, vec![("generated", "now")]),
|
|
603
|
+
],
|
|
604
|
+
)
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
#[test]
|
|
608
|
+
fn test_is_table_entity() {
|
|
609
|
+
let user = user_type();
|
|
610
|
+
assert!(is_table_entity(&user));
|
|
611
|
+
assert!(!is_table_entity(&make_non_table_type("Config")));
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
#[test]
|
|
615
|
+
fn test_get_table_name() {
|
|
616
|
+
let user = user_type();
|
|
617
|
+
assert_eq!(get_table_name(&user), "users");
|
|
618
|
+
|
|
619
|
+
// Empty @table value falls back to snake_case + "s"
|
|
620
|
+
let mut meta = make_non_table_type("BlogPost");
|
|
621
|
+
let mut jsdoc = HashMap::new();
|
|
622
|
+
jsdoc.insert("table".to_string(), String::new());
|
|
623
|
+
meta.jsdoc = Some(jsdoc);
|
|
624
|
+
assert_eq!(get_table_name(&meta), "blog_posts");
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
#[test]
|
|
628
|
+
fn test_find_id_property() {
|
|
629
|
+
let user = user_type();
|
|
630
|
+
let id = find_id_property(&user);
|
|
631
|
+
assert!(id.is_some());
|
|
632
|
+
let (name, _) = id.unwrap();
|
|
633
|
+
assert_eq!(name, "id");
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
#[test]
|
|
637
|
+
fn test_is_generated_uuid() {
|
|
638
|
+
let prop = PropertyMetadata {
|
|
639
|
+
type_str: "string".to_string(),
|
|
640
|
+
optional: false,
|
|
641
|
+
jsdoc: Some({
|
|
642
|
+
let mut m = HashMap::new();
|
|
643
|
+
m.insert("id".to_string(), String::new());
|
|
644
|
+
m.insert("generated".to_string(), "uuid".to_string());
|
|
645
|
+
m
|
|
646
|
+
}),
|
|
647
|
+
};
|
|
648
|
+
assert!(is_generated_uuid(&prop));
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
#[test]
|
|
652
|
+
fn test_is_generated_now() {
|
|
653
|
+
let prop = PropertyMetadata {
|
|
654
|
+
type_str: "Date".to_string(),
|
|
655
|
+
optional: false,
|
|
656
|
+
jsdoc: Some({
|
|
657
|
+
let mut m = HashMap::new();
|
|
658
|
+
m.insert("generated".to_string(), "now".to_string());
|
|
659
|
+
m
|
|
660
|
+
}),
|
|
661
|
+
};
|
|
662
|
+
assert!(is_generated_now(&prop));
|
|
663
|
+
assert!(!is_generated_uuid(&prop));
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
#[test]
|
|
667
|
+
fn test_ts_type_to_postgres_mapping() {
|
|
668
|
+
assert_eq!(ts_type_to_postgres("string", "name", &None), "TEXT");
|
|
669
|
+
assert_eq!(ts_type_to_postgres("number", "age", &None), "DOUBLE PRECISION");
|
|
670
|
+
assert_eq!(ts_type_to_postgres("boolean", "active", &None), "BOOLEAN");
|
|
671
|
+
assert_eq!(ts_type_to_postgres("Date", "createdAt", &None), "TIMESTAMPTZ");
|
|
672
|
+
assert_eq!(ts_type_to_postgres("string[]", "tags", &None), "JSONB");
|
|
673
|
+
|
|
674
|
+
// Integer override
|
|
675
|
+
let jsdoc = Some({
|
|
676
|
+
let mut m = HashMap::new();
|
|
677
|
+
m.insert("integer".to_string(), String::new());
|
|
678
|
+
m
|
|
679
|
+
});
|
|
680
|
+
assert_eq!(ts_type_to_postgres("number", "score", &jsdoc), "BIGINT");
|
|
681
|
+
|
|
682
|
+
// Pagination heuristic
|
|
683
|
+
assert_eq!(ts_type_to_postgres("number", "page", &None), "INTEGER");
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
#[test]
|
|
687
|
+
fn test_generate_database_filters_non_table_entities() {
|
|
688
|
+
let mut type_map = HashMap::new();
|
|
689
|
+
type_map.insert("User".to_string(), user_type());
|
|
690
|
+
type_map.insert("Config".to_string(), make_non_table_type("Config"));
|
|
691
|
+
|
|
692
|
+
let outputs = generate_database(&type_map);
|
|
693
|
+
|
|
694
|
+
// Should have db/mod.rs, db/repository.rs, and one migration
|
|
695
|
+
assert!(outputs.iter().any(|o| o.path == ".typokit/db/mod.rs"));
|
|
696
|
+
assert!(outputs.iter().any(|o| o.path == ".typokit/db/repository.rs"));
|
|
697
|
+
assert!(outputs.iter().any(|o| o.path.contains("migrations") && o.path.contains("user")));
|
|
698
|
+
assert_eq!(outputs.len(), 3);
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
#[test]
|
|
702
|
+
fn test_generate_db_mod_content() {
|
|
703
|
+
let output = generate_db_mod();
|
|
704
|
+
assert_eq!(output.path, ".typokit/db/mod.rs");
|
|
705
|
+
assert!(output.overwrite);
|
|
706
|
+
assert!(output.content.contains("pub async fn connect(database_url: &str) -> PgPool"));
|
|
707
|
+
assert!(output.content.contains("pub mod repository;"));
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
#[test]
|
|
711
|
+
fn test_generate_repository_crud_functions() {
|
|
712
|
+
let mut type_map = HashMap::new();
|
|
713
|
+
type_map.insert("User".to_string(), user_type());
|
|
714
|
+
|
|
715
|
+
let table_entities: BTreeMap<String, &TypeMetadata> = type_map
|
|
716
|
+
.iter()
|
|
717
|
+
.filter(|(_, meta)| is_table_entity(meta))
|
|
718
|
+
.map(|(name, meta)| (name.clone(), meta))
|
|
719
|
+
.collect();
|
|
720
|
+
|
|
721
|
+
let output = generate_repository(&table_entities);
|
|
722
|
+
let content = &output.content;
|
|
723
|
+
|
|
724
|
+
assert!(content.contains("pub async fn find_all_user("));
|
|
725
|
+
assert!(content.contains("pub async fn find_user_by_id("));
|
|
726
|
+
assert!(content.contains("pub async fn create_user("));
|
|
727
|
+
assert!(content.contains("pub async fn update_user("));
|
|
728
|
+
assert!(content.contains("pub async fn delete_user("));
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
#[test]
|
|
732
|
+
fn test_repository_uses_correct_table_name() {
|
|
733
|
+
let mut type_map = HashMap::new();
|
|
734
|
+
type_map.insert("User".to_string(), user_type());
|
|
735
|
+
|
|
736
|
+
let table_entities: BTreeMap<String, &TypeMetadata> = type_map
|
|
737
|
+
.iter()
|
|
738
|
+
.filter(|(_, meta)| is_table_entity(meta))
|
|
739
|
+
.map(|(name, meta)| (name.clone(), meta))
|
|
740
|
+
.collect();
|
|
741
|
+
|
|
742
|
+
let output = generate_repository(&table_entities);
|
|
743
|
+
assert!(output.content.contains("FROM users"));
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
#[test]
|
|
747
|
+
fn test_repository_generated_uuid() {
|
|
748
|
+
let mut type_map = HashMap::new();
|
|
749
|
+
type_map.insert("User".to_string(), user_type());
|
|
750
|
+
|
|
751
|
+
let table_entities: BTreeMap<String, &TypeMetadata> = type_map
|
|
752
|
+
.iter()
|
|
753
|
+
.filter(|(_, meta)| is_table_entity(meta))
|
|
754
|
+
.map(|(name, meta)| (name.clone(), meta))
|
|
755
|
+
.collect();
|
|
756
|
+
|
|
757
|
+
let output = generate_repository(&table_entities);
|
|
758
|
+
assert!(output.content.contains("Uuid::new_v4().to_string()"));
|
|
759
|
+
assert!(output.content.contains("use uuid::Uuid;"));
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
#[test]
|
|
763
|
+
fn test_repository_generated_now() {
|
|
764
|
+
let mut type_map = HashMap::new();
|
|
765
|
+
type_map.insert("User".to_string(), user_type());
|
|
766
|
+
|
|
767
|
+
let table_entities: BTreeMap<String, &TypeMetadata> = type_map
|
|
768
|
+
.iter()
|
|
769
|
+
.filter(|(_, meta)| is_table_entity(meta))
|
|
770
|
+
.map(|(name, meta)| (name.clone(), meta))
|
|
771
|
+
.collect();
|
|
772
|
+
|
|
773
|
+
let output = generate_repository(&table_entities);
|
|
774
|
+
assert!(output.content.contains("Utc::now()"));
|
|
775
|
+
assert!(output.content.contains("use chrono::Utc;"));
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
#[test]
|
|
779
|
+
fn test_generate_migration_creates_table() {
|
|
780
|
+
let user = user_type();
|
|
781
|
+
let output = generate_migration(&user, 0);
|
|
782
|
+
|
|
783
|
+
assert!(output.path.contains("migrations"));
|
|
784
|
+
assert!(output.path.contains("create_user"));
|
|
785
|
+
assert!(output.overwrite);
|
|
786
|
+
|
|
787
|
+
let content = &output.content;
|
|
788
|
+
assert!(content.contains("CREATE TABLE IF NOT EXISTS users"));
|
|
789
|
+
assert!(content.contains("id TEXT NOT NULL PRIMARY KEY"));
|
|
790
|
+
assert!(content.contains("email TEXT NOT NULL UNIQUE"));
|
|
791
|
+
assert!(content.contains("name TEXT NOT NULL"));
|
|
792
|
+
assert!(content.contains("TIMESTAMPTZ"));
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
#[test]
|
|
796
|
+
fn test_migration_column_types() {
|
|
797
|
+
let meta = make_table_type(
|
|
798
|
+
"Product",
|
|
799
|
+
"products",
|
|
800
|
+
vec![
|
|
801
|
+
("id", "string", false, vec![("id", "")]),
|
|
802
|
+
("name", "string", false, vec![]),
|
|
803
|
+
("price", "number", false, vec![]),
|
|
804
|
+
("active", "boolean", false, vec![]),
|
|
805
|
+
("createdAt", "Date", false, vec![]),
|
|
806
|
+
("count", "number", false, vec![("integer", "")]),
|
|
807
|
+
],
|
|
808
|
+
);
|
|
809
|
+
|
|
810
|
+
let output = generate_migration(&meta, 0);
|
|
811
|
+
let content = &output.content;
|
|
812
|
+
|
|
813
|
+
assert!(content.contains("id TEXT NOT NULL PRIMARY KEY"));
|
|
814
|
+
assert!(content.contains("name TEXT NOT NULL"));
|
|
815
|
+
assert!(content.contains("price DOUBLE PRECISION NOT NULL"));
|
|
816
|
+
assert!(content.contains("active BOOLEAN NOT NULL"));
|
|
817
|
+
assert!(content.contains("TIMESTAMPTZ NOT NULL"));
|
|
818
|
+
assert!(content.contains("count BIGINT NOT NULL"));
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
#[test]
|
|
822
|
+
fn test_generate_database_all_overwrite_true() {
|
|
823
|
+
let mut type_map = HashMap::new();
|
|
824
|
+
type_map.insert("User".to_string(), user_type());
|
|
825
|
+
|
|
826
|
+
let outputs = generate_database(&type_map);
|
|
827
|
+
for output in &outputs {
|
|
828
|
+
assert!(output.overwrite, "Expected overwrite: true for {}", output.path);
|
|
829
|
+
}
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
#[test]
|
|
833
|
+
fn test_generate_database_empty_type_map() {
|
|
834
|
+
let type_map = HashMap::new();
|
|
835
|
+
let outputs = generate_database(&type_map);
|
|
836
|
+
|
|
837
|
+
// Should still produce db/mod.rs
|
|
838
|
+
assert!(outputs.iter().any(|o| o.path == ".typokit/db/mod.rs"));
|
|
839
|
+
// No repository or migrations since there are no table entities
|
|
840
|
+
assert!(!outputs.iter().any(|o| o.path.contains("repository")));
|
|
841
|
+
}
|
|
842
|
+
|
|
843
|
+
#[test]
|
|
844
|
+
fn test_generate_database_deterministic() {
|
|
845
|
+
let mut type_map = HashMap::new();
|
|
846
|
+
type_map.insert("User".to_string(), user_type());
|
|
847
|
+
type_map.insert(
|
|
848
|
+
"Todo".to_string(),
|
|
849
|
+
make_table_type(
|
|
850
|
+
"Todo",
|
|
851
|
+
"todos",
|
|
852
|
+
vec![
|
|
853
|
+
("id", "string", false, vec![("id", ""), ("generated", "uuid")]),
|
|
854
|
+
("title", "string", false, vec![]),
|
|
855
|
+
("completed", "boolean", false, vec![]),
|
|
856
|
+
],
|
|
857
|
+
),
|
|
858
|
+
);
|
|
859
|
+
|
|
860
|
+
let output1 = generate_database(&type_map);
|
|
861
|
+
let output2 = generate_database(&type_map);
|
|
862
|
+
|
|
863
|
+
assert_eq!(output1.len(), output2.len());
|
|
864
|
+
for (a, b) in output1.iter().zip(output2.iter()) {
|
|
865
|
+
assert_eq!(a.path, b.path);
|
|
866
|
+
assert_eq!(a.content, b.content);
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
#[test]
|
|
871
|
+
fn test_find_all_pagination() {
|
|
872
|
+
let user = user_type();
|
|
873
|
+
let crud = generate_entity_crud(&user);
|
|
874
|
+
assert!(crud.contains("LIMIT $1 OFFSET $2"));
|
|
875
|
+
assert!(crud.contains("page: u32"));
|
|
876
|
+
assert!(crud.contains("page_size: u32"));
|
|
877
|
+
}
|
|
878
|
+
|
|
879
|
+
#[test]
|
|
880
|
+
fn test_create_excludes_generated_fields_from_input() {
|
|
881
|
+
let user = user_type();
|
|
882
|
+
let crud = generate_entity_crud(&user);
|
|
883
|
+
|
|
884
|
+
// The INSERT should include id and createdAt (via generated values)
|
|
885
|
+
// but the input struct should NOT have those fields bound from input
|
|
886
|
+
assert!(crud.contains("Uuid::new_v4()"));
|
|
887
|
+
assert!(crud.contains("Utc::now()"));
|
|
888
|
+
}
|
|
889
|
+
|
|
890
|
+
#[test]
|
|
891
|
+
fn test_delete_returns_optional() {
|
|
892
|
+
let user = user_type();
|
|
893
|
+
let crud = generate_entity_crud(&user);
|
|
894
|
+
assert!(crud.contains("delete_user"));
|
|
895
|
+
assert!(crud.contains("DELETE FROM users WHERE id = $1 RETURNING *"));
|
|
896
|
+
assert!(crud.contains("fetch_optional"));
|
|
897
|
+
}
|
|
898
|
+
}
|