sera-2 1.21.2__py3-none-any.whl → 1.24.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,359 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime, timezone
4
+ from enum import Enum
5
+ from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Sequence
6
+
7
+ import msgspec
8
+ from litestar import status_codes
9
+ from litestar.exceptions import HTTPException
10
+
11
+ from sera.misc import to_snake_case
12
+ from sera.models import Cardinality, Class, DataProperty, ObjectProperty
13
+ from sera.typing import FieldName, doc
14
+
15
+ if TYPE_CHECKING:
16
+ from sera.libs.base_service import QueryResult
17
+
18
+ """Providing helpers to implement search functionality in HTTP POST request."""
19
+
20
+
21
+ class QueryOp(str, Enum):
22
+ lt = "lt"
23
+ lte = "lte"
24
+ gt = "gt"
25
+ gte = "gte"
26
+ eq = "eq"
27
+ ne = "ne"
28
+ # select records where values are in the given list
29
+ in_ = "in"
30
+ not_in = "not_in"
31
+ # for full text search
32
+ fuzzy = "fuzzy"
33
+
34
+
35
+ class QueryClause(msgspec.Struct):
36
+ field: FieldName
37
+ op: QueryOp
38
+ value: Annotated[Any, doc("query value")]
39
+
40
+
41
+ class FieldOrderClause(msgspec.Struct):
42
+ field: FieldName
43
+ order: Annotated[Literal["asc", "desc"], doc("order direction, 'asc' or 'desc'")]
44
+ prop: Optional[str] = None
45
+
46
+
47
+ class FieldGroupClause(msgspec.Struct):
48
+ field: FieldName
49
+ prop: Optional[str] = None
50
+
51
+
52
+ # queries conditions written in CNF form.
53
+ QueryCondition = Annotated[
54
+ Sequence[QueryClause],
55
+ doc("query operations written in CNF form"),
56
+ ]
57
+
58
+
59
+ class JoinCondition(msgspec.Struct):
60
+ # name of the property in the primary class
61
+ prop: str
62
+ join_type: Annotated[
63
+ Literal["inner", "left", "full"],
64
+ doc("join type, 'inner', 'left', or 'full'"),
65
+ ] = msgspec.field(default="inner")
66
+ fields: Sequence[FieldName] = msgspec.field(default_factory=list)
67
+ conditions: QueryCondition = msgspec.field(default_factory=list)
68
+
69
+
70
+ class Query(msgspec.Struct):
71
+ # list of fields to return in the results
72
+ fields: Sequence[FieldName] = msgspec.field(default_factory=list)
73
+ # conditions to filter the records
74
+ conditions: QueryCondition = msgspec.field(default_factory=list)
75
+ # sort the records by a field or multiple fields
76
+ sorted_by: Sequence[FieldOrderClause] = msgspec.field(default_factory=list)
77
+ # group the records by a field or multiple fields
78
+ group_by: Sequence[FieldGroupClause] = msgspec.field(default_factory=list)
79
+ # join with another classes
80
+ join_conditions: Sequence[JoinCondition] = msgspec.field(default_factory=list)
81
+ # whether to return unique records
82
+ unique: bool = False
83
+ # maximum number of records to return
84
+ limit: Annotated[int, msgspec.Meta(le=1000, ge=1)] = 10
85
+ # number of records to skip before returning results
86
+ offset: Annotated[int, msgspec.Meta(ge=0)] = 0
87
+ # whether to return the total number of records that match the query
88
+ return_total: bool = False
89
+
90
+ def validate_and_normalize(
91
+ self,
92
+ cls: Class,
93
+ allowed_fields: set[str],
94
+ allowed_join_fields: Optional[dict[str, set[str]]] = None,
95
+ debug: bool = False,
96
+ ):
97
+ """Validate query against the schema and normalize the field values.
98
+
99
+ Args:
100
+ cls: The class schema
101
+ allowed_fields: The set of allowed search field names
102
+ allowed_join_fields: The dict of allowed search fields in joined tables
103
+ debug: Whether to enable debug mode
104
+ """
105
+ if allowed_join_fields is None:
106
+ allowed_join_fields = {}
107
+
108
+ for field_name in self.fields:
109
+ if field_name not in cls.properties:
110
+ if debug:
111
+ raise HTTPException(
112
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
113
+ detail=f"Invalid field name: {field_name}",
114
+ )
115
+ continue
116
+ for groupfield in self.group_by:
117
+ if groupfield.field not in allowed_fields:
118
+ if debug:
119
+ raise HTTPException(
120
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
121
+ detail=f"Invalid group by field name: {groupfield.field}",
122
+ )
123
+ continue
124
+ for sortfield in self.sorted_by:
125
+ if sortfield.field not in allowed_fields:
126
+ if debug:
127
+ raise HTTPException(
128
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
129
+ detail=f"Invalid sort by field name: {sortfield.field}",
130
+ )
131
+ continue
132
+ for clause in self.conditions:
133
+ # TODO: check if the operation is allowed for the field
134
+ if clause.field not in allowed_fields:
135
+ if debug:
136
+ raise HTTPException(
137
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
138
+ detail=f"Invalid condition field name: {clause.field}",
139
+ )
140
+ continue
141
+
142
+ # normalize the value based on the field type.
143
+ prop = cls.properties[clause.field]
144
+ assert isinstance(prop, DataProperty)
145
+ if prop.datatype.pytype.is_enum_type():
146
+ # skip enum types -- we directly use it as string.
147
+ continue
148
+ clause.value = FieldTypeValidator.typemap[prop.datatype.pytype.type](
149
+ clause.field, clause.value
150
+ )
151
+
152
+ for join_clause in self.join_conditions:
153
+ if join_clause.prop not in allowed_join_fields:
154
+ if debug:
155
+ raise HTTPException(
156
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
157
+ detail=f"Invalid join property: {join_clause.prop}",
158
+ )
159
+ continue
160
+
161
+ target_prop = cls.properties[join_clause.prop]
162
+ if (
163
+ isinstance(target_prop, DataProperty)
164
+ and target_prop.db is not None
165
+ and target_prop.db.foreign_key is not None
166
+ ):
167
+ # we have this case where ID is also a foreign key
168
+ target_class = target_prop.db.foreign_key
169
+ elif (
170
+ isinstance(target_prop, ObjectProperty)
171
+ and target_prop.target.db is not None
172
+ ):
173
+ target_class = target_prop.target
174
+ else:
175
+ if debug:
176
+ raise HTTPException(
177
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
178
+ detail=f"Invalid join property: {join_clause.prop}",
179
+ )
180
+ continue
181
+
182
+ for field in join_clause.fields:
183
+ if field not in target_class.properties:
184
+ if debug:
185
+ raise HTTPException(
186
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
187
+ detail=f"Invalid join field: {join_clause.prop}.{field}",
188
+ )
189
+ continue
190
+ for condition in join_clause.conditions:
191
+ if condition.field not in allowed_join_fields[join_clause.prop]:
192
+ if debug:
193
+ raise HTTPException(
194
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
195
+ detail=f"Invalid join condition field: {join_clause.prop}.{condition.field}",
196
+ )
197
+ continue
198
+
199
+ # normalize the value based on the field type.
200
+ target_prop = target_class.properties[condition.field]
201
+ assert isinstance(target_prop, DataProperty)
202
+ if target_prop.datatype.pytype.is_enum_type():
203
+ # skip enum types -- we directly use it as string.
204
+ continue
205
+ condition.value = FieldTypeValidator.typemap[
206
+ target_prop.datatype.pytype.type
207
+ ](condition.field, condition.value)
208
+
209
+ def prepare_results(
210
+ self, cls: Class, dataschema: dict[str, type], result: QueryResult
211
+ ) -> dict:
212
+ output = {}
213
+ if result.total is not None:
214
+ output["total"] = result.total
215
+
216
+ for join_clause in self.join_conditions:
217
+ prop = cls.properties[join_clause.prop]
218
+ if (
219
+ isinstance(prop, DataProperty)
220
+ and prop.db is not None
221
+ and prop.db.foreign_key is not None
222
+ ):
223
+ target_cls = prop.db.foreign_key
224
+ cardinality = Cardinality.ONE_TO_ONE
225
+ # the property storing the SQLAlchemy relationship of the foreign key
226
+ source_relprop = prop.name + "_relobj"
227
+ else:
228
+ assert isinstance(prop, ObjectProperty)
229
+ target_cls = prop.target
230
+ cardinality = prop.cardinality
231
+ source_relprop = prop.name
232
+
233
+ target_name = target_cls.name
234
+ assoc_targetrel_name = to_snake_case(target_name)
235
+
236
+ deser_func = dataschema[target_name].from_db
237
+
238
+ if target_name not in output:
239
+ output[target_name] = []
240
+
241
+ if cardinality == Cardinality.MANY_TO_MANY:
242
+ # for many-to-many, we have a middle object (association tables)
243
+ # because it's a list, we don't need to handle outer join because we don't have null values in the list
244
+ output[target_name].extend(
245
+ deser_func(getattr(item, assoc_targetrel_name))
246
+ for record in result.records
247
+ for item in getattr(record, source_relprop)
248
+ )
249
+ elif cardinality == Cardinality.ONE_TO_MANY:
250
+ # A -> B is 1:N, A.id is stored in B, this does not supported in SERA yet so we do not need
251
+ # to implement it
252
+ raise NotImplementedError()
253
+ else:
254
+ if join_clause.join_type != "inner":
255
+ output[target_name].extend(
256
+ deser_func((val := getattr(record, source_relprop)))
257
+ for record in result.records
258
+ if val is not None
259
+ )
260
+ else:
261
+ output[target_name].extend(
262
+ deser_func(getattr(record, source_relprop))
263
+ for record in result.records
264
+ )
265
+
266
+ deser_func = dataschema[cls.name].from_db
267
+ output[cls.name] = [deser_func(record) for record in result.records]
268
+
269
+ return output
270
+
271
+
272
+ class FieldTypeValidator:
273
+
274
+ @staticmethod
275
+ def normalize_str(field: str, val: Any):
276
+ if not isinstance(val, str):
277
+ raise HTTPException(
278
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
279
+ detail=f"Invalid value for field '{field}': Expected string",
280
+ )
281
+ return val
282
+
283
+ @staticmethod
284
+ def normalize_int(field: str, val: Any):
285
+ if not isinstance(val, int):
286
+ raise HTTPException(
287
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
288
+ detail=f"Invalid value for field '{field}': Expected int",
289
+ )
290
+ return val
291
+
292
+ @staticmethod
293
+ def normalize_float(field: str, val: Any):
294
+ if not isinstance(val, float):
295
+ raise HTTPException(
296
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
297
+ detail=f"Invalid value for field '{field}': Expected float",
298
+ )
299
+ return val
300
+
301
+ @staticmethod
302
+ def normalize_bool(field: str, val: Any):
303
+ if not isinstance(val, bool):
304
+ raise HTTPException(
305
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
306
+ detail=f"Invalid value for field '{field}': Expected bool",
307
+ )
308
+ return val
309
+
310
+ @staticmethod
311
+ def normalize_date(field: str, val: Any):
312
+ if not isinstance(val, str):
313
+ raise HTTPException(
314
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
315
+ detail=f"Invalid value for field '{field}': Expected date",
316
+ )
317
+
318
+ try:
319
+ # Parse ISO format date string to date object
320
+ parsed_date = (
321
+ datetime.fromisoformat(val.replace("Z", "+00:00"))
322
+ .astimezone(timezone.utc)
323
+ .date()
324
+ )
325
+ return parsed_date
326
+ except ValueError:
327
+ raise HTTPException(
328
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
329
+ detail=f"Invalid date format for field '{field}': Expected ISO format (YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS)",
330
+ )
331
+
332
+ @staticmethod
333
+ def normalize_datetime(field: str, val: Any):
334
+ if not isinstance(val, str):
335
+ raise HTTPException(
336
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
337
+ detail=f"Invalid value for field '{field}': Expected date",
338
+ )
339
+
340
+ try:
341
+ # Parse ISO format date string to date object
342
+ parsed_dt = datetime.fromisoformat(val.replace("Z", "+00:00")).astimezone(
343
+ timezone.utc
344
+ )
345
+ return parsed_dt
346
+ except ValueError:
347
+ raise HTTPException(
348
+ status_code=status_codes.HTTP_400_BAD_REQUEST,
349
+ detail=f"Invalid date format for field '{field}': Expected ISO format (YYYY-MM-DDTHH:MM:SS)",
350
+ )
351
+
352
+ typemap = {
353
+ "str": normalize_str,
354
+ "int": normalize_int,
355
+ "float": normalize_float,
356
+ "bool": normalize_bool,
357
+ "date": normalize_date,
358
+ "datetime": normalize_datetime,
359
+ }
sera/make/make_app.py CHANGED
@@ -7,6 +7,7 @@ from typing import Annotated
7
7
 
8
8
  from codegen.models import DeferredVar, PredefinedFn, Program, expr, stmt
9
9
  from loguru import logger
10
+
10
11
  from sera.make.make_python_api import make_python_api
11
12
  from sera.make.make_python_model import (
12
13
  make_python_data_model,
@@ -14,10 +15,8 @@ from sera.make.make_python_model import (
14
15
  make_python_relational_model,
15
16
  )
16
17
  from sera.make.make_python_services import make_python_service_structure
17
- from sera.make.make_typescript_model import (
18
- make_typescript_data_model,
19
- make_typescript_enum,
20
- )
18
+ from sera.make.make_typescript_model import make_typescript_data_model
19
+ from sera.make.ts_frontend.make_enums import make_typescript_enums
21
20
  from sera.misc import Formatter
22
21
  from sera.models import App, DataCollection, parse_schema
23
22
  from sera.typing import Language
@@ -170,7 +169,7 @@ def make_app(
170
169
  # generate services
171
170
  make_python_service_structure(app, collections)
172
171
  elif language == Language.Typescript:
173
- make_typescript_enum(schema, app.models)
172
+ make_typescript_enums(schema, app.models)
174
173
  make_typescript_data_model(schema, app.models)
175
174
 
176
175
  Formatter.get_instance().process()
@@ -22,7 +22,7 @@ def make_python_api(app: App, collections: Sequence[DataCollection]):
22
22
  route = app.api.pkg("routes").pkg(collection.get_pymodule_name())
23
23
 
24
24
  controllers = []
25
- controllers.append(make_python_get_api(collection, route))
25
+ controllers.append(make_python_search_api(collection, route))
26
26
  controllers.append(make_python_get_by_id_api(collection, route))
27
27
  controllers.append(make_python_has_api(collection, route))
28
28
  controllers.append(make_python_create_api(collection, route))
@@ -58,6 +58,12 @@ def make_python_api(app: App, collections: Sequence[DataCollection]):
58
58
  ]
59
59
  ),
60
60
  ),
61
+ PredefinedFn.keyword_assignment(
62
+ "tags",
63
+ PredefinedFn.list(
64
+ [expr.ExprConstant(collection.get_pymodule_name())]
65
+ ),
66
+ ),
61
67
  ],
62
68
  ),
63
69
  ),
@@ -107,7 +113,7 @@ def make_main(target_pkg: Package, routes: Sequence[Module]):
107
113
  outmod.write(program)
108
114
 
109
115
 
110
- def make_python_get_api(
116
+ def make_python_search_api(
111
117
  collection: DataCollection, target_pkg: Package
112
118
  ) -> tuple[Module, str]:
113
119
  """Make an endpoint for querying resources"""
@@ -117,10 +123,8 @@ def make_python_get_api(
117
123
  import_helper = ImportHelper(program, GLOBAL_IDENTS)
118
124
 
119
125
  program.import_("__future__.annotations", True)
120
- program.import_("typing.Annotated", True)
121
- program.import_("litestar.get", True)
122
- program.import_("litestar.Request", True)
123
- program.import_("litestar.params.Parameter", True)
126
+ program.import_("litestar.post", True)
127
+ program.import_(app.config.path + ".schema", True)
124
128
  program.import_(app.config.path + ".API_DEBUG", True)
125
129
  program.import_(
126
130
  app.services.path
@@ -128,34 +132,43 @@ def make_python_get_api(
128
132
  True,
129
133
  )
130
134
  program.import_(
131
- app.models.data.path + f".{collection.get_pymodule_name()}.{collection.name}",
135
+ app.models.path + ".data_schema.dataschema",
132
136
  True,
133
137
  )
134
- program.import_("sera.libs.api_helper.parse_query", True)
135
-
136
- func_name = "get_"
138
+ program.import_("sera.libs.search_helper.Query", True)
137
139
 
138
- queryable_fields = []
139
- for propname, (
140
- convert_func,
141
- convert_func_import,
142
- ) in collection.get_queryable_fields():
143
- program.import_(convert_func_import, True)
144
- queryable_fields.append(
145
- (expr.ExprConstant(propname), expr.ExprIdent(convert_func))
146
- )
140
+ func_name = "search"
147
141
 
148
142
  program.root(
149
143
  stmt.LineBreak(),
150
- lambda ast00: ast00.assign(
144
+ lambda ast: ast.assign(
151
145
  DeferredVar.simple("QUERYABLE_FIELDS"),
152
- PredefinedFn.dict(queryable_fields),
146
+ PredefinedFn.set(
147
+ [
148
+ expr.ExprConstant(propname)
149
+ for propname in collection.get_queryable_fields()
150
+ ]
151
+ ),
152
+ ),
153
+ stmt.LineBreak(),
154
+ lambda ast: ast.assign(
155
+ DeferredVar.simple("JOIN_QUERYABLE_FIELDS"),
156
+ PredefinedFn.dict(
157
+ [
158
+ (
159
+ expr.ExprConstant(propname),
160
+ PredefinedFn.set([expr.ExprConstant(f) for f in fields]),
161
+ )
162
+ for propname, fields in collection.get_join_queryable_fields().items()
163
+ ]
164
+ ),
153
165
  ),
166
+ stmt.LineBreak(),
154
167
  stmt.PythonDecoratorStatement(
155
168
  expr.ExprFuncCall(
156
- expr.ExprIdent("get"),
169
+ expr.ExprIdent("post"),
157
170
  [
158
- expr.ExprConstant("/"),
171
+ expr.ExprConstant("/q"),
159
172
  ],
160
173
  )
161
174
  ),
@@ -163,44 +176,8 @@ def make_python_get_api(
163
176
  func_name,
164
177
  [
165
178
  DeferredVar.simple(
166
- "limit",
167
- expr.ExprIdent(
168
- 'Annotated[int, Parameter(default=10, description="The maximum number of records to return")]'
169
- ),
170
- ),
171
- DeferredVar.simple(
172
- "offset",
173
- type=expr.ExprIdent(
174
- 'Annotated[int, Parameter(default=0, description="The number of records to skip before returning results")]'
175
- ),
176
- ),
177
- DeferredVar.simple(
178
- "unique",
179
- expr.ExprIdent(
180
- 'Annotated[bool, Parameter(default=False, description="Whether to return unique results only")]'
181
- ),
182
- ),
183
- DeferredVar.simple(
184
- "sorted_by",
185
- expr.ExprIdent(
186
- "Annotated[list[str], Parameter(default=tuple(), description=\"list of field names to sort by, prefix a field with '-' to sort that field in descending order\")]"
187
- ),
188
- ),
189
- DeferredVar.simple(
190
- "group_by",
191
- expr.ExprIdent(
192
- 'Annotated[list[str], Parameter(default=tuple(), description="list of field names to group by")]'
193
- ),
194
- ),
195
- DeferredVar.simple(
196
- "fields",
197
- expr.ExprIdent(
198
- 'Annotated[list[str], Parameter(default=tuple(), description="list of field names to include in the results")]'
199
- ),
200
- ),
201
- DeferredVar.simple(
202
- "request",
203
- expr.ExprIdent("Request"),
179
+ "data",
180
+ expr.ExprIdent("Query"),
204
181
  ),
205
182
  DeferredVar.simple(
206
183
  "session",
@@ -223,19 +200,27 @@ def make_python_get_api(
223
200
  [],
224
201
  ),
225
202
  ),
226
- lambda ast101: ast101.assign(
227
- DeferredVar.simple("query", expr.ExprIdent("ServiceQuery")),
203
+ stmt.SingleExprStatement(
228
204
  expr.ExprFuncCall(
229
- expr.ExprIdent("parse_query"),
205
+ PredefinedFn.attr_getter(
206
+ expr.ExprIdent("data"),
207
+ expr.ExprIdent("validate_and_normalize"),
208
+ ),
230
209
  [
231
- expr.ExprIdent("request"),
210
+ PredefinedFn.item_getter(
211
+ PredefinedFn.attr_getter(
212
+ expr.ExprIdent("schema"), expr.ExprIdent("classes")
213
+ ),
214
+ expr.ExprConstant(collection.cls.name),
215
+ ),
232
216
  expr.ExprIdent("QUERYABLE_FIELDS"),
217
+ expr.ExprIdent("JOIN_QUERYABLE_FIELDS"),
233
218
  PredefinedFn.keyword_assignment(
234
219
  "debug",
235
220
  expr.ExprIdent("API_DEBUG"),
236
221
  ),
237
222
  ],
238
- ),
223
+ )
239
224
  ),
240
225
  lambda ast102: ast102.assign(
241
226
  DeferredVar.simple("result"),
@@ -243,67 +228,34 @@ def make_python_get_api(
243
228
  expr.ExprFuncCall(
244
229
  PredefinedFn.attr_getter(
245
230
  expr.ExprIdent("service"),
246
- expr.ExprIdent("get"),
231
+ expr.ExprIdent("search"),
247
232
  ),
248
- [
249
- expr.ExprIdent("query"),
250
- PredefinedFn.keyword_assignment(
251
- "limit", expr.ExprIdent("limit")
252
- ),
253
- PredefinedFn.keyword_assignment(
254
- "offset", expr.ExprIdent("offset")
255
- ),
256
- PredefinedFn.keyword_assignment(
257
- "unique", expr.ExprIdent("unique")
258
- ),
259
- PredefinedFn.keyword_assignment(
260
- "sorted_by", expr.ExprIdent("sorted_by")
261
- ),
262
- PredefinedFn.keyword_assignment(
263
- "group_by", expr.ExprIdent("group_by")
264
- ),
265
- PredefinedFn.keyword_assignment(
266
- "fields", expr.ExprIdent("fields")
267
- ),
268
- PredefinedFn.keyword_assignment(
269
- "session", expr.ExprIdent("session")
270
- ),
271
- ],
233
+ [expr.ExprIdent("data"), expr.ExprIdent("session")],
272
234
  )
273
235
  ),
274
236
  ),
275
237
  lambda ast103: ast103.return_(
276
- PredefinedFn.dict(
238
+ expr.ExprFuncCall(
239
+ PredefinedFn.attr_getter(
240
+ expr.ExprIdent("data"),
241
+ expr.ExprIdent("prepare_results"),
242
+ ),
277
243
  [
278
- (
279
- PredefinedFn.attr_getter(
280
- expr.ExprIdent(collection.name),
281
- expr.ExprIdent("__name__"),
282
- ),
283
- PredefinedFn.map_list(
284
- PredefinedFn.attr_getter(
285
- expr.ExprIdent("result"), expr.ExprIdent("records")
286
- ),
287
- lambda item: expr.ExprMethodCall(
288
- expr.ExprIdent(collection.name),
289
- "from_db",
290
- [item],
291
- ),
292
- ),
293
- ),
294
- (
295
- expr.ExprConstant("total"),
244
+ PredefinedFn.item_getter(
296
245
  PredefinedFn.attr_getter(
297
- expr.ExprIdent("result"), expr.ExprIdent("total")
246
+ expr.ExprIdent("schema"), expr.ExprIdent("classes")
298
247
  ),
248
+ expr.ExprConstant(collection.cls.name),
299
249
  ),
300
- ]
250
+ expr.ExprIdent("dataschema"),
251
+ expr.ExprIdent("result"),
252
+ ],
301
253
  )
302
254
  ),
303
255
  ),
304
256
  )
305
257
 
306
- outmod = target_pkg.module("get")
258
+ outmod = target_pkg.module("search")
307
259
  outmod.write(program)
308
260
 
309
261
  return outmod, func_name