polyapi-python 0.3.1.dev1__py3-none-any.whl → 0.3.1.dev2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
polyapi/parser.py ADDED
@@ -0,0 +1,519 @@
1
+ import ast
2
+ import json
3
+ import types
4
+ import sys
5
+ import re
6
+ from typing import Dict, List, Mapping, Optional, Tuple, Any
7
+ from typing import _TypedDictMeta as BaseTypedDict # type: ignore
8
+ from typing_extensions import _TypedDictMeta # type: ignore
9
+ from stdlib_list import stdlib_list
10
+ from pydantic import TypeAdapter
11
+ from importlib.metadata import packages_distributions
12
+ from polyapi.constants import PYTHON_TO_JSONSCHEMA_TYPE_MAP
13
+ from polyapi.utils import print_red
14
+ from polyapi.deployables import Deployment, DeployableRecord, get_deployable_file_revision
15
+
16
+
17
+ # these libraries are already installed in the base docker image
18
+ # and shouldnt be included in additional requirements
19
+ BASE_REQUIREMENTS = {
20
+ "polyapi",
21
+ "requests",
22
+ "typing_extensions",
23
+ "jsonschema-gentypes",
24
+ "pydantic",
25
+ "cloudevents",
26
+ }
27
+ all_stdlib_symbols = stdlib_list(".".join([str(v) for v in sys.version_info[0:2]]))
28
+ BASE_REQUIREMENTS.update(
29
+ all_stdlib_symbols
30
+ ) # dont need to pip install stuff in the python standard library
31
+
32
+
33
+ def _parse_sphinx_docstring(docstring: str) -> Dict[str, Any]:
34
+ """
35
+ Parses a Sphinx-style docstring to extract parameters, return values, and descriptions.
36
+
37
+ :param docstring: Docstring content in reST format.
38
+ :type docstring: str
39
+ :return: A dictionary with descriptions, parameters, and return values.
40
+ :rtype: Dict[str, Any]
41
+ """
42
+ lines = docstring.split('\n')
43
+ description = []
44
+ params = {}
45
+ returns = {
46
+ "description": "",
47
+ "type": "Any"
48
+ }
49
+ current_section = None
50
+
51
+ for line in lines:
52
+ stripped_line = line.strip()
53
+ if stripped_line.startswith(":param "):
54
+ # Example line: :param x: This is x
55
+ param_name, _, param_desc = stripped_line[7:].partition(":")
56
+ param_name = param_name.strip()
57
+ if param_name in params:
58
+ params[param_name]["description"] = param_desc.strip()
59
+ else:
60
+ params[param_name] = { "name": param_name, "type": "", "description": param_desc.strip() }
61
+ current_section = param_name
62
+
63
+ elif stripped_line.startswith(":type "):
64
+ # Example line: :type x: int
65
+ param_name, _, param_type = stripped_line[6:].partition(":")
66
+ param_name = param_name.strip()
67
+ if param_name in params:
68
+ params[param_name]["type"] = param_type.strip()
69
+ else:
70
+ params[param_name] = { "name": param_name, "type": param_type.strip(), "description": "" }
71
+
72
+ elif stripped_line.startswith(":returns: "):
73
+ # Example line: :returns: This returns x
74
+ return_desc = stripped_line[10:].strip()
75
+ returns["description"] = return_desc
76
+ current_section = "returns"
77
+
78
+ elif stripped_line.startswith(":rtype: "):
79
+ # Example line: :rtype: int
80
+ return_type = stripped_line[8:].strip()
81
+ returns["type"] = return_type
82
+
83
+ elif current_section and not stripped_line.startswith(":"):
84
+ # Append continued description lines to the last param or return section
85
+ if current_section == "returns":
86
+ returns["description"] += ' ' + stripped_line
87
+ else:
88
+ params[current_section]["description"] += " " + stripped_line
89
+
90
+ elif not stripped_line.startswith(":"):
91
+ # Normal description line
92
+ description.append(stripped_line)
93
+
94
+ return {
95
+ "description": '\n'.join(description).strip(),
96
+ "params": list(params.values()),
97
+ "returns": returns
98
+ }
99
+
100
+
101
+ def _parse_google_docstring(docstring: str) -> Dict[str, Any]:
102
+ import re
103
+ lines = docstring.split('\n')
104
+ mode = None
105
+ params = {}
106
+ parsed = {
107
+ 'description': [],
108
+ 'params': [],
109
+ 'returns': {'description': []},
110
+ 'raises': {}
111
+ }
112
+ current_key = None
113
+
114
+ # Regex to capture the parts of the parameter and the start of type/exception sections
115
+ arg_pattern = re.compile(r'^\s*(\w+)\s*(\(.*?\))?:(.*)')
116
+ section_pattern = re.compile(r'^\s*(Args|Returns|Raises):')
117
+
118
+ for line in lines:
119
+ line = line.rstrip()
120
+ section_match = section_pattern.match(line)
121
+
122
+ if section_match:
123
+ mode = section_match.group(1).lower()
124
+ continue
125
+
126
+ if mode == 'args':
127
+ arg_match = arg_pattern.match(line)
128
+ if arg_match:
129
+ current_key = arg_match.group(1)
130
+ type_desc = arg_match.group(2) if arg_match.group(2) else ''
131
+ description = arg_match.group(3).strip()
132
+ params[current_key] = {'name': current_key, 'type': type_desc.strip('() '), 'description': [description]}
133
+ elif current_key:
134
+ params[current_key]['description'].append(line.strip())
135
+
136
+ elif mode == 'returns':
137
+ if not parsed['returns']['description']:
138
+ ret_type, _, desc = line.partition(':')
139
+ parsed['returns']['type'] = ret_type.strip()
140
+ parsed['returns']['description'].append(desc.strip())
141
+ else:
142
+ parsed['returns']['description'].append(line.strip())
143
+
144
+ elif mode == 'raises':
145
+ if ':' in line:
146
+ exc_type, desc = line.split(':', 1)
147
+ parsed['raises'][exc_type.strip()] = desc.strip()
148
+ elif current_key:
149
+ parsed['raises'][current_key] += ' ' + line.strip()
150
+
151
+ elif mode is None:
152
+ parsed['description'].append(line.strip())
153
+
154
+ # Consolidate descriptions
155
+ parsed['description'] = ' '.join(parsed['description']).strip()
156
+ parsed['returns']['description'] = ' '.join(parsed['returns']['description']).strip()
157
+ parsed['params'] = [{ **v, 'description': ' '.join(v['description']).strip() } for v in params.values()]
158
+
159
+ return parsed
160
+
161
+ def _get_schemas(code: str) -> List[Dict]:
162
+ schemas = []
163
+ user_code = types.SimpleNamespace()
164
+ exec(code, user_code.__dict__)
165
+ for name, obj in user_code.__dict__.items():
166
+ if isinstance(obj, BaseTypedDict):
167
+ print_red("ERROR")
168
+ print_red("\nERROR DETAILS: ")
169
+ print(
170
+ "It looks like you have used TypedDict in a custom function. Please use `from typing_extensions import TypedDict` instead. The `typing_extensions` version is more powerful and better allows us to provide rich types for your function."
171
+ )
172
+ sys.exit(1)
173
+ elif (
174
+ isinstance(obj, type)
175
+ and isinstance(obj, _TypedDictMeta)
176
+ and name != "TypedDict"
177
+ ):
178
+ schemas.append(TypeAdapter(obj).json_schema())
179
+ return schemas
180
+
181
+
182
+ def get_jsonschema_type(python_type: str):
183
+ if python_type == "Any":
184
+ return "Any"
185
+
186
+ if python_type == "List":
187
+ return "array"
188
+
189
+ if python_type.startswith("List["):
190
+ # the actual type will be returned as return_type_schema
191
+ subtype = python_type[5:-1]
192
+ if subtype == "Any":
193
+ return "any[]"
194
+ elif subtype in ["int", "float", "str", "bool"]:
195
+ jsonschema_type = PYTHON_TO_JSONSCHEMA_TYPE_MAP.get(subtype)
196
+ return f"{jsonschema_type}[]"
197
+ else:
198
+ # the schema will handle it!
199
+ return "object"
200
+
201
+ if python_type.startswith("Dict"):
202
+ return "object"
203
+
204
+ rv = PYTHON_TO_JSONSCHEMA_TYPE_MAP.get(python_type)
205
+ if rv:
206
+ return rv
207
+
208
+ # should be custom type
209
+ return python_type
210
+
211
+
212
+ def get_python_type_from_ast(expr: ast.expr) -> str:
213
+ if isinstance(expr, ast.Name):
214
+ return str(expr.id)
215
+ elif isinstance(expr, ast.Subscript):
216
+ assert isinstance(expr, ast.Subscript)
217
+ name = getattr(expr.value, "id", "")
218
+ if name == "List":
219
+ slice = getattr(expr.slice, "id", "Any")
220
+ return f"List[{slice}]"
221
+ elif name == "Dict":
222
+ if expr.slice and isinstance(expr.slice, ast.Tuple):
223
+ key = get_python_type_from_ast(expr.slice.dims[0])
224
+ value = get_python_type_from_ast(expr.slice.dims[1])
225
+ return f"Dict[{key}, {value}]"
226
+ else:
227
+ return "Dict"
228
+ return "Any"
229
+ else:
230
+ return "Any"
231
+
232
+
233
+ def _get_type_schema(json_type: str, python_type: str, schemas: List[Dict]):
234
+ if python_type.startswith("List["):
235
+ subtype = python_type[5:-1]
236
+ for schema in schemas:
237
+ if schema["title"] == subtype:
238
+ return {"type": "array", "items": schema}
239
+
240
+ # subtype somehow not in schema, just call it any
241
+ return None
242
+ else:
243
+ for schema in schemas:
244
+ if schema["title"] == json_type:
245
+ return schema
246
+
247
+
248
+ def _get_type(expr: ast.expr | None, schemas: List[Dict]) -> Tuple[str, Dict | None]:
249
+ if not expr:
250
+ return "any", "Any", None
251
+ python_type = get_python_type_from_ast(expr)
252
+ json_type = get_jsonschema_type(python_type)
253
+ return json_type, python_type, _get_type_schema(json_type, python_type, schemas)
254
+
255
+
256
+
257
+ def _get_req_name_if_not_in_base(
258
+ n: Optional[str], pip_name_lookup: Mapping[str, List[str]]
259
+ ) -> Optional[str]:
260
+ if not n:
261
+ return None
262
+
263
+ if "." in n:
264
+ n = n.split(".")[0]
265
+
266
+ if n in BASE_REQUIREMENTS:
267
+ return None
268
+ else:
269
+ return pip_name_lookup[n][0]
270
+
271
+
272
+ def _parse_deploy_comment(comment: str) -> Optional[Deployment]:
273
+ # Poly deployed @ 2024-08-29T22:46:46.791Z - test.weeklyReport - https://develop-k8s.polyapi.io/canopy/polyui/collections/server-functions/f0630f95-eac8-4c7d-9d23-639d39034bb6 - e3b0c44
274
+ pattern = r"^\s*(?:#\s*)*Poly deployed @ (\S+) - (\S+)\.([^.]+) - (https?:\/\/[^\/]+)\/\S+\/(\S+)s\/(\S+) - (\S+)$"
275
+ match = re.match(pattern, comment)
276
+ if not match:
277
+ return None
278
+
279
+ deployed, context, name, instance, deploy_type, id, file_revision = match.groups()
280
+
281
+ # Local development puts canopy on a different port than the poly-server
282
+ if instance.endswith("localhost:3000"):
283
+ instance = instance.replace(":3000', ':8000")
284
+
285
+ return {
286
+ "name": name,
287
+ "context": context,
288
+ "type": deploy_type,
289
+ "id": id,
290
+ "deployed": deployed,
291
+ "fileRevision": file_revision,
292
+ "instance": instance
293
+ }
294
+
295
+
296
+ def _parse_dict(node):
297
+ """Recursively parse an ast.Dict node into a Python dictionary."""
298
+ result = {}
299
+ for key, value in zip(node.keys, node.values):
300
+ parsed_key = _parse_value(key) # Keys can be other expressions too
301
+ parsed_value = _parse_value(value)
302
+ result[parsed_key] = parsed_value
303
+ return result
304
+
305
+
306
+ def _parse_value(value):
307
+ """Parse a value from different possible AST nodes to Python data."""
308
+ if isinstance(value, ast.Constant):
309
+ return value.value # Handles str, int, float, NoneType, etc.
310
+ elif isinstance(value, ast.Dict):
311
+ return _parse_dict(value)
312
+ elif isinstance(value, ast.List):
313
+ return [_parse_value(item) for item in value.elts]
314
+ elif isinstance(value, ast.Name):
315
+ return value.id # Could be a variable reference
316
+ else:
317
+ return None
318
+
319
+
320
+ def parse_function_code(code: str, name: Optional[str] = "", context: Optional[str] = ""):
321
+ schemas = _get_schemas(code)
322
+
323
+ # the pip name and the import name might be different
324
+ # e.g. kube_hunter is the import name, but the pip name is kube-hunter
325
+ # see https://stackoverflow.com/a/75144378
326
+ pip_name_lookup = packages_distributions()
327
+
328
+ deployable: DeployableRecord = {
329
+ "context": context,
330
+ "name": name,
331
+ "description": "",
332
+ "config": {},
333
+ "gitRevision": "",
334
+ "fileRevision": "",
335
+ "file": "",
336
+ "types": {
337
+ "description": "",
338
+ "params": [],
339
+ "returns": {
340
+ "type": "",
341
+ "description": "",
342
+ }
343
+ },
344
+ "typeSchemas": {},
345
+ "dependencies": [],
346
+ "deployments" : [],
347
+ "deploymentCommentRanges": [],
348
+ "docStartIndex": -1,
349
+ "docEndIndex": -1,
350
+ "dirty": False,
351
+ }
352
+
353
+ class FunctionParserVisitor(ast.NodeVisitor):
354
+ """
355
+ Custom visitor so that we can keep track of the global offsets of text so we can easily generate replacements later
356
+ """
357
+
358
+ def __init__(self):
359
+ self._name = name
360
+ self._lines = code.splitlines(keepends=True) # Keep line endings to maintain accurate indexing
361
+ self._current_offset = 0
362
+ self._line_offsets = [0]
363
+ for i in range(1, len(self._lines)):
364
+ self._line_offsets.append(
365
+ self._line_offsets[i-1] + len(self._lines[i-1])
366
+ )
367
+
368
+ self._extract_deploy_comments()
369
+
370
+ def visit_AnnAssign(self, node):
371
+ """Visit an assignment and check if it's defining a polyConfig."""
372
+ self.generic_visit(node) # Continue to visit children first
373
+
374
+ if (
375
+ isinstance(node.target, ast.Name) and
376
+ node.target.id == "polyConfig" and
377
+ isinstance(node.annotation, ast.Name)
378
+ ):
379
+ # We've found a polyConfig dictionary assignment
380
+ if node.annotation.id == "PolyServerFunction":
381
+ deployable["type"] = "server-function"
382
+ elif node.annotation.id == "PolyClientFunction":
383
+ deployable["type"] = "server-function"
384
+ else:
385
+ print_red("ERROR")
386
+ print(f"Unsupported polyConfig type '${node.annotation.id}'")
387
+ sys.exit(1)
388
+ deployable["config"] = _parse_dict(node.value)
389
+ self._name = deployable["config"]["name"]
390
+
391
+ def _extract_docstring_from_function(self, node: ast.FunctionDef):
392
+ start_lineno = (node.body[0].lineno if node.body else node.lineno) - 1
393
+ start_offset = self._line_offsets[start_lineno]
394
+ end_offset = start_offset
395
+ deployable["docStartIndex"] = start_offset
396
+ deployable["docEndIndex"] = end_offset
397
+
398
+ try:
399
+ docstring = ast.get_docstring(node)
400
+ finally:
401
+ # Handle case where there is no doc string
402
+ # Also handle case where docstring exists but is empty
403
+ if type(docstring) is None or (not docstring and '"""' not in self._lines[start_lineno] and "'''" not in self._lines[start_lineno]):
404
+ return None
405
+
406
+
407
+ # Support both types of triple quotation marks
408
+ pattern = '"""'
409
+ str_offset = self._lines[start_lineno].find(pattern)
410
+ if str_offset == -1:
411
+ pattern = "'''"
412
+ str_offset = self._lines[start_lineno].find(pattern)
413
+ start_offset += str_offset
414
+ # Determine end_offset for multiline or single line doc strings by searching until we hit the end of the opening pattern
415
+ # We have to do this manually because the docstring we get from the ast excludes the quotation marks and whitespace
416
+ if self._lines[start_lineno].find(pattern, str_offset + 3) == -1:
417
+ end_offset = start_offset
418
+ for i in range(start_lineno + 1, len(self._lines)):
419
+ end_offset = self._line_offsets[i]
420
+ str_offset = self._lines[i].find(pattern)
421
+ if str_offset >= 0:
422
+ end_offset += str_offset + 3
423
+ break
424
+ else:
425
+ end_offset += len(self._lines[start_lineno]) - 1
426
+
427
+ deployable["docStartIndex"] = start_offset
428
+ deployable["docEndIndex"] = end_offset
429
+
430
+ # Check if the docstring is likely to be Google Docstring format https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
431
+ if 'Args:' in docstring or 'Returns:' in docstring:
432
+ deployable["types"] = _parse_google_docstring(docstring)
433
+ else:
434
+ deployable["types"]["description"] = docstring.strip()
435
+
436
+ def _extract_deploy_comments(self):
437
+ for i in range(len(self._lines)):
438
+ line = self._lines[i].strip()
439
+ if line and not line.startswith("#"):
440
+ return
441
+ deployment = _parse_deploy_comment(line)
442
+ if deployment:
443
+ deployable["deployments"].append(deployment)
444
+ deployable["deploymentCommentRanges"].append([self._line_offsets[i], len(line)])
445
+
446
+ def visit_Import(self, node: ast.Import):
447
+ # TODO maybe handle `import foo.bar` case?
448
+ for name in node.names:
449
+ req = _get_req_name_if_not_in_base(name.name, pip_name_lookup)
450
+ if req:
451
+ deployable["dependencies"].append(req)
452
+
453
+ def visit_ImportFrom(self, node: ast.ImportFrom):
454
+ if node.module:
455
+ req = _get_req_name_if_not_in_base(node.module, pip_name_lookup)
456
+ if req:
457
+ deployable["dependencies"].append(req)
458
+
459
+ def visit_FunctionDef(self, node: ast.FunctionDef):
460
+ if node.name == self._name:
461
+ # Parse docstring which may contain param types and descriptions
462
+ self._extract_docstring_from_function(node)
463
+ function_args = [arg for arg in node.args.args]
464
+ docstring_params = deployable["types"]["params"]
465
+ parsed_params = []
466
+ # parse params from actual function and merge in any data from the docstring
467
+ for arg in function_args:
468
+ _, python_type, type_schema = _get_type(arg.annotation, schemas)
469
+ json_arg = {
470
+ "name": arg.arg,
471
+ "type": python_type,
472
+ "description": "",
473
+ }
474
+ if type_schema:
475
+ json_arg["typeSchema"] = json.dumps(type_schema)
476
+
477
+ if docstring_params:
478
+ type_index = next(i for i, d in enumerate(docstring_params) if d["name"] == arg.arg)
479
+ if type_index >= 0:
480
+ json_arg["description"] = docstring_params[type_index]["description"]
481
+ if docstring_params[type_index]["type"] != python_type:
482
+ deployable["dirty"] = True
483
+ else:
484
+ deployable["dirty"] = True
485
+
486
+ parsed_params.append(json_arg)
487
+ deployable["types"]["params"] = parsed_params
488
+ if node.returns:
489
+ _, python_type, return_type_schema = _get_type(node.returns, schemas)
490
+ if deployable["types"]["returns"]["type"] != python_type:
491
+ deployable["dirty"] = True
492
+ deployable["types"]["returns"]["type"] = python_type
493
+ deployable["types"]["returns"]["typeSchema"] = return_type_schema
494
+ else:
495
+ deployable["types"]["returns"]["type"] = "Any"
496
+
497
+ def generic_visit(self, node):
498
+ if hasattr(node, 'lineno') and hasattr(node, 'col_offset'):
499
+ self._current_offset = self._line_offsets[node.lineno - 1] + node.col_offset
500
+ super().generic_visit(node)
501
+
502
+ tree = ast.parse(code)
503
+ visitor = FunctionParserVisitor()
504
+ visitor.visit(tree)
505
+
506
+ # Setting some top-level config values for convenience
507
+ deployable["context"] = context or deployable["config"].get("context", "")
508
+ deployable["name"] = name or deployable["config"].get("name", "")
509
+ deployable["disableAi"] = deployable["config"].get("disableAi", False)
510
+ deployable["description"] = deployable["types"].get("description", "")
511
+ if not deployable["name"]:
512
+ print_red("ERROR")
513
+ print("Function config is missing a name.")
514
+ sys.exit(1)
515
+
516
+ deployable["fileRevision"] = get_deployable_file_revision(code)
517
+
518
+ return deployable
519
+
polyapi/prepare.py ADDED
@@ -0,0 +1,135 @@
1
+ import os
2
+ import sys
3
+ from typing import List, Tuple, Literal
4
+ import requests
5
+
6
+ from polyapi.parser import parse_function_code
7
+ from polyapi.deployables import (
8
+ prepare_deployable_directory, write_cache_revision,
9
+ save_deployable_records, get_all_deployable_files,
10
+ is_cache_up_to_date, get_git_revision,
11
+ write_updated_deployable, DeployableRecord
12
+ )
13
+
14
+ class FunctionArgumentDto:
15
+ def __init__(self, name, type, description=None):
16
+ self.name = name
17
+ self.type = type
18
+ self.description = description
19
+
20
+ def get_function_description(deploy_type: Literal["server-function", "client-function"], description: str, arguments, code: str) -> str:
21
+ if deploy_type == "server-function":
22
+ return get_server_function_description(description, arguments, code)
23
+ elif deploy_type == "client-function":
24
+ return get_client_function_description(description, arguments, code)
25
+ else:
26
+ raise ValueError("Unsupported deployable type")
27
+
28
+ def get_server_function_description(description: str, arguments, code: str) -> str:
29
+ # Simulated API call to generate server function descriptions
30
+ data = {"description": description, "arguments": arguments, "code": code}
31
+ response = requests.post("http://your-api-url/server-function-description", json=data)
32
+ return response.json()
33
+
34
+ def get_client_function_description(description: str, arguments, code: str) -> str:
35
+ # Simulated API call to generate client function descriptions
36
+ data = {"description": description, "arguments": arguments, "code": code}
37
+ response = requests.post("http://your-api-url/client-function-description", json=data)
38
+ return response.json()
39
+
40
+ def fill_in_missing_function_details(deployable: DeployableRecord, code: str) -> DeployableRecord:
41
+ is_missing_descriptions = (
42
+ not deployable["types"]["description"] or
43
+ not deployable["types"]["returns"]["description"] or
44
+ any(not param["description"] for param in deployable["types"]["params"])
45
+ )
46
+ if is_missing_descriptions:
47
+ try:
48
+ ai_generated = get_function_description(
49
+ deployable["type"],
50
+ deployable["types"]["description"],
51
+ [{"name": p["name"], "type": p["type"], "description": p.get("description")} for p in deployable["types"]["params"]],
52
+ code
53
+ )
54
+ if not deployable["types"]["description"] and ai_generated.get("description"):
55
+ deployable["types"]["description"] = ai_generated["description"]
56
+ deployable["dirty"] = True
57
+
58
+ deployable["types"]["params"] = [
59
+ {**p, "description": ai_arg["description"]} if ai_arg and ai_arg.get("description") else p
60
+ for p, ai_arg in zip(deployable["types"]["params"], ai_generated.get("arguments", []))
61
+ ]
62
+ except Exception as e:
63
+ print(f"Failed to generate descriptions due to: {str(e)}")
64
+ return deployable
65
+
66
+ def fill_in_missing_details(deployable: DeployableRecord, code: str) -> DeployableRecord:
67
+ if deployable["type"] in ["server-function", "client-function"]:
68
+ return fill_in_missing_function_details(deployable, code)
69
+ else:
70
+ raise ValueError(f'Unsupported deployable type: "{deployable["type"]}"')
71
+
72
+
73
+ def get_base_url() -> str:
74
+ # Placeholder for getting base URL
75
+ return "."
76
+
77
+ def get_all_deployables(disable_docs: bool, disable_ai: bool, git_revision: str) -> List[DeployableRecord]:
78
+ print("Searching for poly deployables.")
79
+ base_url = get_base_url() or "."
80
+ possible_deployables = get_all_deployable_files({"includeDirs": [base_url]})
81
+ print(f'Found {len(possible_deployables)} possible deployable file{"s" if len(possible_deployables) != 1 else ""}.')
82
+
83
+ found = {}
84
+ for possible in possible_deployables:
85
+ deployable, code = parse_deployable(possible, base_url, git_revision)
86
+ full_name = f'{deployable["context"]}.{deployable["name"]}'
87
+ if full_name in found:
88
+ print(f'ERROR: Prepared {deployable["type"].replace("-", " ")} {full_name}: DUPLICATE')
89
+ else:
90
+ if not disable_ai and not deployable.get("disableAi", False):
91
+ deployable = fill_in_missing_details(deployable, code)
92
+ found[full_name] = deployable
93
+ status = "UPDATED" if deployable.get("dirty", False) and not disable_docs else "OK"
94
+ print(f'Prepared {deployable["type"].replace("-", " ")} {full_name}: {status}')
95
+
96
+ return list(found.values())
97
+
98
+ def parse_deployable(file_path: str, base_url: str, git_revision: str) -> Tuple[DeployableRecord, str]:
99
+ # Simulate parsing deployable; adapt with actual logic to parse deployables
100
+ # This function should return a tuple of (deployable_dict, code_string)
101
+ code = ""
102
+ with open(file_path, "r", encoding="utf-8") as file:
103
+ code = file.read()
104
+
105
+ deployable = parse_function_code(code)
106
+ deployable["gitRevision"] = git_revision
107
+ deployable["file"] = file_path
108
+ return deployable, code
109
+
110
+ def prepare_deployables(lazy: bool = False, disable_docs: bool = False, disable_ai: bool = False) -> None:
111
+ if lazy and is_cache_up_to_date():
112
+ print("Poly deployments are prepared.")
113
+ return
114
+
115
+ print("Preparing Poly deployments...")
116
+
117
+ prepare_deployable_directory()
118
+ git_revision = get_git_revision()
119
+ # Parse deployable files
120
+ parsed_deployables = get_all_deployables(disable_docs, disable_ai, git_revision)
121
+ if not parsed_deployables:
122
+ print("No deployable files found. Did you define a `polyConfig` within your deployment?")
123
+ return sys.exit(1)
124
+ dirty_deployables = [d for d in parsed_deployables if d["dirty"]]
125
+ if dirty_deployables:
126
+ # Write back deployables files with updated comments
127
+ print(f'Fixing {len(dirty_deployables)} deployable file{"" if len(dirty_deployables) == 1 else "s"}.')
128
+ # NOTE: write_updated_deployable has side effects that update deployable.fileRevision which is in both this list and parsed_deployables
129
+ for deployable in dirty_deployables:
130
+ write_updated_deployable(deployable, disable_docs)
131
+
132
+ print("Poly deployments are prepared.")
133
+ save_deployable_records(parsed_deployables)
134
+ write_cache_revision(git_revision)
135
+ print("Cached deployables and generated typedefs into mode_modules/.poly/deployables directory.")