opentf-toolkit-nightly 0.63.0.dev1421__py3-none-any.whl → 0.63.0.dev1431__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -44,7 +44,6 @@ from .exceptions import ConfigError
44
44
  from .pubsub import make_dispatchqueue, make_event, publish, subscribe, unsubscribe
45
45
  from .schemas import *
46
46
 
47
-
48
47
  ########################################################################
49
48
  # Constants
50
49
 
opentf/commons/auth.py CHANGED
@@ -25,7 +25,6 @@ import re
25
25
  from .config import ConfigError
26
26
  from .schemas import validate_schema
27
27
 
28
-
29
28
  ########################################################################
30
29
 
31
30
  POLICY = 'abac.opentestfactory.org/v1alpha1/Policy'
opentf/commons/config.py CHANGED
@@ -27,7 +27,6 @@ import yaml
27
27
  from .exceptions import ConfigError
28
28
  from .schemas import read_and_validate, SERVICECONFIG
29
29
 
30
-
31
30
  ########################################################################
32
31
 
33
32
  NOTIFICATION_LOGGER_EXCLUSIONS = 'eventbus'
opentf/commons/meta.py CHANGED
@@ -16,7 +16,6 @@
16
16
 
17
17
  from typing import Dict, Iterable, Optional, Tuple
18
18
 
19
-
20
19
  ########################################################################
21
20
  ## Constants
22
21
 
opentf/commons/pubsub.py CHANGED
@@ -28,7 +28,6 @@ from requests import delete, post, Response
28
28
 
29
29
  from .schemas import SUBSCRIPTION, validate_schema
30
30
 
31
-
32
31
  ########################################################################
33
32
  # Publishers & Subscribers Helpers
34
33
 
opentf/commons/schemas.py CHANGED
@@ -27,7 +27,6 @@ from yaml import safe_load
27
27
 
28
28
  import opentf.schemas
29
29
 
30
-
31
30
  ########################################################################
32
31
  # Schemas
33
32
 
@@ -64,6 +64,12 @@
64
64
  },
65
65
  "output-prefix": {
66
66
  "type": "string"
67
+ },
68
+ "verbose": {
69
+ "type": "string"
70
+ },
71
+ "show-notifications": {
72
+ "type": "string"
67
73
  }
68
74
  },
69
75
  "additionalProperties": false
opentf/scripts/startup.py CHANGED
@@ -14,7 +14,7 @@
14
14
 
15
15
  """Startup script for allinone images."""
16
16
 
17
- from typing import Any, Dict, List, Optional, Set, Tuple, Union
17
+ from typing import Any, Dict, List, Optional, Tuple, Union
18
18
 
19
19
  from datetime import datetime
20
20
  from importlib.metadata import version
@@ -35,7 +35,6 @@ import yaml
35
35
 
36
36
  import opentf
37
37
 
38
-
39
38
  LOGGING_FORMAT = '[%(asctime)s] %(levelname)s in startup: %(message)s'
40
39
  if os.environ.get('DEBUG') or os.environ.get('OPENTF_DEBUG'):
41
40
  logging.basicConfig(level=logging.DEBUG, format=LOGGING_FORMAT)
@@ -64,7 +63,6 @@ ENVIRONMENT_VARIABLES = {
64
63
  'OPENTF_BASE_URL': None,
65
64
  'OPENTF_CONTEXT': 'allinone',
66
65
  'OPENTF_DEBUG': 'INFO',
67
- 'OPENTF_DISABLED_PLUGINS': None,
68
66
  'OPENTF_EVENTBUS_WARMUPDELAY': 2,
69
67
  'OPENTF_EVENTBUS_WARMUPURL': 'http://127.0.0.1:38368/subscriptions',
70
68
  'OPENTF_EVENTBUSCONFIG': 'conf/eventbus.yaml',
@@ -221,7 +219,7 @@ SERVICES = set()
221
219
 
222
220
 
223
221
  def parse_and_start(
224
- paths: List[str], item: str, disabled: Optional[Set[str]] = None
222
+ paths: List[str], item: str, disabled: Optional[List[str]] = None
225
223
  ) -> List[Any]:
226
224
  """Lookup item manifests and start them if not disabled."""
227
225
  result = []
@@ -302,7 +300,7 @@ def maybe_start_otelcol():
302
300
  else ''
303
301
  )
304
302
  cmd = '"/usr/local/bin/otelcol --config=file:/app/otelcol-config.yaml $OTELCOL_EXTRA_OPTIONS"'
305
- logging.info('Starting OpenTelemetry Collector%s...', options_msg)
303
+ logging.info(f'Starting OpenTelemetry Collector{options_msg}...')
306
304
  pid = subprocess.Popen(f'sh -c {cmd}', shell=True)
307
305
  logging.debug('(pid is %d.)', pid.pid)
308
306
  return [pid]
@@ -390,17 +388,17 @@ def start_services(conf: Dict[str, Any]) -> List[Any]:
390
388
 
391
389
  def start_plugins(conf: Dict[str, Any]) -> List[Any]:
392
390
  """Lookup plugins and start them."""
393
- disabled = set(conf.get('disabled', {}))
391
+ if disabled := conf.get('disabled'):
392
+ disabled = [plugin.lower() for plugin in disabled]
393
+ else:
394
+ disabled = []
394
395
  if (v := _get_env('OPENTF_ALLURE_ENABLED')) is None or (
395
396
  v.lower() not in ('true', 'yes', 'on', '1')
396
397
  ):
397
- disabled.add('allure.collector')
398
- disabled.add('result.aggregator')
399
- if v := _get_env('OPENTF_DISABLED_PLUGINS'):
400
- disabled |= set(v.split(','))
398
+ disabled.append('allure.collector')
399
+ disabled.append('result.aggregator')
401
400
  if aggregated := conf.get('aggregated'):
402
- disabled |= set(aggregated)
403
- disabled = {plugin.strip().lower() for plugin in disabled if plugin.strip()}
401
+ disabled += [plugin.lower() for plugin in aggregated]
404
402
  plugins = []
405
403
  for entry in conf['plugins']:
406
404
  plugins += parse_and_start(_expand(entry), PLUGIN_DESCRIPTOR, disabled)
@@ -524,10 +522,10 @@ def _ensure_abac_if_defined(name, value):
524
522
  if value:
525
523
  if not OPENTF_AUTHORIZATION_MODE:
526
524
  logging.error(
527
- '%s is defined but OPENTF_AUTHORIZATION_MODE is undefined.'
528
- ' OPENTF_AUTHORIZATION_MODE must include "ABAC" to use %s.',
529
- name,
530
- name,
525
+ '{0} is defined but OPENTF_AUTHORIZATION_MODE is undefined.'
526
+ ' OPENTF_AUTHORIZATION_MODE must include "ABAC" to use {0}.'.format(
527
+ name
528
+ )
531
529
  )
532
530
  sys.exit(1)
533
531
  if 'ABAC' not in OPENTF_AUTHORIZATION_MODE.split(','):
@@ -49,7 +49,6 @@ from opentf.commons import (
49
49
  from opentf.commons.meta import read_category_labels, maybe_set_category_labels
50
50
  from opentf.toolkit import core
51
51
 
52
-
53
52
  ########################################################################
54
53
 
55
54
  SUBSCRIPTION_KEY = '__subscription uuid__'
@@ -14,10 +14,8 @@
14
14
 
15
15
  """Toolkit helpers for channels plugins."""
16
16
 
17
-
18
17
  from typing import Any, Callable, Dict, List, NamedTuple, Optional, Set, Tuple, Union
19
18
 
20
- from copy import deepcopy
21
19
  from datetime import datetime
22
20
  from shlex import quote
23
21
 
@@ -47,6 +45,7 @@ STOPCOMMANDS_COMMAND = re.compile(r'^::stop-commands::(\w+)$')
47
45
  ADDMASK_COMMAND = re.compile(r'^::add-mask::(.*)$')
48
46
  PUT_FILE_COMMAND = re.compile(r'^::put\s+file=(.*?)\s*::(.*?)\s*$')
49
47
 
48
+
50
49
  ## step sequence IDs
51
50
 
52
51
  CHANNEL_REQUEST = -1
@@ -210,31 +209,26 @@ def _add_default_variables(
210
209
  script.append(VARIABLE_MAKER[runner_os]('CI', 'true'))
211
210
 
212
211
 
213
- def get_opentf_variables_path(metadata: Dict[str, Any]) -> str:
212
+ def _get_opentf_variables_path(metadata: Dict[str, Any]) -> str:
214
213
  return VARIABLES_TEMPLATE[metadata['channel_os']].format(
215
214
  job_id=metadata['job_id'], root=metadata['channel_temp']
216
215
  )
217
216
 
218
217
 
219
- def _read_opentf_variables(lines: List[str]) -> Dict[str, str]:
220
- variables = {}
221
- for line in lines:
222
- if '=' not in line:
223
- continue
224
- line = line.strip()
225
- if set_export := OPENTF_VARIABLES_REGEX.match(line):
226
- line = set_export.group(2)
227
- if line.startswith('"'):
228
- line = line[1:-1]
229
- key, _, value = line.partition('=')
230
- if OPENTF_VARIABLES_NAME_REGEX.match(key):
231
- variables[key] = value
232
- return variables
233
-
234
-
235
218
  def _get_opentf_variables(path: str) -> Dict[str, str]:
219
+ variables = {}
236
220
  with open(path, 'r') as f:
237
- variables = _read_opentf_variables(f.readlines())
221
+ for line in f.readlines():
222
+ if '=' not in line:
223
+ continue
224
+ line = line.strip()
225
+ if set_export := OPENTF_VARIABLES_REGEX.match(line):
226
+ line = set_export.group(2)
227
+ if line.startswith('"'):
228
+ line = line[1:-1]
229
+ key, _, value = line.partition('=')
230
+ if OPENTF_VARIABLES_NAME_REGEX.match(key):
231
+ variables[key] = value
238
232
  try:
239
233
  os.remove(path)
240
234
  except FileNotFoundError:
@@ -313,10 +307,6 @@ class JobState:
313
307
  def __init__(self) -> None:
314
308
  self.stop_command: Optional[str] = None
315
309
  self.masks: List[str] = []
316
- self.output_commands: List[str] = []
317
- self.attachments: List[str] = []
318
- self.attachments_metadata: Dict[str, Any] = {}
319
- self.artifacts_resp: bool = False
320
310
 
321
311
 
322
312
  def mask(line: str, state: JobState) -> str:
@@ -336,9 +326,6 @@ def _as_log(line: str, jobstate: JobState):
336
326
  return mask(line, jobstate).rstrip()
337
327
 
338
328
 
339
- ## artifacts
340
-
341
-
342
329
  def process_upload(result: Dict[str, Any]) -> Dict[str, Any]:
343
330
  """Process ExecutionResult event containing .metadata.upload flag.
344
331
 
@@ -434,13 +421,12 @@ def _download_artifacts(
434
421
 
435
422
  def process_output(
436
423
  event: Dict[str, Any],
437
- resp: Optional[int],
424
+ resp: int,
438
425
  stdout: List[str],
439
426
  stderr: List[str],
440
427
  jobstate: JobState,
441
428
  _get: Callable[[str, str], None],
442
429
  _put: Callable[[str, str], None],
443
- variables: Optional[List[str]] = None,
444
430
  ) -> Dict[str, Any]:
445
431
  """Process output, filling structures.
446
432
 
@@ -545,16 +531,9 @@ def process_output(
545
531
  outputs = {}
546
532
  logs: List[str] = []
547
533
  attachments: List[str] = []
548
- attachments_metadata: Dict[str, Any] = {}
549
-
550
- is_channel_notify = False
551
- step_sequence_id = metadata['step_sequence_id']
552
- if resp is None:
553
- resp, is_channel_notify = 0, True
554
- if not is_channel_notify and jobstate.output_commands:
555
- stdout.extend(jobstate.output_commands)
556
- jobstate.output_commands.clear()
534
+ attachments_metadata = {}
557
535
 
536
+ has_artifacts = False
558
537
  for line in stdout:
559
538
  # Parsing stdout for workflow commands
560
539
  if jobstate.stop_command:
@@ -562,14 +541,10 @@ def process_output(
562
541
  jobstate.stop_command = None
563
542
  continue
564
543
 
565
- if is_channel_notify and SETOUTPUT_COMMAND.match(line):
566
- jobstate.output_commands.append(line)
567
- continue
568
-
569
544
  if wcmd := ATTACH_COMMAND.match(line):
570
545
  resp = _attach(wcmd.group(2), wcmd.group(1))
571
546
  elif wcmd := UPLOAD_COMMAND.match(line):
572
- jobstate.artifacts_resp = True
547
+ has_artifacts = True
573
548
  resp = _attach(wcmd.group(2), wcmd.group(1), is_artifact=True)
574
549
  elif wcmd := DOWNLOAD_COMMAND.match(line):
575
550
  resp = _download(wcmd.group(2), wcmd.group(1))
@@ -587,48 +562,23 @@ def process_output(
587
562
  for line in stderr:
588
563
  logs.append(mask(line, jobstate).rstrip())
589
564
 
590
- if is_channel_notify:
591
- notify = make_event(EXECUTIONRESULT, metadata=deepcopy(metadata), status=resp)
592
- notify['metadata']['step_sequence_id_origin'] = metadata['step_sequence_id']
593
- notify['metadata']['step_sequence_id'] = CHANNEL_NOTIFY
594
- notify['logs'] = logs
595
- if notify['metadata'].get('artifacts'):
596
- del notify['metadata']['artifacts']
597
- if attachments:
598
- jobstate.attachments.extend(attachments)
599
- jobstate.attachments_metadata.update(attachments_metadata)
600
- return notify
601
-
602
565
  if metadata.get('artifacts'):
603
566
  del metadata['artifacts']
604
567
 
605
- opentf_variables = None
606
- if step_sequence_id != CHANNEL_RELEASE:
607
- if variables:
608
- opentf_variables = _read_opentf_variables(variables)
609
- else:
610
- _attach(
611
- get_opentf_variables_path(metadata), f'type={OPENTF_VARIABLES_TYPE}'
612
- )
568
+ if metadata['step_sequence_id'] != CHANNEL_RELEASE:
569
+ _attach(_get_opentf_variables_path(metadata), f'type={OPENTF_VARIABLES_TYPE}')
613
570
 
614
571
  result = make_event(EXECUTIONRESULT, metadata=metadata, status=resp)
615
572
  if outputs:
616
573
  result['outputs'] = outputs
617
574
  if logs:
618
575
  result['logs'] = logs
619
- if attachments or jobstate.attachments:
620
- result['attachments'] = jobstate.attachments + attachments
621
- result['metadata']['attachments'] = {
622
- **jobstate.attachments_metadata,
623
- **attachments_metadata,
624
- }
625
- jobstate.attachments.clear()
626
- jobstate.attachments_metadata = {}
627
- if jobstate.artifacts_resp:
576
+ if attachments:
577
+ result['attachments'] = attachments
578
+ result['metadata']['attachments'] = attachments_metadata
579
+ if has_artifacts:
628
580
  result['metadata']['upload'] = resp
629
- jobstate.artifacts_resp = False
630
- if opentf_variables:
631
- result['variables'] = opentf_variables
581
+
632
582
  return result
633
583
 
634
584
 
opentf/toolkit/core.py CHANGED
@@ -14,7 +14,6 @@
14
14
 
15
15
  """Toolkit core functions."""
16
16
 
17
-
18
17
  from typing import Any, Dict, Iterable, List, NoReturn, Optional
19
18
 
20
19
  from copy import deepcopy
@@ -34,7 +33,6 @@ from opentf.commons import (
34
33
  )
35
34
  from opentf.commons.exceptions import ExecutionError
36
35
 
37
-
38
36
  ########################################################################
39
37
  # Default Plugin registration
40
38
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: opentf-toolkit-nightly
3
- Version: 0.63.0.dev1421
3
+ Version: 0.63.0.dev1431
4
4
  Summary: OpenTestFactory Orchestrator Toolkit
5
5
  Home-page: https://gitlab.com/henixdevelopment/open-source/opentestfactory/python-toolkit
6
6
  Author: Martin Lafaix
@@ -17,7 +17,7 @@ Requires-Python: >= 3.9.0
17
17
  Description-Content-Type: text/markdown
18
18
  License-File: LICENSE
19
19
  Requires-Dist: requests>=2.32
20
- Requires-Dist: PyJWT[crypto]>=2.10
20
+ Requires-Dist: PyJWT[crypto]>=2.9
21
21
  Requires-Dist: PyYAML>=6
22
22
  Requires-Dist: Flask<4,>=3
23
23
  Requires-Dist: jsonschema>=4.23
@@ -1,11 +1,11 @@
1
- opentf/commons/__init__.py,sha256=arFXmrY-ViO8htpI53s2kz4sGNawutTjFPn0jNsfx8Y,23004
2
- opentf/commons/auth.py,sha256=yUmAoZPk9Aru2UVT5xSjH96u9DOKPk17AeL1_12mjBM,16399
3
- opentf/commons/config.py,sha256=r84DKgZNr50Wgwv2iBmDY3nNIS-7Dq2JtliR9btTGts,10454
1
+ opentf/commons/__init__.py,sha256=efUb_2GNOu3AHQci0ORDrXSL9Cs5iZ9Bt93LrYv5KZk,23003
2
+ opentf/commons/auth.py,sha256=R7xPWZjvAIXebrqNnUIE6UmwOYal8UyjS3ME3Yk0-IY,16398
3
+ opentf/commons/config.py,sha256=E-6akcwoI88XdN1sMCdY-IYd_Ajva4mpi5FLGGBKcrE,10453
4
4
  opentf/commons/exceptions.py,sha256=7dhUXO8iyAbqVwlUKxZhgRzGqVcb7LkG39hFlm-VxIA,2407
5
5
  opentf/commons/expressions.py,sha256=jM_YKXVOFhvOE2aE2IuacuvxhIsOYTFs2oQkpcbWR6g,19645
6
- opentf/commons/meta.py,sha256=ygSO3mE2d-Ux62abzK1wYk86noT4R5Tumd90nyZo0MU,3322
7
- opentf/commons/pubsub.py,sha256=M0bvajR9raUP-xe5mfRjdrweZyHQw1_Qsy56gS-Sck4,7676
8
- opentf/commons/schemas.py,sha256=LT8SlkUcJGtqbnUUDT0U1KsQqTEXjl-ShMny332DkMQ,14042
6
+ opentf/commons/meta.py,sha256=lfAX4PYDCwgbMBIatpj-CM9UdQxVnoAc0WaiPFdEISk,3321
7
+ opentf/commons/pubsub.py,sha256=Tp1awPvX15iRdQGVV06BYIgYJGXmZ1zr_3ITRA4T5QQ,7675
8
+ opentf/commons/schemas.py,sha256=D79M5BKqTXzov1LiDcgGx_grzGmqaPupcALcpBA4v9E,14041
9
9
  opentf/commons/selectors.py,sha256=2mmnvfZ13KizBQLsIvHXPU0Qtf6hkIvJpYdejNRszUs,7203
10
10
  opentf/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  opentf/schemas/abac.opentestfactory.org/v1alpha1/Policy.json,sha256=JXsfNAPSEYggeyaDutSQBeG38o4Bmcr70dPLWWeqIh8,2105
@@ -29,7 +29,7 @@ opentf/schemas/opentestfactory.org/v1alpha1/ExecutionError.json,sha256=tz8ZggvjS
29
29
  opentf/schemas/opentestfactory.org/v1alpha1/ExecutionResult.json,sha256=UeWc4TfRY3G1CnMapFxXWRunaXzZdxOIle3DxURSf-A,3287
30
30
  opentf/schemas/opentestfactory.org/v1alpha1/GeneratorCommand.json,sha256=uxbqDhP4newgz-85TnGKbchx448QEQ8WB5PXpcJy2ME,1754
31
31
  opentf/schemas/opentestfactory.org/v1alpha1/GeneratorResult.json,sha256=LkHLGt2uam1Q5Ux0zP_O9oFgxBMCjD3Th3BsfsXxd1g,6633
32
- opentf/schemas/opentestfactory.org/v1alpha1/InsightCollector.json,sha256=mPYt6vuRlW2nq_hOHP1ssk1vXiaOKugzMwRiPm3FzTw,17940
32
+ opentf/schemas/opentestfactory.org/v1alpha1/InsightCollector.json,sha256=CuYa9SODRHrljnP4zztKZQyr-oiB4v_juAoNtIonmJM,18265
33
33
  opentf/schemas/opentestfactory.org/v1alpha1/Notification.json,sha256=V-Yd7yQR6r8135cDrnh0W-ugQvtSvKpHQiNRoMP1N9g,8496
34
34
  opentf/schemas/opentestfactory.org/v1alpha1/PluginMetadata.json,sha256=BLklO7CObT4OpAEsQT60WJ1ttOcG71hIYzgN-e7Ch9k,2803
35
35
  opentf/schemas/opentestfactory.org/v1alpha1/ProviderCommand.json,sha256=soe0imdnnq1mfGEpcLJvF3JVUIrF-7FFECc7CzNzobI,2875
@@ -56,13 +56,12 @@ opentf/schemas/opentestfactory.org/v1beta1/ServiceConfig.json,sha256=m5ZgWAKbutu
56
56
  opentf/schemas/opentestfactory.org/v1beta1/Workflow.json,sha256=QZ8mM9PhzsI9gTmwmKTWYNoRn--rtcM3L0PzgnPBfMU,15424
57
57
  opentf/schemas/opentestfactory.org/v1beta2/ServiceConfig.json,sha256=rEvK2YWL5lG94_qYgR_GnLWNsaQhaQ-2kuZdWJr5NnY,3517
58
58
  opentf/scripts/launch_java_service.sh,sha256=S0jAaCuv2sZy0Gf2NGBuPX-eD531rcM-b0fNyhmzSjw,2423
59
- opentf/scripts/startup.py,sha256=DLanDaXutUTYcG2PwoJ34QH-5G0TwfLUY_xy1VkVOqA,23202
60
- opentf/toolkit/__init__.py,sha256=YnH66dmePAIU7dq_xWFYTIEUrsL9qV9f82LRDiBzbzs,22057
61
- opentf/toolkit/channels.py,sha256=BQh5ztQmIKpxns6ozDNto4YpegktydPZyhOO9F3g-2Q,27731
62
- opentf/toolkit/core.py,sha256=jMBDIYZ8Qn3BvsysfKoG0iTtjOnZsggetpH3eXygCsI,9636
63
- opentf/toolkit/models.py,sha256=PNfXVQbeyOwDfaNrLjcfhYm6duMSlNWBtZsWZcs53ag,6583
64
- opentf_toolkit_nightly-0.63.0.dev1421.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
65
- opentf_toolkit_nightly-0.63.0.dev1421.dist-info/METADATA,sha256=YBdxneEq5WD_IKStBxx6xDLMaoU15v3yOzNn-Qe0KmY,2215
66
- opentf_toolkit_nightly-0.63.0.dev1421.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
67
- opentf_toolkit_nightly-0.63.0.dev1421.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
68
- opentf_toolkit_nightly-0.63.0.dev1421.dist-info/RECORD,,
59
+ opentf/scripts/startup.py,sha256=h6JL2L378P6PlJnGS2r1py8pnDTvBUqGj73F8jJgW_k,23118
60
+ opentf/toolkit/__init__.py,sha256=z-r5CuVcMpBC3rH04YbL9_aqWVS8HsEe1F1shTSv4Bo,22056
61
+ opentf/toolkit/channels.py,sha256=Tp9YlxqYM0yFy3WHXgbEnLBsXqdrTqrKypV5i8ysb5A,25903
62
+ opentf/toolkit/core.py,sha256=n1vLlWOjXI6RBmVaj6ozv_isYuVk-xTTKv3Ah694A7o,9634
63
+ opentf_toolkit_nightly-0.63.0.dev1431.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
64
+ opentf_toolkit_nightly-0.63.0.dev1431.dist-info/METADATA,sha256=xzhJ_wWDUZsGzuchyVivZ4f0dR6st70c3Crs95gjtms,2214
65
+ opentf_toolkit_nightly-0.63.0.dev1431.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
66
+ opentf_toolkit_nightly-0.63.0.dev1431.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
67
+ opentf_toolkit_nightly-0.63.0.dev1431.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
opentf/toolkit/models.py DELETED
@@ -1,208 +0,0 @@
1
- # Copyright (c) 2025 Henix, Henix.fr
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- """Handling models from plugins configuration files."""
16
-
17
- from typing import Any
18
-
19
- import os
20
-
21
- from opentf.commons import read_and_validate
22
- from opentf.toolkit import watch_file
23
-
24
- Model = dict[str, Any]
25
- Spec = dict[str, Any]
26
-
27
- ########################################################################
28
-
29
- IMG_MODELS = []
30
-
31
- ########################################################################
32
- ### Configuration loader helpers
33
-
34
-
35
- def deduplicate(
36
- plugin,
37
- models: list[Model],
38
- ) -> tuple[list[Model], set[str]]:
39
- """Deduplicate models in a list.
40
-
41
- # Required parameter
42
-
43
- - models: a list of dictionaries (models), in increasing priority order.
44
-
45
- # Returned value
46
-
47
- A tuple containing a list of deduplicated models and a possibly empty
48
- list of warnings.
49
- """
50
- seen = {}
51
- name, kind = None, None
52
- warnings = set()
53
- for model in reversed(models):
54
- key = (name, kind) = model.get('name'), model.get('kind')
55
- if key not in seen:
56
- seen[key] = model
57
- else:
58
- if model.get('.source') != 'default':
59
- msg = f'Duplicate definitions found for {plugin.name} {kind+' ' if kind else ''}"{name}", only the definition with the highest priority will be used.'
60
- warnings.add(msg)
61
- if warnings:
62
- for msg in warnings:
63
- plugin.logger.warning(msg)
64
- return list(reversed(list(seen.values()))), warnings
65
-
66
-
67
- def filter_listdir(plugin, path: str, kinds: tuple[str, ...]) -> list[str]:
68
- """listdir-like, filtering for files with specified extensions."""
69
- files = [
70
- f
71
- for f in os.listdir(path)
72
- if os.path.isfile(os.path.join(path, f)) and f.endswith(kinds)
73
- ]
74
- if not files:
75
- plugin.logger.debug('No %s files provided in %s.', ', '.join(kinds), path)
76
- return sorted(files)
77
-
78
-
79
- def _read_models(
80
- plugin, schema: str, configfile: str, config_key: str
81
- ) -> list[Model] | None:
82
- """Read plugin models JSON or YAML and return models list."""
83
- try:
84
- models = read_and_validate(schema, configfile)
85
- except ValueError as err:
86
- plugin.logger.error(
87
- 'Invalid %s definition file "%s": %s. Ignoring.',
88
- plugin.name,
89
- configfile,
90
- str(err),
91
- )
92
- return None
93
-
94
- return models[config_key]
95
-
96
-
97
- def _load_image_models(
98
- plugin, config_path: str, config_key: str, schema: str, default_models: list[Model]
99
- ) -> list[dict[str, Any]]:
100
- """Load models from `CONFIG_PATH` directory.
101
-
102
- Storing models and possible warnings in plugin.config['CONFIG'].
103
- """
104
- models = default_models
105
- for config_file in filter_listdir(plugin, config_path, ('.yaml', '.yml')):
106
- filepath = os.path.join(config_path, config_file)
107
- try:
108
- if not (img_models := _read_models(plugin, schema, filepath, config_key)):
109
- continue
110
- plugin.logger.debug(
111
- 'Loading %s models from file "%s".', plugin.name, config_file
112
- )
113
- models.extend(img_models)
114
- except Exception as err:
115
- raise ValueError(
116
- f'Failed to load {plugin.name} models from file "{config_file}": {str(err)}.'
117
- )
118
- models, warnings = deduplicate(plugin, models)
119
- plugin.config['CONFIG'][config_key] = models
120
- plugin.config['CONFIG']['warnings'] = warnings
121
- return models
122
-
123
-
124
- def _refresh_configuration(
125
- _, configfile: str, schema: str, plugin, config_key: str
126
- ) -> None:
127
- """Read plugin models from environment variable specified file.
128
-
129
- Storing models in .config['CONFIG'], using the following entries:
130
-
131
- - {config_key}: a list of models
132
- - warnings: a list of duplicate models warnings
133
- """
134
- try:
135
- config = plugin.config['CONFIG']
136
- models = IMG_MODELS.copy()
137
- plugin.logger.info(
138
- f'Reading {plugin.name} models definition from {configfile}.'
139
- )
140
- env_models = _read_models(plugin, schema, configfile, config_key) or []
141
- models.extend(env_models)
142
- config[config_key], config['warnings'] = deduplicate(plugin, models)
143
- except Exception as err:
144
- plugin.logger.error(
145
- 'Error while reading %s "%s" definition: %s.',
146
- plugin.name,
147
- configfile,
148
- str(err),
149
- )
150
-
151
-
152
- ########################################################################
153
- ### Configuration loader
154
-
155
-
156
- def load_and_watch_models(
157
- plugin,
158
- config_path: str,
159
- config_key: str,
160
- schema: str,
161
- default_models: list[Model],
162
- env_var: str,
163
- ) -> None:
164
- """Load plugin configuration models.
165
-
166
- Plugin configuration models are loaded from configuration files path
167
- and filepath specified by the environment variable. File specified by the
168
- environment variable is watched for modifications. Models list is stored
169
- in `plugin.config['CONFIG'][{config_key}]` entry.
170
-
171
- # Required parameters
172
-
173
- - plugin: a Flask plugin
174
- - config_path: a string, configuration models path, should be a directory
175
- - config_key: a string, plugin configuration key name
176
- - schema: a string, plugin models validation schema
177
- - default_models: a list of plugin-specific default models
178
- - env_var: a string, environment variable name
179
-
180
- # Raised exception
181
-
182
- ValueError is raised if configuration files path is not found or
183
- is not a directory.
184
- """
185
- if not os.path.isdir(config_path):
186
- raise ValueError(
187
- f'Configuration files path "{config_path}" not found or not a directory.'
188
- )
189
-
190
- IMG_MODELS.extend(
191
- _load_image_models(plugin, config_path, config_key, schema, default_models)
192
- )
193
-
194
- if os.environ.get(env_var):
195
- watch_file(
196
- plugin,
197
- os.environ[env_var],
198
- _refresh_configuration,
199
- schema,
200
- plugin,
201
- config_key,
202
- )
203
-
204
- plugin.logger.info(
205
- 'Loading default %s definitions and definitions from "%s".',
206
- plugin.name,
207
- config_path,
208
- )