opentf-toolkit-nightly 0.57.0.dev1051__py3-none-any.whl → 0.57.0.dev1071__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,7 +14,7 @@
14
14
 
15
15
  """Datasources (testcases, tags and jobs) retrieval helpers"""
16
16
 
17
- from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple
17
+ from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
18
18
 
19
19
  from datetime import datetime
20
20
 
@@ -22,6 +22,7 @@ from datetime import datetime
22
22
  from flask import current_app
23
23
 
24
24
  from opentf.commons.expressions import evaluate_bool
25
+ from opentf.commons.selectors import match_selectors
25
26
 
26
27
 
27
28
  ########################################################################
@@ -99,9 +100,11 @@ def parse_testcase_name(full_name: str) -> Tuple[str, str]:
99
100
  ## Datasource: Testcases
100
101
 
101
102
 
102
- def in_scope(expr: str, contexts: Dict[str, Any]) -> bool:
103
+ def in_scope(expr: Union[str, bool], contexts: Dict[str, Any]) -> bool:
103
104
  """Safely evaluate datasource scope."""
104
105
  try:
106
+ if isinstance(expr, bool):
107
+ return expr
105
108
  return evaluate_bool(expr, contexts)
106
109
  except ValueError as err:
107
110
  raise ValueError(f'Invalid conditional {expr}: {err}.')
@@ -399,9 +402,7 @@ def _get_testresult_labels(
399
402
 
400
403
 
401
404
  def _make_testcase_from_testresult(
402
- item: Dict[str, Any],
403
- labels: Dict[str, Any],
404
- scope: str,
405
+ item: Dict[str, Any], labels: Dict[str, Any], scope: Union[str, bool]
405
406
  ) -> Dict[str, Any]:
406
407
  suite_name, testcase_name = parse_testcase_name(item['name'])
407
408
  item_data = {
@@ -434,8 +435,55 @@ def _make_testcase_from_testresult(
434
435
  return testcase
435
436
 
436
437
 
438
+ def _get_max_count(state: Dict[str, Any]) -> int:
439
+ if state['reset']:
440
+ return state['per_page'] * state['page']
441
+ return state['per_page']
442
+
443
+
444
+ def _extract_testcases(
445
+ testresults: List[Dict[str, Any]],
446
+ state: Dict[str, Any],
447
+ scope: Union[str, bool],
448
+ events: List[Dict[str, Any]],
449
+ ) -> Dict[str, Dict[str, Any]]:
450
+ testcases = {}
451
+ items = 0
452
+ for i, testresult in enumerate(
453
+ testresults[state['last_notification_used'] :],
454
+ start=state['last_notification_used'],
455
+ ):
456
+ if i == state['last_notification_used']:
457
+ last_testresult_used = state['last_testresult_used']
458
+ else:
459
+ last_testresult_used = 0
460
+ execution_id = testresult['metadata']['attachment_origin'][0]
461
+ labels = _get_testresult_labels(execution_id, events)
462
+ if not labels:
463
+ continue
464
+ for j, item in enumerate(
465
+ testresult['spec']['testResults'][last_testresult_used:],
466
+ start=last_testresult_used,
467
+ ):
468
+ testcase = _make_testcase_from_testresult(item, labels, scope)
469
+ if not testcase:
470
+ continue
471
+ if not match_selectors(testcase, state['fieldselector']):
472
+ continue
473
+ testcases[item['id']] = testcase
474
+ items += 1
475
+ if items > _get_max_count(state):
476
+ state['last_notification_used'] = i
477
+ state['last_testresult_used'] = j
478
+ return testcases
479
+
480
+ state['last_notification_used'] = i + 1
481
+ state['last_testresult_used'] = 0
482
+ return testcases
483
+
484
+
437
485
  def get_testcases(
438
- events: List[Dict[str, Any]], scope: str = 'true'
486
+ events: List[Dict[str, Any]], scope: Union[str, bool] = True, state=None
439
487
  ) -> Dict[str, Dict[str, Any]]:
440
488
  """Extract metadata for each test result.
441
489
 
@@ -489,6 +537,9 @@ def get_testcases(
489
537
  A _ValueError_ exception is raised if there were no test results in
490
538
  `events` or some scope errors occured retrieving test results.
491
539
  """
540
+ if not state:
541
+ raise ValueError('No workflow cache state received from observer.')
542
+
492
543
  if _uses_inception(events):
493
544
  testresults = _get_inception_testresults(events)
494
545
  else:
@@ -497,17 +548,10 @@ def get_testcases(
497
548
  if not testresults:
498
549
  raise ValueError('No test results in events.')
499
550
 
500
- testcases = {}
501
- for testresult in testresults:
502
- execution_id = testresult['metadata']['attachment_origin'][0]
503
- labels = _get_testresult_labels(execution_id, events)
504
- if not labels:
505
- continue
506
- for item in testresult['spec']['testResults']:
507
- if testcase := _make_testcase_from_testresult(item, labels, scope):
508
- testcases[item['id']] = testcase
551
+ testcases = _extract_testcases(testresults, state, scope, events)
509
552
  if not testcases:
510
553
  raise ValueError(f'No test cases matching scope `{scope}`.')
554
+
511
555
  return testcases
512
556
 
513
557
 
@@ -538,7 +582,9 @@ def _make_tag_datasource(tag: str, parent: Dict[str, Any]) -> Dict[str, Any]:
538
582
  }
539
583
 
540
584
 
541
- def get_tags(events: List[Dict[str, Any]], scope: str = 'true') -> Dict[str, Any]:
585
+ def get_tags(
586
+ events: List[Dict[str, Any]], scope: Union[str, bool] = True, state=None
587
+ ) -> Dict[str, Any]:
542
588
  """Extract metadata for each execution environment tag.
543
589
 
544
590
  # Required parameters:
@@ -574,7 +620,7 @@ def get_tags(events: List[Dict[str, Any]], scope: str = 'true') -> Dict[str, Any
574
620
  'No job events found in workflow. Cannot extract data for tags.'
575
621
  )
576
622
  try:
577
- testcases = get_testcases(events, scope)
623
+ testcases = get_testcases(events, scope, state)
578
624
  except ValueError as err:
579
625
  if str(err).startswith('[SCOPE ERROR] '):
580
626
  raise ValueError(str(err))
@@ -711,7 +757,9 @@ def _make_job_datasource(
711
757
  }
712
758
 
713
759
 
714
- def get_jobs(events: List[Dict[str, Any]], scope: str = 'true') -> Dict[str, Any]:
760
+ def get_jobs(
761
+ events: List[Dict[str, Any]], scope: Union[str, bool] = True, state=None
762
+ ) -> Dict[str, Any]:
715
763
  """Extract metadata for each job.
716
764
 
717
765
  # Required parameters:
@@ -764,7 +812,7 @@ def get_jobs(events: List[Dict[str, Any]], scope: str = 'true') -> Dict[str, Any
764
812
  )
765
813
 
766
814
  try:
767
- testcases = get_testcases(events, scope)
815
+ testcases = get_testcases(events, scope, state)
768
816
  except ValueError as err:
769
817
  if str(err).startswith('[SCOPE ERROR] '):
770
818
  raise ValueError(str(err))
opentf/commons/pubsub.py CHANGED
@@ -108,14 +108,14 @@ def _do(req, path: str, eventbus: Dict[str, Any], **kwargs) -> Response:
108
108
  )
109
109
 
110
110
 
111
- def _dispatch_events(dispatch_queue: Queue, app) -> None:
111
+ def _dispatch_events(dispatch_queue: Queue, fn, app) -> None:
112
112
  """Async event dispatch thread handler."""
113
113
  delay = 0
114
114
  while True:
115
115
  try:
116
116
  publication = dispatch_queue.get()
117
117
  try:
118
- publish(publication, app.config['CONTEXT'])
118
+ fn(publication, app.config['CONTEXT'])
119
119
  delay = 0
120
120
  except Exception:
121
121
  dispatch_queue.put(publication)
@@ -125,34 +125,6 @@ def _dispatch_events(dispatch_queue: Queue, app) -> None:
125
125
  app.logger.error(f'Internal error while dispatching publication: {err}.')
126
126
 
127
127
 
128
- def make_dispatchqueue(app) -> Queue:
129
- """Make an asynchronous dispatch queue.
130
-
131
- Handles publication failures by waiting for an increasing delay and
132
- re-attempting publication.
133
-
134
- The delay is at most 60 seconds.
135
-
136
- # Required parameters
137
-
138
- - app: a flask app
139
-
140
- # Returned value
141
-
142
- A _queue_. Events pushed to this queue will be published.
143
- """
144
- queue = Queue()
145
- app.logger.debug('Starting events dispatch thread.')
146
- try:
147
- threading.Thread(
148
- target=_dispatch_events, args=[queue, app], daemon=True
149
- ).start()
150
- return queue
151
- except Exception as err:
152
- app.logger.error('Cound not start events dispatch thread: %s.', str(err))
153
- sys.exit(2)
154
-
155
-
156
128
  def subscribe(
157
129
  kind: Optional[str],
158
130
  target: str,
@@ -262,3 +234,35 @@ def publish(publication: Any, context: Dict[str, Any]) -> Response:
262
234
  if isinstance(publication, dict) and 'metadata' in publication:
263
235
  publication['metadata']['creationTimestamp'] = datetime.now().isoformat()
264
236
  return _do(post, '/publications', context['eventbus'], json=publication)
237
+
238
+
239
+ def make_dispatchqueue(app, fn=publish) -> Queue:
240
+ """Make an asynchronous dispatch queue.
241
+
242
+ Handles publication failures by waiting for an increasing delay and
243
+ re-attempting publication.
244
+
245
+ The delay is at most 60 seconds.
246
+
247
+ # Required parameters
248
+
249
+ - app: a flask app
250
+
251
+ # Optional parameters
252
+
253
+ - fn: a function (`publish` by default)
254
+
255
+ # Returned value
256
+
257
+ A _queue_. Events pushed to this queue will be published.
258
+ """
259
+ queue = Queue()
260
+ app.logger.debug('Starting events dispatch thread.')
261
+ try:
262
+ threading.Thread(
263
+ target=_dispatch_events, args=[queue, fn, app], daemon=True
264
+ ).start()
265
+ return queue
266
+ except Exception as err:
267
+ app.logger.error('Cound not start events dispatch thread: %s.', str(err))
268
+ sys.exit(2)
@@ -40,6 +40,7 @@ from opentf.commons import (
40
40
  CHANNEL_HOOKS,
41
41
  validate_schema,
42
42
  make_status_response,
43
+ make_dispatchqueue,
43
44
  )
44
45
  from opentf.toolkit import core
45
46
 
@@ -51,6 +52,7 @@ KIND_KEY = '__kind key__'
51
52
  INPUTS_KEY = '__inputs key__'
52
53
  WATCHEDFILES_KEY = '__watched files__'
53
54
  WATCHEDFILES_EVENT_KEY = '__watched files event__'
55
+ DISPATCHQUEUE_KEY = '__dispatch queue__'
54
56
 
55
57
  WATCHDOG_POLLING_DELAY_SECONDS = 30
56
58
  WATCHDOG_POLLING_DELAY_KEY = 'watchdog_polling_delay_seconds'
@@ -613,11 +615,12 @@ def make_plugin(
613
615
  f'Not a valid {kind} request: Missing metadata section',
614
616
  )
615
617
 
616
- valid, extra = validate_schema(kind, body)
617
- if not valid:
618
- return make_status_response(
619
- 'BadRequest', f'Not a valid {kind} request: {extra}.'
620
- )
618
+ if request.remote_addr != '127.0.0.1':
619
+ valid, extra = validate_schema(kind, body)
620
+ if not valid:
621
+ return make_status_response(
622
+ 'BadRequest', f'Not a valid {kind} request: {extra}.'
623
+ )
621
624
 
622
625
  if workflow_id := body.get('metadata', {}).get('workflow_id'):
623
626
  g.workflow_id = workflow_id
@@ -673,8 +676,10 @@ def make_plugin(
673
676
 
674
677
  if kind == PROVIDERCOMMAND:
675
678
  _maybe_add_hook_watcher(plugin, schema)
679
+ plugin.config[DISPATCHQUEUE_KEY] = make_dispatchqueue(plugin)
676
680
  elif kind == EXECUTIONCOMMAND:
677
681
  _maybe_add_hook_watcher(plugin, CHANNEL_HOOKS)
682
+ plugin.config[DISPATCHQUEUE_KEY] = make_dispatchqueue(plugin)
678
683
 
679
684
  core.register_defaultplugin(plugin)
680
685
 
opentf/toolkit/core.py CHANGED
@@ -27,7 +27,6 @@ from opentf.commons import (
27
27
  GENERATORRESULT,
28
28
  PROVIDERRESULT,
29
29
  EXECUTIONERROR,
30
- publish,
31
30
  make_event,
32
31
  make_uuid,
33
32
  )
@@ -114,7 +113,7 @@ class ExecutionError(Exception):
114
113
 
115
114
  def publish_event(event) -> None:
116
115
  """Publish event."""
117
- publish(event, context=_getplugin().config['CONTEXT'])
116
+ _getplugin().config['__dispatch queue__'].put(event)
118
117
 
119
118
 
120
119
  def publish_error(error_details) -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opentf-toolkit-nightly
3
- Version: 0.57.0.dev1051
3
+ Version: 0.57.0.dev1071
4
4
  Summary: OpenTestFactory Orchestrator Toolkit
5
5
  Home-page: https://gitlab.com/henixdevelopment/open-source/opentestfactory/python-toolkit
6
6
  Author: Martin Lafaix
@@ -16,14 +16,14 @@ Classifier: License :: OSI Approved :: Apache Software License
16
16
  Requires-Python: >= 3.8.0
17
17
  Description-Content-Type: text/markdown
18
18
  License-File: LICENSE
19
- Requires-Dist: requests >=2.31
20
- Requires-Dist: PyJWT[crypto] <2.9,>=2.7
21
- Requires-Dist: PyYAML >=6
22
- Requires-Dist: Flask >=2.2.3
23
- Requires-Dist: jsonschema >=4.17
24
- Requires-Dist: toposort >=1.10
25
- Requires-Dist: waitress >=2.1.2
26
- Requires-Dist: paste >=3.5.2
19
+ Requires-Dist: requests>=2.31
20
+ Requires-Dist: PyJWT[crypto]<2.9,>=2.7
21
+ Requires-Dist: PyYAML>=6
22
+ Requires-Dist: Flask>=2.2.3
23
+ Requires-Dist: jsonschema>=4.17
24
+ Requires-Dist: toposort>=1.10
25
+ Requires-Dist: waitress>=2.1.2
26
+ Requires-Dist: paste>=3.5.2
27
27
 
28
28
  # opentf-toolkit
29
29
 
@@ -1,9 +1,9 @@
1
1
  opentf/commons/__init__.py,sha256=Uq-7WvkMoBiF3C1KnhwIL4LCKpT8EvomnuG4MBYpIhs,21994
2
2
  opentf/commons/auth.py,sha256=bM2Z3kxm2Wku1lKXaRAIg37LHvXWAXIZIqjplDfN2P8,15899
3
3
  opentf/commons/config.py,sha256=dyus4K5Zdmcftc3Y9Z1YRkzA1KwiRLHoeAlg2_A49QM,7876
4
- opentf/commons/datasources.py,sha256=4ye-TMtaE88O8GVcWx-FtKXOC8aIZLteR6wfIr7Do8U,25232
4
+ opentf/commons/datasources.py,sha256=LjIjZbf08u1VllPN4fDss0OAg-_7gtRqgpIZ2tLuiHo,26807
5
5
  opentf/commons/expressions.py,sha256=jM_YKXVOFhvOE2aE2IuacuvxhIsOYTFs2oQkpcbWR6g,19645
6
- opentf/commons/pubsub.py,sha256=Y3vOeGNcI4_-uYwBy2grxmn1Oq5r89tyRZZX3mjgiAA,7254
6
+ opentf/commons/pubsub.py,sha256=DVrSara5FRfNdPBwXKUkTobqGki0RPDehylTEFcJnFc,7341
7
7
  opentf/commons/schemas.py,sha256=YSCvlmqc7satt-OqIoYXnmhOyo9h8wIpNyKaBAY4u9c,4039
8
8
  opentf/commons/selectors.py,sha256=DEpLgRAr5HXSpSYI4liXP2hLUTvOSexFa9Vfa1xIQTk,7134
9
9
  opentf/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -46,11 +46,11 @@ opentf/schemas/opentestfactory.org/v1beta1/Workflow.json,sha256=QZ8mM9PhzsI9gTmw
46
46
  opentf/schemas/opentestfactory.org/v1beta2/ServiceConfig.json,sha256=rEvK2YWL5lG94_qYgR_GnLWNsaQhaQ-2kuZdWJr5NnY,3517
47
47
  opentf/scripts/launch_java_service.sh,sha256=S0jAaCuv2sZy0Gf2NGBuPX-eD531rcM-b0fNyhmzSjw,2423
48
48
  opentf/scripts/startup.py,sha256=Da2zo93pBWbdRmj-wgekgLcF94rpNc3ZkbvR8R0w8XY,21279
49
- opentf/toolkit/__init__.py,sha256=0265hKrPC2trSQcTEIiobFCUzeXGRLbIm-T30NP-Kio,23216
49
+ opentf/toolkit/__init__.py,sha256=4UbExlqRO8Ew7GYRrMdEDruMIB0zTLSsoVCKfW3vPnQ,23488
50
50
  opentf/toolkit/channels.py,sha256=6xcVKHUK2FdyVKIQmPQbakngfVuQDzCcD_lInOdKpro,17171
51
- opentf/toolkit/core.py,sha256=Uc5cRwyi6bs7WVmgvQLTvEa6bXjZ3KfCKWHSdIeUy98,9621
52
- opentf_toolkit_nightly-0.57.0.dev1051.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
53
- opentf_toolkit_nightly-0.57.0.dev1051.dist-info/METADATA,sha256=dlte0IlbCdEPnhzDEE5pyG-A3PxBkdB0No4Ubh64BB8,1951
54
- opentf_toolkit_nightly-0.57.0.dev1051.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
55
- opentf_toolkit_nightly-0.57.0.dev1051.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
56
- opentf_toolkit_nightly-0.57.0.dev1051.dist-info/RECORD,,
51
+ opentf/toolkit/core.py,sha256=GdmEJ0ikdMdpViEpR4jP-viqfvBUHnpiFCOXwLGThxg,9606
52
+ opentf_toolkit_nightly-0.57.0.dev1071.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
53
+ opentf_toolkit_nightly-0.57.0.dev1071.dist-info/METADATA,sha256=C46QttPC75AZio20HOPCH_uxLwdUoKhC-jNIIFKfgP4,1943
54
+ opentf_toolkit_nightly-0.57.0.dev1071.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
55
+ opentf_toolkit_nightly-0.57.0.dev1071.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
56
+ opentf_toolkit_nightly-0.57.0.dev1071.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: bdist_wheel (0.44.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5