opentf-toolkit-nightly 0.62.0.dev1283__py3-none-any.whl → 0.62.0.dev1291__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opentf/commons/__init__.py +57 -26
- opentf/commons/config.py +117 -33
- opentf/commons/schemas.py +43 -14
- opentf/toolkit/__init__.py +31 -19
- {opentf_toolkit_nightly-0.62.0.dev1283.dist-info → opentf_toolkit_nightly-0.62.0.dev1291.dist-info}/METADATA +1 -1
- {opentf_toolkit_nightly-0.62.0.dev1283.dist-info → opentf_toolkit_nightly-0.62.0.dev1291.dist-info}/RECORD +9 -9
- {opentf_toolkit_nightly-0.62.0.dev1283.dist-info → opentf_toolkit_nightly-0.62.0.dev1291.dist-info}/LICENSE +0 -0
- {opentf_toolkit_nightly-0.62.0.dev1283.dist-info → opentf_toolkit_nightly-0.62.0.dev1291.dist-info}/WHEEL +0 -0
- {opentf_toolkit_nightly-0.62.0.dev1283.dist-info → opentf_toolkit_nightly-0.62.0.dev1291.dist-info}/top_level.txt +0 -0
opentf/commons/__init__.py
CHANGED
|
@@ -465,8 +465,9 @@ def get_context_parameter(
|
|
|
465
465
|
- `{app.name.upper()}_{name.upper()}` environment variable
|
|
466
466
|
- `{name.upper()}` environment variable (if spec is shared)
|
|
467
467
|
- `name` in configuration context
|
|
468
|
-
-
|
|
468
|
+
- for each deprecated name, in order, repeat the three steps above
|
|
469
469
|
- `default` if not None
|
|
470
|
+
- `spec['default']` if spec defines a default value
|
|
470
471
|
|
|
471
472
|
# Required parameters
|
|
472
473
|
|
|
@@ -482,9 +483,29 @@ def get_context_parameter(
|
|
|
482
483
|
|
|
483
484
|
An integer if the parameter has a specification and is expected to
|
|
484
485
|
be of type int. The actual parameter type otherwise.
|
|
486
|
+
|
|
487
|
+
# Spec format
|
|
488
|
+
|
|
489
|
+
(Optional, in `spec.contextParameters` in the service descriptor.)
|
|
490
|
+
|
|
491
|
+
```yaml
|
|
492
|
+
- name: parameter_name
|
|
493
|
+
deprecatedNames: [alternative_parameter_names]
|
|
494
|
+
descriptiveName: parameter description
|
|
495
|
+
shared: true
|
|
496
|
+
type: int
|
|
497
|
+
default: 66
|
|
498
|
+
minValue: 10
|
|
499
|
+
maxValue: 100
|
|
500
|
+
```
|
|
501
|
+
|
|
502
|
+
If `minValue` and/or `maxValue` are defined, the parameter must be
|
|
503
|
+
within the range.
|
|
504
|
+
|
|
505
|
+
If `type` is defined, the parameter must be of the specified type.
|
|
485
506
|
"""
|
|
486
507
|
|
|
487
|
-
def _maybe_validate(v)
|
|
508
|
+
def _maybe_validate(v):
|
|
488
509
|
newv, reason = validator(name, v) if validator else (v, None)
|
|
489
510
|
lhs = f' {spec["descriptiveName"]} ({name}):' if spec else f' {name}:'
|
|
490
511
|
if newv != v:
|
|
@@ -493,36 +514,46 @@ def get_context_parameter(
|
|
|
493
514
|
app.logger.info(f'{lhs} {newv}')
|
|
494
515
|
return newv
|
|
495
516
|
|
|
496
|
-
def _fatal(
|
|
497
|
-
app.logger.error(
|
|
517
|
+
def _fatal(msg: str) -> NoReturn:
|
|
518
|
+
app.logger.error(msg)
|
|
498
519
|
sys.exit(2)
|
|
499
520
|
|
|
500
521
|
spec = _get_contextparameter_spec(app, name)
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
522
|
+
shared = spec and spec.get('shared')
|
|
523
|
+
deprecateds: List[str] = spec.get('deprecatedNames', []) if spec else []
|
|
524
|
+
|
|
525
|
+
for alternative in [name] + deprecateds:
|
|
526
|
+
val = os.environ.get(alternative.upper()) if shared else None
|
|
527
|
+
val = os.environ.get(f'{app.name.upper()}_{alternative.upper()}', val)
|
|
528
|
+
val = val if val is not None else app.config['CONTEXT'].get(alternative)
|
|
529
|
+
if val is not None:
|
|
530
|
+
if alternative != name:
|
|
531
|
+
app.logger.warning(
|
|
532
|
+
f' "{alternative}" is deprecated. Consider using "{name}" instead.'
|
|
533
|
+
)
|
|
534
|
+
break
|
|
535
|
+
else:
|
|
536
|
+
val = default
|
|
537
|
+
|
|
538
|
+
if val is None and spec:
|
|
539
|
+
val = spec.get('default')
|
|
540
|
+
if val is None:
|
|
508
541
|
_fatal(
|
|
509
|
-
'Context parameter
|
|
510
|
-
name,
|
|
542
|
+
f'Context parameter "{name}" not in current context and no default value specified.'
|
|
511
543
|
)
|
|
512
544
|
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
_fatal(f'{desc} must be less that {spec["maxValue"]+1}.')
|
|
545
|
+
if spec:
|
|
546
|
+
if spec.get('type') == 'int':
|
|
547
|
+
try:
|
|
548
|
+
val = int(val)
|
|
549
|
+
except ValueError as err:
|
|
550
|
+
_fatal(f'Context parameter "{name}" not an integer: {err}.')
|
|
551
|
+
desc = spec['descriptiveName'][0].upper() + spec['descriptiveName'][1:]
|
|
552
|
+
if 'minValue' in spec and val < spec['minValue']:
|
|
553
|
+
_fatal(f'{desc} must be greater than {spec["minValue"]-1}.')
|
|
554
|
+
if 'maxValue' in spec and val > spec['maxValue']:
|
|
555
|
+
_fatal(f'{desc} must be less that {spec["maxValue"]+1}.')
|
|
556
|
+
|
|
526
557
|
return _maybe_validate(val)
|
|
527
558
|
|
|
528
559
|
|
opentf/commons/config.py
CHANGED
|
@@ -25,14 +25,14 @@ from logging.config import dictConfig
|
|
|
25
25
|
import yaml
|
|
26
26
|
|
|
27
27
|
from .exceptions import ConfigError
|
|
28
|
-
from .schemas import
|
|
28
|
+
from .schemas import read_and_validate, SERVICECONFIG
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
########################################################################
|
|
32
32
|
|
|
33
33
|
NOTIFICATION_LOGGER_EXCLUSIONS = 'eventbus'
|
|
34
34
|
|
|
35
|
-
DEFAULT_CONTEXT = {
|
|
35
|
+
DEFAULT_CONTEXT: Dict[str, Any] = {
|
|
36
36
|
'host': '127.0.0.1',
|
|
37
37
|
'port': 443,
|
|
38
38
|
'ssl_context': 'adhoc',
|
|
@@ -46,6 +46,21 @@ DEBUG_LEVELS = {'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'}
|
|
|
46
46
|
|
|
47
47
|
|
|
48
48
|
def make_argparser(description: str, configfile: str) -> argparse.ArgumentParser:
|
|
49
|
+
"""Make an argument parser.
|
|
50
|
+
|
|
51
|
+
The configured argument parser includes definitions for all commons
|
|
52
|
+
command-line parameters. It can be extended with additional
|
|
53
|
+
parameters.
|
|
54
|
+
|
|
55
|
+
# Required parameters
|
|
56
|
+
|
|
57
|
+
- description: a string, the parser description
|
|
58
|
+
- configfile: a string, the default configuration file name
|
|
59
|
+
|
|
60
|
+
# Returned value
|
|
61
|
+
|
|
62
|
+
An argument parser, configured.
|
|
63
|
+
"""
|
|
49
64
|
parser = argparse.ArgumentParser(description=description)
|
|
50
65
|
parser.add_argument('--descriptor', help='alternate descriptor file')
|
|
51
66
|
parser.add_argument(
|
|
@@ -98,7 +113,34 @@ def make_argparser(description: str, configfile: str) -> argparse.ArgumentParser
|
|
|
98
113
|
|
|
99
114
|
|
|
100
115
|
def configure_logging(name: str, debug_level: str) -> None:
|
|
101
|
-
|
|
116
|
+
"""Configure logging.
|
|
117
|
+
|
|
118
|
+
The logging configuration is driven by the `debug_level` parameter.
|
|
119
|
+
|
|
120
|
+
A `wsgi` handler is defined, that will log messages to the WSGI
|
|
121
|
+
stream.
|
|
122
|
+
|
|
123
|
+
You can use the `OPENTF_LOGGING_REDIRECT` environment variable to
|
|
124
|
+
redirect the logs to a specific stream.
|
|
125
|
+
|
|
126
|
+
If `name` is not in `NOTIFICATION_LOGGER_EXCLUSIONS`, an `eventbus`
|
|
127
|
+
handler is added. It will post log messages to the event bus as
|
|
128
|
+
`Notification` events.
|
|
129
|
+
|
|
130
|
+
The configured format is:
|
|
131
|
+
|
|
132
|
+
`[%(asctime)s] %(levelname)s in {name}: %(message)s`
|
|
133
|
+
|
|
134
|
+
# Required parameters
|
|
135
|
+
|
|
136
|
+
- name: a string, the service name
|
|
137
|
+
- debug_level: a string, the log level
|
|
138
|
+
|
|
139
|
+
# Returned value
|
|
140
|
+
|
|
141
|
+
None.
|
|
142
|
+
"""
|
|
143
|
+
logging_conf: Dict[str, Any] = {
|
|
102
144
|
'version': 1,
|
|
103
145
|
'formatters': {
|
|
104
146
|
'default': {
|
|
@@ -126,41 +168,57 @@ def configure_logging(name: str, debug_level: str) -> None:
|
|
|
126
168
|
dictConfig(logging_conf)
|
|
127
169
|
|
|
128
170
|
|
|
129
|
-
def _read_configfile(
|
|
130
|
-
argsconfig: Optional[str], configfile: str
|
|
131
|
-
) -> Tuple[str, Dict[str, Any]]:
|
|
132
|
-
try:
|
|
133
|
-
filename = argsconfig or configfile
|
|
134
|
-
with open(filename, 'r', encoding='utf-8') as cnf:
|
|
135
|
-
config = yaml.safe_load(cnf)
|
|
136
|
-
if not isinstance(config, dict):
|
|
137
|
-
raise ValueError('Config file is not an object.')
|
|
138
|
-
return filename, config
|
|
139
|
-
except Exception as err:
|
|
140
|
-
raise ConfigError(f'Could not get configfile "{filename}", aborting: {err}.')
|
|
141
|
-
|
|
142
|
-
|
|
143
171
|
def read_config(
|
|
144
|
-
|
|
145
|
-
|
|
172
|
+
altconfig: Optional[str],
|
|
173
|
+
altcontext: Optional[str],
|
|
146
174
|
configfile: str,
|
|
147
|
-
defaultcontext,
|
|
148
|
-
schema,
|
|
175
|
+
defaultcontext: Optional[Dict[str, Any]],
|
|
176
|
+
schema: Optional[str],
|
|
149
177
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
150
|
-
|
|
151
|
-
|
|
178
|
+
"""Read service configuration.
|
|
179
|
+
|
|
180
|
+
If not None, the `alt` parameters are used to override the default
|
|
181
|
+
ones.
|
|
182
|
+
|
|
183
|
+
# Required parameters
|
|
184
|
+
|
|
185
|
+
- altconfig: a string (the configuration file location) or None
|
|
186
|
+
- altcontext: a string (the context name) or None
|
|
187
|
+
- configfile: a string, the configuration file name
|
|
188
|
+
- defaultcontext: a dictionary (the default context) or None
|
|
189
|
+
- schema: a string (the schema that validates the configuration)
|
|
190
|
+
or None
|
|
191
|
+
|
|
192
|
+
If `altcontext` is None, returns the current context as specified
|
|
193
|
+
in the service configuration.
|
|
194
|
+
|
|
195
|
+
If `schema` is None, uses SERVICECONFIG as the default schema.
|
|
196
|
+
|
|
197
|
+
# Returned value
|
|
198
|
+
|
|
199
|
+
A pair of dictionaries. The first item is the context, the second
|
|
200
|
+
item is the complete configuration.
|
|
201
|
+
|
|
202
|
+
# Raised exceptions
|
|
203
|
+
|
|
204
|
+
A _ConfigError_ exception is raised if the configuration file cannot
|
|
205
|
+
be read or if the configuration file is invalid.
|
|
206
|
+
"""
|
|
207
|
+
if altconfig is None and not os.path.isfile(configfile):
|
|
208
|
+
if altcontext:
|
|
152
209
|
raise ConfigError(
|
|
153
210
|
'Cannot specify a context when using default configuration.'
|
|
154
211
|
)
|
|
155
212
|
context = defaultcontext or DEFAULT_CONTEXT
|
|
156
213
|
config = {}
|
|
157
214
|
else:
|
|
158
|
-
configfile
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
215
|
+
configfile = altconfig or configfile
|
|
216
|
+
try:
|
|
217
|
+
config = read_and_validate(configfile, schema or SERVICECONFIG)
|
|
218
|
+
except (ValueError, OSError) as err:
|
|
219
|
+
raise ConfigError(f'Could not read configfile "{configfile}": {err}.')
|
|
162
220
|
|
|
163
|
-
context_name =
|
|
221
|
+
context_name = altcontext or config['current-context']
|
|
164
222
|
try:
|
|
165
223
|
context = get_named(context_name, config['contexts'])['context']
|
|
166
224
|
except ValueError as err:
|
|
@@ -169,11 +227,32 @@ def read_config(
|
|
|
169
227
|
|
|
170
228
|
|
|
171
229
|
def read_descriptor(
|
|
172
|
-
|
|
230
|
+
altdescriptor: Optional[str], descriptor: Optional[str]
|
|
173
231
|
) -> Tuple[str, List[Dict[str, Any]]]:
|
|
232
|
+
"""Read service descriptor.
|
|
233
|
+
|
|
234
|
+
If `altdescriptor` is None, the descriptor is read from the module
|
|
235
|
+
where the function is called. If `descriptor` is None, the default
|
|
236
|
+
descriptor name is `service.yaml`.
|
|
237
|
+
|
|
238
|
+
# Required parameter
|
|
239
|
+
|
|
240
|
+
- altdescriptor: a string, the descriptor file location or None
|
|
241
|
+
- descriptor: a string, the descriptor file name or None
|
|
242
|
+
|
|
243
|
+
# Returned value
|
|
244
|
+
|
|
245
|
+
A pair (filename, manifests) where filename is the descriptor file
|
|
246
|
+
name and manifests is a list of dictionaries.
|
|
247
|
+
|
|
248
|
+
# Raised exceptions
|
|
249
|
+
|
|
250
|
+
A _ConfigError_ exception is raised if the descriptor file cannot
|
|
251
|
+
be read.
|
|
252
|
+
"""
|
|
174
253
|
try:
|
|
175
|
-
if
|
|
176
|
-
filename =
|
|
254
|
+
if altdescriptor:
|
|
255
|
+
filename = altdescriptor
|
|
177
256
|
else:
|
|
178
257
|
for frame in inspect.stack():
|
|
179
258
|
if frame.frame.f_code.co_name == '<module>':
|
|
@@ -187,12 +266,17 @@ def read_descriptor(
|
|
|
187
266
|
with open(filename, 'r', encoding='utf-8') as definition:
|
|
188
267
|
manifests = list(yaml.safe_load_all(definition))
|
|
189
268
|
return filename, manifests
|
|
269
|
+
except ConfigError:
|
|
270
|
+
raise
|
|
190
271
|
except Exception as err:
|
|
191
272
|
raise ConfigError(f'Could not get descriptor "{filename}", aborting: {err}.')
|
|
192
273
|
|
|
193
274
|
|
|
194
275
|
def get_named(name: str, entries: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
195
|
-
"""Get an entry from a list of
|
|
276
|
+
"""Get an entry from a list of dictionaries.
|
|
277
|
+
|
|
278
|
+
Matching entries are those with a 'name' entry equal to the
|
|
279
|
+
requested name.
|
|
196
280
|
|
|
197
281
|
# Required parameters
|
|
198
282
|
|
|
@@ -219,7 +303,7 @@ def get_named(name: str, entries: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
|
219
303
|
def get_debug_level(name: str) -> str:
|
|
220
304
|
"""Get service log level.
|
|
221
305
|
|
|
222
|
-
Driven by environment variables. If `{
|
|
306
|
+
Driven by environment variables. If `{name}_DEBUG_LEVEL` is
|
|
223
307
|
defined, this value is used. If not, if `DEBUG_LEVEL` is set, then
|
|
224
308
|
it is used. Otherwise, returns `INFO`.
|
|
225
309
|
|
opentf/commons/schemas.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright (c) Henix, Henix.fr
|
|
1
|
+
# Copyright (c) 2023 Henix, Henix.fr
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -14,13 +14,14 @@
|
|
|
14
14
|
|
|
15
15
|
"""Helpers for the OpenTestFactory schemas."""
|
|
16
16
|
|
|
17
|
-
from typing import Any, Dict, Tuple
|
|
17
|
+
from typing import Any, Dict, Optional, Tuple
|
|
18
18
|
|
|
19
19
|
import json
|
|
20
20
|
import logging
|
|
21
21
|
import os
|
|
22
22
|
|
|
23
23
|
from jsonschema import Draft201909Validator, ValidationError
|
|
24
|
+
from yaml import safe_load
|
|
24
25
|
|
|
25
26
|
import opentf.schemas
|
|
26
27
|
|
|
@@ -68,8 +69,8 @@ INSIGHT_COLLECTOR = 'opentestfactory.org/v1alpha1/InsightCollector'
|
|
|
68
69
|
########################################################################
|
|
69
70
|
# JSON Schema Helpers
|
|
70
71
|
|
|
71
|
-
_schemas = {}
|
|
72
|
-
_validators = {}
|
|
72
|
+
_schemas: Dict[str, Dict[str, Any]] = {}
|
|
73
|
+
_validators: Dict[str, Draft201909Validator] = {}
|
|
73
74
|
|
|
74
75
|
SCHEMAS_ROOT_DIRECTORY = list(opentf.schemas.__path__)[0]
|
|
75
76
|
|
|
@@ -99,19 +100,21 @@ def get_schema(name: str) -> Dict[str, Any]:
|
|
|
99
100
|
) as schema:
|
|
100
101
|
_schemas[name] = json.loads(schema.read())
|
|
101
102
|
except Exception as err:
|
|
102
|
-
logging.error('Could not read schema %s: %s', name, err)
|
|
103
|
+
logging.error('Could not read schema "%s": %s', name, err)
|
|
103
104
|
raise
|
|
104
105
|
return _schemas[name]
|
|
105
106
|
|
|
106
107
|
|
|
107
|
-
def _validator(schema: str):
|
|
108
|
+
def _validator(schema: str) -> Draft201909Validator:
|
|
108
109
|
if schema not in _validators:
|
|
109
110
|
_validators[schema] = Draft201909Validator(get_schema(schema))
|
|
110
111
|
return _validators[schema]
|
|
111
112
|
|
|
112
113
|
|
|
113
|
-
def validate_schema(
|
|
114
|
-
|
|
114
|
+
def validate_schema(
|
|
115
|
+
schema: str, instance: Dict[str, Any]
|
|
116
|
+
) -> Tuple[bool, Optional[str]]:
|
|
117
|
+
"""Return (True, None) if instance validates schema.
|
|
115
118
|
|
|
116
119
|
# Required parameters
|
|
117
120
|
|
|
@@ -122,14 +125,40 @@ def validate_schema(schema, instance) -> Tuple[bool, Any]:
|
|
|
122
125
|
|
|
123
126
|
A (bool, Optional[str]) pair. If `instance` is a valid instance of
|
|
124
127
|
`schema`, returns `(True, None)`. If not, returns `(False, error)`.
|
|
125
|
-
|
|
126
|
-
# Raised exceptions
|
|
127
|
-
|
|
128
|
-
If an error occurs while reading the schema, the initial exception
|
|
129
|
-
is logged and raised again.
|
|
130
128
|
"""
|
|
131
129
|
try:
|
|
132
130
|
_validator(schema).validate(instance=instance)
|
|
133
131
|
except ValidationError as err:
|
|
134
|
-
return False, err
|
|
132
|
+
return False, str(err)
|
|
135
133
|
return True, None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def read_and_validate(filename: str, schema: str) -> Dict[str, Any]:
|
|
137
|
+
"""Read and validate a configuration file.
|
|
138
|
+
|
|
139
|
+
# Required parameters
|
|
140
|
+
|
|
141
|
+
- filename: a string, the configuration file name
|
|
142
|
+
- schema: a string, the schema to validate the configuration
|
|
143
|
+
|
|
144
|
+
# Returned value
|
|
145
|
+
|
|
146
|
+
A dictionary, the configuration.
|
|
147
|
+
|
|
148
|
+
# Raised exceptions
|
|
149
|
+
|
|
150
|
+
An _OSError_ exception is raised if the configuration file cannot
|
|
151
|
+
be read.
|
|
152
|
+
|
|
153
|
+
A _ValueError_ exception is raised if the configuration file is
|
|
154
|
+
invalid.
|
|
155
|
+
"""
|
|
156
|
+
with open(filename, 'r', encoding='utf-8') as cnf:
|
|
157
|
+
config = safe_load(cnf)
|
|
158
|
+
|
|
159
|
+
if not isinstance(config, dict):
|
|
160
|
+
raise ValueError('Config file is not an object.')
|
|
161
|
+
valid, extra = validate_schema(schema or SERVICECONFIG, config)
|
|
162
|
+
if not valid:
|
|
163
|
+
raise ValueError(f'Config file "{filename}" is invalid: {extra}.')
|
|
164
|
+
return config
|
opentf/toolkit/__init__.py
CHANGED
|
@@ -23,7 +23,7 @@ import sys
|
|
|
23
23
|
from collections import defaultdict
|
|
24
24
|
from time import sleep
|
|
25
25
|
|
|
26
|
-
from flask import request, g
|
|
26
|
+
from flask import Flask, request, g
|
|
27
27
|
|
|
28
28
|
import yaml
|
|
29
29
|
|
|
@@ -121,7 +121,7 @@ def _maybe_get_item(cache: Dict[Any, Any], labels: Dict[str, str]) -> Optional[A
|
|
|
121
121
|
|
|
122
122
|
|
|
123
123
|
def _ensure_inputs_match(
|
|
124
|
-
plugin, labels: Dict[str, str], inputs: Dict[str, Any]
|
|
124
|
+
plugin: Flask, labels: Dict[str, str], inputs: Dict[str, Any]
|
|
125
125
|
) -> None:
|
|
126
126
|
"""Check inputs.
|
|
127
127
|
|
|
@@ -207,7 +207,7 @@ INVALID_HOOKS_DEFINITION_TEMPLATE = {
|
|
|
207
207
|
}
|
|
208
208
|
|
|
209
209
|
|
|
210
|
-
def _maybe_add_hook_watcher(plugin, schema: str) -> None:
|
|
210
|
+
def _maybe_add_hook_watcher(plugin: Flask, schema: str) -> None:
|
|
211
211
|
if plugin.config['CONTEXT'][KIND_KEY] == EXECUTIONCOMMAND:
|
|
212
212
|
type_ = 'CHANNEL'
|
|
213
213
|
else:
|
|
@@ -235,7 +235,7 @@ def _maybe_add_hook_watcher(plugin, schema: str) -> None:
|
|
|
235
235
|
|
|
236
236
|
|
|
237
237
|
def _read_hooks_definition(
|
|
238
|
-
plugin, hooksfile: str, schema: str, invalid: Dict[str, Any]
|
|
238
|
+
plugin: Flask, hooksfile: str, schema: str, invalid: Dict[str, Any]
|
|
239
239
|
) -> None:
|
|
240
240
|
"""Read hooks definition file.
|
|
241
241
|
|
|
@@ -285,7 +285,9 @@ def _read_hooks_definition(
|
|
|
285
285
|
# Dispatchers
|
|
286
286
|
|
|
287
287
|
|
|
288
|
-
def _dispatch_providercommand(
|
|
288
|
+
def _dispatch_providercommand(
|
|
289
|
+
plugin: Flask, handler: Handler, body: Dict[str, Any]
|
|
290
|
+
) -> None:
|
|
289
291
|
"""Provider plugin dispatcher.
|
|
290
292
|
|
|
291
293
|
`handler` is expected to return either a list of steps or raise a
|
|
@@ -308,7 +310,7 @@ def _dispatch_providercommand(plugin, handler: Handler, body: Dict[str, Any]) ->
|
|
|
308
310
|
core.publish_error(f'Unexpected execution error: {err}.')
|
|
309
311
|
|
|
310
312
|
|
|
311
|
-
def _dispatch_executioncommand(_, handler: Handler, body: Dict[str, Any]):
|
|
313
|
+
def _dispatch_executioncommand(_, handler: Handler, body: Dict[str, Any]) -> None:
|
|
312
314
|
"""Channel plugin dispatcher."""
|
|
313
315
|
try:
|
|
314
316
|
handler(body)
|
|
@@ -316,7 +318,9 @@ def _dispatch_executioncommand(_, handler: Handler, body: Dict[str, Any]):
|
|
|
316
318
|
core.publish_error(f'Unexpected execution error: {err}.')
|
|
317
319
|
|
|
318
320
|
|
|
319
|
-
def _dispatch_generatorcommand(
|
|
321
|
+
def _dispatch_generatorcommand(
|
|
322
|
+
plugin: Flask, handler: Handler, body: Dict[str, Any]
|
|
323
|
+
) -> None:
|
|
320
324
|
"""Generator plugin dispatcher."""
|
|
321
325
|
try:
|
|
322
326
|
labels = body['metadata'].get('labels', {})
|
|
@@ -339,7 +343,7 @@ def _dispatch_generatorcommand(plugin, handler: Handler, body: Dict[str, Any]):
|
|
|
339
343
|
# Watchdog
|
|
340
344
|
|
|
341
345
|
|
|
342
|
-
def _run_handlers(plugin, file, handlers):
|
|
346
|
+
def _run_handlers(plugin: Flask, file, handlers) -> None:
|
|
343
347
|
"""Run file handlers."""
|
|
344
348
|
for handler, args, kwargs in handlers:
|
|
345
349
|
try:
|
|
@@ -350,7 +354,7 @@ def _run_handlers(plugin, file, handlers):
|
|
|
350
354
|
)
|
|
351
355
|
|
|
352
356
|
|
|
353
|
-
def _watchdog(plugin, polling_delay):
|
|
357
|
+
def _watchdog(plugin: Flask, polling_delay: int) -> None:
|
|
354
358
|
"""Watch changes and call handlers when appropriate."""
|
|
355
359
|
files_stat = defaultdict(float)
|
|
356
360
|
files_handlers = plugin.config[WATCHEDFILES_KEY]
|
|
@@ -373,7 +377,7 @@ def _watchdog(plugin, polling_delay):
|
|
|
373
377
|
plugin.config[WATCHEDFILES_EVENT_KEY].clear()
|
|
374
378
|
|
|
375
379
|
|
|
376
|
-
def _start_watchdog(plugin) -> None:
|
|
380
|
+
def _start_watchdog(plugin: Flask) -> None:
|
|
377
381
|
"""Set up a watchdog that monitors specified files for changes."""
|
|
378
382
|
polling_delay = max(
|
|
379
383
|
WATCHDOG_POLLING_DELAY_SECONDS,
|
|
@@ -386,7 +390,7 @@ def _start_watchdog(plugin) -> None:
|
|
|
386
390
|
).start()
|
|
387
391
|
|
|
388
392
|
|
|
389
|
-
def watch_file(plugin, path: str, handler, *args, **kwargs) -> None:
|
|
393
|
+
def watch_file(plugin: Flask, path: str, handler, *args, **kwargs) -> None:
|
|
390
394
|
"""Watch file changes.
|
|
391
395
|
|
|
392
396
|
There can be more than one handler watching a given file. A handler
|
|
@@ -420,12 +424,18 @@ def watch_file(plugin, path: str, handler, *args, **kwargs) -> None:
|
|
|
420
424
|
plugin.config[WATCHEDFILES_EVENT_KEY].set()
|
|
421
425
|
|
|
422
426
|
|
|
423
|
-
def _watchnotifier(
|
|
427
|
+
def _watchnotifier(
|
|
428
|
+
plugin: Flask,
|
|
429
|
+
polling_delay: int,
|
|
430
|
+
check: Callable[..., bool],
|
|
431
|
+
items,
|
|
432
|
+
notify: Callable[[], None],
|
|
433
|
+
):
|
|
424
434
|
reference = {}
|
|
425
435
|
while True:
|
|
426
436
|
sleep(polling_delay)
|
|
427
437
|
try:
|
|
428
|
-
statuses = {item: check(item) for item in items
|
|
438
|
+
statuses = {item: check(item) for item in list(items)}
|
|
429
439
|
if statuses != reference:
|
|
430
440
|
notify()
|
|
431
441
|
reference = statuses
|
|
@@ -435,7 +445,9 @@ def _watchnotifier(plugin, polling_delay, check, items, notify):
|
|
|
435
445
|
)
|
|
436
446
|
|
|
437
447
|
|
|
438
|
-
def watch_and_notify(
|
|
448
|
+
def watch_and_notify(
|
|
449
|
+
plugin: Flask, status: Callable[..., Any], items, notify: Callable[[], None]
|
|
450
|
+
) -> None:
|
|
439
451
|
"""Watch statuses changes in items.
|
|
440
452
|
|
|
441
453
|
Check item status change at regular interval, call notify if
|
|
@@ -444,11 +456,11 @@ def watch_and_notify(plugin, status, items, notify):
|
|
|
444
456
|
# Required parameters
|
|
445
457
|
|
|
446
458
|
- plugin: a Flask application
|
|
447
|
-
- status: a function taking an item and returning a
|
|
459
|
+
- status: a function taking an item and returning a value
|
|
448
460
|
- items: an iterable
|
|
449
461
|
- notify: a function of no arguments
|
|
450
462
|
"""
|
|
451
|
-
polling_delay = get_context_parameter(plugin, '
|
|
463
|
+
polling_delay = get_context_parameter(plugin, 'availability_check_delay_seconds')
|
|
452
464
|
|
|
453
465
|
plugin.logger.debug('Starting watch notifier thread.')
|
|
454
466
|
threading.Thread(
|
|
@@ -459,7 +471,7 @@ def watch_and_notify(plugin, status, items, notify):
|
|
|
459
471
|
|
|
460
472
|
|
|
461
473
|
def _subscribe(
|
|
462
|
-
plugin,
|
|
474
|
+
plugin: Flask,
|
|
463
475
|
cat_prefix: Optional[str],
|
|
464
476
|
cat: Optional[str],
|
|
465
477
|
cat_version: Optional[str],
|
|
@@ -499,7 +511,7 @@ def _subscribe(
|
|
|
499
511
|
return subscribe(kind=kind, target='inbox', app=plugin, labels=labels)
|
|
500
512
|
|
|
501
513
|
|
|
502
|
-
def run_plugin(plugin):
|
|
514
|
+
def run_plugin(plugin: Flask) -> None:
|
|
503
515
|
"""Start and run plugin.
|
|
504
516
|
|
|
505
517
|
Subscribe to the relevant events before startup and tries to
|
|
@@ -553,7 +565,7 @@ def make_plugin(
|
|
|
553
565
|
schema=None,
|
|
554
566
|
configfile=None,
|
|
555
567
|
args: Optional[Any] = None,
|
|
556
|
-
):
|
|
568
|
+
) -> Flask:
|
|
557
569
|
"""Create and return a new plugin service.
|
|
558
570
|
|
|
559
571
|
One and only one of `channel`, `generator`, `provider`, `providers`,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: opentf-toolkit-nightly
|
|
3
|
-
Version: 0.62.0.
|
|
3
|
+
Version: 0.62.0.dev1291
|
|
4
4
|
Summary: OpenTestFactory Orchestrator Toolkit
|
|
5
5
|
Home-page: https://gitlab.com/henixdevelopment/open-source/opentestfactory/python-toolkit
|
|
6
6
|
Author: Martin Lafaix
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
opentf/commons/__init__.py,sha256=
|
|
1
|
+
opentf/commons/__init__.py,sha256=_LTxZRG9KKhmx6T5Tn19un_FBE2WH_1ZV_cG43MYAcI,23441
|
|
2
2
|
opentf/commons/auth.py,sha256=gXRp_0Tf3bfd65F4QiQmh6C6vR9y3ugag_0DSvozJFk,15898
|
|
3
|
-
opentf/commons/config.py,sha256=
|
|
3
|
+
opentf/commons/config.py,sha256=RVSSdQhMle4oCo_z_AR2EQ4U6sUjSxw-qVBtjKuJVfo,10219
|
|
4
4
|
opentf/commons/exceptions.py,sha256=7dhUXO8iyAbqVwlUKxZhgRzGqVcb7LkG39hFlm-VxIA,2407
|
|
5
5
|
opentf/commons/expressions.py,sha256=jM_YKXVOFhvOE2aE2IuacuvxhIsOYTFs2oQkpcbWR6g,19645
|
|
6
6
|
opentf/commons/pubsub.py,sha256=M0bvajR9raUP-xe5mfRjdrweZyHQw1_Qsy56gS-Sck4,7676
|
|
7
|
-
opentf/commons/schemas.py,sha256=
|
|
7
|
+
opentf/commons/schemas.py,sha256=u1TdoGFNWoslWV7foBqXPgkGTQtRZWtXTr_PEd_TsFY,5189
|
|
8
8
|
opentf/commons/selectors.py,sha256=R7y1E_5yP1zsz40eNy1buk464SaKVH5dCcRZnEYkdpU,7136
|
|
9
9
|
opentf/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
10
|
opentf/schemas/abac.opentestfactory.org/v1alpha1/Policy.json,sha256=JXsfNAPSEYggeyaDutSQBeG38o4Bmcr70dPLWWeqIh8,2105
|
|
@@ -55,11 +55,11 @@ opentf/schemas/opentestfactory.org/v1beta1/Workflow.json,sha256=QZ8mM9PhzsI9gTmw
|
|
|
55
55
|
opentf/schemas/opentestfactory.org/v1beta2/ServiceConfig.json,sha256=rEvK2YWL5lG94_qYgR_GnLWNsaQhaQ-2kuZdWJr5NnY,3517
|
|
56
56
|
opentf/scripts/launch_java_service.sh,sha256=S0jAaCuv2sZy0Gf2NGBuPX-eD531rcM-b0fNyhmzSjw,2423
|
|
57
57
|
opentf/scripts/startup.py,sha256=vOGxl7xBcp1-_LsAKiOmeOqFl2iT81A2XRrXBLUrNi4,22785
|
|
58
|
-
opentf/toolkit/__init__.py,sha256=
|
|
58
|
+
opentf/toolkit/__init__.py,sha256=_I003tfzaNTpMxS_CgqMIrgud66Rofz4pfFk0Jbjwls,23320
|
|
59
59
|
opentf/toolkit/channels.py,sha256=whLfPVT5PksVlprmoeb2ktaZ3KEhqyryUCVWBJq7PeY,24308
|
|
60
60
|
opentf/toolkit/core.py,sha256=fqnGgaYnuVcd4fyeNIwpc0QtyUo7jsKeVgdkBfY3iqo,9443
|
|
61
|
-
opentf_toolkit_nightly-0.62.0.
|
|
62
|
-
opentf_toolkit_nightly-0.62.0.
|
|
63
|
-
opentf_toolkit_nightly-0.62.0.
|
|
64
|
-
opentf_toolkit_nightly-0.62.0.
|
|
65
|
-
opentf_toolkit_nightly-0.62.0.
|
|
61
|
+
opentf_toolkit_nightly-0.62.0.dev1291.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
62
|
+
opentf_toolkit_nightly-0.62.0.dev1291.dist-info/METADATA,sha256=jitqIOoKF4yrCa7b44WWH6WzZUdHPKOOpauF1rESCLc,2192
|
|
63
|
+
opentf_toolkit_nightly-0.62.0.dev1291.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
|
|
64
|
+
opentf_toolkit_nightly-0.62.0.dev1291.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
|
|
65
|
+
opentf_toolkit_nightly-0.62.0.dev1291.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|