crossplane-function-pythonic 0.0.7a0__py3-none-any.whl → 0.0.7b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,260 @@
1
+ """A Crossplane composition function."""
2
+
3
+ import asyncio
4
+ import base64
5
+ import builtins
6
+ import importlib
7
+ import inspect
8
+ import logging
9
+ import sys
10
+
11
+ import grpc
12
+ import crossplane.function.response
13
+ from crossplane.function.proto.v1 import run_function_pb2 as fnv1
14
+ from crossplane.function.proto.v1 import run_function_pb2_grpc as grpcv1
15
+ from .. import pythonic
16
+
17
+ builtins.BaseComposite = pythonic.BaseComposite
18
+ builtins.append = pythonic.append
19
+ builtins.Map = pythonic.Map
20
+ builtins.List = pythonic.List
21
+ builtins.Unknown = pythonic.Unknown
22
+ builtins.Yaml = pythonic.Yaml
23
+ builtins.Json = pythonic.Json
24
+ builtins.B64Encode = pythonic.B64Encode
25
+ builtins.B64Decode = pythonic.B64Decode
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+
30
+ class FunctionRunner(grpcv1.FunctionRunnerService):
31
+ """A FunctionRunner handles gRPC RunFunctionRequests."""
32
+
33
+ def __init__(self, debug=False):
34
+ """Create a new FunctionRunner."""
35
+ self.debug = debug
36
+ self.clazzes = {}
37
+
38
+ def invalidate_module(self, module):
39
+ self.clazzes.clear()
40
+ if module in sys.modules:
41
+ del sys.modules[module]
42
+ importlib.invalidate_caches()
43
+
44
+ async def RunFunction(
45
+ self, request: fnv1.RunFunctionRequest, _: grpc.aio.ServicerContext
46
+ ) -> fnv1.RunFunctionResponse:
47
+ try:
48
+ return await self.run_function(request)
49
+ except:
50
+ logger.exception('Exception thrown in run fuction')
51
+ raise
52
+
53
+ async def run_function(self, request):
54
+ composite = request.observed.composite.resource
55
+ name = list(reversed(composite['apiVersion'].split('/')[0].split('.')))
56
+ name.append(composite['kind'])
57
+ name.append(composite['metadata']['name'])
58
+ logger = logging.getLogger('.'.join(name))
59
+ if 'iteration' in request.context:
60
+ request.context['iteration'] = request.context['iteration'] + 1
61
+ else:
62
+ request.context['iteration'] = 1
63
+ logger.debug(f"Starting compose, {ordinal(request.context['iteration'])} pass")
64
+
65
+ response = crossplane.function.response.to(request)
66
+
67
+ if composite['apiVersion'] == 'pythonic.fortra.com/v1alpha1' and composite['kind'] == 'Composite':
68
+ if 'composite' not in composite['spec']:
69
+ logger.error('Missing spec "composite"')
70
+ crossplane.function.response.fatal(response, 'Missing spec "composite"')
71
+ return response
72
+ composite = composite['spec']['composite']
73
+ else:
74
+ if 'composite' not in request.input:
75
+ logger.error('Missing input "composite"')
76
+ crossplane.function.response.fatal(response, 'Missing input "composite"')
77
+ return response
78
+ composite = request.input['composite']
79
+
80
+ clazz = self.clazzes.get(composite)
81
+ if not clazz:
82
+ if '\n' in composite:
83
+ module = Module()
84
+ try:
85
+ exec(composite, module.__dict__)
86
+ except Exception as e:
87
+ logger.exception('Exec exception')
88
+ crossplane.function.response.fatal(response, f"Exec exception: {e}")
89
+ return response
90
+ composite = ['<script>', 'Composite']
91
+ else:
92
+ composite = composite.rsplit('.', 1)
93
+ if len(composite) == 1:
94
+ logger.error(f"Composite class name does not include module: {composite[0]}")
95
+ crossplane.function.response.fatal(response, f"Composite class name does not include module: {composite[0]}")
96
+ return response
97
+ try:
98
+ module = importlib.import_module(composite[0])
99
+ except Exception as e:
100
+ logger.error(str(e))
101
+ crossplane.function.response.fatal(response, f"Import module exception: {e}")
102
+ return response
103
+ clazz = getattr(module, composite[1], None)
104
+ if not clazz:
105
+ logger.error(f"{composite[0]} did not define: {composite[1]}")
106
+ crossplane.function.response.fatal(response, f"{composite[0]} did not define: {composite[1]}")
107
+ return response
108
+ composite = '.'.join(composite)
109
+ if not inspect.isclass(clazz):
110
+ logger.error(f"{composite} is not a class")
111
+ crossplane.function.response.fatal(response, f"{composite} is not a class")
112
+ return response
113
+ if not issubclass(clazz, BaseComposite):
114
+ logger.error(f"{composite} is not a subclass of BaseComposite")
115
+ crossplane.function.response.fatal(response, f"{composite} is not a subclass of BaseComposite")
116
+ return response
117
+ self.clazzes[composite] = clazz
118
+
119
+ try:
120
+ composite = clazz(request, response, logger)
121
+ except Exception as e:
122
+ logger.exception('Instatiate exception')
123
+ crossplane.function.response.fatal(response, f"Instatiate exception: {e}")
124
+ return response
125
+
126
+ try:
127
+ result = composite.compose()
128
+ if asyncio.iscoroutine(result):
129
+ await result
130
+ except Exception as e:
131
+ logger.exception('Compose exception')
132
+ crossplane.function.response.fatal(response, f"Compose exception: {e}")
133
+ return response
134
+
135
+ requested = []
136
+ for name, required in composite.requireds:
137
+ if required.apiVersion and required.kind:
138
+ r = Map(apiVersion=required.apiVersion, kind=required.kind)
139
+ if required.namespace:
140
+ r.namespace = required.namespace
141
+ if required.matchName:
142
+ r.matchName = required.matchName
143
+ for key, value in required.matchLabels:
144
+ r.matchLabels[key] = value
145
+ if r != composite.context._requireds[name]:
146
+ composite.context._requireds[name] = r
147
+ requested.append(name)
148
+ if requested:
149
+ logger.info(f"Requireds requested: {','.join(requested)}")
150
+ return response
151
+
152
+ unknownResources = []
153
+ warningResources = []
154
+ fatalResources = []
155
+ for name, resource in sorted(entry for entry in composite.resources):
156
+ unknowns = resource.desired._getUnknowns
157
+ if unknowns:
158
+ unknownResources.append(name)
159
+ warning = False
160
+ fatal = False
161
+ if resource.observed:
162
+ warningResources.append(name)
163
+ warning = True
164
+ if resource.unknownsFatal or (resource.unknownsFatal is None and composite.unknownsFatal):
165
+ fatalResources.append(name)
166
+ fatal = True
167
+ if self.debug:
168
+ for destination, source in sorted(unknowns.items()):
169
+ destination = self.trimFullName(destination)
170
+ source = self.trimFullName(source)
171
+ if fatal:
172
+ logger.error(f'Observed unknown: {destination} = {source}')
173
+ elif warning:
174
+ logger.warning(f'Observed unknown: {destination} = {source}')
175
+ else:
176
+ logger.debug(f'Desired unknown: {destination} = {source}')
177
+ if resource.observed:
178
+ resource.desired._patchUnknowns(resource.observed)
179
+ else:
180
+ del composite.resources[name]
181
+
182
+ if fatalResources:
183
+ level = logger.error
184
+ reason = 'FatalUnknowns'
185
+ message = f"Observed resources with unknowns: {','.join(fatalResources)}"
186
+ status = False
187
+ event = composite.events.fatal
188
+ elif warningResources:
189
+ level = logger.warning
190
+ reason = 'ObservedUnknowns'
191
+ message = f"Observed resources with unknowns: {','.join(warningResources)}"
192
+ status = False
193
+ event = composite.events.warning
194
+ elif unknownResources:
195
+ level = logger.info
196
+ reason = 'DesiredUnknowns'
197
+ message = f"Desired resources with unknowns: {','.join(unknownResources)}"
198
+ status = False
199
+ event = composite.events.info
200
+ else:
201
+ level = None
202
+ reason = 'AllComposed'
203
+ message = 'All resources are composed'
204
+ status = True
205
+ event = None
206
+ if not self.debug and level:
207
+ level(message)
208
+ composite.conditions.ResourcesComposed(reason, message, status)
209
+ if event:
210
+ event(reason, message)
211
+
212
+ for name, resource in composite.resources:
213
+ if resource.autoReady or (resource.autoReady is None and composite.autoReady):
214
+ if resource.ready is None:
215
+ if resource.conditions.Ready.status:
216
+ resource.ready = True
217
+
218
+ logger.info('Completed compose')
219
+ return response
220
+
221
+ def trimFullName(self, name):
222
+ name = name.split('.')
223
+ for values in (
224
+ ('request', 'observed', 'resources', None, 'resource'),
225
+ ('request', 'extra_resources', None, 'items', 'resource'),
226
+ ('response', 'desired', 'resources', None, 'resource'),
227
+ ):
228
+ if len(values) <= len(name):
229
+ for ix, value in enumerate(values):
230
+ if value and value != name[ix] and not name[ix].startswith(f"{value}["):
231
+ break
232
+ else:
233
+ ix = 0
234
+ for value in values:
235
+ if value:
236
+ if value == name[ix]:
237
+ del name[ix]
238
+ elif ix:
239
+ name[ix-1] += name[ix][len(value):]
240
+ del name[ix]
241
+ else:
242
+ name[ix] = name[ix][len(value):]
243
+ ix += 1
244
+ else:
245
+ ix += 1
246
+ break
247
+ return '.'.join(name)
248
+
249
+
250
+ def ordinal(ix):
251
+ ix = int(ix)
252
+ if 11 <= (ix % 100) <= 13:
253
+ suffix = 'th'
254
+ else:
255
+ suffix = ['th', 'st', 'nd', 'rd', 'th'][min(ix % 10, 4)]
256
+ return str(ix) + suffix
257
+
258
+
259
+ class Module:
260
+ pass
@@ -0,0 +1,187 @@
1
+ """The composition function's main CLI."""
2
+
3
+ import argparse
4
+ import asyncio
5
+ import logging
6
+ import os
7
+ import pathlib
8
+ import shlex
9
+ import signal
10
+ import sys
11
+ import traceback
12
+
13
+ import crossplane.function.logging
14
+ import crossplane.function.proto.v1.run_function_pb2_grpc as grpcv1
15
+ import grpc
16
+ import pip._internal.cli.main
17
+
18
+ from . import function
19
+
20
+
21
+ def main():
22
+ try:
23
+ asyncio.run(Main().main())
24
+ except:
25
+ traceback.print_exc()
26
+ sys.exit(1)
27
+
28
+
29
+ class Main:
30
+ async def main(self):
31
+ parser = argparse.ArgumentParser('Forta Crossplane Function')
32
+ parser.add_argument(
33
+ '--debug', '-d',
34
+ action='store_true',
35
+ help='Emit debug logs.',
36
+ )
37
+ parser.add_argument(
38
+ '--log-name-width',
39
+ type=int,
40
+ default=40,
41
+ help='Width of the logger name in the log output, default 40',
42
+ )
43
+ parser.add_argument(
44
+ '--address',
45
+ default='0.0.0.0:9443',
46
+ help='Address at which to listen for gRPC connections, default: 0.0.0.0:9443',
47
+ )
48
+ parser.add_argument(
49
+ '--tls-certs-dir',
50
+ default=os.getenv('TLS_SERVER_CERTS_DIR'),
51
+ help='Serve using mTLS certificates.',
52
+ )
53
+ parser.add_argument(
54
+ '--insecure',
55
+ action='store_true',
56
+ help='Run without mTLS credentials. If you supply this flag --tls-certs-dir will be ignored.',
57
+ )
58
+ parser.add_argument(
59
+ '--packages',
60
+ action='store_true',
61
+ help='Discover python packages from function-pythonic ConfigMaps.'
62
+ )
63
+ parser.add_argument(
64
+ '--packages-secrets',
65
+ action='store_true',
66
+ help='Also Discover python packages from function-pythonic Secrets.'
67
+ )
68
+ parser.add_argument(
69
+ '--packages-namespace',
70
+ action='append',
71
+ default=[],
72
+ help='Namespaces to discover function-pythonic ConfigMaps and Secrets in, default is cluster wide.',
73
+ )
74
+ parser.add_argument(
75
+ '--packages-dir',
76
+ default='./pythonic-packages',
77
+ help='Directory to store discovered function-pythonic ConfigMaps and Secrets to, defaults "<cwd>/pythonic-packages"'
78
+ )
79
+ parser.add_argument(
80
+ '--pip-install',
81
+ help='Pip install command to install additional Python packages.'
82
+ )
83
+ parser.add_argument(
84
+ '--python-path',
85
+ action='append',
86
+ default=[],
87
+ help='Filing system directories to add to the python path',
88
+ )
89
+ parser.add_argument(
90
+ '--allow-oversize-protos',
91
+ action='store_true',
92
+ help='Allow oversized protobuf messages'
93
+ )
94
+ args = parser.parse_args()
95
+
96
+ self.configure_logging(args)
97
+
98
+ if args.pip_install:
99
+ pip._internal.cli.main.main(['install', *shlex.split(args.pip_install)])
100
+
101
+ # enables read only volumes or mismatched uid volumes
102
+ sys.dont_write_bytecode = True
103
+ for path in reversed(args.python_path):
104
+ sys.path.insert(0, str(pathlib.Path(path).resolve()))
105
+
106
+ if args.allow_oversize_protos:
107
+ from google.protobuf.internal import api_implementation
108
+ if api_implementation._c_module:
109
+ api_implementation._c_module.SetAllowOversizeProtos(True)
110
+
111
+ grpc.aio.init_grpc_aio()
112
+ grpc_runner = function.FunctionRunner(args.debug)
113
+ grpc_server = grpc.aio.server()
114
+ grpcv1.add_FunctionRunnerServiceServicer_to_server(grpc_runner, grpc_server)
115
+ if args.tls_certs_dir:
116
+ certs = pathlib.Path(args.tls_certs_dir)
117
+ grpc_server.add_secure_port(
118
+ args.address,
119
+ grpc.ssl_server_credentials(
120
+ private_key_certificate_chain_pairs=[(
121
+ (certs / 'tls.key').read_bytes(),
122
+ (certs / 'tls.crt').read_bytes(),
123
+ )],
124
+ root_certificates=(certs / 'ca.crt').read_bytes(),
125
+ require_client_auth=True,
126
+ ),
127
+ )
128
+ else:
129
+ if not args.insecure:
130
+ raise ValueError('Either --tls-certs-dir or --insecure must be specified')
131
+ grpc_server.add_insecure_port(args.address)
132
+ await grpc_server.start()
133
+
134
+ if args.packages:
135
+ from . import packages
136
+ async with asyncio.TaskGroup() as tasks:
137
+ tasks.create_task(grpc_server.wait_for_termination())
138
+ tasks.create_task(packages.operator(
139
+ args.packages_secrets,
140
+ args.packages_namespace,
141
+ args.packages_dir,
142
+ grpc_server,
143
+ grpc_runner,
144
+ ))
145
+ else:
146
+ def stop():
147
+ asyncio.ensure_future(grpc_server.stop(5))
148
+ loop = asyncio.get_event_loop()
149
+ loop.add_signal_handler(signal.SIGINT, stop)
150
+ loop.add_signal_handler(signal.SIGTERM, stop)
151
+ await grpc_server.wait_for_termination()
152
+
153
+ def configure_logging(self, args):
154
+ formatter = Formatter(args.log_name_width)
155
+ handler = logging.StreamHandler()
156
+ handler.setFormatter(formatter)
157
+ logger = logging.getLogger()
158
+ logger.addHandler(handler)
159
+ logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
160
+
161
+
162
+ class Formatter(logging.Formatter):
163
+ def __init__(self, name_width):
164
+ super(Formatter, self).__init__(
165
+ f"[{{asctime}}.{{msecs:03.0f}}] {{sname:{name_width}.{name_width}}} [{{levelname:8.8}}] {{message}}",
166
+ '%Y-%m-%d %H:%M:%S',
167
+ '{',
168
+ )
169
+ self.name_width = name_width
170
+
171
+ def format(self, record):
172
+ record.sname = record.name
173
+ extra = len(record.sname) - self.name_width
174
+ if extra > 0:
175
+ names = record.sname.split('.')
176
+ for ix, name in enumerate(names):
177
+ if len(name) > extra:
178
+ names[ix] = name[extra:]
179
+ break
180
+ names[ix] = name[:1]
181
+ extra -= len(name) - 1
182
+ record.sname = '.'.join(names)
183
+ return super(Formatter, self).format(record)
184
+
185
+
186
+ if __name__ == '__main__':
187
+ main()
@@ -0,0 +1,158 @@
1
+
2
+ import base64
3
+ import importlib
4
+ import logging
5
+ import pathlib
6
+ import sys
7
+
8
+ import kopf
9
+
10
+
11
+ PACKAGE_LABEL = {'function-pythonic.package': kopf.PRESENT}
12
+ PACKAGES_DIR = None
13
+ GRPC_SERVER = None
14
+ GRPC_RUNNER = None
15
+
16
+
17
+ def operator(packages_secrets, packages_namespace, packages_dir, grpc_server, grpc_runner):
18
+ logging.getLogger('kopf.objects').setLevel(logging.INFO)
19
+ global PACKAGES_DIR, GRPC_SERVER, GRPC_RUNNER
20
+ PACKAGES_DIR = packages_dir
21
+ GRPC_SERVER = grpc_server
22
+ GRPC_RUNNER = grpc_runner
23
+ PACKAGES_DIR = pathlib.Path(packages_dir).expanduser().resolve()
24
+ sys.path.insert(0, str(PACKAGES_DIR))
25
+ if packages_secrets:
26
+ kopf.on.create('', 'v1', 'secrets', labels=PACKAGE_LABEL)(create)
27
+ kopf.on.resume('', 'v1', 'secrets', labels=PACKAGE_LABEL)(create)
28
+ kopf.on.update('', 'v1', 'secrets', labels=PACKAGE_LABEL)(update)
29
+ kopf.on.delete('', 'v1', 'secrets', labels=PACKAGE_LABEL)(delete)
30
+ return kopf.operator(
31
+ standalone=True,
32
+ clusterwide=not packages_namespace,
33
+ namespaces=packages_namespace,
34
+ )
35
+
36
+
37
+ @kopf.on.startup()
38
+ async def startup(settings, **_):
39
+ settings.scanning.disabled = True
40
+
41
+
42
+ @kopf.on.cleanup()
43
+ async def cleanup(**_):
44
+ await GRPC_SERVER.stop(5)
45
+
46
+
47
+ @kopf.on.create('', 'v1', 'configmaps', labels=PACKAGE_LABEL)
48
+ @kopf.on.resume('', 'v1', 'configmaps', labels=PACKAGE_LABEL)
49
+ async def create(body, logger, **_):
50
+ package_dir, package = get_package_dir(body)
51
+ if package_dir:
52
+ package_dir.mkdir(parents=True, exist_ok=True)
53
+ secret = body['kind'] == 'Secret'
54
+ invalidate = False
55
+ for name, text in body.get('data', {}).items():
56
+ package_file = package_dir / name
57
+ if secret:
58
+ package_file.write_bytes(base64.b64decode(text.encode('utf-8')))
59
+ else:
60
+ package_file.write_text(text)
61
+ if package_file.suffixes == ['.py']:
62
+ module = '.'.join(package + [package_file.stem])
63
+ GRPC_RUNNER.invalidate_module(module)
64
+ logger.info(f"Created module: {module}")
65
+ else:
66
+ logger.info(f"Created file: {'/'.join(package + [name])}")
67
+
68
+
69
+ @kopf.on.update('', 'v1', 'configmaps', labels=PACKAGE_LABEL)
70
+ async def update(body, old, logger, **_):
71
+ old_package_dir, old_package = get_package_dir(old)
72
+ if old_package_dir:
73
+ old_data = old.get('data', {})
74
+ else:
75
+ old_data = {}
76
+ old_names = set(old_data.keys())
77
+ package_dir, package = get_package_dir(body, logger)
78
+ if package_dir:
79
+ package_dir.mkdir(parents=True, exist_ok=True)
80
+ secret = body['kind'] == 'Secret'
81
+ for name, text in body.get('data', {}).items():
82
+ package_file = package_dir / name
83
+ if package_dir == old_package_dir and text == old_data.get(name, None):
84
+ action = 'Unchanged'
85
+ else:
86
+ if secret:
87
+ package_file.write_bytes(base64.b64decode(text.encode('utf-8')))
88
+ else:
89
+ package_file.write_text(text)
90
+ action = 'Updated' if package_dir == old_package_dir and name in old_names else 'Created'
91
+ if package_file.suffixes == ['.py']:
92
+ module = '.'.join(package + [package_file.stem])
93
+ if action != 'Unchanged':
94
+ GRPC_RUNNER.invalidate_module(module)
95
+ logger.info(f"{action} module: {module}")
96
+ else:
97
+ logger.info(f"{action} file: {'/'.join(package + [name])}")
98
+ if package_dir == old_package_dir:
99
+ old_names.discard(name)
100
+ if old_package_dir:
101
+ for name in old_names:
102
+ package_file = old_package_dir / name
103
+ package_file.unlink(missing_ok=True)
104
+ if package_file.suffixes == ['.py']:
105
+ module = '.'.join(old_package + [package_file.stem])
106
+ GRPC_RUNNER.invalidate_module(module)
107
+ logger.info(f"Removed module: {module}")
108
+ else:
109
+ logger.info(f"Removed file: {'/'.join(old_package + [name])}")
110
+ while old_package and old_package_dir.is_dir() and not list(old_package_dir.iterdir()):
111
+ old_package_dir.rmdir()
112
+ module = '.'.join(old_package)
113
+ GRPC_RUNNER.invalidate_module(module)
114
+ logger.info(f"Removed package: {module}")
115
+ old_package_dir = old_package_dir.parent
116
+ old_package.pop()
117
+
118
+
119
+ @kopf.on.delete('', 'v1', 'configmaps', labels=PACKAGE_LABEL)
120
+ async def delete(old, logger, **_):
121
+ package_dir, package = get_package_dir(old)
122
+ if package_dir:
123
+ for name in old.get('data', {}).keys():
124
+ package_file = package_dir / name
125
+ package_file.unlink(missing_ok=True)
126
+ if package_file.suffixes == ['.py']:
127
+ module = '.'.join(package + [package_file.stem])
128
+ GRPC_RUNNER.invalidate_module(module)
129
+ logger.info(f"Deleted module: {module}")
130
+ else:
131
+ logger.info(f"Deleted file: {'/'.join(package + [name])}")
132
+ while package and package_dir.is_dir() and not list(package_dir.iterdir()):
133
+ package_dir.rmdir()
134
+ module = '.'.join(package)
135
+ GRPC_RUNNER.invalidate_module(module)
136
+ logger.info(f"Deleted package: {module}")
137
+ package_dir = package_dir.parent
138
+ package.pop()
139
+
140
+
141
+ def get_package_dir(body, logger=None):
142
+ package = body.get('metadata', {}).get('labels', {}).get('function-pythonic.package', None)
143
+ if package is None:
144
+ if logger:
145
+ logger.error('function-pythonic.package label is missing')
146
+ return None, None
147
+ package_dir = PACKAGES_DIR
148
+ if package == '':
149
+ package = []
150
+ else:
151
+ package = package.split('.')
152
+ for segment in package:
153
+ if not segment.isidentifier():
154
+ if logger:
155
+ logger.error('Package has invalid package name: %s', package)
156
+ return None, None
157
+ package_dir = package_dir / segment
158
+ return package_dir, package