ob-metaflow 2.14.3.1__py2.py3-none-any.whl → 2.15.3.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow might be problematic. Click here for more details.

Files changed (29) hide show
  1. metaflow/__init__.py +6 -0
  2. metaflow/cmd/code/__init__.py +230 -0
  3. metaflow/cmd/develop/stub_generator.py +5 -2
  4. metaflow/cmd/main_cli.py +1 -0
  5. metaflow/cmd/make_wrapper.py +48 -0
  6. metaflow/extension_support/plugins.py +1 -0
  7. metaflow/metaflow_config.py +2 -0
  8. metaflow/metaflow_environment.py +3 -1
  9. metaflow/mflog/__init__.py +4 -3
  10. metaflow/plugins/__init__.py +14 -0
  11. metaflow/plugins/aws/batch/batch_cli.py +4 -4
  12. metaflow/plugins/aws/batch/batch_decorator.py +8 -0
  13. metaflow/plugins/kubernetes/kubernetes_decorator.py +2 -1
  14. metaflow/plugins/kubernetes/kubernetes_jobsets.py +2 -0
  15. metaflow/plugins/pypi/bootstrap.py +18 -27
  16. metaflow/plugins/pypi/conda_environment.py +8 -8
  17. metaflow/plugins/pypi/parsers.py +268 -0
  18. metaflow/plugins/pypi/utils.py +18 -0
  19. metaflow/runner/subprocess_manager.py +12 -6
  20. metaflow/version.py +1 -1
  21. ob_metaflow-2.15.3.1.data/data/share/metaflow/devtools/Makefile +332 -0
  22. ob_metaflow-2.15.3.1.data/data/share/metaflow/devtools/Tiltfile +626 -0
  23. ob_metaflow-2.15.3.1.data/data/share/metaflow/devtools/pick_services.sh +104 -0
  24. {ob_metaflow-2.14.3.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/METADATA +3 -3
  25. {ob_metaflow-2.14.3.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/RECORD +29 -23
  26. {ob_metaflow-2.14.3.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/WHEEL +1 -1
  27. {ob_metaflow-2.14.3.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/entry_points.txt +1 -0
  28. {ob_metaflow-2.14.3.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/LICENSE +0 -0
  29. {ob_metaflow-2.14.3.1.dist-info → ob_metaflow-2.15.3.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,626 @@
1
+ # Tilt configuration for running Metaflow on a local Kubernetes stack
2
+ #
3
+ # Usage:
4
+ # Start the development environment:
5
+ # $ tilt up
6
+ # Stop and clean up:
7
+ # $ tilt down
8
+
9
+ # TODO:
10
+ # 1. move away from temporary images
11
+ # 2. introduce kueue and jobsets
12
+ # 3. lock versions
13
+
14
+ version_settings(constraint='>=0.22.2')
15
+ allow_k8s_contexts('minikube')
16
+
17
+ components = {
18
+ "metadata-service": ["postgresql"],
19
+ "ui": ["postgresql", "minio"],
20
+ "minio": [],
21
+ "postgresql": [],
22
+ "argo-workflows": [],
23
+ "argo-events": ["argo-workflows"],
24
+ }
25
+
26
+ services_env = os.getenv("SERVICES", "").strip().lower()
27
+
28
+ if services_env:
29
+ if services_env == "all":
30
+ requested_components = list(components.keys())
31
+ else:
32
+ requested_components = services_env.split(",")
33
+ else:
34
+ requested_components = list(components.keys())
35
+
36
+ metaflow_config = {}
37
+ metaflow_config["METAFLOW_KUBERNETES_NAMESPACE"] = "default"
38
+
39
+ aws_config = []
40
+
41
+ def write_config_files():
42
+ metaflow_json = encode_json(metaflow_config)
43
+ cmd = '''cat > .devtools/config_local.json <<EOF
44
+ %s
45
+ EOF
46
+ ''' % (metaflow_json)
47
+ if aws_config and aws_config.strip():
48
+ cmd += '''cat > .devtools/aws_config <<EOF
49
+ %s
50
+ EOF
51
+ ''' % (aws_config.strip())
52
+ return cmd
53
+
54
+ load('ext://helm_resource', 'helm_resource', 'helm_repo')
55
+ load('ext://helm_remote', 'helm_remote')
56
+
57
+
58
+ def resolve(component, resolved=None):
59
+ if resolved == None:
60
+ resolved = []
61
+ if component in resolved:
62
+ return resolved
63
+ if component in components:
64
+ for dep in components[component]:
65
+ resolve(dep, resolved)
66
+ resolved.append(component)
67
+ return resolved
68
+
69
+ valid_components = []
70
+ for component in components.keys():
71
+ if component not in valid_components:
72
+ valid_components.append(component)
73
+ for deps in components.values():
74
+ for dep in deps:
75
+ if dep not in valid_components:
76
+ valid_components.append(dep)
77
+
78
+ enabled_components = []
79
+ for component in requested_components:
80
+ if component not in valid_components:
81
+ fail("Unknown component: " + component)
82
+ for result in resolve(component):
83
+ if result not in enabled_components:
84
+ enabled_components.append(result)
85
+
86
+ # Print a friendly summary when running `tilt up`.
87
+ if config.tilt_subcommand == 'up':
88
+ print("\n📦 Components to install:")
89
+ for component in enabled_components:
90
+ print("• " + component)
91
+ if component in components and components[component]:
92
+ print(" ↳ requires: " + ", ".join(components[component]))
93
+
94
+ config_resources = []
95
+
96
+ #################################################
97
+ # MINIO
98
+ #################################################
99
+ if "minio" in enabled_components:
100
+ helm_remote(
101
+ 'minio',
102
+ repo_name='minio-s3',
103
+ repo_url='https://charts.min.io/',
104
+ set=[
105
+ 'rootUser=rootuser',
106
+ 'rootPassword=rootpass123',
107
+ # TODO: perturb the bucket name to avoid conflicts
108
+ 'buckets[0].name=metaflow-test',
109
+ 'buckets[0].policy=none',
110
+ 'buckets[0].purge=false',
111
+ 'mode=standalone',
112
+ 'replicas=1',
113
+ 'persistence.enabled=false',
114
+ 'resources.requests.memory=128Mi',
115
+ 'resources.requests.cpu=50m',
116
+ 'resources.limits.memory=256Mi',
117
+ 'resources.limits.cpu=100m',
118
+ ]
119
+ )
120
+
121
+ k8s_resource(
122
+ 'minio',
123
+ port_forwards=[
124
+ '9000:9000',
125
+ '9001:9001'
126
+ ],
127
+ links=[
128
+ link('http://localhost:9000', 'MinIO API'),
129
+ link('http://localhost:9001/login', 'MinIO Console (rootuser/rootpass123)')
130
+ ],
131
+ labels=['minio'],
132
+ )
133
+
134
+ k8s_resource(
135
+ "minio-post-job",
136
+ labels=['minio'],
137
+ )
138
+
139
+ k8s_yaml(encode_yaml({
140
+ 'apiVersion': 'v1',
141
+ 'kind': 'Secret',
142
+ 'metadata': {'name': 'minio-secret'},
143
+ 'type': 'Opaque',
144
+ 'stringData': {
145
+ 'AWS_ACCESS_KEY_ID': 'rootuser',
146
+ 'AWS_SECRET_ACCESS_KEY': 'rootpass123',
147
+ 'AWS_ENDPOINT_URL_S3': 'http://minio.default.svc.cluster.local:9000',
148
+ }
149
+ }))
150
+
151
+ metaflow_config["METAFLOW_DEFAULT_DATASTORE"] = "s3"
152
+ metaflow_config["METAFLOW_DATASTORE_SYSROOT_S3"] = "s3://metaflow-test/metaflow"
153
+ metaflow_config["METAFLOW_KUBERNETES_SECRETS"] = "minio-secret"
154
+
155
+ aws_config = """[default]
156
+ aws_access_key_id = rootuser
157
+ aws_secret_access_key = rootpass123
158
+ endpoint_url = http://localhost:9000
159
+ """
160
+ config_resources.append('minio')
161
+
162
+ #################################################
163
+ # POSTGRESQL
164
+ #################################################
165
+ if "postgresql" in enabled_components:
166
+ helm_remote(
167
+ 'postgresql',
168
+ version='12.5.6',
169
+ repo_name='postgresql',
170
+ repo_url='https://charts.bitnami.com/bitnami',
171
+ set=[
172
+ 'auth.username=metaflow',
173
+ 'auth.password=metaflow123',
174
+ 'auth.database=metaflow',
175
+ 'primary.persistence.enabled=false',
176
+ 'primary.resources.requests.memory=128Mi',
177
+ 'primary.resources.requests.cpu=50m',
178
+ 'primary.resources.limits.memory=256Mi',
179
+ 'primary.resources.limits.cpu=100m',
180
+ 'primary.terminationGracePeriodSeconds=1',
181
+ 'primary.podSecurityContext.enabled=false',
182
+ 'primary.containerSecurityContext.enabled=false',
183
+ 'volumePermissions.enabled=false',
184
+ 'shmVolume.enabled=false',
185
+ 'primary.extraVolumes=null',
186
+ 'primary.extraVolumeMounts=null'
187
+ ]
188
+ )
189
+
190
+ k8s_resource(
191
+ 'postgresql',
192
+ port_forwards=['5432:5432'],
193
+ links=[
194
+ link('postgresql://metaflow:metaflow@localhost:5432/metaflow', 'PostgreSQL Connection')
195
+ ],
196
+ labels=['postgresql'],
197
+ resource_deps=components['postgresql'],
198
+ )
199
+
200
+ config_resources.append('postgresql')
201
+
202
+ #################################################
203
+ # ARGO WORKFLOWS
204
+ #################################################
205
+ if "argo-workflows" in enabled_components:
206
+ helm_remote(
207
+ 'argo-workflows',
208
+ repo_name='argo',
209
+ repo_url='https://argoproj.github.io/argo-helm',
210
+ set=[
211
+ 'server.extraArgs[0]=--auth-mode=server',
212
+ 'workflow.serviceAccount.create=true',
213
+ 'workflow.rbac.create=true',
214
+ 'server.livenessProbe.initialDelaySeconds=1',
215
+ 'server.readinessProbe.initialDelaySeconds=1',
216
+ 'server.resources.requests.memory=128Mi',
217
+ 'server.resources.requests.cpu=50m',
218
+ 'server.resources.limits.memory=256Mi',
219
+ 'server.resources.limits.cpu=100m',
220
+ 'controller.resources.requests.memory=128Mi',
221
+ 'controller.resources.requests.cpu=50m',
222
+ 'controller.resources.limits.memory=256Mi',
223
+ 'controller.resources.limits.cpu=100m'
224
+ ]
225
+ )
226
+
227
+ k8s_yaml(encode_yaml({
228
+ 'apiVersion': 'rbac.authorization.k8s.io/v1',
229
+ 'kind': 'Role',
230
+ 'metadata': {
231
+ 'name': 'argo-workflowtaskresults-role',
232
+ 'namespace': 'default'
233
+ },
234
+ 'rules': [{
235
+ 'apiGroups': ['argoproj.io'],
236
+ 'resources': ['workflowtaskresults'],
237
+ 'verbs': ['create', 'patch', 'get', 'list']
238
+ }]
239
+ }))
240
+
241
+ k8s_yaml(encode_yaml({
242
+ 'apiVersion': 'rbac.authorization.k8s.io/v1',
243
+ 'kind': 'RoleBinding',
244
+ 'metadata': {
245
+ 'name': 'default-argo-workflowtaskresults-binding',
246
+ 'namespace': 'default'
247
+ },
248
+ 'subjects': [{
249
+ 'kind': 'ServiceAccount',
250
+ 'name': 'default',
251
+ 'namespace': 'default'
252
+ }],
253
+ 'roleRef': {
254
+ 'kind': 'Role',
255
+ 'name': 'argo-workflowtaskresults-role',
256
+ 'apiGroup': 'rbac.authorization.k8s.io'
257
+ }
258
+ }))
259
+
260
+ k8s_resource(
261
+ workload='argo-workflows-server',
262
+ port_forwards=['2746:2746'],
263
+ links=[
264
+ link('http://localhost:2746', 'Argo Workflows UI')
265
+ ],
266
+ labels=['argo-workflows'],
267
+ resource_deps=components['argo-workflows']
268
+ )
269
+
270
+ k8s_resource(
271
+ workload='argo-workflows-workflow-controller',
272
+ labels=['argo-workflows'],
273
+ resource_deps=components['argo-workflows']
274
+ )
275
+
276
+ config_resources.append('argo-workflows-workflow-controller')
277
+ config_resources.append('argo-workflows-server')
278
+
279
+ #################################################
280
+ # ARGO EVENTS
281
+ #################################################
282
+ if "argo-events" in enabled_components:
283
+ helm_remote(
284
+ 'argo-events',
285
+ repo_name='argo',
286
+ repo_url='https://argoproj.github.io/argo-helm',
287
+ set=[
288
+ 'crds.install=true',
289
+ 'controller.metrics.enabled=true',
290
+ 'controller.livenessProbe.initialDelaySeconds=1',
291
+ 'controller.readinessProbe.initialDelaySeconds=1',
292
+ 'controller.resources.requests.memory=64Mi',
293
+ 'controller.resources.requests.cpu=25m',
294
+ 'controller.resources.limits.memory=128Mi',
295
+ 'controller.resources.limits.cpu=50m',
296
+ 'configs.jetstream.streamConfig.maxAge=72h',
297
+ 'configs.jetstream.streamConfig.replicas=1',
298
+ 'controller.rbac.enabled=true',
299
+ 'controller.rbac.namespaced=false',
300
+ 'controller.serviceAccount.create=true',
301
+ 'controller.serviceAccount.name=argo-events-events-controller-sa',
302
+ 'configs.jetstream.versions[0].configReloaderImage=natsio/nats-server-config-reloader:latest',
303
+ 'configs.jetstream.versions[0].metricsExporterImage=natsio/prometheus-nats-exporter:latest',
304
+ 'configs.jetstream.versions[0].natsImage=nats:latest',
305
+ 'configs.jetstream.versions[0].startCommand=/nats-server',
306
+ 'configs.jetstream.versions[0].version=latest',
307
+ 'configs.jetstream.versions[1].configReloaderImage=natsio/nats-server-config-reloader:latest',
308
+ 'configs.jetstream.versions[1].metricsExporterImage=natsio/prometheus-nats-exporter:latest',
309
+ 'configs.jetstream.versions[1].natsImage=nats:2.9.15',
310
+ 'configs.jetstream.versions[1].startCommand=/nats-server',
311
+ 'configs.jetstream.versions[1].version=2.9.15',
312
+ ]
313
+ )
314
+
315
+ k8s_yaml(encode_yaml({
316
+ 'apiVersion': 'v1',
317
+ 'kind': 'ServiceAccount',
318
+ 'metadata': {
319
+ 'name': 'operate-workflow-sa',
320
+ 'namespace': 'default'
321
+ }
322
+ }))
323
+
324
+ k8s_yaml(encode_yaml({
325
+ 'apiVersion': 'rbac.authorization.k8s.io/v1',
326
+ 'kind': 'Role',
327
+ 'metadata': {
328
+ 'name': 'operate-workflow-role',
329
+ 'namespace': 'default'
330
+ },
331
+ 'rules': [{
332
+ 'apiGroups': ['argoproj.io'],
333
+ 'resources': [
334
+ 'workflows',
335
+ 'workflowtemplates',
336
+ 'cronworkflows',
337
+ 'clusterworkflowtemplates'
338
+ ],
339
+ 'verbs': ['*']
340
+ }]
341
+ }))
342
+
343
+ k8s_yaml(encode_yaml({
344
+ 'apiVersion': 'rbac.authorization.k8s.io/v1',
345
+ 'kind': 'RoleBinding',
346
+ 'metadata': {
347
+ 'name': 'operate-workflow-role-binding',
348
+ 'namespace': 'default'
349
+ },
350
+ 'roleRef': {
351
+ 'apiGroup': 'rbac.authorization.k8s.io',
352
+ 'kind': 'Role',
353
+ 'name': 'operate-workflow-role'
354
+ },
355
+ 'subjects': [{
356
+ 'kind': 'ServiceAccount',
357
+ 'name': 'operate-workflow-sa'
358
+ }]
359
+ }))
360
+
361
+ k8s_yaml(encode_yaml({
362
+ 'apiVersion': 'rbac.authorization.k8s.io/v1',
363
+ 'kind': 'Role',
364
+ 'metadata': {
365
+ 'name': 'view-events-role',
366
+ 'namespace': 'default'
367
+ },
368
+ 'rules': [{
369
+ 'apiGroups': ['argoproj.io'],
370
+ 'resources': [
371
+ 'eventsources',
372
+ 'eventbuses',
373
+ 'sensors'
374
+ ],
375
+ 'verbs': [
376
+ 'get',
377
+ 'list',
378
+ 'watch'
379
+ ]
380
+ }]
381
+ }))
382
+
383
+ k8s_yaml(encode_yaml({
384
+ 'apiVersion': 'rbac.authorization.k8s.io/v1',
385
+ 'kind': 'RoleBinding',
386
+ 'metadata': {
387
+ 'name': 'view-events-role-binding',
388
+ 'namespace': 'default'
389
+ },
390
+ 'roleRef': {
391
+ 'apiGroup': 'rbac.authorization.k8s.io',
392
+ 'kind': 'Role',
393
+ 'name': 'view-events-role'
394
+ },
395
+ 'subjects': [{
396
+ 'kind': 'ServiceAccount',
397
+ 'name': 'argo-workflows',
398
+ 'namespace': 'default'
399
+ }]
400
+ }))
401
+
402
+ k8s_yaml(encode_yaml({
403
+ 'apiVersion': 'argoproj.io/v1alpha1',
404
+ 'kind': 'EventBus',
405
+ 'metadata': {
406
+ 'name': 'default',
407
+ 'namespace': 'default'
408
+ },
409
+ 'spec': {
410
+ 'jetstream': {
411
+ 'version': '2.9.15',
412
+ 'replicas': 3,
413
+ 'containerTemplate': {
414
+ 'resources': {
415
+ 'limits': {
416
+ 'cpu': '100m',
417
+ 'memory': '128Mi'
418
+ },
419
+ 'requests': {
420
+ 'cpu': '100m',
421
+ 'memory': '128Mi'
422
+ }
423
+ }
424
+ }
425
+ }
426
+ }
427
+ }))
428
+
429
+ k8s_yaml(encode_yaml({
430
+ 'apiVersion': 'argoproj.io/v1alpha1',
431
+ 'kind': 'EventSource',
432
+ 'metadata': {
433
+ 'name': 'argo-events-webhook',
434
+ 'namespace': 'default'
435
+ },
436
+ 'spec': {
437
+ 'template': {
438
+ 'container': {
439
+ 'resources': {
440
+ 'requests': {
441
+ 'cpu': '25m',
442
+ 'memory': '50Mi'
443
+ },
444
+ 'limits': {
445
+ 'cpu': '25m',
446
+ 'memory': '50Mi'
447
+ }
448
+ }
449
+ }
450
+ },
451
+ 'service': {
452
+ 'ports': [
453
+ {
454
+ 'port': 12000,
455
+ 'targetPort': 12000
456
+ }
457
+ ]
458
+ },
459
+ 'webhook': {
460
+ 'metaflow-event': {
461
+ 'port': '12000',
462
+ 'endpoint': '/metaflow-event',
463
+ 'method': 'POST'
464
+ }
465
+ }
466
+ }
467
+ }))
468
+
469
+ # Create a custom service and port-forward it because tilt :/
470
+ k8s_yaml(encode_yaml(
471
+ {
472
+ 'apiVersion': 'v1',
473
+ 'kind': 'Service',
474
+ 'metadata': {
475
+ 'name': 'argo-events-webhook-eventsource-svc-tilt',
476
+ 'namespace': 'default',
477
+ },
478
+ 'spec': {
479
+ 'ports': [{
480
+ 'port': 12000,
481
+ 'protocol': 'TCP',
482
+ 'targetPort': 12000
483
+ }],
484
+ 'selector': {
485
+ 'controller': 'eventsource-controller',
486
+ 'eventsource-name': 'argo-events-webhook',
487
+ 'owner-name': 'argo-events-webhook'
488
+ },
489
+ 'type': 'ClusterIP'
490
+ }
491
+ }
492
+ ))
493
+
494
+ local_resource(
495
+ name='argo-events-webhook-eventsource-svc',
496
+ serve_cmd='while ! kubectl get service/argo-events-webhook-eventsource-svc-tilt >/dev/null 2>&1 || ! kubectl get pods -l eventsource-name=argo-events-webhook -o jsonpath="{.items[*].status.phase}" | grep -q "Running"; do sleep 5; done && kubectl port-forward service/argo-events-webhook-eventsource-svc-tilt 12000:12000',
497
+ links=[
498
+ link('http://localhost:12000/metaflow-event', 'Argo Events Webhook'),
499
+ ],
500
+ labels=['argo-events']
501
+ )
502
+
503
+ k8s_resource(
504
+ 'argo-events-controller-manager',
505
+ labels=['argo-events'],
506
+ )
507
+
508
+ metaflow_config["METAFLOW_ARGO_EVENTS_EVENT"] = "metaflow-event"
509
+ metaflow_config["METAFLOW_ARGO_EVENTS_EVENT_BUS"] = "default"
510
+ metaflow_config["METAFLOW_ARGO_EVENTS_EVENT_SOURCE"] = "argo-events-webhook"
511
+ metaflow_config["METAFLOW_ARGO_EVENTS_SERVICE_ACCOUNT"] = "operate-workflow-sa"
512
+ metaflow_config["METAFLOW_ARGO_EVENTS_WEBHOOK_AUTH"] = "service"
513
+ metaflow_config["METAFLOW_ARGO_EVENTS_INTERNAL_WEBHOOK_URL"] = "http://argo-events-webhook-eventsource-svc:12000/metaflow-event"
514
+ metaflow_config["METAFLOW_ARGO_EVENTS_WEBHOOK_URL"] = "http://localhost:12000/metaflow-event"
515
+
516
+ config_resources.append('argo-events-controller-manager')
517
+ config_resources.append('argo-events-webhook-eventsource-svc')
518
+
519
+ #################################################
520
+ # METADATA SERVICE
521
+ #################################################
522
+ if "metadata-service" in enabled_components:
523
+ helm_remote(
524
+ 'metaflow-service',
525
+ repo_name='metaflow-tools',
526
+ repo_url='https://outerbounds.github.io/metaflow-tools',
527
+ set=[
528
+ 'metadatadb.user=metaflow',
529
+ 'metadatadb.password=metaflow123',
530
+ 'metadatadb.database=metaflow',
531
+ 'metadatadb.host=postgresql',
532
+ 'image.repository=public.ecr.aws/p7g1e3j4/metaflow-service',
533
+ 'image.tag=2.4.13-fbcc7d04',
534
+ 'resources.requests.cpu=25m',
535
+ 'resources.requests.memory=64Mi',
536
+ 'resources.limits.cpu=50m',
537
+ 'resources.limits.memory=128Mi'
538
+ ]
539
+ )
540
+
541
+ k8s_resource(
542
+ 'metaflow-service',
543
+ port_forwards=['8080:8080'],
544
+ links=[link('http://localhost:8080/ping', 'Ping Metaflow Service')],
545
+ labels=['metadata-service'],
546
+ resource_deps=components['metadata-service']
547
+ )
548
+
549
+ metaflow_config["METAFLOW_DEFAULT_METADATA"] = "service"
550
+ metaflow_config["METAFLOW_SERVICE_URL"] = "http://localhost:8080"
551
+ metaflow_config["METAFLOW_SERVICE_INTERNAL_URL"] = "http://metaflow-service.default.svc.cluster.local:8080"
552
+
553
+ config_resources.append('metaflow-service')
554
+
555
+ #################################################
556
+ # METAFLOW UI
557
+ #################################################
558
+ if "ui" in enabled_components:
559
+ helm_remote(
560
+ 'metaflow-ui',
561
+ repo_name='metaflow-tools',
562
+ repo_url='https://outerbounds.github.io/metaflow-tools',
563
+ set=[
564
+ 'uiBackend.metadatadb.user=metaflow',
565
+ 'uiBackend.metadatadb.password=metaflow123',
566
+ 'uiBackend.metadatadb.name=metaflow',
567
+ 'uiBackend.metadatadb.host=postgresql',
568
+ 'uiBackend.metaflowDatastoreSysRootS3=s3://metaflow-test',
569
+ 'uiBackend.metaflowS3EndpointURL=http://minio.default.svc.cluster.local:9000',
570
+ 'uiBackend.image.name=public.ecr.aws/p7g1e3j4/metaflow-service',
571
+ 'uiBackend.image.tag=2.4.13-fbcc7d04',
572
+ 'uiBackend.env[0].name=AWS_ACCESS_KEY_ID',
573
+ 'uiBackend.env[0].value=rootuser',
574
+ 'uiBackend.env[1].name=AWS_SECRET_ACCESS_KEY',
575
+ 'uiBackend.env[1].value=rootpass123',
576
+ # TODO: configure lower cache limits
577
+ 'uiBackend.resources.requests.cpu=100m',
578
+ 'uiBackend.resources.requests.memory=256Mi',
579
+ 'uiStatic.metaflowUIBackendURL=http://localhost:8083/api',
580
+ 'uiStatic.image.name=public.ecr.aws/outerbounds/metaflow_ui',
581
+ 'uiStatic.image.tag=v1.3.13-5-g5dd049e',
582
+ 'uiStatic.resources.requests.cpu=25m',
583
+ 'uiStatic.resources.requests.memory=64Mi',
584
+ 'uiStatic.resources.limits.cpu=50m',
585
+ 'uiStatic.resources.limits.memory=128Mi',
586
+ ]
587
+ )
588
+
589
+ k8s_resource(
590
+ 'metaflow-ui-static',
591
+ port_forwards=['3000:3000'],
592
+ links=[link('http://localhost:3000', 'Metaflow UI')],
593
+ labels=['metaflow-ui'],
594
+ resource_deps=components['ui']
595
+ )
596
+
597
+ k8s_resource(
598
+ 'metaflow-ui',
599
+ port_forwards=['8083:8083'],
600
+ links=[link('http://localhost:3000', 'Metaflow UI')],
601
+ labels=['metaflow-ui'],
602
+ resource_deps=components['ui']
603
+ )
604
+
605
+ metaflow_config["METAFLOW_UI_URL"] = "http://localhost:3000"
606
+
607
+ config_resources.append('metaflow-ui')
608
+ config_resources.append('metaflow-ui-static')
609
+
610
+ cmd = '''
611
+ ARCH=$(kubectl get nodes -o jsonpath='{.items[0].status.nodeInfo.architecture}')
612
+ case "$ARCH" in
613
+ arm64) echo linux-aarch64 ;;
614
+ amd64) echo linux-64 ;;
615
+ *) echo linux-64 ;;
616
+ esac
617
+ '''
618
+
619
+ # For @conda/@pypi emulation
620
+ metaflow_config["METAFLOW_KUBERNETES_CONDA_ARCH"] = str(local(cmd)).strip()
621
+
622
+ local_resource(
623
+ name="generate-configs",
624
+ cmd=write_config_files(),
625
+ resource_deps=config_resources,
626
+ )