mage-ai 0.8.91__py3-none-any.whl → 0.8.93__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mage-ai might be problematic. Click here for more details.

Files changed (103) hide show
  1. mage_ai/api/policies/BlockPolicy.py +1 -0
  2. mage_ai/api/policies/PipelinePolicy.py +6 -0
  3. mage_ai/api/presenters/BlockPresenter.py +1 -0
  4. mage_ai/api/presenters/PipelinePresenter.py +2 -0
  5. mage_ai/data_preparation/decorators.py +4 -0
  6. mage_ai/data_preparation/executors/block_executor.py +68 -3
  7. mage_ai/data_preparation/models/block/__init__.py +212 -67
  8. mage_ai/data_preparation/models/block/constants.py +3 -1
  9. mage_ai/data_preparation/models/constants.py +8 -8
  10. mage_ai/data_preparation/models/pipeline.py +83 -5
  11. mage_ai/data_preparation/repo_manager.py +5 -2
  12. mage_ai/data_preparation/shared/constants.py +2 -1
  13. mage_ai/data_preparation/templates/conditionals/base.jinja +11 -0
  14. mage_ai/data_preparation/templates/constants.py +7 -0
  15. mage_ai/io/mssql.py +11 -1
  16. mage_ai/io/sql.py +8 -1
  17. mage_ai/orchestration/db/migrations/versions/dfe49d040487_add_condition_failed_status_to_block_.py +39 -0
  18. mage_ai/orchestration/db/models/schedules.py +5 -1
  19. mage_ai/orchestration/pipeline_scheduler.py +27 -17
  20. mage_ai/server/api/downloads.py +64 -0
  21. mage_ai/server/constants.py +1 -1
  22. mage_ai/server/execution_manager.py +3 -2
  23. mage_ai/server/frontend_dist/404.html +2 -2
  24. mage_ai/server/frontend_dist/404.html.html +2 -2
  25. mage_ai/server/frontend_dist/_next/static/chunks/1424-321c8c08a2b05c19.js +1 -0
  26. mage_ai/server/frontend_dist/_next/static/chunks/2786-2b3ad2cf216fae42.js +1 -0
  27. mage_ai/server/frontend_dist/_next/static/chunks/3714-3bd2a8c979d6d820.js +1 -0
  28. mage_ai/server/frontend_dist/_next/static/chunks/3752-8f15fe0ca9c23cf4.js +1 -0
  29. mage_ai/server/frontend_dist/_next/static/chunks/{4476-cdae7a65db573bb7.js → 4476-c1a62e69cd8e14d5.js} +1 -1
  30. mage_ai/server/frontend_dist/_next/static/chunks/pages/_app-8aaee96edc252aa3.js +1 -0
  31. mage_ai/server/frontend_dist/_next/static/chunks/pages/manage-88c03376d807012e.js +1 -0
  32. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipeline-runs-915825c19bf42fa1.js +1 -0
  33. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills-bf2d83dabe1bd25a.js +1 -0
  34. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/edit-f0940870ff5a17f6.js +1 -0
  35. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/logs-8ee12ce8362ed576.js +1 -0
  36. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/block-runs-a64f7a0aba0f481d.js +1 -0
  37. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/block-runtime-3a3a115ab1a86e2f.js +1 -0
  38. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs/[run]-160881dab5ef66d8.js +1 -0
  39. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs-69d63c14abf8cf68.js +1 -0
  40. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/syncs-6092226e191dd720.js +1 -0
  41. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines-549f4708f2912a7a.js +1 -0
  42. mage_ai/server/frontend_dist/_next/static/zlCBBK90aKYZtPlYLj9_T/_buildManifest.js +1 -0
  43. mage_ai/server/frontend_dist/files.html +2 -2
  44. mage_ai/server/frontend_dist/index.html +2 -2
  45. mage_ai/server/frontend_dist/manage/users/[user].html +2 -2
  46. mage_ai/server/frontend_dist/manage/users.html +2 -2
  47. mage_ai/server/frontend_dist/manage.html +2 -2
  48. mage_ai/server/frontend_dist/pipeline-runs.html +2 -2
  49. mage_ai/server/frontend_dist/pipelines/[pipeline]/backfills/[...slug].html +2 -2
  50. mage_ai/server/frontend_dist/pipelines/[pipeline]/backfills.html +2 -2
  51. mage_ai/server/frontend_dist/pipelines/[pipeline]/edit.html +2 -2
  52. mage_ai/server/frontend_dist/pipelines/[pipeline]/logs.html +2 -2
  53. mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors/block-runs.html +2 -2
  54. mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors/block-runtime.html +2 -2
  55. mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors.html +2 -2
  56. mage_ai/server/frontend_dist/pipelines/[pipeline]/runs/[run].html +2 -2
  57. mage_ai/server/frontend_dist/pipelines/[pipeline]/runs.html +2 -2
  58. mage_ai/server/frontend_dist/pipelines/[pipeline]/settings.html +2 -2
  59. mage_ai/server/frontend_dist/pipelines/[pipeline]/syncs.html +2 -2
  60. mage_ai/server/frontend_dist/pipelines/[pipeline]/triggers/[...slug].html +2 -2
  61. mage_ai/server/frontend_dist/pipelines/[pipeline]/triggers.html +2 -2
  62. mage_ai/server/frontend_dist/pipelines/[pipeline].html +2 -2
  63. mage_ai/server/frontend_dist/pipelines.html +2 -2
  64. mage_ai/server/frontend_dist/settings/account/profile.html +2 -2
  65. mage_ai/server/frontend_dist/settings/workspace/preferences.html +2 -2
  66. mage_ai/server/frontend_dist/settings/workspace/sync-data.html +2 -2
  67. mage_ai/server/frontend_dist/settings/workspace/users.html +2 -2
  68. mage_ai/server/frontend_dist/settings.html +2 -2
  69. mage_ai/server/frontend_dist/sign-in.html +2 -2
  70. mage_ai/server/frontend_dist/terminal.html +2 -2
  71. mage_ai/server/frontend_dist/test.html +2 -2
  72. mage_ai/server/frontend_dist/triggers.html +2 -2
  73. mage_ai/server/server.py +8 -0
  74. mage_ai/server/websocket_server.py +3 -2
  75. mage_ai/services/spark/config.py +8 -2
  76. mage_ai/services/spark/spark.py +64 -22
  77. mage_ai/shared/environments.py +4 -8
  78. mage_ai/tests/api/operations/test_syncs.py +1 -1
  79. mage_ai/tests/data_preparation/models/test_pipeline.py +11 -1
  80. {mage_ai-0.8.91.dist-info → mage_ai-0.8.93.dist-info}/METADATA +1 -1
  81. {mage_ai-0.8.91.dist-info → mage_ai-0.8.93.dist-info}/RECORD +87 -83
  82. mage_ai/server/frontend_dist/_next/static/chunks/1424-c6b0d89ffb4a10b9.js +0 -1
  83. mage_ai/server/frontend_dist/_next/static/chunks/3714-c70e815b08e3d9be.js +0 -1
  84. mage_ai/server/frontend_dist/_next/static/chunks/3752-bd78037feb0a755f.js +0 -1
  85. mage_ai/server/frontend_dist/_next/static/chunks/pages/_app-aa11738683e2250f.js +0 -1
  86. mage_ai/server/frontend_dist/_next/static/chunks/pages/manage-06aa8a8f1ca2e8d8.js +0 -1
  87. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipeline-runs-3260a2dac8df672e.js +0 -1
  88. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills-f08b51d9dc56eab5.js +0 -1
  89. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/edit-43e71712d3fc0299.js +0 -1
  90. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/logs-264439be4f197741.js +0 -1
  91. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/block-runs-91ba61b9030eff1f.js +0 -1
  92. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/block-runtime-0bbae5456b0e6e82.js +0 -1
  93. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs/[run]-86d1477c6671ea30.js +0 -1
  94. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs-4b0c098074dd3e6d.js +0 -1
  95. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/syncs-891c3d3f7a2b634b.js +0 -1
  96. mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines-f4d470fe28b74de7.js +0 -1
  97. mage_ai/server/frontend_dist/_next/static/j-J6532RA0pcVgjHCeKSz/_buildManifest.js +0 -1
  98. /mage_ai/server/frontend_dist/_next/static/{j-J6532RA0pcVgjHCeKSz → zlCBBK90aKYZtPlYLj9_T}/_middlewareManifest.js +0 -0
  99. /mage_ai/server/frontend_dist/_next/static/{j-J6532RA0pcVgjHCeKSz → zlCBBK90aKYZtPlYLj9_T}/_ssgManifest.js +0 -0
  100. {mage_ai-0.8.91.dist-info → mage_ai-0.8.93.dist-info}/LICENSE +0 -0
  101. {mage_ai-0.8.91.dist-info → mage_ai-0.8.93.dist-info}/WHEEL +0 -0
  102. {mage_ai-0.8.91.dist-info → mage_ai-0.8.93.dist-info}/entry_points.txt +0 -0
  103. {mage_ai-0.8.91.dist-info → mage_ai-0.8.93.dist-info}/top_level.txt +0 -0
@@ -41,7 +41,6 @@ from mage_ai.shared.strings import format_enum
41
41
  from mage_ai.shared.utils import clean_name
42
42
 
43
43
  CYCLE_DETECTION_ERR_MESSAGE = 'A cycle was detected in this pipeline'
44
- METADATA_FILE_NAME = 'metadata.yaml'
45
44
 
46
45
 
47
46
  class Pipeline:
@@ -142,7 +141,7 @@ class Pipeline:
142
141
  # Copy pipeline files from template folder
143
142
  copy_template_directory('pipeline', pipeline_path)
144
143
  # Update metadata.yaml with pipeline config
145
- with open(os.path.join(pipeline_path, METADATA_FILE_NAME), 'w') as fp:
144
+ with open(os.path.join(pipeline_path, PIPELINE_CONFIG_FILE), 'w') as fp:
146
145
  yaml.dump(dict(
147
146
  name=name,
148
147
  uuid=uuid,
@@ -291,7 +290,7 @@ class Pipeline:
291
290
  @classmethod
292
291
  def is_valid_pipeline(self, pipeline_path):
293
292
  return os.path.isdir(pipeline_path) and os.path.exists(
294
- os.path.join(pipeline_path, METADATA_FILE_NAME)
293
+ os.path.join(pipeline_path, PIPELINE_CONFIG_FILE)
295
294
  )
296
295
 
297
296
  def block_deletable(self, block, widget=False):
@@ -424,8 +423,10 @@ class Pipeline:
424
423
 
425
424
  self.block_configs = config.get('blocks') or []
426
425
  self.callback_configs = config.get('callbacks') or []
426
+ self.conditional_configs = config.get('conditionals') or []
427
427
  self.executor_type = config.get('executor_type')
428
428
  self.executor_config = config.get('executor_confid') or dict()
429
+ self.spark_config = config.get('spark_config') or dict()
429
430
  self.widget_configs = config.get('widgets') or []
430
431
 
431
432
  self.variables = config.get('variables')
@@ -453,6 +454,7 @@ class Pipeline:
453
454
 
454
455
  blocks = [build_shared_args_kwargs(c) for c in self.block_configs]
455
456
  callbacks = [build_shared_args_kwargs(c) for c in self.callback_configs]
457
+ conditionals = [build_shared_args_kwargs(c) for c in self.conditional_configs]
456
458
  widgets = [build_shared_args_kwargs(c) for c in self.widget_configs]
457
459
  all_blocks = blocks + callbacks + widgets
458
460
 
@@ -466,6 +468,11 @@ class Pipeline:
466
468
  callbacks,
467
469
  all_blocks,
468
470
  )
471
+ self.conditionals_by_uuid = self.__initialize_blocks_by_uuid(
472
+ self.conditional_configs,
473
+ conditionals,
474
+ all_blocks,
475
+ )
469
476
  self.widgets_by_uuid = self.__initialize_blocks_by_uuid(
470
477
  self.widget_configs,
471
478
  widgets,
@@ -493,8 +500,16 @@ class Pipeline:
493
500
  blocks_with_callbacks[upstream_block.uuid] = []
494
501
  blocks_with_callbacks[upstream_block.uuid].append(callback_block)
495
502
 
503
+ blocks_with_conditionals = {}
504
+ for conditional_block in self.conditionals_by_uuid.values():
505
+ for upstream_block in conditional_block.upstream_blocks:
506
+ if upstream_block.uuid not in blocks_with_conditionals:
507
+ blocks_with_conditionals[upstream_block.uuid] = []
508
+ blocks_with_conditionals[upstream_block.uuid].append(conditional_block)
509
+
496
510
  for block in self.blocks_by_uuid.values():
497
511
  block.callback_blocks = blocks_with_callbacks.get(block.uuid, [])
512
+ block.conditional_blocks = blocks_with_conditionals.get(block.uuid, [])
498
513
 
499
514
  self.validate('A cycle was detected in the loaded pipeline')
500
515
 
@@ -532,8 +547,13 @@ class Pipeline:
532
547
  updated_at=self.updated_at,
533
548
  uuid=self.uuid,
534
549
  )
550
+
535
551
  if self.variables is not None:
536
552
  base['variables'] = self.variables
553
+
554
+ if self.spark_config is not None:
555
+ base['spark_config'] = self.spark_config
556
+
537
557
  return base
538
558
 
539
559
  def to_dict(
@@ -553,11 +573,13 @@ class Pipeline:
553
573
 
554
574
  blocks_data = [b.to_dict(**shared_kwargs) for b in self.blocks_by_uuid.values()]
555
575
  callbacks_data = [b.to_dict(**shared_kwargs) for b in self.callbacks_by_uuid.values()]
576
+ conditionals_data = [b.to_dict(**shared_kwargs) for b in self.conditionals_by_uuid.values()]
556
577
  widgets_data = [b.to_dict(**shared_kwargs) for b in self.widgets_by_uuid.values()]
557
578
 
558
579
  data = dict(
559
580
  blocks=blocks_data,
560
581
  callbacks=callbacks_data,
582
+ conditionals=conditionals_data,
561
583
  widgets=widgets_data,
562
584
  )
563
585
 
@@ -594,6 +616,7 @@ class Pipeline:
594
616
  include_block_metadata: bool = False,
595
617
  include_block_tags: bool = False,
596
618
  include_callback_blocks: bool = False,
619
+ include_conditional_blocks: bool = False,
597
620
  include_content: bool = False,
598
621
  include_extensions: bool = False,
599
622
  include_outputs: bool = False,
@@ -604,6 +627,7 @@ class Pipeline:
604
627
  include_block_tags=include_block_tags,
605
628
  include_block_metadata=include_block_metadata,
606
629
  include_callback_blocks=include_callback_blocks,
630
+ include_conditional_blocks=include_conditional_blocks,
607
631
  include_content=include_content,
608
632
  include_outputs=include_outputs,
609
633
  sample_count=sample_count,
@@ -614,6 +638,9 @@ class Pipeline:
614
638
  callbacks_data = await asyncio.gather(
615
639
  *[b.to_dict_async(**shared_kwargs) for b in self.callbacks_by_uuid.values()]
616
640
  )
641
+ conditionals_data = await asyncio.gather(
642
+ *[b.to_dict_async(**shared_kwargs) for b in self.conditionals_by_uuid.values()]
643
+ )
617
644
  widgets_data = await asyncio.gather(
618
645
  *[b.to_dict_async(
619
646
  include_content=include_content,
@@ -624,6 +651,7 @@ class Pipeline:
624
651
  data = dict(
625
652
  blocks=blocks_data,
626
653
  callbacks=callbacks_data,
654
+ conditionals=conditionals_data,
627
655
  widgets=widgets_data,
628
656
  )
629
657
 
@@ -747,6 +775,9 @@ class Pipeline:
747
775
  if 'callbacks' in data:
748
776
  arr.append(('callbacks', data['callbacks'], self.callbacks_by_uuid))
749
777
 
778
+ if 'conditionals' in data:
779
+ arr.append(('conditionals', data['conditionals'], self.conditionals_by_uuid))
780
+
750
781
  if 'widgets' in data:
751
782
  arr.append(('widgets', data['widgets'], self.widgets_by_uuid))
752
783
 
@@ -930,6 +961,13 @@ class Pipeline:
930
961
  upstream_blocks=self.get_blocks(upstream_block_uuids),
931
962
  priority=priority,
932
963
  )
964
+ elif BlockType.CONDITIONAL == block.type:
965
+ self.conditionals_by_uuid = self.__add_block_to_mapping(
966
+ self.conditionals_by_uuid,
967
+ block,
968
+ upstream_blocks=self.get_blocks(upstream_block_uuids),
969
+ priority=priority,
970
+ )
933
971
  else:
934
972
  self.blocks_by_uuid = self.__add_block_to_mapping(
935
973
  self.blocks_by_uuid,
@@ -957,6 +995,8 @@ class Pipeline:
957
995
  mapping = self.extensions.get(extension_uuid, {}).get('blocks_by_uuid', {})
958
996
  elif BlockType.CALLBACK == block_type:
959
997
  mapping = self.callbacks_by_uuid
998
+ elif BlockType.CONDITIONAL == block_type:
999
+ mapping = self.conditionals_by_uuid
960
1000
  else:
961
1001
  mapping = self.blocks_by_uuid
962
1002
 
@@ -984,6 +1024,8 @@ class Pipeline:
984
1024
  block_uuid in self.extensions[extension_uuid]['blocks_by_uuid']
985
1025
  elif BlockType.CALLBACK == block_type:
986
1026
  return block_uuid in self.callbacks_by_uuid
1027
+ elif BlockType.CONDITIONAL == block_type:
1028
+ return block_uuid in self.conditionals_by_uuid
987
1029
 
988
1030
  return block_uuid in self.blocks_by_uuid
989
1031
 
@@ -991,6 +1033,7 @@ class Pipeline:
991
1033
  self,
992
1034
  block: Block,
993
1035
  callback_block_uuids: List[str] = None,
1036
+ conditional_block_uuids: List[str] = None,
994
1037
  upstream_block_uuids: List[str] = None,
995
1038
  widget: bool = False,
996
1039
  ):
@@ -998,6 +1041,7 @@ class Pipeline:
998
1041
 
999
1042
  extension_uuid = block.extension_uuid
1000
1043
  is_callback = BlockType.CALLBACK == block.type
1044
+ is_conditional = BlockType.CONDITIONAL == block.type
1001
1045
  is_extension = BlockType.EXTENSION == block.type
1002
1046
 
1003
1047
  if upstream_block_uuids is not None:
@@ -1010,6 +1054,8 @@ class Pipeline:
1010
1054
  mapping = self.extensions[extension_uuid].get('blocks_by_uuid', {})
1011
1055
  elif is_callback:
1012
1056
  mapping = self.callbacks_by_uuid
1057
+ elif is_conditional:
1058
+ mapping = self.conditionals_by_uuid
1013
1059
  else:
1014
1060
  mapping = self.blocks_by_uuid
1015
1061
 
@@ -1029,8 +1075,8 @@ class Pipeline:
1029
1075
  new_upstream_block_uuids = set(upstream_block_uuids)
1030
1076
  if curr_upstream_block_uuids != new_upstream_block_uuids:
1031
1077
  # Only set upstream block’s downstream to the current block if current block
1032
- # is not an extension block and not a callback block
1033
- if not is_extension and not is_callback:
1078
+ # is not an extension block and not a callback/conditional block
1079
+ if not is_extension and not is_callback and not is_conditional:
1034
1080
  # There are currently no upstream blocks that are widgets (e.g. chart)
1035
1081
  upstream_blocks_added = self.get_blocks(
1036
1082
  new_upstream_block_uuids - curr_upstream_block_uuids,
@@ -1066,6 +1112,22 @@ class Pipeline:
1066
1112
  # Callback blocks don’t have an upstream.
1067
1113
  # The normal block will know about the callback block via the callback_blocks field.
1068
1114
  block.update_callback_blocks(callback_blocks)
1115
+ elif conditional_block_uuids is not None:
1116
+ conditional_blocks = []
1117
+ for conditional_block_uuid in conditional_block_uuids:
1118
+ conditional_block = self.callbacks_by_uuid.get(conditional_block_uuid)
1119
+ if not conditional_block:
1120
+ raise Exception(
1121
+ f'Conditional block {conditional_block_uuid}'
1122
+ f' is not in the {self.uuid} pipeline.',
1123
+ )
1124
+
1125
+ conditional_blocks.append(conditional_block)
1126
+
1127
+ # Conditional blocks don’t have an upstream.
1128
+ # The normal block will know about the conditional block via the
1129
+ # conditional_blocks field.
1130
+ block.update_conditional_blocks(conditional_blocks)
1069
1131
  else:
1070
1132
  save_kwargs['block_uuid'] = block.uuid
1071
1133
 
@@ -1079,6 +1141,8 @@ class Pipeline:
1079
1141
  })
1080
1142
  elif is_callback:
1081
1143
  self.callbacks_by_uuid[block.uuid] = block
1144
+ elif is_conditional:
1145
+ self.conditionals_by_uuid[block.uuid] = block
1082
1146
  else:
1083
1147
  self.blocks_by_uuid[block.uuid] = block
1084
1148
 
@@ -1117,6 +1181,10 @@ class Pipeline:
1117
1181
  self.callbacks_by_uuid = {
1118
1182
  new_uuid if k == old_uuid else k: v for k, v in self.callbacks_by_uuid.items()
1119
1183
  }
1184
+ elif BlockType.CONDITIONAL == block.type:
1185
+ self.conditionals_by_uuid = {
1186
+ new_uuid if k == old_uuid else k: v for k, v in self.conditionals_by_uuid.items()
1187
+ }
1120
1188
  elif old_uuid in self.blocks_by_uuid:
1121
1189
  self.blocks_by_uuid = {
1122
1190
  new_uuid if k == old_uuid else k: v for k, v in self.blocks_by_uuid.items()
@@ -1159,6 +1227,7 @@ class Pipeline:
1159
1227
  force: bool = False,
1160
1228
  ) -> None:
1161
1229
  is_callback = BlockType.CALLBACK == block.type
1230
+ is_conditional = BlockType.CONDITIONAL == block.type
1162
1231
  is_extension = BlockType.EXTENSION == block.type
1163
1232
 
1164
1233
  mapping = {}
@@ -1168,6 +1237,8 @@ class Pipeline:
1168
1237
  mapping = self.extensions.get(block.extension_uuid, {}).get('blocks_by_uuid', {})
1169
1238
  elif is_callback:
1170
1239
  mapping = self.callbacks_by_uuid
1240
+ elif is_conditional:
1241
+ mapping = self.conditionals_by_uuid
1171
1242
  else:
1172
1243
  mapping = self.blocks_by_uuid
1173
1244
 
@@ -1211,6 +1282,8 @@ class Pipeline:
1211
1282
  del self.extensions[block.extension_uuid]['blocks_by_uuid'][block.uuid]
1212
1283
  elif is_callback:
1213
1284
  del self.callbacks_by_uuid[block.uuid]
1285
+ elif is_conditional:
1286
+ del self.conditionals_by_uuid[block.uuid]
1214
1287
  else:
1215
1288
  del self.blocks_by_uuid[block.uuid]
1216
1289
 
@@ -1247,6 +1320,8 @@ class Pipeline:
1247
1320
  self.extensions[extension_uuid]['blocks_by_uuid'][block_uuid] = block
1248
1321
  elif BlockType.CALLBACK == block.type:
1249
1322
  current_pipeline.callbacks_by_uuid[block_uuid] = block
1323
+ elif BlockType.CONDITIONAL == block.type:
1324
+ current_pipeline.conditionals_by_uuid[block_uuid] = block
1250
1325
  else:
1251
1326
  current_pipeline.blocks_by_uuid[block_uuid] = block
1252
1327
  pipeline_dict = current_pipeline.to_dict(include_extensions=True)
@@ -1302,6 +1377,8 @@ class Pipeline:
1302
1377
  self.extensions[extension_uuid]['blocks_by_uuid'][block_uuid] = block
1303
1378
  elif BlockType.CALLBACK == block_type:
1304
1379
  current_pipeline.callbacks_by_uuid[block_uuid] = block
1380
+ elif BlockType.CONDITIONAL == block_type:
1381
+ current_pipeline.conditionals_by_uuid[block_uuid] = block
1305
1382
  else:
1306
1383
  current_pipeline.blocks_by_uuid[block_uuid] = block
1307
1384
  pipeline_dict = current_pipeline.to_dict(include_extensions=True)
@@ -1373,6 +1450,7 @@ class Pipeline:
1373
1450
 
1374
1451
  combined_blocks.update(self.widgets_by_uuid)
1375
1452
  combined_blocks.update(self.callbacks_by_uuid)
1453
+ combined_blocks.update(self.conditionals_by_uuid)
1376
1454
  combined_blocks.update(self.blocks_by_uuid)
1377
1455
  status = {uuid: 'unvisited' for uuid in combined_blocks}
1378
1456
 
@@ -9,11 +9,14 @@ import ruamel.yaml
9
9
  import yaml
10
10
  from jinja2 import Template
11
11
 
12
- from mage_ai.data_preparation.shared.constants import REPO_PATH_ENV_VAR
12
+ from mage_ai.data_preparation.shared.constants import (
13
+ MAGE_DATA_DIR_ENV_VAR,
14
+ REPO_PATH_ENV_VAR,
15
+ )
13
16
  from mage_ai.data_preparation.templates.utils import copy_template_directory
14
17
  from mage_ai.shared.environments import is_test
15
18
 
16
- MAGE_DATA_DIR_ENV_VAR = 'MAGE_DATA_DIR'
19
+
17
20
  if is_test():
18
21
  DEFAULT_MAGE_DATA_DIR = './'
19
22
  else:
@@ -1,2 +1,3 @@
1
- REPO_PATH_ENV_VAR = 'MAGE_REPO_PATH'
1
+ MAGE_DATA_DIR_ENV_VAR = 'MAGE_DATA_DIR'
2
2
  MANAGE_ENV_VAR = 'MAGE_IS_MANAGE_INSTANCE'
3
+ REPO_PATH_ENV_VAR = 'MAGE_REPO_PATH'
@@ -0,0 +1,11 @@
1
+ {% block imports %}
2
+ if 'condition' not in globals():
3
+ from mage_ai.data_preparation.decorators import condition
4
+ {% endblock %}
5
+
6
+
7
+ {% block content %}
8
+ @condition
9
+ def evaluate_condition(*args, **kwargs) -> bool:
10
+ return True
11
+ {% endblock %}
@@ -98,6 +98,13 @@ TEMPLATES = [
98
98
  name='Trigger pipeline',
99
99
  path='callbacks/orchestration/triggers/default.jinja',
100
100
  ),
101
+ dict(
102
+ block_type=BlockType.CONDITIONAL,
103
+ description='Base template with empty functions.',
104
+ language=BlockLanguage.PYTHON,
105
+ name='Base template',
106
+ path='conditionals/base.jinja',
107
+ ),
101
108
  ]
102
109
 
103
110
  TEMPLATES_BY_UUID = index_by(lambda x: x['name'], TEMPLATES)
mage_ai/io/mssql.py CHANGED
@@ -65,6 +65,16 @@ class MSSQL(BaseSQL):
65
65
  )
66
66
  self._ctx = pyodbc.connect(connection_string)
67
67
 
68
+ def build_create_schema_command(
69
+ self,
70
+ schema_name: str
71
+ ) -> str:
72
+ return '\n'.join([
73
+ 'IF NOT EXISTS (',
74
+ f'SELECT * FROM information_schema.schemata WHERE schema_name = \'{schema_name}\')',
75
+ f'BEGIN EXEC(\'CREATE SCHEMA {schema_name}\') END'
76
+ ])
77
+
68
78
  def build_create_table_as_command(
69
79
  self,
70
80
  table_name: str,
@@ -79,7 +89,7 @@ class MSSQL(BaseSQL):
79
89
  with self.conn.cursor() as cur:
80
90
  cur.execute('\n'.join([
81
91
  'SELECT TOP 1 * FROM information_schema.tables ',
82
- f'WHERE table_name = \'{table_name}\'',
92
+ f'WHERE table_schema = \'{schema_name}\' AND table_name = \'{table_name}\'',
83
93
  ]))
84
94
  return len(cur.fetchall()) >= 1
85
95
 
mage_ai/io/sql.py CHANGED
@@ -39,6 +39,12 @@ class BaseSQL(BaseSQLConnection):
39
39
  """
40
40
  raise Exception('Subclasses must override this method.')
41
41
 
42
+ def build_create_schema_command(
43
+ self,
44
+ schema_name: str
45
+ ) -> str:
46
+ return f'CREATE SCHEMA IF NOT EXISTS {schema_name};'
47
+
42
48
  def build_create_table_command(
43
49
  self,
44
50
  dtypes: Mapping[str, str],
@@ -256,7 +262,8 @@ class BaseSQL(BaseSQLConnection):
256
262
 
257
263
  with self.conn.cursor() as cur:
258
264
  if schema_name:
259
- cur.execute(f'CREATE SCHEMA IF NOT EXISTS {schema_name};')
265
+ query = self.build_create_schema_command(schema_name)
266
+ cur.execute(query)
260
267
 
261
268
  should_create_table = not table_exists
262
269
 
@@ -0,0 +1,39 @@
1
+ """Add condition_failed status to block run status
2
+
3
+ Revision ID: dfe49d040487
4
+ Revises: e7beb59b44f9
5
+ Create Date: 2023-06-12 10:25:55.734358
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = 'dfe49d040487'
14
+ down_revision = 'e7beb59b44f9'
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ bind = op.get_bind()
22
+ if bind.engine.name == 'postgresql':
23
+ with op.get_context().autocommit_block():
24
+ op.execute("ALTER TYPE blockrunstatus ADD VALUE 'CONDITION_FAILED'")
25
+ # ### end Alembic commands ###
26
+
27
+
28
+ def downgrade() -> None:
29
+ # ### commands auto generated by Alembic - please adjust! ###
30
+ bind = op.get_bind()
31
+ if bind.engine.name == 'postgresql':
32
+ op.execute("ALTER TYPE blockrunstatus RENAME TO blockrunstatus_old")
33
+ op.execute("CREATE TYPE blockrunstatus AS ENUM('INITIAL', 'QUEUED', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED', 'UPSTREAM_FAILED')")
34
+ op.execute((
35
+ "ALTER TABLE block_run ALTER COLUMN status TYPE blockrunstatus USING "
36
+ "status::text::blockrunstatus"
37
+ ))
38
+ op.execute("DROP TYPE blockrunstatus_old")
39
+ # ### end Alembic commands ###
@@ -388,7 +388,10 @@ class PipelineRun(BaseModel):
388
388
  )
389
389
 
390
390
  def all_blocks_completed(self, include_failed_blocks: bool = False) -> bool:
391
- statuses = [BlockRun.BlockRunStatus.COMPLETED]
391
+ statuses = [
392
+ BlockRun.BlockRunStatus.COMPLETED,
393
+ BlockRun.BlockRunStatus.CONDITION_FAILED,
394
+ ]
392
395
  if include_failed_blocks:
393
396
  statuses.extend([
394
397
  BlockRun.BlockRunStatus.FAILED,
@@ -438,6 +441,7 @@ class BlockRun(BaseModel):
438
441
  FAILED = 'failed'
439
442
  CANCELLED = 'cancelled'
440
443
  UPSTREAM_FAILED = 'upstream_failed'
444
+ CONDITION_FAILED = 'condition_failed'
441
445
 
442
446
  pipeline_run_id = Column(Integer, ForeignKey('pipeline_run.id'), index=True)
443
447
  block_uuid = Column(String(255))
@@ -393,6 +393,17 @@ class PipelineScheduler:
393
393
  BlockRun.BlockRunStatus.FAILED,
394
394
  ]
395
395
  )
396
+ condition_failed_block_uuids = set(
397
+ b.block_uuid for b in self.pipeline_run.block_runs
398
+ if b.status in [
399
+ BlockRun.BlockRunStatus.CONDITION_FAILED,
400
+ ]
401
+ )
402
+
403
+ statuses = {
404
+ BlockRun.BlockRunStatus.CONDITION_FAILED: condition_failed_block_uuids,
405
+ BlockRun.BlockRunStatus.UPSTREAM_FAILED: failed_block_uuids,
406
+ }
396
407
  not_updated_block_runs = []
397
408
  for block_run in block_runs:
398
409
  updated_status = False
@@ -400,23 +411,22 @@ class PipelineScheduler:
400
411
  'dynamic_upstream_block_uuids',
401
412
  )
402
413
 
403
- if dynamic_upstream_block_uuids:
404
- if all(
405
- b in failed_block_uuids
406
- for b in dynamic_upstream_block_uuids
407
- ):
408
- block_run.update(
409
- status=BlockRun.BlockRunStatus.UPSTREAM_FAILED)
410
- updated_status = True
411
- else:
412
- block = self.pipeline.get_block(block_run.block_uuid)
413
- if any(
414
- b in failed_block_uuids
415
- for b in block.upstream_block_uuids
416
- ):
417
- block_run.update(
418
- status=BlockRun.BlockRunStatus.UPSTREAM_FAILED)
419
- updated_status = True
414
+ for status, block_uuids in statuses.items():
415
+ if dynamic_upstream_block_uuids:
416
+ if all(
417
+ b in block_uuids
418
+ for b in dynamic_upstream_block_uuids
419
+ ):
420
+ block_run.update(status=status)
421
+ updated_status = True
422
+ else:
423
+ block = self.pipeline.get_block(block_run.block_uuid)
424
+ if any(
425
+ b in block_uuids
426
+ for b in block.upstream_block_uuids
427
+ ):
428
+ block_run.update(status=status)
429
+ updated_status = True
420
430
 
421
431
  if not updated_status:
422
432
  not_updated_block_runs.append(block_run)
@@ -0,0 +1,64 @@
1
+ from tornado import gen, iostream
2
+
3
+ from mage_ai.api.utils import authenticate_client_and_token
4
+ from mage_ai.data_preparation.models.pipeline import Pipeline
5
+ from mage_ai.data_preparation.models.variable import VariableType
6
+ from mage_ai.orchestration.db.models.oauth import Oauth2Application
7
+ from mage_ai.orchestration.db.models.schedules import PipelineRun
8
+ from mage_ai.server.api.base import BaseHandler
9
+ from mage_ai.settings import REQUIRE_USER_AUTHENTICATION
10
+
11
+
12
+ class ApiDownloadHandler(BaseHandler):
13
+ async def get(self, pipeline_uuid, block_uuid, **kwargs):
14
+ self.set_header('Content-Type', 'text/csv')
15
+ self.set_header('Content-Disposition', 'attachment; filename=' + f'{block_uuid}.csv')
16
+
17
+ api_key = self.get_argument('api_key', None, True)
18
+ token = self.get_argument('token', None, True)
19
+ if REQUIRE_USER_AUTHENTICATION:
20
+ authenticated = False
21
+ if api_key and token:
22
+ oauth_client = Oauth2Application.query.filter(
23
+ Oauth2Application.client_id == api_key,
24
+ ).first()
25
+ if oauth_client:
26
+ oauth_token, valid = authenticate_client_and_token(oauth_client.id, token)
27
+ authenticated = valid and \
28
+ oauth_token and \
29
+ oauth_token.user
30
+ if not authenticated:
31
+ raise Exception('Unauthorized access to download block output.')
32
+
33
+ pipeline = Pipeline.get(pipeline_uuid)
34
+ block = pipeline.get_block(block_uuid)
35
+ pipeline_run_id = self.get_argument('pipeline_run_id', None)
36
+ execution_partition = None
37
+ if pipeline_run_id is not None:
38
+ pipeline_run = PipelineRun.query.get(pipeline_run_id)
39
+ execution_partition = pipeline_run.execution_partition
40
+
41
+ if block is None:
42
+ raise Exception(f'Block {block_uuid} does not exist in pipeline {pipeline_uuid}')
43
+
44
+ tables = block.get_outputs(
45
+ execution_partition=execution_partition,
46
+ include_print_outputs=False,
47
+ csv_lines_only=True,
48
+ sample=False,
49
+ variable_type=VariableType.DATAFRAME,
50
+ )
51
+ for data in tables:
52
+ table = data.get('table', [])
53
+ line_count = len(table)
54
+ for line in range(0, line_count):
55
+ is_last_line = line == line_count - 1
56
+ try:
57
+ csv_line = table[line] if is_last_line else table[line] + '\n'
58
+ self.write(csv_line.encode('UTF-8'))
59
+ except iostream.StreamClosedError:
60
+ break
61
+ if line % 5000 == 0 or is_last_line:
62
+ await self.flush()
63
+ # Sleep for a nanosecond so other handlers can run and avoid blocking
64
+ await gen.sleep(0.000000001)
@@ -12,4 +12,4 @@ DATAFRAME_OUTPUT_SAMPLE_COUNT = 10
12
12
  # Dockerfile depends on it because it runs ./scripts/install_mage.sh and uses
13
13
  # the last line to determine the version to install.
14
14
  VERSION = \
15
- '0.8.91'
15
+ '0.8.93'
@@ -1,4 +1,5 @@
1
1
  from distutils.file_util import copy_file
2
+ from mage_ai.data_preparation.models.constants import PIPELINE_CONFIG_FILE
2
3
  from mage_ai.data_preparation.models.pipeline import Pipeline
3
4
  from typing import Callable
4
5
  import asyncio
@@ -78,8 +79,8 @@ def cancel_pipeline_execution(
78
79
  config_path = pipeline_execution.previous_config_path
79
80
  if config_path is not None and os.path.isdir(config_path):
80
81
  copy_file(
81
- os.path.join(config_path, 'metadata.yaml'),
82
- os.path.join(pipeline.dir_path, 'metadata.yaml'),
82
+ os.path.join(config_path, PIPELINE_CONFIG_FILE),
83
+ os.path.join(pipeline.dir_path, PIPELINE_CONFIG_FILE),
83
84
  )
84
85
  delete_pipeline_copy_config(config_path)
85
86