mage-ai 0.8.11__py3-none-any.whl → 0.8.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mage-ai might be problematic. Click here for more details.
- mage_ai/api/policies/BackfillPolicy.py +4 -1
- mage_ai/api/policies/BlockPolicy.py +13 -0
- mage_ai/api/policies/SyncPolicy.py +30 -0
- mage_ai/api/presenters/BlockPresenter.py +12 -1
- mage_ai/api/presenters/SyncPresenter.py +11 -0
- mage_ai/api/resources/BlockResource.py +8 -1
- mage_ai/api/resources/SyncResource.py +43 -0
- mage_ai/data_preparation/executors/pipeline_executor.py +9 -0
- mage_ai/data_preparation/executors/streaming_pipeline_executor.py +3 -1
- mage_ai/data_preparation/models/block/__init__.py +5 -3
- mage_ai/data_preparation/models/block/integration/__init__.py +59 -0
- mage_ai/data_preparation/models/constants.py +1 -0
- mage_ai/data_preparation/preferences.py +43 -0
- mage_ai/data_preparation/sync/__init__.py +17 -0
- mage_ai/data_preparation/sync/base_sync.py +7 -0
- mage_ai/data_preparation/sync/git_sync.py +26 -0
- mage_ai/data_preparation/templates/data_loaders/streaming/kafka.yaml +5 -0
- mage_ai/io/export_utils.py +1 -0
- mage_ai/io/postgres.py +46 -4
- mage_ai/orchestration/db/models.py +14 -0
- mage_ai/orchestration/pipeline_scheduler.py +12 -4
- mage_ai/server/constants.py +1 -1
- mage_ai/server/frontend_dist/404.html +2 -2
- mage_ai/server/frontend_dist/404.html.html +2 -2
- mage_ai/server/frontend_dist/_next/static/UuQKenirO2LDcepWvj6S9/_buildManifest.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/{1830-c3f709c7ca7f32ab.js → 1830-fbc03130dc8507d5.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/2626-74904ade5b462127.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/3573-9b7b57411a327efa.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/434-b1cbc39e987f22cd.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/4846-688b5f0a015a543a.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/5477-4d9bab807c386355.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/7281-99f836e410857a68.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/7400-26ce25ec46728ef7.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/9767-30e239ed26aa848a.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/{_app-44766f29c8c5b4ce.js → _app-169f9cc923ae39a5.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/{manage-14c743a0e2549486.js → manage-91c4fd203b4ac0e2.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/{pipeline-runs-21fe37061bdaaaea.js → pipeline-runs-10d54d54bfd9b39c.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills/[...slug]-971ea5bb26d66155.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills-393cec015b843dd8.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/edit-2963c971fdefd159.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/logs-d1490dc3ea4f2e61.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/{block-runs-8f23f7ca9efcb069.js → block-runs-f513df1d43ba48f0.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/block-runtime-393fa626a8873f74.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/{monitors-8b08ec1aef4af4f2.js → monitors-d45f982e19978552.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs/[run]-463347405cafa133.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs-a33f6fd5d91cd123.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/syncs-d4df695d51e01232.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/triggers/[...slug]-d83cf90749c8cd56.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/triggers-f6ee40bebf427cfa.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/{pipelines-1c0627627c21667f.js → pipelines-db2903bdeeeae791.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/settings/account/profile-19a5aa2de681b54a.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/settings/workspace/preferences-e1b34b9a44fc4688.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/settings/workspace/sync_data-4ae244876881cfe1.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/settings/workspace/{users-c128672e053a4c30.js → users-b8aeca222449e041.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/{terminal-6feb9848233a6c6e.js → terminal-482bd79cb26b8f73.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/triggers-81b7327ed8e015a4.js +1 -0
- mage_ai/server/frontend_dist/index.html +2 -2
- mage_ai/server/frontend_dist/manage.html +4 -4
- mage_ai/server/frontend_dist/pipeline-runs.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/backfills/[...slug].html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/backfills.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/edit.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/logs.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors/block-runs.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors/block-runtime.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/runs/[run].html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/runs.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/syncs.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/triggers/[...slug].html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline]/triggers.html +5 -5
- mage_ai/server/frontend_dist/pipelines/[pipeline].html +2 -2
- mage_ai/server/frontend_dist/pipelines.html +5 -5
- mage_ai/server/frontend_dist/settings/account/profile.html +5 -5
- mage_ai/server/frontend_dist/settings/workspace/preferences.html +5 -5
- mage_ai/server/frontend_dist/settings/workspace/sync_data.html +20 -0
- mage_ai/server/frontend_dist/settings/workspace/users.html +5 -5
- mage_ai/server/frontend_dist/settings.html +2 -2
- mage_ai/server/frontend_dist/sign-in.html +4 -4
- mage_ai/server/frontend_dist/terminal.html +5 -5
- mage_ai/server/frontend_dist/test.html +3 -3
- mage_ai/server/frontend_dist/triggers.html +5 -5
- mage_ai/server/server.py +1 -0
- mage_ai/server/websocket_server.py +1 -1
- mage_ai/shared/config.py +4 -1
- mage_ai/streaming/sources/kafka.py +49 -3
- mage_ai/tests/data_preparation/test_templates.py +5 -0
- mage_ai/tests/streaming/sources/test_kafka.py +17 -0
- {mage_ai-0.8.11.dist-info → mage_ai-0.8.13.dist-info}/METADATA +3 -1
- {mage_ai-0.8.11.dist-info → mage_ai-0.8.13.dist-info}/RECORD +96 -89
- mage_ai/server/frontend_dist/_next/static/Bmwp0WdXRDJpCFVPOVdc9/_buildManifest.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/2249-70929b8c547bbc18.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/2426-115f8871a2e28f8c.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/434-69ddfacd3e93f2db.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/4846-b4ced91dc0e9fba9.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/5944-757b7898608a65e1.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/6579-2b5d8d39472d85c0.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/7400-1430ec3874c1fdee.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/8961-697fe5d4db44d909.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/9140-6f67e0879394373d.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/9767-3f852fd90cf7857f.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills/[...slug]-678569cf24a2e10d.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills-0ed0d70bc659c236.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/edit-f96da0174abb54b5.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/logs-5ccc75887776efb0.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/block-runtime-f83ab9de4094e1b0.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs/[run]-73ced07cc98a4968.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/runs-67d23509a0c9a1b8.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/syncs-4084a44baf91f30e.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/triggers/[...slug]-ae970171cfe98c51.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/triggers-b0b91245d3299bdf.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/settings/account/profile-acd7ee47219fee3d.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/settings/workspace/preferences-07bda506f68974fb.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/triggers-e0172c422c95eda9.js +0 -1
- /mage_ai/server/frontend_dist/_next/static/{Bmwp0WdXRDJpCFVPOVdc9 → UuQKenirO2LDcepWvj6S9}/_middlewareManifest.js +0 -0
- /mage_ai/server/frontend_dist/_next/static/{Bmwp0WdXRDJpCFVPOVdc9 → UuQKenirO2LDcepWvj6S9}/_ssgManifest.js +0 -0
- {mage_ai-0.8.11.dist-info → mage_ai-0.8.13.dist-info}/LICENSE +0 -0
- {mage_ai-0.8.11.dist-info → mage_ai-0.8.13.dist-info}/WHEEL +0 -0
- {mage_ai-0.8.11.dist-info → mage_ai-0.8.13.dist-info}/entry_points.txt +0 -0
- {mage_ai-0.8.11.dist-info → mage_ai-0.8.13.dist-info}/top_level.txt +0 -0
|
@@ -22,7 +22,10 @@ BackfillPolicy.allow_actions([
|
|
|
22
22
|
OauthScope.CLIENT_PRIVATE,
|
|
23
23
|
], condition=lambda policy: policy.has_at_least_editor_role())
|
|
24
24
|
|
|
25
|
-
BackfillPolicy.allow_read([
|
|
25
|
+
BackfillPolicy.allow_read([
|
|
26
|
+
'pipeline_run_dates',
|
|
27
|
+
'total_run_count',
|
|
28
|
+
] + BackfillPresenter.default_attributes, scopes=[
|
|
26
29
|
OauthScope.CLIENT_PRIVATE,
|
|
27
30
|
], on_action=[
|
|
28
31
|
constants.DETAIL,
|
|
@@ -38,6 +38,7 @@ BlockPolicy.allow_read(BlockPresenter.default_attributes + [], scopes=[
|
|
|
38
38
|
], condition=lambda policy: policy.has_at_least_editor_role())
|
|
39
39
|
|
|
40
40
|
BlockPolicy.allow_read([
|
|
41
|
+
'bookmarks',
|
|
41
42
|
'content',
|
|
42
43
|
'outputs',
|
|
43
44
|
] + BlockPresenter.default_attributes, scopes=[
|
|
@@ -68,9 +69,11 @@ BlockPolicy.allow_write([
|
|
|
68
69
|
|
|
69
70
|
BlockPolicy.allow_write([
|
|
70
71
|
'all_upstream_blocks_executed',
|
|
72
|
+
'bookmark_values',
|
|
71
73
|
'color',
|
|
72
74
|
'configuration',
|
|
73
75
|
'content',
|
|
76
|
+
'destination_table',
|
|
74
77
|
'downstream_blocks',
|
|
75
78
|
'executor_config',
|
|
76
79
|
'executor_type',
|
|
@@ -80,6 +83,7 @@ BlockPolicy.allow_write([
|
|
|
80
83
|
'name',
|
|
81
84
|
'outputs',
|
|
82
85
|
'status',
|
|
86
|
+
'tap_stream_id',
|
|
83
87
|
'type',
|
|
84
88
|
'upstream_blocks',
|
|
85
89
|
'uuid',
|
|
@@ -88,3 +92,12 @@ BlockPolicy.allow_write([
|
|
|
88
92
|
], on_action=[
|
|
89
93
|
constants.UPDATE,
|
|
90
94
|
], condition=lambda policy: policy.has_at_least_editor_role())
|
|
95
|
+
|
|
96
|
+
BlockPolicy.allow_query([
|
|
97
|
+
'destination_table',
|
|
98
|
+
'state_stream',
|
|
99
|
+
], scopes=[
|
|
100
|
+
OauthScope.CLIENT_PRIVATE,
|
|
101
|
+
], on_action=[
|
|
102
|
+
constants.DETAIL,
|
|
103
|
+
], condition=lambda policy: policy.has_at_least_viewer_role())
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from mage_ai.api.oauth_scope import OauthScope
|
|
2
|
+
from mage_ai.api.operations import constants
|
|
3
|
+
from mage_ai.api.policies.BasePolicy import BasePolicy
|
|
4
|
+
from mage_ai.api.presenters.SyncPresenter import SyncPresenter
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SyncPolicy(BasePolicy):
|
|
8
|
+
pass
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
SyncPolicy.allow_read(SyncPresenter.default_attributes, scopes=[
|
|
12
|
+
OauthScope.CLIENT_PRIVATE,
|
|
13
|
+
], on_action=[
|
|
14
|
+
constants.CREATE,
|
|
15
|
+
constants.UPDATE,
|
|
16
|
+
], condition=lambda policy: policy.has_at_least_viewer_role())
|
|
17
|
+
|
|
18
|
+
SyncPolicy.allow_write(SyncPresenter.default_attributes, scopes=[
|
|
19
|
+
OauthScope.CLIENT_PRIVATE,
|
|
20
|
+
], on_action=[
|
|
21
|
+
constants.CREATE,
|
|
22
|
+
], condition=lambda policy: policy.has_at_least_editor_role())
|
|
23
|
+
|
|
24
|
+
SyncPolicy.allow_write([
|
|
25
|
+
'action_type',
|
|
26
|
+
], scopes=[
|
|
27
|
+
OauthScope.CLIENT_PRIVATE,
|
|
28
|
+
], on_action=[
|
|
29
|
+
constants.UPDATE,
|
|
30
|
+
], condition=lambda policy: policy.has_at_least_editor_role())
|
|
@@ -30,13 +30,24 @@ class BlockPresenter(BasePresenter):
|
|
|
30
30
|
if display_format in [constants.CREATE, constants.UPDATE]:
|
|
31
31
|
return self.model.to_dict(include_content=True)
|
|
32
32
|
elif display_format in [constants.DETAIL, 'dbt']:
|
|
33
|
-
|
|
33
|
+
query = kwargs.get('query', {})
|
|
34
|
+
|
|
35
|
+
include_outputs = query.get('include_outputs', [True])
|
|
34
36
|
if include_outputs:
|
|
35
37
|
include_outputs = include_outputs[0]
|
|
36
38
|
|
|
39
|
+
state_stream = query.get('state_stream', [None])
|
|
40
|
+
if state_stream:
|
|
41
|
+
state_stream = state_stream[0]
|
|
42
|
+
destination_table = query.get('destination_table', [None])
|
|
43
|
+
if destination_table:
|
|
44
|
+
destination_table = destination_table[0]
|
|
45
|
+
|
|
37
46
|
data = self.model.to_dict(
|
|
47
|
+
destination_table=destination_table,
|
|
38
48
|
include_content=True,
|
|
39
49
|
include_outputs=include_outputs,
|
|
50
|
+
state_stream=state_stream,
|
|
40
51
|
)
|
|
41
52
|
|
|
42
53
|
if 'dbt' == display_format:
|
|
@@ -78,4 +78,11 @@ class BlockResource(GenericResource):
|
|
|
78
78
|
|
|
79
79
|
@safe_db_query
|
|
80
80
|
def update(self, payload, **kwargs):
|
|
81
|
-
|
|
81
|
+
query = kwargs.get('query', {})
|
|
82
|
+
update_state = query.get('update_state', [False])
|
|
83
|
+
if update_state:
|
|
84
|
+
update_state = update_state[0]
|
|
85
|
+
self.model.update(
|
|
86
|
+
payload,
|
|
87
|
+
update_state=update_state,
|
|
88
|
+
)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from mage_ai.api.resources.GenericResource import GenericResource
|
|
2
|
+
from mage_ai.data_preparation.preferences import get_preferences
|
|
3
|
+
from mage_ai.data_preparation.sync import SyncType, SyncConfig
|
|
4
|
+
from mage_ai.data_preparation.sync.git_sync import GitSync
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SyncResource(GenericResource):
|
|
8
|
+
@classmethod
|
|
9
|
+
def collection(self, query, meta, user, **kwargs):
|
|
10
|
+
preferences = get_preferences()
|
|
11
|
+
sync_config = preferences.sync_config
|
|
12
|
+
return self.build_result_set(
|
|
13
|
+
[sync_config],
|
|
14
|
+
user,
|
|
15
|
+
**kwargs,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def create(self, payload, user, **kwargs):
|
|
20
|
+
sync_config = SyncConfig.load(config=payload)
|
|
21
|
+
get_preferences().update_preferences(
|
|
22
|
+
dict(sync_config=sync_config.to_dict())
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
if sync_config.type == SyncType.GIT:
|
|
26
|
+
# set up Git repo
|
|
27
|
+
GitSync(sync_config)
|
|
28
|
+
|
|
29
|
+
return self(get_preferences().sync_config, user, **kwargs)
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def member(self, pk, user, **kwargs):
|
|
33
|
+
return self(get_preferences().sync_config, user, **kwargs)
|
|
34
|
+
|
|
35
|
+
def update(self, payload, **kwargs):
|
|
36
|
+
sync_config = SyncConfig.load(config=self.model)
|
|
37
|
+
action_type = payload.get('action_type')
|
|
38
|
+
if action_type == 'sync_data':
|
|
39
|
+
if sync_config.type == SyncType.GIT:
|
|
40
|
+
sync = GitSync(sync_config)
|
|
41
|
+
sync.sync_data()
|
|
42
|
+
|
|
43
|
+
return self
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from mage_ai.data_preparation.logging.logger import DictLogger
|
|
2
2
|
from mage_ai.data_preparation.logging.logger_manager_factory import LoggerManagerFactory
|
|
3
3
|
from mage_ai.data_preparation.models.pipeline import Pipeline
|
|
4
|
+
from mage_ai.shared.hash import merge_dict
|
|
4
5
|
from typing import Dict
|
|
5
6
|
import asyncio
|
|
6
7
|
|
|
@@ -33,3 +34,11 @@ class PipelineExecutor:
|
|
|
33
34
|
update_status=update_status,
|
|
34
35
|
))
|
|
35
36
|
self.logger_manager.output_logs_to_destination()
|
|
37
|
+
|
|
38
|
+
def _build_tags(self, **kwargs):
|
|
39
|
+
default_tags = dict(
|
|
40
|
+
pipeline_uuid=self.pipeline.uuid,
|
|
41
|
+
)
|
|
42
|
+
if kwargs.get('pipeline_run_id'):
|
|
43
|
+
default_tags['pipeline_run_id'] = kwargs.get('pipeline_run_id')
|
|
44
|
+
return merge_dict(kwargs.get('tags', {}), default_tags)
|
|
@@ -69,10 +69,12 @@ class StreamingPipelineExecutor(PipelineExecutor):
|
|
|
69
69
|
# TODOs:
|
|
70
70
|
# 1. Support multiple sources and sinks
|
|
71
71
|
# 2. Support flink pipeline
|
|
72
|
+
|
|
73
|
+
tags = self._build_tags(**kwargs)
|
|
72
74
|
if build_block_output_stdout:
|
|
73
75
|
stdout = build_block_output_stdout(self.pipeline.uuid)
|
|
74
76
|
else:
|
|
75
|
-
stdout = StreamToLogger(self.logger)
|
|
77
|
+
stdout = StreamToLogger(self.logger, logging_tags=tags)
|
|
76
78
|
try:
|
|
77
79
|
with redirect_stdout(stdout):
|
|
78
80
|
with redirect_stderr(stdout):
|
|
@@ -1263,6 +1263,7 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1263
1263
|
include_outputs=False,
|
|
1264
1264
|
sample_count=None,
|
|
1265
1265
|
check_if_file_exists: bool = False,
|
|
1266
|
+
**kwargs,
|
|
1266
1267
|
):
|
|
1267
1268
|
data = self.to_dict_base()
|
|
1268
1269
|
if include_content:
|
|
@@ -1280,6 +1281,7 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1280
1281
|
'Delete the current block to remove it from the pipeline or write code ' +
|
|
1281
1282
|
f'and save the pipeline to create a new file at {file_path}.',
|
|
1282
1283
|
)
|
|
1284
|
+
|
|
1283
1285
|
return data
|
|
1284
1286
|
|
|
1285
1287
|
async def to_dict_async(
|
|
@@ -1314,7 +1316,7 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1314
1316
|
|
|
1315
1317
|
return data
|
|
1316
1318
|
|
|
1317
|
-
def update(self, data):
|
|
1319
|
+
def update(self, data, **kwargs):
|
|
1318
1320
|
if 'name' in data and data['name'] != self.name:
|
|
1319
1321
|
self.__update_name(data['name'])
|
|
1320
1322
|
if (
|
|
@@ -1338,6 +1340,7 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1338
1340
|
if self.has_callback:
|
|
1339
1341
|
CallbackBlock.create(self.uuid)
|
|
1340
1342
|
self.__update_pipeline_block()
|
|
1343
|
+
|
|
1341
1344
|
return self
|
|
1342
1345
|
|
|
1343
1346
|
def update_upstream_blocks(self, upstream_blocks: List[Any]) -> None:
|
|
@@ -1505,8 +1508,6 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1505
1508
|
raise Exception(f'Failed to pass tests for block {self.uuid}')
|
|
1506
1509
|
|
|
1507
1510
|
def analyze_outputs(self, variable_mapping, shape_only: bool = False):
|
|
1508
|
-
from mage_ai.data_cleaner.data_cleaner import clean as clean_data
|
|
1509
|
-
|
|
1510
1511
|
if self.pipeline is None:
|
|
1511
1512
|
return
|
|
1512
1513
|
for uuid, data in variable_mapping.items():
|
|
@@ -1532,6 +1533,7 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1532
1533
|
else:
|
|
1533
1534
|
data_for_analysis = data.reset_index(drop=True)
|
|
1534
1535
|
try:
|
|
1536
|
+
from mage_ai.data_cleaner.data_cleaner import clean as clean_data
|
|
1535
1537
|
analysis = clean_data(
|
|
1536
1538
|
data_for_analysis,
|
|
1537
1539
|
df_original=data,
|
|
@@ -314,6 +314,65 @@ class SourceBlock(IntegrationBlock):
|
|
|
314
314
|
|
|
315
315
|
|
|
316
316
|
class DestinationBlock(IntegrationBlock):
|
|
317
|
+
def to_dict(
|
|
318
|
+
self,
|
|
319
|
+
include_content=False,
|
|
320
|
+
include_outputs=False,
|
|
321
|
+
sample_count=None,
|
|
322
|
+
check_if_file_exists: bool = False,
|
|
323
|
+
destination_table: str = None,
|
|
324
|
+
state_stream: str = None,
|
|
325
|
+
):
|
|
326
|
+
data = {}
|
|
327
|
+
if state_stream and destination_table:
|
|
328
|
+
from mage_ai.data_preparation.models.pipelines.integration_pipeline \
|
|
329
|
+
import IntegrationPipeline
|
|
330
|
+
integration_pipeline = IntegrationPipeline(self.pipeline.uuid)
|
|
331
|
+
destination_state_file_path = integration_pipeline.destination_state_file_path(
|
|
332
|
+
destination_table=destination_table,
|
|
333
|
+
stream=state_stream,
|
|
334
|
+
)
|
|
335
|
+
if os.path.isfile(destination_state_file_path):
|
|
336
|
+
with open(destination_state_file_path, 'r') as f:
|
|
337
|
+
text = f.read()
|
|
338
|
+
d = json.loads(text) if text else {}
|
|
339
|
+
bookmark_values = d.get('bookmarks', {}).get(state_stream)
|
|
340
|
+
data['bookmarks'] = bookmark_values
|
|
341
|
+
|
|
342
|
+
return merge_dict(
|
|
343
|
+
super().to_dict(
|
|
344
|
+
include_content,
|
|
345
|
+
include_outputs,
|
|
346
|
+
sample_count,
|
|
347
|
+
check_if_file_exists,
|
|
348
|
+
),
|
|
349
|
+
data,
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
def update(self, data, update_state=False):
|
|
353
|
+
if update_state:
|
|
354
|
+
from mage_ai.data_preparation.models.pipelines.integration_pipeline \
|
|
355
|
+
import IntegrationPipeline
|
|
356
|
+
from mage_integrations.destinations.utils \
|
|
357
|
+
import update_destination_state_bookmarks
|
|
358
|
+
|
|
359
|
+
integration_pipeline = IntegrationPipeline(self.pipeline.uuid)
|
|
360
|
+
tap_stream_id = data.get('tap_stream_id')
|
|
361
|
+
destination_table = data.get('destination_table')
|
|
362
|
+
bookmark_values = data.get('bookmark_values', {})
|
|
363
|
+
if tap_stream_id and destination_table:
|
|
364
|
+
destination_state_file_path = integration_pipeline.destination_state_file_path(
|
|
365
|
+
destination_table=destination_table,
|
|
366
|
+
stream=tap_stream_id,
|
|
367
|
+
)
|
|
368
|
+
update_destination_state_bookmarks(
|
|
369
|
+
destination_state_file_path,
|
|
370
|
+
tap_stream_id,
|
|
371
|
+
bookmark_values=bookmark_values
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
return super().update(data)
|
|
375
|
+
|
|
317
376
|
def output_variables(self, execution_partition: str = None) -> List[str]:
|
|
318
377
|
return []
|
|
319
378
|
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from typing import Dict
|
|
2
|
+
from mage_ai.data_preparation.models.constants import PREFERENCES_FILE
|
|
3
|
+
from mage_ai.data_preparation.repo_manager import get_repo_path
|
|
4
|
+
import os
|
|
5
|
+
import traceback
|
|
6
|
+
import yaml
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Preferences:
|
|
10
|
+
def __init__(self, repo_path: str = None, config_dict: Dict = None):
|
|
11
|
+
self.repo_path = repo_path or get_repo_path()
|
|
12
|
+
self.preferences_file_path = \
|
|
13
|
+
os.path.join(self.repo_path, PREFERENCES_FILE)
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
if not config_dict:
|
|
17
|
+
if os.path.exists(self.preferences_file_path):
|
|
18
|
+
with open(self.preferences_file_path) as f:
|
|
19
|
+
preferences = yaml.full_load(f.read()) or {}
|
|
20
|
+
else:
|
|
21
|
+
preferences = dict()
|
|
22
|
+
else:
|
|
23
|
+
preferences = config_dict
|
|
24
|
+
|
|
25
|
+
self.sync_config = preferences.get('sync_config', dict())
|
|
26
|
+
except Exception:
|
|
27
|
+
traceback.print_exc()
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
def update_preferences(self, updates: Dict):
|
|
31
|
+
preferences = self.to_dict()
|
|
32
|
+
preferences.update(updates)
|
|
33
|
+
with open(self.preferences_file_path, 'w') as f:
|
|
34
|
+
yaml.dump(preferences, f)
|
|
35
|
+
|
|
36
|
+
def to_dict(self) -> Dict:
|
|
37
|
+
return dict(
|
|
38
|
+
sync_config=self.sync_config
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_preferences(repo_path=None) -> Preferences:
|
|
43
|
+
return Preferences(repo_path=repo_path)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from mage_ai.shared.config import BaseConfig
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SyncType(str, Enum):
|
|
8
|
+
GIT = 'git'
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class SyncConfig(BaseConfig):
|
|
13
|
+
type: SyncType
|
|
14
|
+
remote_repo_link: str
|
|
15
|
+
repo_path: str = os.getcwd()
|
|
16
|
+
branch: str = 'main'
|
|
17
|
+
sync_on_pipeline_run: bool = False
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from mage_ai.data_preparation.sync import SyncConfig
|
|
2
|
+
from mage_ai.data_preparation.sync.base_sync import BaseSync
|
|
3
|
+
from mage_ai.shared.logger import VerboseFunctionExec
|
|
4
|
+
import git
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class GitSync(BaseSync):
|
|
8
|
+
def __init__(self, sync_config: SyncConfig):
|
|
9
|
+
self.remote_repo_link = sync_config.remote_repo_link
|
|
10
|
+
self.repo_path = sync_config.repo_path
|
|
11
|
+
self.branch = sync_config.branch
|
|
12
|
+
try:
|
|
13
|
+
self.repo = git.Repo(self.repo_path)
|
|
14
|
+
except git.exc.InvalidGitRepositoryError:
|
|
15
|
+
self.repo = git.Repo.init(self.repo_path)
|
|
16
|
+
self.repo.create_remote('origin', self.remote_repo_link)
|
|
17
|
+
|
|
18
|
+
self.origin = self.repo.remotes.origin
|
|
19
|
+
|
|
20
|
+
def sync_data(self):
|
|
21
|
+
with VerboseFunctionExec(
|
|
22
|
+
f'Syncing data with remote repo {self.remote_repo_link}',
|
|
23
|
+
verbose=True,
|
|
24
|
+
):
|
|
25
|
+
self.origin.fetch(kill_after_timeout=60)
|
|
26
|
+
self.repo.git.reset('--hard', f'origin/{self.branch}')
|
|
@@ -18,3 +18,8 @@ consumer_group: unique_consumer_group
|
|
|
18
18
|
# mechanism: "PLAIN"
|
|
19
19
|
# username: username
|
|
20
20
|
# password: password
|
|
21
|
+
|
|
22
|
+
# Uncomment the config below to use protobuf schema to deserialize message
|
|
23
|
+
# serde_config:
|
|
24
|
+
# serialization_method: PROTOBUF
|
|
25
|
+
# schema_classpath: "path.to.schema.SchemaClass"
|
mage_ai/io/export_utils.py
CHANGED
mage_ai/io/postgres.py
CHANGED
|
@@ -4,8 +4,10 @@ from mage_ai.io.sql import BaseSQL
|
|
|
4
4
|
from pandas import DataFrame, Series
|
|
5
5
|
from psycopg2 import connect, _psycopg
|
|
6
6
|
from sshtunnel import SSHTunnelForwarder
|
|
7
|
-
import numpy as np
|
|
8
7
|
from typing import Union, IO
|
|
8
|
+
import json
|
|
9
|
+
import numpy as np
|
|
10
|
+
import pandas as pd
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
class Postgres(BaseSQL):
|
|
@@ -138,6 +140,29 @@ class Postgres(BaseSQL):
|
|
|
138
140
|
PandasTypes.UNKNOWN_ARRAY,
|
|
139
141
|
PandasTypes.COMPLEX,
|
|
140
142
|
):
|
|
143
|
+
series = column[column.notnull()]
|
|
144
|
+
values = series.values
|
|
145
|
+
|
|
146
|
+
column_type = None
|
|
147
|
+
|
|
148
|
+
if len(values) >= 1:
|
|
149
|
+
value = values[0]
|
|
150
|
+
column_type = 'JSONB'
|
|
151
|
+
|
|
152
|
+
if type(value) is list:
|
|
153
|
+
if len(value) >= 1:
|
|
154
|
+
item = value[0]
|
|
155
|
+
item_series = pd.Series(data=item)
|
|
156
|
+
item_dtype = item_series.dtype
|
|
157
|
+
if PandasTypes.OBJECT != item_dtype:
|
|
158
|
+
item_type = self.get_type(item_series, item_dtype)
|
|
159
|
+
column_type = f'{item_type}[]'
|
|
160
|
+
else:
|
|
161
|
+
column_type = 'text[]'
|
|
162
|
+
|
|
163
|
+
if column_type:
|
|
164
|
+
return column_type
|
|
165
|
+
|
|
141
166
|
raise BadConversionError(
|
|
142
167
|
f'Cannot convert column \'{column.name}\' with data type \'{dtype}\' to '
|
|
143
168
|
'a PostgreSQL datatype.'
|
|
@@ -180,6 +205,8 @@ class Postgres(BaseSQL):
|
|
|
180
205
|
return 'bigint'
|
|
181
206
|
elif dtype == PandasTypes.EMPTY:
|
|
182
207
|
return 'text'
|
|
208
|
+
elif PandasTypes.OBJECT == dtype:
|
|
209
|
+
return 'JSONB'
|
|
183
210
|
else:
|
|
184
211
|
print(f'Invalid datatype provided: {dtype}')
|
|
185
212
|
|
|
@@ -192,9 +219,24 @@ class Postgres(BaseSQL):
|
|
|
192
219
|
full_table_name: str,
|
|
193
220
|
buffer: Union[IO, None] = None
|
|
194
221
|
) -> None:
|
|
195
|
-
|
|
196
|
-
|
|
222
|
+
df_ = df.copy()
|
|
223
|
+
columns = df_.columns
|
|
224
|
+
|
|
225
|
+
for col in columns:
|
|
226
|
+
print(col, df_[col].dtype)
|
|
227
|
+
if PandasTypes.OBJECT == df_[col].dtype:
|
|
228
|
+
df_[col] = df_[col].apply(lambda x: json.dumps(x))
|
|
229
|
+
|
|
230
|
+
df_.to_csv(
|
|
231
|
+
buffer,
|
|
232
|
+
header=False,
|
|
233
|
+
index=False,
|
|
234
|
+
na_rep='',
|
|
235
|
+
)
|
|
236
|
+
|
|
197
237
|
buffer.seek(0)
|
|
238
|
+
|
|
239
|
+
columns_names = ', '.join(columns)
|
|
198
240
|
cursor.copy_expert(f"""
|
|
199
241
|
COPY {full_table_name} FROM STDIN (
|
|
200
242
|
FORMAT csv
|
|
@@ -202,4 +244,4 @@ COPY {full_table_name} FROM STDIN (
|
|
|
202
244
|
, NULL \'\'
|
|
203
245
|
, FORCE_NULL({columns_names})
|
|
204
246
|
);
|
|
205
|
-
""", buffer)
|
|
247
|
+
""", buffer)
|
|
@@ -76,6 +76,11 @@ class BaseModel(Base):
|
|
|
76
76
|
def full_clean(self, **kwargs) -> None:
|
|
77
77
|
pass
|
|
78
78
|
|
|
79
|
+
@classmethod
|
|
80
|
+
@safe_db_query
|
|
81
|
+
def get(self, uuid):
|
|
82
|
+
return self.query.get(uuid)
|
|
83
|
+
|
|
79
84
|
def save(self, commit=True) -> None:
|
|
80
85
|
self.session.add(self)
|
|
81
86
|
if commit:
|
|
@@ -667,6 +672,15 @@ class Backfill(BaseModel):
|
|
|
667
672
|
|
|
668
673
|
pipeline_runs = relationship('PipelineRun', back_populates='backfill')
|
|
669
674
|
|
|
675
|
+
@classmethod
|
|
676
|
+
@safe_db_query
|
|
677
|
+
def filter(self, pipeline_schedule_ids: List = None):
|
|
678
|
+
if pipeline_schedule_ids is not None:
|
|
679
|
+
return Backfill.query.filter(
|
|
680
|
+
Backfill.pipeline_schedule_id.in_(pipeline_schedule_ids),
|
|
681
|
+
)
|
|
682
|
+
return []
|
|
683
|
+
|
|
670
684
|
|
|
671
685
|
class Secret(BaseModel):
|
|
672
686
|
name = Column(String(255), unique=True)
|
|
@@ -14,7 +14,10 @@ from mage_ai.data_preparation.models.block.utils import (
|
|
|
14
14
|
from mage_ai.data_preparation.models.constants import PipelineType
|
|
15
15
|
from mage_ai.data_preparation.models.pipeline import Pipeline
|
|
16
16
|
from mage_ai.data_preparation.models.pipelines.integration_pipeline import IntegrationPipeline
|
|
17
|
+
from mage_ai.data_preparation.preferences import get_preferences
|
|
17
18
|
from mage_ai.data_preparation.repo_manager import get_repo_config, get_repo_path
|
|
19
|
+
from mage_ai.data_preparation.sync import SyncConfig
|
|
20
|
+
from mage_ai.data_preparation.sync.git_sync import GitSync
|
|
18
21
|
from mage_ai.data_preparation.variable_manager import get_global_variables
|
|
19
22
|
from mage_ai.orchestration.db.models import (
|
|
20
23
|
Backfill,
|
|
@@ -58,6 +61,12 @@ class PipelineScheduler:
|
|
|
58
61
|
)
|
|
59
62
|
|
|
60
63
|
def start(self, should_schedule: bool = True) -> None:
|
|
64
|
+
if get_preferences().sync_config:
|
|
65
|
+
sync_config = SyncConfig.load(config=get_preferences().sync_config)
|
|
66
|
+
if sync_config.sync_on_pipeline_run:
|
|
67
|
+
sync = GitSync(sync_config)
|
|
68
|
+
sync.sync_data()
|
|
69
|
+
|
|
61
70
|
if self.pipeline_run.status == PipelineRun.PipelineRunStatus.RUNNING:
|
|
62
71
|
return
|
|
63
72
|
self.pipeline_run.update(status=PipelineRun.PipelineRunStatus.RUNNING)
|
|
@@ -126,7 +135,7 @@ class PipelineScheduler:
|
|
|
126
135
|
)
|
|
127
136
|
self.logger_manager.output_logs_to_destination()
|
|
128
137
|
|
|
129
|
-
schedule = PipelineSchedule.
|
|
138
|
+
schedule = PipelineSchedule.get(
|
|
130
139
|
self.pipeline_run.pipeline_schedule_id,
|
|
131
140
|
)
|
|
132
141
|
|
|
@@ -735,9 +744,8 @@ def schedule_all():
|
|
|
735
744
|
active_pipeline_schedules = \
|
|
736
745
|
list(PipelineSchedule.active_schedules(pipeline_uuids=repo_pipelines))
|
|
737
746
|
|
|
738
|
-
backfills = Backfill.
|
|
739
|
-
|
|
740
|
-
)
|
|
747
|
+
backfills = Backfill.filter(pipeline_schedule_ids=[ps.id for ps in active_pipeline_schedules])
|
|
748
|
+
|
|
741
749
|
backfills_by_pipeline_schedule_id = index_by(
|
|
742
750
|
lambda backfill: backfill.pipeline_schedule_id,
|
|
743
751
|
backfills,
|
mage_ai/server/constants.py
CHANGED