mage-ai 0.8.4__py3-none-any.whl → 0.8.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mage-ai might be problematic. Click here for more details.
- mage_ai/api/policies/BlockPolicy.py +2 -0
- mage_ai/api/policies/PipelinePolicy.py +1 -0
- mage_ai/api/presenters/BlockPresenter.py +1 -0
- mage_ai/api/presenters/OutputPresenter.py +1 -0
- mage_ai/api/presenters/PipelinePresenter.py +6 -1
- mage_ai/data_preparation/models/block/__init__.py +18 -4
- mage_ai/data_preparation/models/block/dbt/__init__.py +50 -1
- mage_ai/data_preparation/models/block/dbt/utils/__init__.py +136 -60
- mage_ai/data_preparation/models/block/utils.py +2 -1
- mage_ai/data_preparation/models/pipeline.py +5 -3
- mage_ai/data_preparation/repo_manager.py +1 -1
- mage_ai/data_preparation/storage/local_storage.py +1 -1
- mage_ai/data_preparation/templates/custom/python/default.jinja +1 -1
- mage_ai/data_preparation/templates/data_exporters/default.jinja +2 -4
- mage_ai/data_preparation/templates/data_exporters/pyspark/default.jinja +2 -2
- mage_ai/data_preparation/templates/data_loaders/api.py +1 -1
- mage_ai/data_preparation/templates/data_loaders/default.jinja +1 -1
- mage_ai/data_preparation/templates/data_loaders/file.py +1 -0
- mage_ai/data_preparation/templates/data_loaders/pyspark/default.jinja +1 -1
- mage_ai/data_preparation/templates/testable.jinja +2 -2
- mage_ai/data_preparation/templates/transformers/data_warehouse_transformer.jinja +2 -0
- mage_ai/data_preparation/templates/transformers/default.jinja +4 -6
- mage_ai/data_preparation/templates/transformers/default_pyspark.jinja +4 -4
- mage_ai/data_preparation/templates/transformers/transformer_actions/action.jinja +2 -0
- mage_ai/io/base.py +11 -1
- mage_ai/io/postgres.py +10 -5
- mage_ai/server/constants.py +1 -1
- mage_ai/server/frontend_dist/404.html +2 -2
- mage_ai/server/frontend_dist/404.html.html +2 -2
- mage_ai/server/frontend_dist/_next/static/chunks/{2249-84de2142241f4925.js → 2249-70929b8c547bbc18.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/4846-58b7e138009c98a2.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/{5944-9488f2ddf3543b08.js → 5944-757b7898608a65e1.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/{6641-fb7a8be8444f2dd4.js → 6641-a0ed2bd8f5dc777b.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/8961-7a2143c4424c9217.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/{9140-836abb2721055e82.js → 9140-6f67e0879394373d.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/9898-51ca6a904b7a2382.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills-a1e8869ed201ce7e.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/edit-0678cf63c79072a7.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/logs-5ccc75887776efb0.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/monitors/{block-runs-a6dbd67285ecc5a5.js → block-runs-8f23f7ca9efcb069.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/syncs-d2bbafbb5b2c09e7.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/triggers-b0b91245d3299bdf.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/terminal-5d7c45bb058a3f20.js +1 -0
- mage_ai/server/frontend_dist/_next/static/chunks/pages/{triggers-dbce4f85a95ea336.js → triggers-e0172c422c95eda9.js} +1 -1
- mage_ai/server/frontend_dist/_next/static/{0jln56azuIZflrR1CXt9U → okm8eXXn0kUptL5A1B7a6}/_buildManifest.js +1 -1
- mage_ai/server/frontend_dist/index.html +2 -2
- mage_ai/server/frontend_dist/manage.html +2 -2
- mage_ai/server/frontend_dist/pipeline-runs.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/backfills/[...slug].html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/backfills.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/edit.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/logs.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors/block-runs.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors/block-runtime.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/monitors.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/runs/[run].html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/runs.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/syncs.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/triggers/[...slug].html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline]/triggers.html +2 -2
- mage_ai/server/frontend_dist/pipelines/[pipeline].html +2 -2
- mage_ai/server/frontend_dist/pipelines.html +2 -2
- mage_ai/server/frontend_dist/settings/account/profile.html +2 -2
- mage_ai/server/frontend_dist/settings/workspace/preferences.html +2 -2
- mage_ai/server/frontend_dist/settings/workspace/users.html +2 -2
- mage_ai/server/frontend_dist/settings.html +2 -2
- mage_ai/server/frontend_dist/sign-in.html +13 -13
- mage_ai/server/frontend_dist/terminal.html +2 -2
- mage_ai/server/frontend_dist/test.html +2 -2
- mage_ai/server/frontend_dist/triggers.html +2 -2
- mage_ai/server/server.py +1 -0
- mage_ai/server/utils/output_display.py +7 -0
- mage_ai/server/websocket_server.py +2 -2
- mage_ai/services/datadog/__init__.py +123 -0
- mage_ai/tests/data_preparation/test_templates.py +34 -86
- mage_ai/tests/services/datadog/__init__.py +0 -0
- mage_ai/tests/services/datadog/test_datadog.py +69 -0
- {mage_ai-0.8.4.dist-info → mage_ai-0.8.6.dist-info}/METADATA +2 -1
- {mage_ai-0.8.4.dist-info → mage_ai-0.8.6.dist-info}/RECORD +85 -82
- mage_ai/server/frontend_dist/_next/static/chunks/4846-64f9afc02d45293c.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/8961-e25997bc088e0d19.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/9898-91c6384c9bd33ca7.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/backfills-688184bd8b4d4f5c.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/edit-c6dfcc4f231cfa5a.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/logs-abce05c25bee218d.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/syncs-a056c0e384d39c9b.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/pipelines/[pipeline]/triggers-6a8545f96cc7b8f2.js +0 -1
- mage_ai/server/frontend_dist/_next/static/chunks/pages/terminal-1734d248ec2b6c24.js +0 -1
- /mage_ai/server/frontend_dist/_next/static/{0jln56azuIZflrR1CXt9U → okm8eXXn0kUptL5A1B7a6}/_middlewareManifest.js +0 -0
- /mage_ai/server/frontend_dist/_next/static/{0jln56azuIZflrR1CXt9U → okm8eXXn0kUptL5A1B7a6}/_ssgManifest.js +0 -0
- {mage_ai-0.8.4.dist-info → mage_ai-0.8.6.dist-info}/LICENSE +0 -0
- {mage_ai-0.8.4.dist-info → mage_ai-0.8.6.dist-info}/WHEEL +0 -0
- {mage_ai-0.8.4.dist-info → mage_ai-0.8.6.dist-info}/entry_points.txt +0 -0
- {mage_ai-0.8.4.dist-info → mage_ai-0.8.6.dist-info}/top_level.txt +0 -0
|
@@ -53,6 +53,7 @@ BlockPolicy.allow_write([
|
|
|
53
53
|
'content',
|
|
54
54
|
'converted_from',
|
|
55
55
|
'language',
|
|
56
|
+
'metadata',
|
|
56
57
|
'name',
|
|
57
58
|
'priority',
|
|
58
59
|
'type',
|
|
@@ -73,6 +74,7 @@ BlockPolicy.allow_write([
|
|
|
73
74
|
'executor_config',
|
|
74
75
|
'executor_type',
|
|
75
76
|
'language',
|
|
77
|
+
'metadata',
|
|
76
78
|
'name',
|
|
77
79
|
'outputs',
|
|
78
80
|
'status',
|
|
@@ -21,13 +21,18 @@ class PipelinePresenter(BasePresenter):
|
|
|
21
21
|
include_content = query.get('includes_content', [True])
|
|
22
22
|
if include_content:
|
|
23
23
|
include_content = include_content[0]
|
|
24
|
+
|
|
24
25
|
include_outputs = query.get('includes_outputs', [True])
|
|
25
26
|
if include_outputs:
|
|
26
27
|
include_outputs = include_outputs[0]
|
|
27
28
|
|
|
29
|
+
include_block_metadata = query.get('includes_block_metadata', [True])
|
|
30
|
+
if include_block_metadata:
|
|
31
|
+
include_block_metadata = include_block_metadata[0]
|
|
32
|
+
|
|
28
33
|
return await self.model.to_dict_async(
|
|
34
|
+
include_block_metadata=include_block_metadata,
|
|
29
35
|
include_content=include_content,
|
|
30
|
-
include_outputs=include_outputs,
|
|
31
36
|
sample_count=DATAFRAME_SAMPLE_COUNT_PREVIEW,
|
|
32
37
|
)
|
|
33
38
|
|
|
@@ -267,6 +267,9 @@ class Block:
|
|
|
267
267
|
self._content = await self.file.content_async()
|
|
268
268
|
return self._content
|
|
269
269
|
|
|
270
|
+
async def metadata_async(self):
|
|
271
|
+
return {}
|
|
272
|
+
|
|
270
273
|
@property
|
|
271
274
|
def executable(self):
|
|
272
275
|
return (
|
|
@@ -566,6 +569,7 @@ class Block:
|
|
|
566
569
|
dynamic_block_index: int = None,
|
|
567
570
|
dynamic_block_uuid: str = None,
|
|
568
571
|
dynamic_upstream_block_uuids: List[str] = None,
|
|
572
|
+
run_settings: Dict = None,
|
|
569
573
|
) -> Dict:
|
|
570
574
|
try:
|
|
571
575
|
if not run_all_blocks:
|
|
@@ -595,6 +599,7 @@ class Block:
|
|
|
595
599
|
runtime_arguments=runtime_arguments,
|
|
596
600
|
dynamic_block_index=dynamic_block_index,
|
|
597
601
|
dynamic_upstream_block_uuids=dynamic_upstream_block_uuids,
|
|
602
|
+
run_settings=run_settings,
|
|
598
603
|
)
|
|
599
604
|
block_output = output['output'] or []
|
|
600
605
|
variable_mapping = dict()
|
|
@@ -773,6 +778,7 @@ class Block:
|
|
|
773
778
|
runtime_arguments: Dict = None,
|
|
774
779
|
dynamic_block_index: int = None,
|
|
775
780
|
dynamic_upstream_block_uuids: List[str] = None,
|
|
781
|
+
run_settings: Dict = None,
|
|
776
782
|
) -> Dict:
|
|
777
783
|
# Add pipeline uuid and block uuid to global_vars
|
|
778
784
|
global_vars = merge_dict(
|
|
@@ -821,7 +827,8 @@ class Block:
|
|
|
821
827
|
test_execution=test_execution,
|
|
822
828
|
input_from_output=input_from_output,
|
|
823
829
|
runtime_arguments=runtime_arguments,
|
|
824
|
-
upstream_block_uuids=upstream_block_uuids
|
|
830
|
+
upstream_block_uuids=upstream_block_uuids,
|
|
831
|
+
run_settings=run_settings,
|
|
825
832
|
)
|
|
826
833
|
|
|
827
834
|
output_message = dict(output=outputs)
|
|
@@ -841,6 +848,7 @@ class Block:
|
|
|
841
848
|
input_from_output: Dict = None,
|
|
842
849
|
runtime_arguments: Dict = None,
|
|
843
850
|
upstream_block_uuids: List[str] = None,
|
|
851
|
+
run_settings: Dict = None,
|
|
844
852
|
) -> List:
|
|
845
853
|
decorated_functions = []
|
|
846
854
|
test_functions = []
|
|
@@ -1219,15 +1227,17 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1219
1227
|
|
|
1220
1228
|
async def to_dict_async(
|
|
1221
1229
|
self,
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1230
|
+
include_block_metadata: bool = False,
|
|
1231
|
+
include_content: bool = False,
|
|
1232
|
+
include_outputs: bool = False,
|
|
1233
|
+
sample_count: int = None,
|
|
1225
1234
|
check_if_file_exists: bool = False,
|
|
1226
1235
|
):
|
|
1227
1236
|
data = self.to_dict_base()
|
|
1228
1237
|
|
|
1229
1238
|
if include_content:
|
|
1230
1239
|
data['content'] = await self.content_async()
|
|
1240
|
+
|
|
1231
1241
|
if include_outputs:
|
|
1232
1242
|
data['outputs'] = await self.outputs_async()
|
|
1233
1243
|
if check_if_file_exists:
|
|
@@ -1239,6 +1249,10 @@ df = get_variable('{self.pipeline.uuid}', '{block_uuid}', 'df')
|
|
|
1239
1249
|
'Delete the current block to remove it from the pipeline or write code ' +
|
|
1240
1250
|
f'and save the pipeline to create a new file at {file_path}.',
|
|
1241
1251
|
)
|
|
1252
|
+
|
|
1253
|
+
if include_block_metadata:
|
|
1254
|
+
data['metadata'] = await self.metadata_async()
|
|
1255
|
+
|
|
1242
1256
|
return data
|
|
1243
1257
|
|
|
1244
1258
|
def update(self, data):
|
|
@@ -2,6 +2,8 @@ from mage_ai.data_preparation.models.block import Block
|
|
|
2
2
|
from mage_ai.data_preparation.models.block.dbt.utils import (
|
|
3
3
|
build_command_line_arguments,
|
|
4
4
|
create_upstream_tables,
|
|
5
|
+
load_profiles_async,
|
|
6
|
+
parse_attributes,
|
|
5
7
|
query_from_compiled_sql,
|
|
6
8
|
run_dbt_tests,
|
|
7
9
|
update_model_settings,
|
|
@@ -10,7 +12,6 @@ from mage_ai.data_preparation.models.constants import BlockLanguage
|
|
|
10
12
|
from mage_ai.data_preparation.repo_manager import get_repo_path
|
|
11
13
|
from mage_ai.shared.hash import merge_dict
|
|
12
14
|
from typing import Any, Dict, List
|
|
13
|
-
|
|
14
15
|
import json
|
|
15
16
|
import os
|
|
16
17
|
import subprocess
|
|
@@ -31,6 +32,49 @@ class DBTBlock(Block):
|
|
|
31
32
|
|
|
32
33
|
return super().file_path
|
|
33
34
|
|
|
35
|
+
async def metadata_async(self) -> Dict:
|
|
36
|
+
project = None
|
|
37
|
+
projects = {}
|
|
38
|
+
|
|
39
|
+
if self.configuration.get('file_path'):
|
|
40
|
+
attributes_dict = parse_attributes(self)
|
|
41
|
+
profiles_full_path = attributes_dict['profiles_full_path']
|
|
42
|
+
project_name = attributes_dict['project_name']
|
|
43
|
+
project = project_name
|
|
44
|
+
|
|
45
|
+
targets = []
|
|
46
|
+
profiles = await load_profiles_async(project_name, profiles_full_path)
|
|
47
|
+
outputs = profiles.get('outputs')
|
|
48
|
+
if outputs:
|
|
49
|
+
targets += sorted(list(outputs.keys()))
|
|
50
|
+
|
|
51
|
+
projects[project_name] = dict(
|
|
52
|
+
target=profiles.get('target'),
|
|
53
|
+
targets=targets,
|
|
54
|
+
)
|
|
55
|
+
else:
|
|
56
|
+
dbt_dir = f'{get_repo_path()}/dbt'
|
|
57
|
+
project_names = [
|
|
58
|
+
name for name in os.listdir(dbt_dir) if os.path.isdir(f'{dbt_dir}/{name}')
|
|
59
|
+
]
|
|
60
|
+
for project_name in project_names:
|
|
61
|
+
profiles_full_path = f'{dbt_dir}/{project_name}/profiles.yml'
|
|
62
|
+
targets = []
|
|
63
|
+
profiles = await load_profiles_async(project_name, profiles_full_path)
|
|
64
|
+
outputs = profiles.get('outputs')
|
|
65
|
+
if outputs:
|
|
66
|
+
targets += sorted(list(outputs.keys()))
|
|
67
|
+
|
|
68
|
+
projects[project_name] = dict(
|
|
69
|
+
target=profiles.get('target'),
|
|
70
|
+
targets=targets,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return dict(dbt=dict(
|
|
74
|
+
project=project,
|
|
75
|
+
projects=projects,
|
|
76
|
+
))
|
|
77
|
+
|
|
34
78
|
def run_tests(
|
|
35
79
|
self,
|
|
36
80
|
build_block_output_stdout=None,
|
|
@@ -56,10 +100,14 @@ class DBTBlock(Block):
|
|
|
56
100
|
global_vars: Dict = None,
|
|
57
101
|
test_execution: bool = False,
|
|
58
102
|
runtime_arguments: Dict = None,
|
|
103
|
+
run_settings: Dict = None,
|
|
59
104
|
**kwargs,
|
|
60
105
|
) -> List:
|
|
61
106
|
variables = merge_dict(global_vars, runtime_arguments or {})
|
|
62
107
|
|
|
108
|
+
if run_settings and run_settings.get('run_model', False):
|
|
109
|
+
test_execution = False
|
|
110
|
+
|
|
63
111
|
dbt_command, args, command_line_dict = build_command_line_arguments(
|
|
64
112
|
self,
|
|
65
113
|
variables,
|
|
@@ -95,6 +143,7 @@ class DBTBlock(Block):
|
|
|
95
143
|
df = query_from_compiled_sql(
|
|
96
144
|
self,
|
|
97
145
|
dbt_profile_target,
|
|
146
|
+
limit=self.configuration.get('limit'),
|
|
98
147
|
)
|
|
99
148
|
self.store_variables(
|
|
100
149
|
dict(df=df),
|
|
@@ -24,6 +24,7 @@ from mage_ai.shared.parsers import encode_complex
|
|
|
24
24
|
from mage_ai.shared.utils import clean_name, files_in_path
|
|
25
25
|
from pandas import DataFrame
|
|
26
26
|
from typing import Callable, Dict, List, Tuple
|
|
27
|
+
import aiofiles
|
|
27
28
|
import os
|
|
28
29
|
import re
|
|
29
30
|
import simplejson
|
|
@@ -252,11 +253,65 @@ def add_table_to_source(block: 'Block', settings: Dict, source_name: str, table_
|
|
|
252
253
|
return settings
|
|
253
254
|
|
|
254
255
|
|
|
255
|
-
def
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
256
|
+
def load_profiles_file(profiles_full_path: str) -> Dict:
|
|
257
|
+
try:
|
|
258
|
+
with open(profiles_full_path, 'r') as f:
|
|
259
|
+
try:
|
|
260
|
+
text = Template(f.read()).render(
|
|
261
|
+
**get_template_vars(),
|
|
262
|
+
)
|
|
263
|
+
return yaml.safe_load(text)
|
|
264
|
+
except Exception as err:
|
|
265
|
+
print(
|
|
266
|
+
f'Error loading file {profiles_full_path}, check file content syntax: {err}.',
|
|
267
|
+
)
|
|
268
|
+
return {}
|
|
269
|
+
except OSError as err:
|
|
270
|
+
print(
|
|
271
|
+
f'Error loading file {profiles_full_path}, check file content syntax: {err}.',
|
|
272
|
+
)
|
|
273
|
+
return {}
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
async def load_profiles_file_async(profiles_full_path: str) -> Dict:
|
|
277
|
+
try:
|
|
278
|
+
async with aiofiles.open(profiles_full_path, mode='r') as fp:
|
|
279
|
+
try:
|
|
280
|
+
file_content = await fp.read()
|
|
281
|
+
text = Template(file_content).render(
|
|
282
|
+
**get_template_vars(),
|
|
283
|
+
)
|
|
284
|
+
return yaml.safe_load(text)
|
|
285
|
+
except Exception as err:
|
|
286
|
+
print(
|
|
287
|
+
f'Error loading file {profiles_full_path}, check file content syntax: {err}.',
|
|
288
|
+
)
|
|
289
|
+
return {}
|
|
290
|
+
except OSError as err:
|
|
291
|
+
print(
|
|
292
|
+
f'Error loading file {profiles_full_path}, check file content syntax: {err}.',
|
|
293
|
+
)
|
|
294
|
+
return {}
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def load_profiles(project_name: str, profiles_full_path: str) -> Dict:
|
|
298
|
+
profiles = load_profiles_file(profiles_full_path)
|
|
299
|
+
|
|
300
|
+
if not profiles or project_name not in profiles:
|
|
301
|
+
print(f'Project name {project_name} does not exist in profile file {profiles_full_path}.')
|
|
302
|
+
return {}
|
|
303
|
+
|
|
304
|
+
return profiles[project_name]
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
async def load_profiles_async(project_name: str, profiles_full_path: str) -> Dict:
|
|
308
|
+
profiles = await load_profiles_file_async(profiles_full_path)
|
|
309
|
+
|
|
310
|
+
if not profiles or project_name not in profiles:
|
|
311
|
+
print(f'Project name {project_name} does not exist in profile file {profiles_full_path}.')
|
|
312
|
+
return {}
|
|
313
|
+
|
|
314
|
+
return profiles[project_name]
|
|
260
315
|
|
|
261
316
|
|
|
262
317
|
def load_profile(
|
|
@@ -264,19 +319,19 @@ def load_profile(
|
|
|
264
319
|
profiles_full_path: str,
|
|
265
320
|
profile_target: str = None,
|
|
266
321
|
) -> Dict:
|
|
267
|
-
with open(profiles_full_path, 'r') as f:
|
|
268
|
-
try:
|
|
269
|
-
text = Template(f.read()).render(
|
|
270
|
-
**get_template_vars(),
|
|
271
|
-
)
|
|
272
|
-
profile = yaml.safe_load(text)[project_name]
|
|
273
|
-
outputs = profile['outputs']
|
|
274
|
-
target = profile.get('target')
|
|
275
322
|
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
323
|
+
profile = load_profiles(project_name, profiles_full_path)
|
|
324
|
+
outputs = profile.get('outputs', {})
|
|
325
|
+
target = profile.get('target', None)
|
|
326
|
+
|
|
327
|
+
return outputs.get(profile_target or target)
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def get_profile(block, profile_target: str = None) -> Dict:
|
|
331
|
+
attr = parse_attributes(block)
|
|
332
|
+
project_name = attr['project_name']
|
|
333
|
+
profiles_full_path = attr['profiles_full_path']
|
|
334
|
+
return load_profile(project_name, profiles_full_path, profile_target)
|
|
280
335
|
|
|
281
336
|
|
|
282
337
|
def config_file_loader_and_configuration(block, profile_target: str) -> Dict:
|
|
@@ -565,7 +620,50 @@ def interpolate_input(
|
|
|
565
620
|
return query
|
|
566
621
|
|
|
567
622
|
|
|
568
|
-
def
|
|
623
|
+
def interpolate_refs_with_table_names(
|
|
624
|
+
query_string: str,
|
|
625
|
+
block: Block,
|
|
626
|
+
profile_target: str,
|
|
627
|
+
configuration: Dict,
|
|
628
|
+
):
|
|
629
|
+
profile = get_profile(block, profile_target)
|
|
630
|
+
|
|
631
|
+
profile_type = profile.get('type')
|
|
632
|
+
quote_str = ''
|
|
633
|
+
if DataSource.POSTGRES == profile_type:
|
|
634
|
+
database = profile['dbname']
|
|
635
|
+
schema = profile['schema']
|
|
636
|
+
quote_str = '"'
|
|
637
|
+
elif DataSource.MYSQL == profile_type:
|
|
638
|
+
database = configuration['data_provider_database']
|
|
639
|
+
schema = None
|
|
640
|
+
quote_str = '`'
|
|
641
|
+
elif DataSource.BIGQUERY == profile_type:
|
|
642
|
+
database = profile['project']
|
|
643
|
+
schema = profile['dataset']
|
|
644
|
+
quote_str = '`'
|
|
645
|
+
elif DataSource.REDSHIFT == profile_type:
|
|
646
|
+
database = profile['dbname']
|
|
647
|
+
schema = profile['schema']
|
|
648
|
+
quote_str = '"'
|
|
649
|
+
elif DataSource.SNOWFLAKE == profile_type:
|
|
650
|
+
database = profile['database']
|
|
651
|
+
schema = profile['schema']
|
|
652
|
+
elif DataSource.TRINO == profile_type:
|
|
653
|
+
database = profile['catalog']
|
|
654
|
+
schema = profile['schema']
|
|
655
|
+
|
|
656
|
+
return interpolate_input(
|
|
657
|
+
block,
|
|
658
|
+
query_string,
|
|
659
|
+
configuration=configuration,
|
|
660
|
+
profile_database=database,
|
|
661
|
+
profile_schema=schema,
|
|
662
|
+
quote_str=quote_str,
|
|
663
|
+
)
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def query_from_compiled_sql(block, profile_target: str, limit: int = None) -> DataFrame:
|
|
569
667
|
attr = parse_attributes(block)
|
|
570
668
|
|
|
571
669
|
config_file_loader, configuration = config_file_loader_and_configuration(
|
|
@@ -577,77 +675,55 @@ def query_from_compiled_sql(block, profile_target: str) -> DataFrame:
|
|
|
577
675
|
project_full_path = attr['project_full_path']
|
|
578
676
|
file_path = attr['file_path']
|
|
579
677
|
|
|
580
|
-
profile = get_profile(block, profile_target)
|
|
581
|
-
|
|
582
678
|
file = f'{project_full_path}/target/compiled/{file_path}'
|
|
583
679
|
|
|
584
680
|
with open(file, 'r') as f:
|
|
585
681
|
query_string = f.read()
|
|
586
682
|
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
quote_str = '`'
|
|
601
|
-
elif DataSource.REDSHIFT == profile_type:
|
|
602
|
-
database = profile['dbname']
|
|
603
|
-
schema = profile['schema']
|
|
604
|
-
quote_str = '"'
|
|
605
|
-
elif DataSource.SNOWFLAKE == profile_type:
|
|
606
|
-
database = profile['database']
|
|
607
|
-
schema = profile['schema']
|
|
608
|
-
elif DataSource.TRINO == profile_type:
|
|
609
|
-
database = profile['catalog']
|
|
610
|
-
schema = profile['schema']
|
|
611
|
-
|
|
612
|
-
query_string = interpolate_input(
|
|
613
|
-
block,
|
|
614
|
-
query_string,
|
|
615
|
-
configuration=configuration,
|
|
616
|
-
profile_database=database,
|
|
617
|
-
profile_schema=schema,
|
|
618
|
-
quote_str=quote_str,
|
|
619
|
-
)
|
|
683
|
+
# TODO (tommy dang): this was needed because we didn’t want to create model tables and
|
|
684
|
+
# so we’d create a table to store the model results without creating the model.
|
|
685
|
+
# However, we’re requiring people to run the model and create the model table to use ref.
|
|
686
|
+
# query_string = interpolate_refs_with_table_names(
|
|
687
|
+
# query_string,
|
|
688
|
+
# block,
|
|
689
|
+
# profile_target=profile_target,
|
|
690
|
+
# configuration=configuration,
|
|
691
|
+
# )
|
|
692
|
+
|
|
693
|
+
shared_kwargs = {}
|
|
694
|
+
if limit is not None:
|
|
695
|
+
shared_kwargs['limit'] = limit
|
|
620
696
|
|
|
621
697
|
if DataSource.POSTGRES == data_provider:
|
|
622
698
|
from mage_ai.io.postgres import Postgres
|
|
623
699
|
|
|
624
700
|
with Postgres.with_config(config_file_loader) as loader:
|
|
625
|
-
return loader.load(query_string)
|
|
701
|
+
return loader.load(query_string, **shared_kwargs)
|
|
626
702
|
elif DataSource.MYSQL == data_provider:
|
|
627
703
|
from mage_ai.io.mysql import MySQL
|
|
628
704
|
|
|
629
705
|
with MySQL.with_config(config_file_loader) as loader:
|
|
630
|
-
return loader.load(query_string)
|
|
706
|
+
return loader.load(query_string, **shared_kwargs)
|
|
631
707
|
elif DataSource.BIGQUERY == data_provider:
|
|
632
708
|
from mage_ai.io.bigquery import BigQuery
|
|
633
709
|
|
|
634
710
|
loader = BigQuery.with_config(config_file_loader)
|
|
635
|
-
return loader.load(query_string)
|
|
711
|
+
return loader.load(query_string, **shared_kwargs)
|
|
636
712
|
elif DataSource.REDSHIFT == data_provider:
|
|
637
713
|
from mage_ai.io.redshift import Redshift
|
|
638
714
|
|
|
639
715
|
with Redshift.with_config(config_file_loader) as loader:
|
|
640
|
-
return loader.load(query_string)
|
|
716
|
+
return loader.load(query_string, **shared_kwargs)
|
|
641
717
|
elif DataSource.SNOWFLAKE == data_provider:
|
|
642
718
|
from mage_ai.io.snowflake import Snowflake
|
|
643
719
|
|
|
644
720
|
with Snowflake.with_config(config_file_loader) as loader:
|
|
645
|
-
return loader.load(query_string)
|
|
721
|
+
return loader.load(query_string, **shared_kwargs)
|
|
646
722
|
elif DataSource.TRINO == data_provider:
|
|
647
723
|
from mage_ai.io.trino import Trino
|
|
648
724
|
|
|
649
725
|
with Trino.with_config(config_file_loader) as loader:
|
|
650
|
-
return loader.load(query_string)
|
|
726
|
+
return loader.load(query_string, **shared_kwargs)
|
|
651
727
|
|
|
652
728
|
|
|
653
729
|
def build_command_line_arguments(
|
|
@@ -657,7 +733,7 @@ def build_command_line_arguments(
|
|
|
657
733
|
test_execution: bool = False,
|
|
658
734
|
) -> Tuple[str, List[str], Dict]:
|
|
659
735
|
variables = merge_dict(
|
|
660
|
-
variables,
|
|
736
|
+
variables or {},
|
|
661
737
|
get_global_variables(block.pipeline.uuid) if block.pipeline else {},
|
|
662
738
|
)
|
|
663
739
|
dbt_command = 'test' if run_tests else 'run'
|
|
@@ -45,7 +45,8 @@ def create_block_runs_from_dynamic_block(
|
|
|
45
45
|
|
|
46
46
|
values = []
|
|
47
47
|
block_metadata = []
|
|
48
|
-
|
|
48
|
+
output_vars = block.output_variables(execution_partition=execution_partition)
|
|
49
|
+
for idx, output_name in enumerate(output_vars):
|
|
49
50
|
if idx == 0:
|
|
50
51
|
values = block.pipeline.variable_manager.get_variable(
|
|
51
52
|
block.pipeline.uuid,
|
|
@@ -470,12 +470,14 @@ class Pipeline:
|
|
|
470
470
|
|
|
471
471
|
async def to_dict_async(
|
|
472
472
|
self,
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
473
|
+
include_block_metadata: bool = False,
|
|
474
|
+
include_content: bool = False,
|
|
475
|
+
include_outputs: bool = False,
|
|
476
|
+
sample_count: int = None,
|
|
476
477
|
):
|
|
477
478
|
blocks_data = await asyncio.gather(
|
|
478
479
|
*[b.to_dict_async(
|
|
480
|
+
include_block_metadata=include_block_metadata,
|
|
479
481
|
include_content=include_content,
|
|
480
482
|
include_outputs=include_outputs,
|
|
481
483
|
sample_count=sample_count,
|
|
@@ -37,7 +37,7 @@ class RepoConfig:
|
|
|
37
37
|
# Priority:
|
|
38
38
|
# 1. 'variables_dir' from config_dict
|
|
39
39
|
# 1. os.getenv(MAGE_DATA_DIR_ENV_VAR)
|
|
40
|
-
# 2. 'variables_dir' from
|
|
40
|
+
# 2. 'variables_dir' from project's metadata.yaml file
|
|
41
41
|
# 3. DEFAULT_MAGE_DATA_DIR
|
|
42
42
|
if config_dict is not None and config_dict.get('variables_dir'):
|
|
43
43
|
self.variables_dir = config_dict.get('variables_dir')
|
|
@@ -31,7 +31,7 @@ class LocalStorage(BaseStorage):
|
|
|
31
31
|
os.remove(path)
|
|
32
32
|
|
|
33
33
|
def remove_dir(self, path: str) -> None:
|
|
34
|
-
shutil.rmtree(path)
|
|
34
|
+
shutil.rmtree(path, ignore_errors=True)
|
|
35
35
|
|
|
36
36
|
def read_json_file(self, file_path: str, default_value={}) -> Dict:
|
|
37
37
|
if not self.path_exists(file_path):
|
|
@@ -1,16 +1,14 @@
|
|
|
1
|
-
from pandas import DataFrame
|
|
2
|
-
|
|
3
1
|
if 'data_exporter' not in globals():
|
|
4
2
|
from mage_ai.data_preparation.decorators import data_exporter
|
|
5
3
|
|
|
6
4
|
|
|
7
5
|
@data_exporter
|
|
8
|
-
def export_data(
|
|
6
|
+
def export_data(*args, **kwargs):
|
|
9
7
|
"""
|
|
10
8
|
Exports data to some source
|
|
11
9
|
|
|
12
10
|
Args:
|
|
13
|
-
|
|
11
|
+
args: The input variables from upstream blocks
|
|
14
12
|
|
|
15
13
|
Output (optional):
|
|
16
14
|
Optionally return any object and it'll be logged and
|
|
@@ -3,12 +3,12 @@ if 'data_exporter' not in globals():
|
|
|
3
3
|
|
|
4
4
|
|
|
5
5
|
@data_exporter
|
|
6
|
-
def export_data(
|
|
6
|
+
def export_data(*args, **kwargs):
|
|
7
7
|
"""
|
|
8
8
|
Exports data to some source
|
|
9
9
|
|
|
10
10
|
Args:
|
|
11
|
-
|
|
11
|
+
args: The input variables from upstream blocks
|
|
12
12
|
|
|
13
13
|
Output (optional):
|
|
14
14
|
Optionally return any object and it'll be logged and
|
|
@@ -11,7 +11,7 @@ def load_data(**kwargs):
|
|
|
11
11
|
Template code for loading data from any source.
|
|
12
12
|
|
|
13
13
|
Returns:
|
|
14
|
-
|
|
14
|
+
Anything (e.g. data frame, dictionary, array, int, str, etc.)
|
|
15
15
|
"""
|
|
16
16
|
# Specify your data loading logic here
|
|
17
17
|
{{ code }}
|
|
@@ -9,8 +9,8 @@ if 'test' not in globals():
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@test
|
|
12
|
-
def test_output(
|
|
12
|
+
def test_output(output, *args) -> None:
|
|
13
13
|
"""
|
|
14
14
|
Template code for testing the output of the block.
|
|
15
15
|
"""
|
|
16
|
-
assert
|
|
16
|
+
assert output is not None, 'The output is undefined'
|
|
@@ -4,6 +4,8 @@ from mage_ai.data_preparation.repo_manager import get_repo_path
|
|
|
4
4
|
from mage_ai.io.config import ConfigFileLoader
|
|
5
5
|
from mage_ai.io.{{ data_source }} import {{ data_source_handler }}
|
|
6
6
|
from os import path
|
|
7
|
+
from pandas import DataFrame
|
|
8
|
+
|
|
7
9
|
{{ super() -}}
|
|
8
10
|
{% endblock %}
|
|
9
11
|
|