ddeutil-workflow 0.0.1__tar.gz → 0.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/PKG-INFO +26 -15
  2. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/README.md +23 -11
  3. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/pyproject.toml +8 -6
  4. ddeutil_workflow-0.0.2/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/conn.py +31 -29
  6. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/dataset.py +1 -5
  7. ddeutil_workflow-0.0.2/src/ddeutil/workflow/exceptions.py +32 -0
  8. ddeutil_workflow-0.0.2/src/ddeutil/workflow/loader.py +146 -0
  9. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/pipeline.py +201 -83
  10. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/schedule.py +3 -8
  11. ddeutil_workflow-0.0.2/src/ddeutil/workflow/tasks/__init__.py +6 -0
  12. ddeutil_workflow-0.0.2/src/ddeutil/workflow/tasks/_pandas.py +54 -0
  13. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/tasks/_polars.py +45 -2
  14. ddeutil_workflow-0.0.2/src/ddeutil/workflow/utils.py +65 -0
  15. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil_workflow.egg-info/PKG-INFO +26 -15
  16. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil_workflow.egg-info/SOURCES.txt +3 -5
  17. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil_workflow.egg-info/requires.txt +2 -3
  18. ddeutil_workflow-0.0.2/tests/test_loader.py +6 -0
  19. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_loader_simple.py +3 -8
  20. ddeutil_workflow-0.0.2/tests/test_pipeline_params.py +12 -0
  21. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_pipeline_run.py +6 -6
  22. ddeutil_workflow-0.0.2/tests/test_pipeline_task.py +21 -0
  23. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_schedule.py +2 -29
  24. ddeutil_workflow-0.0.1/src/ddeutil/workflow/__about__.py +0 -1
  25. ddeutil_workflow-0.0.1/src/ddeutil/workflow/exceptions.py +0 -82
  26. ddeutil_workflow-0.0.1/src/ddeutil/workflow/hooks/__init__.py +0 -9
  27. ddeutil_workflow-0.0.1/src/ddeutil/workflow/hooks/_postgres.py +0 -2
  28. ddeutil_workflow-0.0.1/src/ddeutil/workflow/loader.py +0 -310
  29. ddeutil_workflow-0.0.1/src/ddeutil/workflow/tasks/__init__.py +0 -10
  30. ddeutil_workflow-0.0.1/src/ddeutil/workflow/utils/receive.py +0 -33
  31. ddeutil_workflow-0.0.1/src/ddeutil/workflow/utils/selection.py +0 -2
  32. ddeutil_workflow-0.0.1/src/ddeutil/workflow/vendors/__init__.py +0 -0
  33. ddeutil_workflow-0.0.1/tests/test_loader.py +0 -51
  34. ddeutil_workflow-0.0.1/tests/test_pipeline_task.py +0 -11
  35. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/LICENSE +0 -0
  36. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/setup.cfg +0 -0
  37. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/__init__.py +0 -0
  38. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/__regex.py +0 -0
  39. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/__types.py +0 -0
  40. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/vendors/__dict.py +0 -0
  41. {ddeutil_workflow-0.0.1/src/ddeutil/workflow/utils → ddeutil_workflow-0.0.2/src/ddeutil/workflow/vendors}/__init__.py +0 -0
  42. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/vendors/__schedule.py +0 -0
  43. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/vendors/aws_warpped.py +0 -0
  44. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/vendors/minio_warpped.py +0 -0
  45. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil/workflow/vendors/sftp_wrapped.py +0 -0
  46. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  47. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  48. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_base_data.py +0 -0
  49. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_base_local_and_global.py +0 -0
  50. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_base_regex.py +0 -0
  51. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_conn.py +0 -0
  52. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_dataset.py +0 -0
  53. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.2}/tests/test_pipeline.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.1
3
+ Version: 0.0.2
4
4
  Summary: Data Developer & Engineer Workflow Utility Objects
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -23,15 +23,14 @@ Description-Content-Type: text/markdown
23
23
  License-File: LICENSE
24
24
  Requires-Dist: fmtutil
25
25
  Requires-Dist: ddeutil-io
26
- Requires-Dist: ddeutil-model
27
26
  Requires-Dist: python-dotenv
28
27
  Provides-Extra: test
29
28
  Requires-Dist: sqlalchemy==2.0.30; extra == "test"
30
29
  Requires-Dist: paramiko==3.4.0; extra == "test"
31
30
  Requires-Dist: sshtunnel==0.4.0; extra == "test"
32
- Requires-Dist: boto3==1.34.109; extra == "test"
31
+ Requires-Dist: boto3==1.34.117; extra == "test"
33
32
  Requires-Dist: fsspec==2024.5.0; extra == "test"
34
- Requires-Dist: polars==0.20.26; extra == "test"
33
+ Requires-Dist: polars==0.20.31; extra == "test"
35
34
  Requires-Dist: pyarrow==16.1.0; extra == "test"
36
35
 
37
36
  # Data Utility: _Workflow_
@@ -83,6 +82,9 @@ The first step, you should start create the connections and datasets for In and
83
82
  Out of you data that want to use in pipeline of workflow. Some of this component
84
83
  is similar component of the **Airflow** because I like it concepts.
85
84
 
85
+ The main feature of this project is the `Pipeline` object that can call any
86
+ registried function.
87
+
86
88
  ### Connection
87
89
 
88
90
  The connection for worker able to do any thing.
@@ -155,8 +157,10 @@ The state of doing lists that worker should to do. It be collection of the stage
155
157
  run_py_local:
156
158
  type: ddeutil.workflow.pipe.Pipeline
157
159
  params:
158
- author-run: utils.receive.string
159
- run-date: utils.receive.datetime
160
+ author-run:
161
+ type: str
162
+ run-date:
163
+ type: datetime
160
164
  jobs:
161
165
  first-job:
162
166
  stages:
@@ -203,13 +207,15 @@ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
203
207
  pipe_el_pg_to_lake:
204
208
  type: ddeutil.workflow.pipe.Pipeline
205
209
  params:
206
- run-date: utils.receive.datetime
207
- author-email: utils.receive.string
210
+ run-date:
211
+ type: datetime
212
+ author-email:
213
+ type: str
208
214
  jobs:
209
215
  extract-load:
210
216
  stages:
211
217
  - name: "Extract Load from Postgres to Lake"
212
- id: extract
218
+ id: extract-load
213
219
  task: tasks/postgres-to-delta@polars
214
220
  with:
215
221
  source:
@@ -222,21 +228,26 @@ pipe_el_pg_to_lake:
222
228
  endpoint: "/${{ params.name }}"
223
229
  ```
224
230
 
225
- ### Hooks (Transform)
231
+ ### Tasks (Transform)
226
232
 
227
233
  ```yaml
228
234
  pipe_hook_mssql_proc:
229
235
  type: ddeutil.workflow.pipe.Pipeline
230
236
  params:
231
- run_date: utils.receive.datetime
232
- sp_name: utils.receive.string
233
- source_name: utils.receive.string
234
- target_name: utils.receive.string
237
+ run_date:
238
+ type: datetime
239
+ sp_name:
240
+ type: str
241
+ source_name:
242
+ type: str
243
+ target_name:
244
+ type: str
235
245
  jobs:
236
246
  transform:
237
247
  stages:
238
248
  - name: "Transform Data in MS SQL Server"
239
- hook: hooks/mssql-proc@odbc
249
+ id: transform
250
+ task: tasks/mssql-proc@odbc
240
251
  with:
241
252
  exec: ${{ params.sp_name }}
242
253
  params:
@@ -47,6 +47,9 @@ The first step, you should start create the connections and datasets for In and
47
47
  Out of you data that want to use in pipeline of workflow. Some of this component
48
48
  is similar component of the **Airflow** because I like it concepts.
49
49
 
50
+ The main feature of this project is the `Pipeline` object that can call any
51
+ registried function.
52
+
50
53
  ### Connection
51
54
 
52
55
  The connection for worker able to do any thing.
@@ -119,8 +122,10 @@ The state of doing lists that worker should to do. It be collection of the stage
119
122
  run_py_local:
120
123
  type: ddeutil.workflow.pipe.Pipeline
121
124
  params:
122
- author-run: utils.receive.string
123
- run-date: utils.receive.datetime
125
+ author-run:
126
+ type: str
127
+ run-date:
128
+ type: datetime
124
129
  jobs:
125
130
  first-job:
126
131
  stages:
@@ -167,13 +172,15 @@ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
167
172
  pipe_el_pg_to_lake:
168
173
  type: ddeutil.workflow.pipe.Pipeline
169
174
  params:
170
- run-date: utils.receive.datetime
171
- author-email: utils.receive.string
175
+ run-date:
176
+ type: datetime
177
+ author-email:
178
+ type: str
172
179
  jobs:
173
180
  extract-load:
174
181
  stages:
175
182
  - name: "Extract Load from Postgres to Lake"
176
- id: extract
183
+ id: extract-load
177
184
  task: tasks/postgres-to-delta@polars
178
185
  with:
179
186
  source:
@@ -186,21 +193,26 @@ pipe_el_pg_to_lake:
186
193
  endpoint: "/${{ params.name }}"
187
194
  ```
188
195
 
189
- ### Hooks (Transform)
196
+ ### Tasks (Transform)
190
197
 
191
198
  ```yaml
192
199
  pipe_hook_mssql_proc:
193
200
  type: ddeutil.workflow.pipe.Pipeline
194
201
  params:
195
- run_date: utils.receive.datetime
196
- sp_name: utils.receive.string
197
- source_name: utils.receive.string
198
- target_name: utils.receive.string
202
+ run_date:
203
+ type: datetime
204
+ sp_name:
205
+ type: str
206
+ source_name:
207
+ type: str
208
+ target_name:
209
+ type: str
199
210
  jobs:
200
211
  transform:
201
212
  stages:
202
213
  - name: "Transform Data in MS SQL Server"
203
- hook: hooks/mssql-proc@odbc
214
+ id: transform
215
+ task: tasks/mssql-proc@odbc
204
216
  with:
205
217
  exec: ${{ params.sp_name }}
206
218
  params:
@@ -28,7 +28,6 @@ requires-python = ">=3.9.13"
28
28
  dependencies = [
29
29
  "fmtutil",
30
30
  "ddeutil-io",
31
- "ddeutil-model",
32
31
  "python-dotenv",
33
32
  ]
34
33
  dynamic = ["version"]
@@ -40,15 +39,15 @@ Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
40
39
  [project.optional-dependencies]
41
40
  test = [
42
41
  "sqlalchemy==2.0.30",
43
- # SFTP warpper
42
+ # SFTP
44
43
  "paramiko==3.4.0",
45
44
  "sshtunnel==0.4.0",
46
- # AWS
47
- "boto3==1.34.109",
45
+ # AWS Client
46
+ "boto3==1.34.117",
48
47
  # Open files
49
48
  "fsspec==2024.5.0",
50
49
  # Polars
51
- "polars==0.20.26",
50
+ "polars==0.20.31",
52
51
  "pyarrow==16.1.0",
53
52
  ]
54
53
 
@@ -133,4 +132,7 @@ ignore = [
133
132
  ]
134
133
 
135
134
  [tool.ruff.lint.per-file-ignores]
136
- "__init__.py" = ["F401"]
135
+ "__init__.py" = [
136
+ "F401",
137
+ "F403",
138
+ ]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.2"
@@ -10,7 +10,7 @@ from collections.abc import Iterator
10
10
  from pathlib import Path
11
11
  from typing import Annotated, Any, Literal, Optional, TypeVar
12
12
 
13
- from ddeutil.model.conn import Conn as ConnModel
13
+ from ddeutil.io.models.conn import Conn as ConnModel
14
14
  from pydantic import BaseModel, ConfigDict, Field
15
15
  from pydantic.functional_validators import field_validator
16
16
  from pydantic.types import SecretStr
@@ -43,27 +43,15 @@ class BaseConn(BaseModel):
43
43
  ]
44
44
 
45
45
  @classmethod
46
- def from_loader(
47
- cls,
48
- name: str,
49
- externals: DictData,
50
- ) -> Self:
51
- """Construct Connection with Loader object with specific config name.
52
-
53
- :param name:
54
- :param externals:
55
- """
56
- loader: Loader = Loader(name, externals=externals)
57
- # NOTE: Validate the config type match with current connection model
58
- if loader.type != cls:
59
- raise ValueError(f"Type {loader.type} does not match with {cls}")
46
+ def from_dict(cls, values: DictData):
47
+ """Construct Connection with dict of data"""
60
48
  filter_data: DictData = {
61
- k: loader.data.pop(k)
62
- for k in loader.data.copy()
49
+ k: values.pop(k)
50
+ for k in values.copy()
63
51
  if k not in cls.model_fields and k not in EXCLUDED_EXTRAS
64
52
  }
65
- if "url" in loader.data:
66
- url: ConnModel = ConnModel.from_url(loader.data.pop("url"))
53
+ if "url" in values:
54
+ url: ConnModel = ConnModel.from_url(values.pop("url"))
67
55
  return cls(
68
56
  dialect=url.dialect,
69
57
  host=url.host,
@@ -73,24 +61,38 @@ class BaseConn(BaseModel):
73
61
  # NOTE:
74
62
  # I will replace None endpoint with memory value for SQLite
75
63
  # connection string.
76
- endpoint=cls.__prepare_slash_from_url(url.endpoint or "memory"),
64
+ endpoint=(url.endpoint or "memory"),
77
65
  # NOTE: This order will show that externals this the top level.
78
- extras=(url.options | filter_data | externals),
66
+ extras=(url.options | filter_data),
79
67
  )
80
68
  return cls.model_validate(
81
69
  obj={
82
- "extras": (
83
- loader.data.pop("extras", {}) | filter_data | externals
84
- ),
85
- **loader.data,
70
+ "extras": (values.pop("extras", {}) | filter_data),
71
+ **values,
86
72
  }
87
73
  )
88
74
 
89
75
  @classmethod
90
- def __prepare_slash_from_url(cls, value: str) -> str:
91
- if value.startswith("/"):
92
- return value[1:]
93
- return value
76
+ def from_loader(
77
+ cls,
78
+ name: str,
79
+ externals: DictData,
80
+ ) -> Self:
81
+ """Construct Connection with Loader object with specific config name.
82
+
83
+ :param name:
84
+ :param externals:
85
+ """
86
+ loader: Loader = Loader(name, externals=externals)
87
+ # NOTE: Validate the config type match with current connection model
88
+ if loader.type != cls:
89
+ raise ValueError(f"Type {loader.type} does not match with {cls}")
90
+ return cls.from_dict(
91
+ {
92
+ "extras": (loader.data.pop("extras", {}) | externals),
93
+ **loader.data,
94
+ }
95
+ )
94
96
 
95
97
  @field_validator("endpoint")
96
98
  def __prepare_slash(cls, value: str) -> str:
@@ -25,11 +25,7 @@ from .conn import SubclassConn
25
25
  from .loader import Loader
26
26
 
27
27
  EXCLUDED_EXTRAS: TupleStr = ("type",)
28
- OBJ_FMTS: FormatterGroupType = make_group(
29
- {
30
- "datetime": Datetime,
31
- }
32
- )
28
+ OBJ_FMTS: FormatterGroupType = make_group({"datetime": Datetime})
33
29
 
34
30
 
35
31
  class BaseDataset(BaseModel):
@@ -0,0 +1,32 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """
7
+ Define Errors Object for Node package
8
+ """
9
+ from __future__ import annotations
10
+
11
+
12
+ class BaseError(Exception):
13
+ """Base Error Object that use for catch any errors statement of
14
+ all step in this src
15
+ """
16
+
17
+
18
+ class WorkflowBaseError(BaseError):
19
+ """Core Base Error object"""
20
+
21
+
22
+ class ConfigNotFound(WorkflowBaseError):
23
+ """Error raise for a method not found the config file or data."""
24
+
25
+
26
+ class PyException(Exception): ...
27
+
28
+
29
+ class ShellException(Exception): ...
30
+
31
+
32
+ class TaskException(Exception): ...
@@ -0,0 +1,146 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from functools import cached_property
9
+ from typing import Any, TypeVar
10
+
11
+ from ddeutil.core import (
12
+ getdot,
13
+ hasdot,
14
+ import_string,
15
+ )
16
+ from ddeutil.io import (
17
+ ConfigNotFound,
18
+ Params,
19
+ PathSearch,
20
+ YamlEnvFl,
21
+ )
22
+ from pydantic import BaseModel
23
+
24
+ from .__regex import RegexConf
25
+ from .__types import DictData
26
+
27
+ T = TypeVar("T")
28
+ BaseModelType = type[BaseModel]
29
+ AnyModel = TypeVar("AnyModel", bound=BaseModel)
30
+
31
+
32
+ class SimLoad:
33
+ """Simple Load Object that will search config data by name.
34
+
35
+ :param name: A name of config data that will read by Yaml Loader object.
36
+ :param params: A Params model object.
37
+ :param externals: An external parameters
38
+
39
+ Note:
40
+ The config data should have ``type`` key for engine can know what is
41
+ config should to do next.
42
+ """
43
+
44
+ import_prefix: str = "ddeutil.workflow"
45
+
46
+ def __init__(
47
+ self,
48
+ name: str,
49
+ params: Params,
50
+ externals: DictData,
51
+ ) -> None:
52
+ self.data: DictData = {}
53
+ for file in PathSearch(params.engine.paths.conf).files:
54
+ if any(file.suffix.endswith(s) for s in ("yml", "yaml")) and (
55
+ data := YamlEnvFl(file).read().get(name, {})
56
+ ):
57
+ self.data = data
58
+ if not self.data:
59
+ raise ConfigNotFound(f"Config {name!r} does not found on conf path")
60
+ self.__conf_params: Params = params
61
+ self.externals: DictData = externals
62
+
63
+ @property
64
+ def conf_params(self) -> Params:
65
+ return self.__conf_params
66
+
67
+ @cached_property
68
+ def type(self) -> BaseModelType:
69
+ """Return object type which implement in `config_object` key."""
70
+ if not (_typ := self.data.get("type")):
71
+ raise ValueError(
72
+ f"the 'type' value: {_typ} does not exists in config data."
73
+ )
74
+ try:
75
+ # NOTE: Auto adding module prefix if it does not set
76
+ return import_string(f"ddeutil.workflow.{_typ}")
77
+ except ModuleNotFoundError:
78
+ return import_string(f"{_typ}")
79
+
80
+ def load(self) -> AnyModel:
81
+ return self.type.model_validate(self.data)
82
+
83
+
84
+ class Loader(SimLoad):
85
+ """Main Loader Object.
86
+
87
+ :param name: A name of config data that will read by Yaml Loader object.
88
+ :param externals: An external parameters
89
+ """
90
+
91
+ def __init__(
92
+ self,
93
+ name: str,
94
+ externals: DictData,
95
+ *,
96
+ path: str | None = None,
97
+ ) -> None:
98
+ self.data: DictData = {}
99
+
100
+ # NOTE: import params object from specific config file
101
+ params: Params = self.config(path)
102
+
103
+ super().__init__(name, params, externals)
104
+
105
+ @classmethod
106
+ def config(cls, path: str | None = None) -> Params:
107
+ """Load Config data from ``workflows-conf.yaml`` file."""
108
+ return Params.model_validate(
109
+ YamlEnvFl(path or "./workflows-conf.yaml").read()
110
+ )
111
+
112
+
113
+ def map_params(value: Any, params: dict[str, Any]) -> Any:
114
+ """Map caller value that found from ``RE_CALLER`` regex.
115
+
116
+ :rtype: Any
117
+ :returns: An any getter value from the params input.
118
+ """
119
+ if isinstance(value, dict):
120
+ return {k: map_params(value[k], params) for k in value}
121
+ elif isinstance(value, (list, tuple, set)):
122
+ return type(value)([map_params(i, params) for i in value])
123
+ elif not isinstance(value, str):
124
+ return value
125
+
126
+ if not (found := RegexConf.RE_CALLER.search(value)):
127
+ return value
128
+
129
+ # NOTE: get caller value that setting inside; ``${{ <caller-value> }}``
130
+ caller: str = found.group("caller")
131
+ if not hasdot(caller, params):
132
+ raise ValueError(f"params does not set caller: {caller!r}")
133
+ getter: Any = getdot(caller, params)
134
+
135
+ # NOTE: check type of vars
136
+ if isinstance(getter, (str, int)):
137
+ return value.replace(found.group(0), str(getter))
138
+
139
+ # NOTE:
140
+ # If type of getter caller does not formatting, it will return origin
141
+ # value.
142
+ if value.replace(found.group(0), "") != "":
143
+ raise ValueError(
144
+ "Callable variable should not pass other outside ${{ ... }}"
145
+ )
146
+ return getter