motia 0.5.11-beta.119 → 0.5.11-beta.120-742949
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/cloud/build/builders/python/index.js +21 -2
- package/dist/cjs/cloud/build/builders/python/python-builder.py +120 -28
- package/dist/cjs/create/templates/basic-tutorial/01-api.step.ts.txt +1 -33
- package/dist/cjs/create/templates/basic-tutorial/02-process-food-order.step.ts.txt +6 -30
- package/dist/cjs/create/templates/basic-tutorial/04_new_order_notifications.step.py.txt +6 -1
- package/dist/cjs/create/templates/basic-tutorial/motia-workbench.json +13 -12
- package/dist/cjs/create/templates/generate.js +3 -1
- package/dist/cjs/create/templates/generate.ts +3 -1
- package/dist/esm/cloud/build/builders/python/index.js +21 -2
- package/dist/esm/cloud/build/builders/python/python-builder.py +120 -28
- package/dist/esm/create/templates/basic-tutorial/01-api.step.ts.txt +1 -33
- package/dist/esm/create/templates/basic-tutorial/02-process-food-order.step.ts.txt +6 -30
- package/dist/esm/create/templates/basic-tutorial/04_new_order_notifications.step.py.txt +6 -1
- package/dist/esm/create/templates/basic-tutorial/motia-workbench.json +13 -12
- package/dist/esm/create/templates/generate.js +3 -1
- package/dist/esm/create/templates/generate.ts +3 -1
- package/package.json +4 -4
- package/dist/cjs/create/templates/basic-tutorial/00-noop.step.ts.txt +0 -30
- package/dist/esm/create/templates/basic-tutorial/00-noop.step.ts.txt +0 -30
|
@@ -23,12 +23,21 @@ class PythonBuilder {
|
|
|
23
23
|
const normalizedEntrypointPath = entrypointPath.replace(/[.]step.py$/, '_step.py');
|
|
24
24
|
const sitePackagesDir = `${process.env.PYTHON_SITE_PACKAGES}-lambda`;
|
|
25
25
|
// Get Python builder response
|
|
26
|
-
const { packages } = await this.getPythonBuilderData(step);
|
|
26
|
+
const { packages, local_files } = await this.getPythonBuilderData(step);
|
|
27
27
|
// Add main file to archive
|
|
28
28
|
if (!fs_1.default.existsSync(step.filePath)) {
|
|
29
29
|
throw new Error(`Source file not found: ${step.filePath}`);
|
|
30
30
|
}
|
|
31
31
|
archive.append(fs_1.default.createReadStream(step.filePath), path_1.default.relative(this.builder.projectDir, normalizedEntrypointPath));
|
|
32
|
+
// Add local Python files to archive
|
|
33
|
+
if (local_files && local_files.length > 0) {
|
|
34
|
+
local_files.forEach((localFile) => {
|
|
35
|
+
const fullPath = path_1.default.join(this.builder.projectDir, localFile);
|
|
36
|
+
if (fs_1.default.existsSync(fullPath)) {
|
|
37
|
+
archive.append(fs_1.default.createReadStream(fullPath), localFile);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
}
|
|
32
41
|
await Promise.all(packages.map(async (packageName) => (0, add_package_to_archive_1.addPackageToArchive)(archive, sitePackagesDir, packageName)));
|
|
33
42
|
return normalizedEntrypointPath;
|
|
34
43
|
}
|
|
@@ -42,7 +51,7 @@ class PythonBuilder {
|
|
|
42
51
|
fs_1.default.mkdirSync(path_1.default.dirname(outfile), { recursive: true });
|
|
43
52
|
this.listener.onBuildStart(step);
|
|
44
53
|
// Get Python builder response
|
|
45
|
-
const { packages } = await this.getPythonBuilderData(step);
|
|
54
|
+
const { packages, local_files } = await this.getPythonBuilderData(step);
|
|
46
55
|
const stepArchiver = new archiver_1.Archiver(outfile);
|
|
47
56
|
const stepPath = await this.buildStep(step, stepArchiver);
|
|
48
57
|
// Add main file to archive
|
|
@@ -53,6 +62,16 @@ class PythonBuilder {
|
|
|
53
62
|
// Add all imported files to archive
|
|
54
63
|
this.listener.onBuildProgress(step, 'Adding imported files to archive...');
|
|
55
64
|
const sitePackagesDir = `${process.env.PYTHON_SITE_PACKAGES}-lambda`;
|
|
65
|
+
// Add local Python files to archive
|
|
66
|
+
if (local_files && local_files.length > 0) {
|
|
67
|
+
local_files.forEach((localFile) => {
|
|
68
|
+
const fullPath = path_1.default.join(this.builder.projectDir, localFile);
|
|
69
|
+
if (fs_1.default.existsSync(fullPath)) {
|
|
70
|
+
stepArchiver.append(fs_1.default.createReadStream(fullPath), localFile);
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
this.listener.onBuildProgress(step, `Added ${local_files.length} local Python files to archive`);
|
|
74
|
+
}
|
|
56
75
|
(0, include_static_files_1.includeStaticFiles)([step], this.builder, stepArchiver);
|
|
57
76
|
if (packages.length > 0) {
|
|
58
77
|
await Promise.all(packages.map(async (packageName) => (0, add_package_to_archive_1.addPackageToArchive)(stepArchiver, sitePackagesDir, packageName)));
|
|
@@ -82,6 +82,23 @@ def is_builtin_module(module_name: str) -> bool:
|
|
|
82
82
|
"""Check if a module is a Python built-in module."""
|
|
83
83
|
if module_name in _builtin_modules_cache:
|
|
84
84
|
return True
|
|
85
|
+
|
|
86
|
+
# First check if it's a known built-in module name
|
|
87
|
+
builtin_modules = {
|
|
88
|
+
'os', 'sys', 'json', 'math', 'random', 'datetime', 'time', 'urllib', 'http',
|
|
89
|
+
'pathlib', 're', 'collections', 'itertools', 'functools', 'operator', 'typing',
|
|
90
|
+
'io', 'csv', 'xml', 'html', 'email', 'base64', 'hashlib', 'hmac', 'uuid',
|
|
91
|
+
'pickle', 'sqlite3', 'logging', 'unittest', 'argparse', 'configparser',
|
|
92
|
+
'tempfile', 'shutil', 'glob', 'fnmatch', 'subprocess', 'threading', 'queue',
|
|
93
|
+
'multiprocessing', 'concurrent', 'asyncio', 'socket', 'ssl', 'gzip', 'zipfile',
|
|
94
|
+
'tarfile', 'zlib', 'bz2', 'lzma', 'struct', 'array', 'ctypes', 'mmap',
|
|
95
|
+
'weakref', 'gc', 'inspect', 'dis', 'ast', 'token', 'tokenize', 'keyword',
|
|
96
|
+
'builtins', '__main__', 'site', 'sysconfig', 'platform', 'warnings'
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if module_name in builtin_modules:
|
|
100
|
+
_builtin_modules_cache.add(module_name)
|
|
101
|
+
return True
|
|
85
102
|
|
|
86
103
|
try:
|
|
87
104
|
module = importlib.import_module(module_name)
|
|
@@ -100,6 +117,8 @@ def is_builtin_module(module_name: str) -> bool:
|
|
|
100
117
|
_builtin_modules_cache.add(module_name)
|
|
101
118
|
return is_builtin
|
|
102
119
|
except ImportError:
|
|
120
|
+
# If we can't import it, assume it's not a built-in module
|
|
121
|
+
# This handles local modules that aren't in the current Python path
|
|
103
122
|
return False
|
|
104
123
|
|
|
105
124
|
def get_direct_imports(file_path: str) -> Set[str]:
|
|
@@ -127,6 +146,101 @@ def get_direct_imports(file_path: str) -> Set[str]:
|
|
|
127
146
|
|
|
128
147
|
return direct_imports
|
|
129
148
|
|
|
149
|
+
def get_all_python_files(project_root: str) -> List[str]:
|
|
150
|
+
"""Get all Python files in the project."""
|
|
151
|
+
python_files = []
|
|
152
|
+
for root, dirs, files in os.walk(project_root):
|
|
153
|
+
# Skip common directories
|
|
154
|
+
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in
|
|
155
|
+
{'__pycache__', 'node_modules', 'dist', 'build', 'venv'}]
|
|
156
|
+
|
|
157
|
+
for file in files:
|
|
158
|
+
if file.endswith('.py') and not file.startswith('.'):
|
|
159
|
+
full_path = os.path.join(root, file)
|
|
160
|
+
relative_path = os.path.relpath(full_path, project_root)
|
|
161
|
+
python_files.append(relative_path)
|
|
162
|
+
|
|
163
|
+
return python_files
|
|
164
|
+
|
|
165
|
+
def get_imports_from_file(file_path: str) -> Set[str]:
|
|
166
|
+
"""Get all import module names from a Python file."""
|
|
167
|
+
imports = set()
|
|
168
|
+
|
|
169
|
+
try:
|
|
170
|
+
with open(file_path, 'r') as f:
|
|
171
|
+
content = f.read()
|
|
172
|
+
|
|
173
|
+
tree = ast.parse(content)
|
|
174
|
+
for node in ast.walk(tree):
|
|
175
|
+
if isinstance(node, ast.Import):
|
|
176
|
+
for name in node.names:
|
|
177
|
+
imports.add(name.name)
|
|
178
|
+
elif isinstance(node, ast.ImportFrom):
|
|
179
|
+
if node.module:
|
|
180
|
+
imports.add(node.module)
|
|
181
|
+
except Exception as e:
|
|
182
|
+
print(f"Warning: Could not parse imports from {file_path}: {str(e)}")
|
|
183
|
+
|
|
184
|
+
return imports
|
|
185
|
+
|
|
186
|
+
def get_local_files_for_entry(entry_file: str) -> List[str]:
|
|
187
|
+
"""Get local Python files that are imported by the entry file."""
|
|
188
|
+
# Find project root
|
|
189
|
+
project_root = os.path.dirname(entry_file)
|
|
190
|
+
while project_root != os.path.dirname(project_root):
|
|
191
|
+
if any(os.path.exists(os.path.join(project_root, f))
|
|
192
|
+
for f in ['package.json', 'requirements.txt']):
|
|
193
|
+
break
|
|
194
|
+
project_root = os.path.dirname(project_root)
|
|
195
|
+
|
|
196
|
+
# Get all Python files in the project
|
|
197
|
+
all_python_files = get_all_python_files(project_root)
|
|
198
|
+
|
|
199
|
+
# Get imports from the entry file
|
|
200
|
+
imports = get_imports_from_file(entry_file)
|
|
201
|
+
|
|
202
|
+
# Check which imports match local Python files
|
|
203
|
+
local_files = []
|
|
204
|
+
for import_name in imports:
|
|
205
|
+
for py_file in all_python_files:
|
|
206
|
+
# Convert file path to module name (e.g., 'utils/example.py' -> 'utils.example')
|
|
207
|
+
module_name = py_file.replace(os.sep, '.').replace('.py', '')
|
|
208
|
+
if import_name == module_name:
|
|
209
|
+
local_files.append(py_file)
|
|
210
|
+
|
|
211
|
+
return sorted(local_files)
|
|
212
|
+
|
|
213
|
+
def trace_imports(entry_file: str) -> List[str]:
|
|
214
|
+
"""Find all imported Python packages from entry file and its local imports."""
|
|
215
|
+
entry_file = os.path.abspath(entry_file)
|
|
216
|
+
|
|
217
|
+
# Get local files that are imported
|
|
218
|
+
local_files = get_local_files_for_entry(entry_file)
|
|
219
|
+
|
|
220
|
+
# Get project root
|
|
221
|
+
project_root = os.path.dirname(entry_file)
|
|
222
|
+
while project_root != os.path.dirname(project_root):
|
|
223
|
+
if any(os.path.exists(os.path.join(project_root, f))
|
|
224
|
+
for f in ['package.json', 'requirements.txt']):
|
|
225
|
+
break
|
|
226
|
+
project_root = os.path.dirname(project_root)
|
|
227
|
+
|
|
228
|
+
# Get imports from entry file and local files
|
|
229
|
+
all_packages = set()
|
|
230
|
+
processed_packages = set()
|
|
231
|
+
files_to_process = [entry_file] + [os.path.join(project_root, f) for f in local_files]
|
|
232
|
+
|
|
233
|
+
for python_file in files_to_process:
|
|
234
|
+
if os.path.exists(python_file):
|
|
235
|
+
direct_imports = get_direct_imports(python_file)
|
|
236
|
+
for package_name in direct_imports:
|
|
237
|
+
if is_valid_package_name(package_name) and not is_builtin_module(package_name):
|
|
238
|
+
all_packages.add(package_name)
|
|
239
|
+
# Get all dependencies including sub-dependencies
|
|
240
|
+
all_packages.update(get_package_dependencies(package_name, processed_packages))
|
|
241
|
+
|
|
242
|
+
return sorted(list(all_packages))
|
|
243
|
+
|
|
130
244
|
@lru_cache(maxsize=1024)
|
|
131
245
|
def is_optional_dependency(req: str) -> bool:
|
|
132
246
|
"""Check if a dependency is an optional dependency."""
|
|
@@ -175,33 +289,6 @@ def get_package_dependencies(package_name: str, processed: Set[str] = None) -> S
|
|
|
175
289
|
|
|
176
290
|
return all_dependencies
|
|
177
291
|
|
|
178
|
-
def trace_imports(entry_file: str) -> List[str]:
|
|
179
|
-
"""Find all imported Python packages and files starting from an entry file."""
|
|
180
|
-
entry_file = os.path.abspath(entry_file)
|
|
181
|
-
module_dir = os.path.dirname(entry_file)
|
|
182
|
-
|
|
183
|
-
if module_dir not in sys.path:
|
|
184
|
-
sys.path.insert(0, module_dir)
|
|
185
|
-
|
|
186
|
-
# Get direct imports from the entry file
|
|
187
|
-
direct_imports = get_direct_imports(entry_file)
|
|
188
|
-
|
|
189
|
-
# Initialize sets to track packages
|
|
190
|
-
all_packages = set()
|
|
191
|
-
processed_packages = set()
|
|
192
|
-
|
|
193
|
-
# Process each direct import and its dependencies
|
|
194
|
-
for package_name in direct_imports:
|
|
195
|
-
if is_valid_package_name(package_name):
|
|
196
|
-
all_packages.add(package_name)
|
|
197
|
-
# Get all dependencies including sub-dependencies
|
|
198
|
-
all_packages.update(get_package_dependencies(package_name, processed_packages))
|
|
199
|
-
|
|
200
|
-
# Filter out built-in packages
|
|
201
|
-
non_builtin_packages = {pkg for pkg in all_packages if not is_builtin_module(pkg)}
|
|
202
|
-
|
|
203
|
-
return sorted(list(non_builtin_packages))
|
|
204
|
-
|
|
205
292
|
def main() -> None:
|
|
206
293
|
"""Main entry point for the script."""
|
|
207
294
|
if len(sys.argv) != 2:
|
|
@@ -211,8 +298,13 @@ def main() -> None:
|
|
|
211
298
|
entry_file = sys.argv[1]
|
|
212
299
|
try:
|
|
213
300
|
packages = trace_imports(entry_file)
|
|
301
|
+
local_files = get_local_files_for_entry(entry_file)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
|
|
214
305
|
output = {
|
|
215
|
-
'packages': packages
|
|
306
|
+
'packages': packages,
|
|
307
|
+
'local_files': local_files
|
|
216
308
|
}
|
|
217
309
|
bytes_message = (json.dumps(output) + '\n').encode('utf-8')
|
|
218
310
|
os.write(NODEIPCFD, bytes_message)
|
|
@@ -5,18 +5,11 @@ import { petStoreService } from './services/pet-store'
|
|
|
5
5
|
export const config: ApiRouteConfig = {
|
|
6
6
|
type: 'api',
|
|
7
7
|
name: 'ApiTrigger',
|
|
8
|
-
description:
|
|
9
|
-
'basic-tutorial api trigger, it uses the petstore public api to create a new pet and emits a topic to proces an order if an item is included.',
|
|
10
|
-
/**
|
|
11
|
-
* The flows this step belongs to, will be available in Workbench
|
|
12
|
-
*/
|
|
8
|
+
description: 'basic-tutorial api trigger',
|
|
13
9
|
flows: ['basic-tutorial'],
|
|
14
10
|
|
|
15
11
|
method: 'POST',
|
|
16
12
|
path: '/basic-tutorial',
|
|
17
|
-
/**
|
|
18
|
-
* Expected request body for type checking and documentation
|
|
19
|
-
*/
|
|
20
13
|
bodySchema: z.object({
|
|
21
14
|
pet: z.object({
|
|
22
15
|
name: z.string(),
|
|
@@ -29,44 +22,22 @@ export const config: ApiRouteConfig = {
|
|
|
29
22
|
})
|
|
30
23
|
.optional(),
|
|
31
24
|
}),
|
|
32
|
-
|
|
33
|
-
/**
|
|
34
|
-
* Expected response body for type checking and documentation
|
|
35
|
-
*/
|
|
36
25
|
responseSchema: {
|
|
37
26
|
200: z.object({
|
|
38
27
|
message: z.string(),
|
|
39
28
|
traceId: z.string(),
|
|
40
29
|
}),
|
|
41
30
|
},
|
|
42
|
-
|
|
43
|
-
/**
|
|
44
|
-
* This API Step emits events to topic `process-food-order`
|
|
45
|
-
*/
|
|
46
31
|
emits: ['process-food-order'],
|
|
47
|
-
|
|
48
|
-
/**
|
|
49
|
-
* We're using virtual subscribes to virtually connect noop step
|
|
50
|
-
* to this step.
|
|
51
|
-
*
|
|
52
|
-
* Noop step is defined in noop.step.ts
|
|
53
|
-
*/
|
|
54
|
-
virtualSubscribes: ['/basic-tutorial'],
|
|
55
32
|
}
|
|
56
33
|
|
|
57
34
|
export const handler: Handlers['ApiTrigger'] = async (req, { logger, emit, traceId }) => {
|
|
58
|
-
/**
|
|
59
|
-
* Avoid usage of console.log, use logger instead
|
|
60
|
-
*/
|
|
61
35
|
logger.info('Step 01 – Processing API Step', { body: req.body })
|
|
62
36
|
|
|
63
37
|
const { pet, foodOrder } = req.body
|
|
64
38
|
|
|
65
39
|
const newPetRecord = await petStoreService.createPet(pet)
|
|
66
40
|
|
|
67
|
-
/**
|
|
68
|
-
* Emit events to the topics to process asynchronously
|
|
69
|
-
*/
|
|
70
41
|
if (foodOrder) {
|
|
71
42
|
await emit({
|
|
72
43
|
topic: 'process-food-order',
|
|
@@ -77,9 +48,6 @@ export const handler: Handlers['ApiTrigger'] = async (req, { logger, emit, trace
|
|
|
77
48
|
})
|
|
78
49
|
}
|
|
79
50
|
|
|
80
|
-
/**
|
|
81
|
-
* Return data back to the client
|
|
82
|
-
*/
|
|
83
51
|
return {
|
|
84
52
|
status: 200,
|
|
85
53
|
body: {
|
|
@@ -5,48 +5,24 @@ import { petStoreService } from './services/pet-store'
|
|
|
5
5
|
export const config: EventConfig = {
|
|
6
6
|
type: 'event',
|
|
7
7
|
name: 'ProcessFoodOrder',
|
|
8
|
-
description:
|
|
9
|
-
'basic-tutorial event step, this example shows how to consume an event from a topic and persist data in state',
|
|
10
|
-
/**
|
|
11
|
-
* The flows this step belongs to, will be available in Workbench
|
|
12
|
-
*/
|
|
8
|
+
description: 'basic-tutorial event step, demonstrates how to consume an event from a topic and persist data in state',
|
|
13
9
|
flows: ['basic-tutorial'],
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* This step subscribes to the event `process-food-order` to
|
|
17
|
-
* be processed asynchronously.
|
|
18
|
-
*/
|
|
19
10
|
subscribes: ['process-food-order'],
|
|
20
|
-
|
|
21
|
-
/**
|
|
22
|
-
* It ultimately emits an event to `new-order-notification` topic.
|
|
23
|
-
*/
|
|
24
11
|
emits: ['new-order-notification'],
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
12
|
+
input: z.object({
|
|
13
|
+
id: z.string(),
|
|
14
|
+
quantity: z.number(),
|
|
15
|
+
petId: z.number(),
|
|
16
|
+
}),
|
|
30
17
|
}
|
|
31
18
|
|
|
32
19
|
export const handler: Handlers['ProcessFoodOrder'] = async (input, { traceId, logger, state, emit }) => {
|
|
33
|
-
/**
|
|
34
|
-
* Avoid usage of console.log, use logger instead
|
|
35
|
-
*/
|
|
36
20
|
logger.info('Step 02 – Process food order', { input, traceId })
|
|
37
21
|
|
|
38
22
|
const order = await petStoreService.createOrder(input)
|
|
39
23
|
|
|
40
|
-
/**
|
|
41
|
-
* Persist content on state to be used by other steps
|
|
42
|
-
* or in other workflows later
|
|
43
|
-
*/
|
|
44
24
|
await state.set<string>('orders', order.id, order)
|
|
45
25
|
|
|
46
|
-
/**
|
|
47
|
-
* Emit events to the topics to process separately
|
|
48
|
-
* on another step
|
|
49
|
-
*/
|
|
50
26
|
await emit({
|
|
51
27
|
topic: 'new-order-notification',
|
|
52
28
|
data: { order_id: order.id },
|
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
|
|
3
|
+
class NewOrderNotificationInput(BaseModel):
|
|
4
|
+
order_id: str = Field(description="pet store order id")
|
|
5
|
+
|
|
1
6
|
config = {
|
|
2
7
|
"type": "event",
|
|
3
8
|
"name": "NewOrderNotifications",
|
|
@@ -5,7 +10,7 @@ config = {
|
|
|
5
10
|
"subscribes": ["new-order-notification"],
|
|
6
11
|
"emits": [],
|
|
7
12
|
"flows": ["basic-tutorial"],
|
|
8
|
-
"input":
|
|
13
|
+
"input": NewOrderNotificationInput.model_json_schema(),
|
|
9
14
|
}
|
|
10
15
|
|
|
11
16
|
async def handler(input, ctx):
|
|
@@ -3,24 +3,25 @@
|
|
|
3
3
|
"id": "basic-tutorial",
|
|
4
4
|
"config": {
|
|
5
5
|
"steps/basic-tutorial/04_new_order_notifications.step.py": {
|
|
6
|
-
"x":
|
|
7
|
-
"y":
|
|
6
|
+
"x": 668,
|
|
7
|
+
"y": 266,
|
|
8
|
+
"targetHandlePosition": "left"
|
|
8
9
|
},
|
|
9
10
|
"steps/basic-tutorial/03-state-audit-cron.step.ts": {
|
|
10
|
-
"x":
|
|
11
|
-
"y":
|
|
11
|
+
"x": 224,
|
|
12
|
+
"y": 520
|
|
12
13
|
},
|
|
13
14
|
"steps/basic-tutorial/02-process-food-order.step.ts": {
|
|
14
|
-
"x":
|
|
15
|
-
"y":
|
|
15
|
+
"x": 220,
|
|
16
|
+
"y": 242,
|
|
17
|
+
"sourceHandlePosition": "right",
|
|
18
|
+
"targetHandlePosition": "left"
|
|
16
19
|
},
|
|
17
20
|
"steps/basic-tutorial/01-api.step.ts": {
|
|
18
|
-
"x": -
|
|
19
|
-
"y":
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
"x": -833,
|
|
23
|
-
"y": 492
|
|
21
|
+
"x": -243,
|
|
22
|
+
"y": 199,
|
|
23
|
+
"sourceHandlePosition": "right",
|
|
24
|
+
"targetHandlePosition": "left"
|
|
24
25
|
}
|
|
25
26
|
}
|
|
26
27
|
}
|
|
@@ -55,7 +55,9 @@ const generateTemplateSteps = (templateFolder) => {
|
|
|
55
55
|
}
|
|
56
56
|
const sanitizedFileName = fileName.replace('.txt', '');
|
|
57
57
|
const isWorkbenchConfig = fileName.match('motia-workbench.json');
|
|
58
|
-
const generateFilePath = path.join(...(isWorkbenchConfig
|
|
58
|
+
const generateFilePath = path.join(...(isWorkbenchConfig
|
|
59
|
+
? [rootDir.match(/steps/) ? path.join(rootDir, '..') : rootDir, sanitizedFileName]
|
|
60
|
+
: [rootDir, templateFolder, sanitizedFileName]));
|
|
59
61
|
let content = await fs_1.promises.readFile(filePath, 'utf8');
|
|
60
62
|
// Make sure statSync doesn't break the execution if the file doesn't exist
|
|
61
63
|
try {
|
|
@@ -28,7 +28,9 @@ export const generateTemplateSteps = (templateFolder: string): Generator => {
|
|
|
28
28
|
const sanitizedFileName = fileName.replace('.txt', '')
|
|
29
29
|
const isWorkbenchConfig = fileName.match('motia-workbench.json')
|
|
30
30
|
const generateFilePath = path.join(
|
|
31
|
-
...(isWorkbenchConfig
|
|
31
|
+
...(isWorkbenchConfig
|
|
32
|
+
? [rootDir.match(/steps/) ? path.join(rootDir, '..') : rootDir, sanitizedFileName]
|
|
33
|
+
: [rootDir, templateFolder, sanitizedFileName]),
|
|
32
34
|
)
|
|
33
35
|
let content = await fs.readFile(filePath, 'utf8')
|
|
34
36
|
|
|
@@ -17,12 +17,21 @@ export class PythonBuilder {
|
|
|
17
17
|
const normalizedEntrypointPath = entrypointPath.replace(/[.]step.py$/, '_step.py');
|
|
18
18
|
const sitePackagesDir = `${process.env.PYTHON_SITE_PACKAGES}-lambda`;
|
|
19
19
|
// Get Python builder response
|
|
20
|
-
const { packages } = await this.getPythonBuilderData(step);
|
|
20
|
+
const { packages, local_files } = await this.getPythonBuilderData(step);
|
|
21
21
|
// Add main file to archive
|
|
22
22
|
if (!fs.existsSync(step.filePath)) {
|
|
23
23
|
throw new Error(`Source file not found: ${step.filePath}`);
|
|
24
24
|
}
|
|
25
25
|
archive.append(fs.createReadStream(step.filePath), path.relative(this.builder.projectDir, normalizedEntrypointPath));
|
|
26
|
+
// Add local Python files to archive
|
|
27
|
+
if (local_files && local_files.length > 0) {
|
|
28
|
+
local_files.forEach((localFile) => {
|
|
29
|
+
const fullPath = path.join(this.builder.projectDir, localFile);
|
|
30
|
+
if (fs.existsSync(fullPath)) {
|
|
31
|
+
archive.append(fs.createReadStream(fullPath), localFile);
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
}
|
|
26
35
|
await Promise.all(packages.map(async (packageName) => addPackageToArchive(archive, sitePackagesDir, packageName)));
|
|
27
36
|
return normalizedEntrypointPath;
|
|
28
37
|
}
|
|
@@ -36,7 +45,7 @@ export class PythonBuilder {
|
|
|
36
45
|
fs.mkdirSync(path.dirname(outfile), { recursive: true });
|
|
37
46
|
this.listener.onBuildStart(step);
|
|
38
47
|
// Get Python builder response
|
|
39
|
-
const { packages } = await this.getPythonBuilderData(step);
|
|
48
|
+
const { packages, local_files } = await this.getPythonBuilderData(step);
|
|
40
49
|
const stepArchiver = new Archiver(outfile);
|
|
41
50
|
const stepPath = await this.buildStep(step, stepArchiver);
|
|
42
51
|
// Add main file to archive
|
|
@@ -47,6 +56,16 @@ export class PythonBuilder {
|
|
|
47
56
|
// Add all imported files to archive
|
|
48
57
|
this.listener.onBuildProgress(step, 'Adding imported files to archive...');
|
|
49
58
|
const sitePackagesDir = `${process.env.PYTHON_SITE_PACKAGES}-lambda`;
|
|
59
|
+
// Add local Python files to archive
|
|
60
|
+
if (local_files && local_files.length > 0) {
|
|
61
|
+
local_files.forEach((localFile) => {
|
|
62
|
+
const fullPath = path.join(this.builder.projectDir, localFile);
|
|
63
|
+
if (fs.existsSync(fullPath)) {
|
|
64
|
+
stepArchiver.append(fs.createReadStream(fullPath), localFile);
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
this.listener.onBuildProgress(step, `Added ${local_files.length} local Python files to archive`);
|
|
68
|
+
}
|
|
50
69
|
includeStaticFiles([step], this.builder, stepArchiver);
|
|
51
70
|
if (packages.length > 0) {
|
|
52
71
|
await Promise.all(packages.map(async (packageName) => addPackageToArchive(stepArchiver, sitePackagesDir, packageName)));
|
|
@@ -82,6 +82,23 @@ def is_builtin_module(module_name: str) -> bool:
|
|
|
82
82
|
"""Check if a module is a Python built-in module."""
|
|
83
83
|
if module_name in _builtin_modules_cache:
|
|
84
84
|
return True
|
|
85
|
+
|
|
86
|
+
# First check if it's a known built-in module name
|
|
87
|
+
builtin_modules = {
|
|
88
|
+
'os', 'sys', 'json', 'math', 'random', 'datetime', 'time', 'urllib', 'http',
|
|
89
|
+
'pathlib', 're', 'collections', 'itertools', 'functools', 'operator', 'typing',
|
|
90
|
+
'io', 'csv', 'xml', 'html', 'email', 'base64', 'hashlib', 'hmac', 'uuid',
|
|
91
|
+
'pickle', 'sqlite3', 'logging', 'unittest', 'argparse', 'configparser',
|
|
92
|
+
'tempfile', 'shutil', 'glob', 'fnmatch', 'subprocess', 'threading', 'queue',
|
|
93
|
+
'multiprocessing', 'concurrent', 'asyncio', 'socket', 'ssl', 'gzip', 'zipfile',
|
|
94
|
+
'tarfile', 'zlib', 'bz2', 'lzma', 'struct', 'array', 'ctypes', 'mmap',
|
|
95
|
+
'weakref', 'gc', 'inspect', 'dis', 'ast', 'token', 'tokenize', 'keyword',
|
|
96
|
+
'builtins', '__main__', 'site', 'sysconfig', 'platform', 'warnings'
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if module_name in builtin_modules:
|
|
100
|
+
_builtin_modules_cache.add(module_name)
|
|
101
|
+
return True
|
|
85
102
|
|
|
86
103
|
try:
|
|
87
104
|
module = importlib.import_module(module_name)
|
|
@@ -100,6 +117,8 @@ def is_builtin_module(module_name: str) -> bool:
|
|
|
100
117
|
_builtin_modules_cache.add(module_name)
|
|
101
118
|
return is_builtin
|
|
102
119
|
except ImportError:
|
|
120
|
+
# If we can't import it, assume it's not a built-in module
|
|
121
|
+
# This handles local modules that aren't in the current Python path
|
|
103
122
|
return False
|
|
104
123
|
|
|
105
124
|
def get_direct_imports(file_path: str) -> Set[str]:
|
|
@@ -127,6 +146,101 @@ def get_direct_imports(file_path: str) -> Set[str]:
|
|
|
127
146
|
|
|
128
147
|
return direct_imports
|
|
129
148
|
|
|
149
|
+
def get_all_python_files(project_root: str) -> List[str]:
|
|
150
|
+
"""Get all Python files in the project."""
|
|
151
|
+
python_files = []
|
|
152
|
+
for root, dirs, files in os.walk(project_root):
|
|
153
|
+
# Skip common directories
|
|
154
|
+
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in
|
|
155
|
+
{'__pycache__', 'node_modules', 'dist', 'build', 'venv'}]
|
|
156
|
+
|
|
157
|
+
for file in files:
|
|
158
|
+
if file.endswith('.py') and not file.startswith('.'):
|
|
159
|
+
full_path = os.path.join(root, file)
|
|
160
|
+
relative_path = os.path.relpath(full_path, project_root)
|
|
161
|
+
python_files.append(relative_path)
|
|
162
|
+
|
|
163
|
+
return python_files
|
|
164
|
+
|
|
165
|
+
def get_imports_from_file(file_path: str) -> Set[str]:
|
|
166
|
+
"""Get all import module names from a Python file."""
|
|
167
|
+
imports = set()
|
|
168
|
+
|
|
169
|
+
try:
|
|
170
|
+
with open(file_path, 'r') as f:
|
|
171
|
+
content = f.read()
|
|
172
|
+
|
|
173
|
+
tree = ast.parse(content)
|
|
174
|
+
for node in ast.walk(tree):
|
|
175
|
+
if isinstance(node, ast.Import):
|
|
176
|
+
for name in node.names:
|
|
177
|
+
imports.add(name.name)
|
|
178
|
+
elif isinstance(node, ast.ImportFrom):
|
|
179
|
+
if node.module:
|
|
180
|
+
imports.add(node.module)
|
|
181
|
+
except Exception as e:
|
|
182
|
+
print(f"Warning: Could not parse imports from {file_path}: {str(e)}")
|
|
183
|
+
|
|
184
|
+
return imports
|
|
185
|
+
|
|
186
|
+
def get_local_files_for_entry(entry_file: str) -> List[str]:
|
|
187
|
+
"""Get local Python files that are imported by the entry file."""
|
|
188
|
+
# Find project root
|
|
189
|
+
project_root = os.path.dirname(entry_file)
|
|
190
|
+
while project_root != os.path.dirname(project_root):
|
|
191
|
+
if any(os.path.exists(os.path.join(project_root, f))
|
|
192
|
+
for f in ['package.json', 'requirements.txt']):
|
|
193
|
+
break
|
|
194
|
+
project_root = os.path.dirname(project_root)
|
|
195
|
+
|
|
196
|
+
# Get all Python files in the project
|
|
197
|
+
all_python_files = get_all_python_files(project_root)
|
|
198
|
+
|
|
199
|
+
# Get imports from the entry file
|
|
200
|
+
imports = get_imports_from_file(entry_file)
|
|
201
|
+
|
|
202
|
+
# Check which imports match local Python files
|
|
203
|
+
local_files = []
|
|
204
|
+
for import_name in imports:
|
|
205
|
+
for py_file in all_python_files:
|
|
206
|
+
# Convert file path to module name (e.g., 'utils/example.py' -> 'utils.example')
|
|
207
|
+
module_name = py_file.replace(os.sep, '.').replace('.py', '')
|
|
208
|
+
if import_name == module_name:
|
|
209
|
+
local_files.append(py_file)
|
|
210
|
+
|
|
211
|
+
return sorted(local_files)
|
|
212
|
+
|
|
213
|
+
def trace_imports(entry_file: str) -> List[str]:
|
|
214
|
+
"""Find all imported Python packages from entry file and its local imports."""
|
|
215
|
+
entry_file = os.path.abspath(entry_file)
|
|
216
|
+
|
|
217
|
+
# Get local files that are imported
|
|
218
|
+
local_files = get_local_files_for_entry(entry_file)
|
|
219
|
+
|
|
220
|
+
# Get project root
|
|
221
|
+
project_root = os.path.dirname(entry_file)
|
|
222
|
+
while project_root != os.path.dirname(project_root):
|
|
223
|
+
if any(os.path.exists(os.path.join(project_root, f))
|
|
224
|
+
for f in ['package.json', 'requirements.txt']):
|
|
225
|
+
break
|
|
226
|
+
project_root = os.path.dirname(project_root)
|
|
227
|
+
|
|
228
|
+
# Get imports from entry file and local files
|
|
229
|
+
all_packages = set()
|
|
230
|
+
processed_packages = set()
|
|
231
|
+
files_to_process = [entry_file] + [os.path.join(project_root, f) for f in local_files]
|
|
232
|
+
|
|
233
|
+
for python_file in files_to_process:
|
|
234
|
+
if os.path.exists(python_file):
|
|
235
|
+
direct_imports = get_direct_imports(python_file)
|
|
236
|
+
for package_name in direct_imports:
|
|
237
|
+
if is_valid_package_name(package_name) and not is_builtin_module(package_name):
|
|
238
|
+
all_packages.add(package_name)
|
|
239
|
+
# Get all dependencies including sub-dependencies
|
|
240
|
+
all_packages.update(get_package_dependencies(package_name, processed_packages))
|
|
241
|
+
|
|
242
|
+
return sorted(list(all_packages))
|
|
243
|
+
|
|
130
244
|
@lru_cache(maxsize=1024)
|
|
131
245
|
def is_optional_dependency(req: str) -> bool:
|
|
132
246
|
"""Check if a dependency is an optional dependency."""
|
|
@@ -175,33 +289,6 @@ def get_package_dependencies(package_name: str, processed: Set[str] = None) -> S
|
|
|
175
289
|
|
|
176
290
|
return all_dependencies
|
|
177
291
|
|
|
178
|
-
def trace_imports(entry_file: str) -> List[str]:
|
|
179
|
-
"""Find all imported Python packages and files starting from an entry file."""
|
|
180
|
-
entry_file = os.path.abspath(entry_file)
|
|
181
|
-
module_dir = os.path.dirname(entry_file)
|
|
182
|
-
|
|
183
|
-
if module_dir not in sys.path:
|
|
184
|
-
sys.path.insert(0, module_dir)
|
|
185
|
-
|
|
186
|
-
# Get direct imports from the entry file
|
|
187
|
-
direct_imports = get_direct_imports(entry_file)
|
|
188
|
-
|
|
189
|
-
# Initialize sets to track packages
|
|
190
|
-
all_packages = set()
|
|
191
|
-
processed_packages = set()
|
|
192
|
-
|
|
193
|
-
# Process each direct import and its dependencies
|
|
194
|
-
for package_name in direct_imports:
|
|
195
|
-
if is_valid_package_name(package_name):
|
|
196
|
-
all_packages.add(package_name)
|
|
197
|
-
# Get all dependencies including sub-dependencies
|
|
198
|
-
all_packages.update(get_package_dependencies(package_name, processed_packages))
|
|
199
|
-
|
|
200
|
-
# Filter out built-in packages
|
|
201
|
-
non_builtin_packages = {pkg for pkg in all_packages if not is_builtin_module(pkg)}
|
|
202
|
-
|
|
203
|
-
return sorted(list(non_builtin_packages))
|
|
204
|
-
|
|
205
292
|
def main() -> None:
|
|
206
293
|
"""Main entry point for the script."""
|
|
207
294
|
if len(sys.argv) != 2:
|
|
@@ -211,8 +298,13 @@ def main() -> None:
|
|
|
211
298
|
entry_file = sys.argv[1]
|
|
212
299
|
try:
|
|
213
300
|
packages = trace_imports(entry_file)
|
|
301
|
+
local_files = get_local_files_for_entry(entry_file)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
|
|
214
305
|
output = {
|
|
215
|
-
'packages': packages
|
|
306
|
+
'packages': packages,
|
|
307
|
+
'local_files': local_files
|
|
216
308
|
}
|
|
217
309
|
bytes_message = (json.dumps(output) + '\n').encode('utf-8')
|
|
218
310
|
os.write(NODEIPCFD, bytes_message)
|
|
@@ -5,18 +5,11 @@ import { petStoreService } from './services/pet-store'
|
|
|
5
5
|
export const config: ApiRouteConfig = {
|
|
6
6
|
type: 'api',
|
|
7
7
|
name: 'ApiTrigger',
|
|
8
|
-
description:
|
|
9
|
-
'basic-tutorial api trigger, it uses the petstore public api to create a new pet and emits a topic to proces an order if an item is included.',
|
|
10
|
-
/**
|
|
11
|
-
* The flows this step belongs to, will be available in Workbench
|
|
12
|
-
*/
|
|
8
|
+
description: 'basic-tutorial api trigger',
|
|
13
9
|
flows: ['basic-tutorial'],
|
|
14
10
|
|
|
15
11
|
method: 'POST',
|
|
16
12
|
path: '/basic-tutorial',
|
|
17
|
-
/**
|
|
18
|
-
* Expected request body for type checking and documentation
|
|
19
|
-
*/
|
|
20
13
|
bodySchema: z.object({
|
|
21
14
|
pet: z.object({
|
|
22
15
|
name: z.string(),
|
|
@@ -29,44 +22,22 @@ export const config: ApiRouteConfig = {
|
|
|
29
22
|
})
|
|
30
23
|
.optional(),
|
|
31
24
|
}),
|
|
32
|
-
|
|
33
|
-
/**
|
|
34
|
-
* Expected response body for type checking and documentation
|
|
35
|
-
*/
|
|
36
25
|
responseSchema: {
|
|
37
26
|
200: z.object({
|
|
38
27
|
message: z.string(),
|
|
39
28
|
traceId: z.string(),
|
|
40
29
|
}),
|
|
41
30
|
},
|
|
42
|
-
|
|
43
|
-
/**
|
|
44
|
-
* This API Step emits events to topic `process-food-order`
|
|
45
|
-
*/
|
|
46
31
|
emits: ['process-food-order'],
|
|
47
|
-
|
|
48
|
-
/**
|
|
49
|
-
* We're using virtual subscribes to virtually connect noop step
|
|
50
|
-
* to this step.
|
|
51
|
-
*
|
|
52
|
-
* Noop step is defined in noop.step.ts
|
|
53
|
-
*/
|
|
54
|
-
virtualSubscribes: ['/basic-tutorial'],
|
|
55
32
|
}
|
|
56
33
|
|
|
57
34
|
export const handler: Handlers['ApiTrigger'] = async (req, { logger, emit, traceId }) => {
|
|
58
|
-
/**
|
|
59
|
-
* Avoid usage of console.log, use logger instead
|
|
60
|
-
*/
|
|
61
35
|
logger.info('Step 01 – Processing API Step', { body: req.body })
|
|
62
36
|
|
|
63
37
|
const { pet, foodOrder } = req.body
|
|
64
38
|
|
|
65
39
|
const newPetRecord = await petStoreService.createPet(pet)
|
|
66
40
|
|
|
67
|
-
/**
|
|
68
|
-
* Emit events to the topics to process asynchronously
|
|
69
|
-
*/
|
|
70
41
|
if (foodOrder) {
|
|
71
42
|
await emit({
|
|
72
43
|
topic: 'process-food-order',
|
|
@@ -77,9 +48,6 @@ export const handler: Handlers['ApiTrigger'] = async (req, { logger, emit, trace
|
|
|
77
48
|
})
|
|
78
49
|
}
|
|
79
50
|
|
|
80
|
-
/**
|
|
81
|
-
* Return data back to the client
|
|
82
|
-
*/
|
|
83
51
|
return {
|
|
84
52
|
status: 200,
|
|
85
53
|
body: {
|
|
@@ -5,48 +5,24 @@ import { petStoreService } from './services/pet-store'
|
|
|
5
5
|
export const config: EventConfig = {
|
|
6
6
|
type: 'event',
|
|
7
7
|
name: 'ProcessFoodOrder',
|
|
8
|
-
description:
|
|
9
|
-
'basic-tutorial event step, this example shows how to consume an event from a topic and persist data in state',
|
|
10
|
-
/**
|
|
11
|
-
* The flows this step belongs to, will be available in Workbench
|
|
12
|
-
*/
|
|
8
|
+
description: 'basic-tutorial event step, demonstrates how to consume an event from a topic and persist data in state',
|
|
13
9
|
flows: ['basic-tutorial'],
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* This step subscribes to the event `process-food-order` to
|
|
17
|
-
* be processed asynchronously.
|
|
18
|
-
*/
|
|
19
10
|
subscribes: ['process-food-order'],
|
|
20
|
-
|
|
21
|
-
/**
|
|
22
|
-
* It ultimately emits an event to `new-order-notification` topic.
|
|
23
|
-
*/
|
|
24
11
|
emits: ['new-order-notification'],
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
12
|
+
input: z.object({
|
|
13
|
+
id: z.string(),
|
|
14
|
+
quantity: z.number(),
|
|
15
|
+
petId: z.number(),
|
|
16
|
+
}),
|
|
30
17
|
}
|
|
31
18
|
|
|
32
19
|
export const handler: Handlers['ProcessFoodOrder'] = async (input, { traceId, logger, state, emit }) => {
|
|
33
|
-
/**
|
|
34
|
-
* Avoid usage of console.log, use logger instead
|
|
35
|
-
*/
|
|
36
20
|
logger.info('Step 02 – Process food order', { input, traceId })
|
|
37
21
|
|
|
38
22
|
const order = await petStoreService.createOrder(input)
|
|
39
23
|
|
|
40
|
-
/**
|
|
41
|
-
* Persist content on state to be used by other steps
|
|
42
|
-
* or in other workflows later
|
|
43
|
-
*/
|
|
44
24
|
await state.set<string>('orders', order.id, order)
|
|
45
25
|
|
|
46
|
-
/**
|
|
47
|
-
* Emit events to the topics to process separately
|
|
48
|
-
* on another step
|
|
49
|
-
*/
|
|
50
26
|
await emit({
|
|
51
27
|
topic: 'new-order-notification',
|
|
52
28
|
data: { order_id: order.id },
|
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
|
|
3
|
+
class NewOrderNotificationInput(BaseModel):
|
|
4
|
+
order_id: str = Field(description="pet store order id")
|
|
5
|
+
|
|
1
6
|
config = {
|
|
2
7
|
"type": "event",
|
|
3
8
|
"name": "NewOrderNotifications",
|
|
@@ -5,7 +10,7 @@ config = {
|
|
|
5
10
|
"subscribes": ["new-order-notification"],
|
|
6
11
|
"emits": [],
|
|
7
12
|
"flows": ["basic-tutorial"],
|
|
8
|
-
"input":
|
|
13
|
+
"input": NewOrderNotificationInput.model_json_schema(),
|
|
9
14
|
}
|
|
10
15
|
|
|
11
16
|
async def handler(input, ctx):
|
|
@@ -3,24 +3,25 @@
|
|
|
3
3
|
"id": "basic-tutorial",
|
|
4
4
|
"config": {
|
|
5
5
|
"steps/basic-tutorial/04_new_order_notifications.step.py": {
|
|
6
|
-
"x":
|
|
7
|
-
"y":
|
|
6
|
+
"x": 668,
|
|
7
|
+
"y": 266,
|
|
8
|
+
"targetHandlePosition": "left"
|
|
8
9
|
},
|
|
9
10
|
"steps/basic-tutorial/03-state-audit-cron.step.ts": {
|
|
10
|
-
"x":
|
|
11
|
-
"y":
|
|
11
|
+
"x": 224,
|
|
12
|
+
"y": 520
|
|
12
13
|
},
|
|
13
14
|
"steps/basic-tutorial/02-process-food-order.step.ts": {
|
|
14
|
-
"x":
|
|
15
|
-
"y":
|
|
15
|
+
"x": 220,
|
|
16
|
+
"y": 242,
|
|
17
|
+
"sourceHandlePosition": "right",
|
|
18
|
+
"targetHandlePosition": "left"
|
|
16
19
|
},
|
|
17
20
|
"steps/basic-tutorial/01-api.step.ts": {
|
|
18
|
-
"x": -
|
|
19
|
-
"y":
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
"x": -833,
|
|
23
|
-
"y": 492
|
|
21
|
+
"x": -243,
|
|
22
|
+
"y": 199,
|
|
23
|
+
"sourceHandlePosition": "right",
|
|
24
|
+
"targetHandlePosition": "left"
|
|
24
25
|
}
|
|
25
26
|
}
|
|
26
27
|
}
|
|
@@ -19,7 +19,9 @@ export const generateTemplateSteps = (templateFolder) => {
|
|
|
19
19
|
}
|
|
20
20
|
const sanitizedFileName = fileName.replace('.txt', '');
|
|
21
21
|
const isWorkbenchConfig = fileName.match('motia-workbench.json');
|
|
22
|
-
const generateFilePath = path.join(...(isWorkbenchConfig
|
|
22
|
+
const generateFilePath = path.join(...(isWorkbenchConfig
|
|
23
|
+
? [rootDir.match(/steps/) ? path.join(rootDir, '..') : rootDir, sanitizedFileName]
|
|
24
|
+
: [rootDir, templateFolder, sanitizedFileName]));
|
|
23
25
|
let content = await fs.readFile(filePath, 'utf8');
|
|
24
26
|
// Make sure statSync doesn't break the execution if the file doesn't exist
|
|
25
27
|
try {
|
|
@@ -28,7 +28,9 @@ export const generateTemplateSteps = (templateFolder: string): Generator => {
|
|
|
28
28
|
const sanitizedFileName = fileName.replace('.txt', '')
|
|
29
29
|
const isWorkbenchConfig = fileName.match('motia-workbench.json')
|
|
30
30
|
const generateFilePath = path.join(
|
|
31
|
-
...(isWorkbenchConfig
|
|
31
|
+
...(isWorkbenchConfig
|
|
32
|
+
? [rootDir.match(/steps/) ? path.join(rootDir, '..') : rootDir, sanitizedFileName]
|
|
33
|
+
: [rootDir, templateFolder, sanitizedFileName]),
|
|
32
34
|
)
|
|
33
35
|
let content = await fs.readFile(filePath, 'utf8')
|
|
34
36
|
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "motia",
|
|
3
3
|
"description": "A Modern Unified Backend Framework for APIs, Events and Agents",
|
|
4
|
-
"version": "0.5.11-beta.
|
|
4
|
+
"version": "0.5.11-beta.120-742949",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"repository": {
|
|
7
7
|
"type": "git",
|
|
@@ -43,9 +43,9 @@
|
|
|
43
43
|
"inquirer": "^8.2.5",
|
|
44
44
|
"table": "^6.9.0",
|
|
45
45
|
"ts-node": "^10.9.2",
|
|
46
|
-
"@motiadev/
|
|
47
|
-
"@motiadev/
|
|
48
|
-
"@motiadev/
|
|
46
|
+
"@motiadev/stream-client-node": "0.5.11-beta.120-742949",
|
|
47
|
+
"@motiadev/workbench": "0.5.11-beta.120-742949",
|
|
48
|
+
"@motiadev/core": "0.5.11-beta.120-742949"
|
|
49
49
|
},
|
|
50
50
|
"devDependencies": {
|
|
51
51
|
"@amplitude/analytics-types": "^2.9.2",
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
import { NoopConfig } from 'motia'
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* NOOP Steps don't hold any logic in code, it's a
|
|
5
|
-
* way to connect nodes in workflow to make it comprehensive
|
|
6
|
-
* like representing a man in the loop or a manual operation that can
|
|
7
|
-
* happen between one step and another.
|
|
8
|
-
*
|
|
9
|
-
* For more information, refer to the documentation: https://www.motia.dev/docs/workbench/noop-steps
|
|
10
|
-
*/
|
|
11
|
-
export const config: NoopConfig = {
|
|
12
|
-
type: 'noop',
|
|
13
|
-
name: 'ExternalRequest',
|
|
14
|
-
description: 'basic-tutorial noop step example, representing an external http request',
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* Used mostly to connect nodes that emit to this
|
|
18
|
-
*/
|
|
19
|
-
virtualSubscribes: [],
|
|
20
|
-
|
|
21
|
-
/**
|
|
22
|
-
* Used mostly to connect nodes that subscribes to this
|
|
23
|
-
*/
|
|
24
|
-
virtualEmits: ['/basic-tutorial'],
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* The flows this step belongs to, will be available in Workbench
|
|
28
|
-
*/
|
|
29
|
-
flows: ['basic-tutorial'],
|
|
30
|
-
}
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
import { NoopConfig } from 'motia'
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* NOOP Steps don't hold any logic in code, it's a
|
|
5
|
-
* way to connect nodes in workflow to make it comprehensive
|
|
6
|
-
* like representing a man in the loop or a manual operation that can
|
|
7
|
-
* happen between one step and another.
|
|
8
|
-
*
|
|
9
|
-
* For more information, refer to the documentation: https://www.motia.dev/docs/workbench/noop-steps
|
|
10
|
-
*/
|
|
11
|
-
export const config: NoopConfig = {
|
|
12
|
-
type: 'noop',
|
|
13
|
-
name: 'ExternalRequest',
|
|
14
|
-
description: 'basic-tutorial noop step example, representing an external http request',
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* Used mostly to connect nodes that emit to this
|
|
18
|
-
*/
|
|
19
|
-
virtualSubscribes: [],
|
|
20
|
-
|
|
21
|
-
/**
|
|
22
|
-
* Used mostly to connect nodes that subscribes to this
|
|
23
|
-
*/
|
|
24
|
-
virtualEmits: ['/basic-tutorial'],
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* The flows this step belongs to, will be available in Workbench
|
|
28
|
-
*/
|
|
29
|
-
flows: ['basic-tutorial'],
|
|
30
|
-
}
|