setta 0.0.2.dev0__py3-none-any.whl → 0.0.3__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. setta/__init__.py +1 -1
  2. setta/cli/__init__.py +1 -1
  3. setta/cli/logger.py +1 -3
  4. setta/code_gen/create_runnable_scripts.py +87 -34
  5. setta/code_gen/export_selected.py +1 -5
  6. setta/code_gen/find_placeholders.py +13 -2
  7. setta/database/backup.py +1 -2
  8. setta/database/db/artifacts/save.py +20 -4
  9. setta/database/db/projects/save.py +1 -1
  10. setta/database/db/sections/load.py +5 -2
  11. setta/database/db/sections/save.py +2 -2
  12. setta/database/db_objs.py +6 -0
  13. setta/database/export_db/export_raw.py +3 -3
  14. setta/database/export_db/utils.py +2 -3
  15. setta/database/settings_file.py +3 -3
  16. setta/lsp/file_watcher.py +24 -23
  17. setta/lsp/server.py +1 -1
  18. setta/lsp/writer.py +2 -2
  19. setta/routers/interactive.py +38 -20
  20. setta/server.py +3 -4
  21. setta/start.py +4 -3
  22. setta/static/constants/constants.json +4 -0
  23. setta/static/constants/db_init.sql +2 -1
  24. setta/static/constants/defaultValues.json +2 -1
  25. setta/static/constants/settingsProject.json +31 -31
  26. setta/static/frontend/assets/index-03be034e.css +32 -0
  27. setta/static/frontend/assets/{index-ee99dc72.js → index-59443547.js} +171 -171
  28. setta/static/frontend/index.html +2 -2
  29. setta/tasks/tasks.py +166 -98
  30. setta/tasks/utils.py +108 -21
  31. setta/terminals/terminals.py +7 -6
  32. setta/utils/constants.py +5 -2
  33. setta/utils/websocket_manager.py +8 -3
  34. setta-0.0.3.dist-info/METADATA +146 -0
  35. {setta-0.0.2.dev0.dist-info → setta-0.0.3.dist-info}/RECORD +39 -40
  36. setta/database/db_path.py +0 -8
  37. setta/static/frontend/assets/index-1d4b4ecf.css +0 -32
  38. setta-0.0.2.dev0.dist-info/METADATA +0 -24
  39. {setta-0.0.2.dev0.dist-info → setta-0.0.3.dist-info}/LICENSE +0 -0
  40. {setta-0.0.2.dev0.dist-info → setta-0.0.3.dist-info}/WHEEL +0 -0
  41. {setta-0.0.2.dev0.dist-info → setta-0.0.3.dist-info}/entry_points.txt +0 -0
  42. {setta-0.0.2.dev0.dist-info → setta-0.0.3.dist-info}/top_level.txt +0 -0
@@ -15,8 +15,8 @@
15
15
 
16
16
 
17
17
  <title>setta.dev</title>
18
- <script type="module" crossorigin src="/static/assets/index-ee99dc72.js"></script>
19
- <link rel="stylesheet" href="/static/assets/index-1d4b4ecf.css">
18
+ <script type="module" crossorigin src="/static/assets/index-59443547.js"></script>
19
+ <link rel="stylesheet" href="/static/assets/index-03be034e.css">
20
20
  </head>
21
21
  <body>
22
22
  <noscript>You need to enable JavaScript to run this app.</noscript>
setta/tasks/tasks.py CHANGED
@@ -1,10 +1,11 @@
1
1
  import asyncio
2
+ import copy
2
3
  import logging
3
- import queue
4
- import threading
4
+ import time
5
5
  from typing import Dict
6
6
 
7
7
  from setta.database.utils import create_new_id
8
+ from setta.utils.constants import C
8
9
 
9
10
  from . import fns
10
11
  from .fns.utils import TaskDefinition, TaskMessage
@@ -20,75 +21,31 @@ class Tasks:
20
21
  self.task_runner = TaskRunner()
21
22
  self.cache = {}
22
23
  self.fns: Dict[str, TaskDefinition] = {}
23
- self.in_memory_subprocess = SettaInMemoryFnSubprocess()
24
- self.websockets = [] # Store the websocket connections
24
+ self.in_memory_subprocesses = {}
25
+ self.websockets = []
26
+ self.stop_event = asyncio.Event()
25
27
  add_fns_from_module(self.fns, fns)
26
28
 
27
- # Start stdout listener thread
28
- self._stop_event = asyncio.Event()
29
- self.stdout_queue = queue.Queue() # regular Queue
30
- self._stdout_processor_task = None
31
- self.stdout_thread = threading.Thread(target=self._stdout_listener, daemon=True)
32
- self.stdout_thread.start()
33
-
34
- # Backend Changes (Tasks class)
35
29
  async def connect(self, websocket):
36
30
  # Accept the new connection
37
31
  await websocket.accept()
38
32
  self.websockets.append(websocket)
39
-
40
- # Start the processor task if it's not running
41
- if self._stdout_processor_task is None or self._stdout_processor_task.done():
42
- self._stdout_processor_task = asyncio.create_task(
43
- self._process_stdout_queue()
44
- )
33
+ for k, v in self.in_memory_subprocesses.items():
34
+ v["subprocess"].start_stdout_processor_task()
35
+ logger.debug(f"listening to subprocess {k}")
45
36
 
46
37
  async def disconnect(self, websocket):
47
38
  self.websockets.remove(websocket)
48
39
  if len(self.websockets) == 0:
49
- # Cancel the processor task
50
- if self._stdout_processor_task and not self._stdout_processor_task.done():
51
- self._stdout_processor_task.cancel()
52
- try:
53
- await self._stdout_processor_task
54
- except asyncio.CancelledError:
55
- pass
56
- self._stdout_processor_task = None
57
-
58
- def _stdout_listener(self):
59
- while not self._stop_event.is_set():
60
- try:
61
- stdout_data = self.in_memory_subprocess.stdout_parent_conn.recv()
62
- self.stdout_queue.put(stdout_data) # simple put, no async needed
63
- except Exception as e:
64
- if self._stop_event.is_set():
65
- break
66
- logger.debug(f"Error in stdout listener: {e}")
67
-
68
- async def _process_stdout_queue(self):
69
- while not self._stop_event.is_set():
70
- try:
71
- if self._stop_event.is_set():
72
- break
73
- if len(self.websockets) > 0:
74
- stdout_data = self.stdout_queue.get_nowait()
75
- stdout_data = stdout_data.replace("\n", "\r\n")
76
- for w in self.websockets:
77
- await w.send_text(stdout_data)
78
- self.stdout_queue.task_done()
79
- except queue.Empty:
80
- await asyncio.sleep(0.1) # Check for connection every 100ms
81
- except asyncio.CancelledError:
82
- break
83
- except Exception as e:
84
- if self._stop_event.is_set():
85
- break
86
- logger.debug(f"Error processing stdout: {e}")
87
-
88
- async def __call__(self, fn_name, message: TaskMessage):
89
- if fn_name in self.fns:
90
- return await self.call_regular_fn(fn_name, message)
91
- return await self.call_in_memory_subprocess_fn(fn_name, message)
40
+ for v in self.in_memory_subprocesses.values():
41
+ await v["subprocess"].stop_stdout_processor_task()
42
+
43
+ async def __call__(
44
+ self, message_type, message: TaskMessage, websocket_manager=None
45
+ ):
46
+ if message_type == "inMemoryFn":
47
+ return await self.call_in_memory_subprocess_fn(message, websocket_manager)
48
+ return await self.call_regular_fn(message_type, message)
92
49
 
93
50
  async def call_regular_fn(self, fn_name, message: TaskMessage):
94
51
  fn = self.fns[fn_name]
@@ -101,52 +58,163 @@ class Tasks:
101
58
  result["messageType"] = fn.return_message_type
102
59
  return result
103
60
 
104
- async def call_in_memory_subprocess_fn(self, fn_name, message: TaskMessage):
105
- self.in_memory_subprocess.parent_conn.send(
106
- {"type": "call", "fn_name": fn_name, "message": message}
107
- )
108
- result = await self.task_runner.run(
109
- self.in_memory_subprocess.parent_conn.recv, [], RunType.THREAD
110
- )
111
- # if result["status"] == "success":
112
- # for x in result["content"]:
113
- # x.setdefault("sectionType", default_section_type(x["type"]))
114
- if result["status"] != "success":
115
- result["content"] = {}
116
-
117
- return {"content": result["content"], "messageType": result["messageType"]}
118
-
119
- async def add_custom_fns(self, code_list, to_cache):
120
- error_msgs = {}
121
- task_metadata = {}
122
- initial_content = []
123
- for c in code_list:
124
- # Send import request to subprocess
125
- self.in_memory_subprocess.parent_conn.send(
61
+ async def call_in_memory_subprocess_fn(
62
+ self,
63
+ message: TaskMessage,
64
+ websocket_manager=None,
65
+ call_all=False,
66
+ subprocess_key=None,
67
+ ):
68
+ # Create a list of tasks to run concurrently
69
+ tasks = []
70
+ results = []
71
+
72
+ for sp_key, sp_info in self.in_memory_subprocesses.items():
73
+ if subprocess_key and sp_key != subprocess_key:
74
+ continue
75
+ for fn_name, fnInfo in sp_info["fnInfo"].items():
76
+ if (
77
+ call_all
78
+ or None in fnInfo["dependencies"]
79
+ or any(k in fnInfo["dependencies"] for k in message.content.keys())
80
+ ):
81
+ # Send message to subprocess
82
+ sp_info["subprocess"].parent_conn.send(
83
+ {"type": "call", "fn_name": fn_name, "message": message}
84
+ )
85
+
86
+ # Create task for receiving response
87
+ task = asyncio.create_task(
88
+ self._handle_subprocess_response(
89
+ sp_key,
90
+ fn_name,
91
+ message.id,
92
+ sp_info["subprocess"].parent_conn.recv,
93
+ websocket_manager,
94
+ results,
95
+ )
96
+ )
97
+ tasks.append(task)
98
+
99
+ # Wait for all tasks to complete concurrently
100
+ if tasks:
101
+ await asyncio.gather(*tasks)
102
+
103
+ if websocket_manager:
104
+ return {}
105
+
106
+ content = []
107
+ for r in results:
108
+ if r["content"]:
109
+ content.extend(r["content"])
110
+ return {"content": content, "messageType": C.WS_IN_MEMORY_FN_RETURN}
111
+
112
+ async def _handle_subprocess_response(
113
+ self, subprocess_key, fn_name, msg_id, recv_fn, websocket_manager, results
114
+ ):
115
+ # Run the receive function in a thread
116
+ start_time = time.perf_counter()
117
+ result = await self.task_runner.run(recv_fn, [], RunType.THREAD)
118
+ elapsed_time = time.perf_counter() - start_time
119
+ if result["status"] == "success":
120
+ self.update_average_subprocess_fn_time(
121
+ subprocess_key, fn_name, elapsed_time
122
+ )
123
+ if websocket_manager is not None:
124
+ if result["content"]:
125
+ await websocket_manager.send_message_to_requester(
126
+ msg_id, result["content"], result["messageType"]
127
+ )
128
+ await self.maybe_send_latest_run_time_info(
129
+ subprocess_key, fn_name, msg_id, websocket_manager
130
+ )
131
+ else:
132
+ results.append(result)
133
+
134
+ async def add_custom_fns(self, code_graph, to_cache):
135
+ for c in code_graph:
136
+ subprocess_key = c["subprocess_key"]
137
+ module_name = c["module_name"]
138
+ sp = self.in_memory_subprocesses.get(subprocess_key, {}).get("subprocess")
139
+ if sp:
140
+ sp.close()
141
+ logger.debug(f"Creating new subprocess for {module_name}")
142
+ sp = SettaInMemoryFnSubprocess(self.stop_event, self.websockets)
143
+ self.in_memory_subprocesses[subprocess_key] = {
144
+ "subprocess": sp,
145
+ "fnInfo": {},
146
+ }
147
+
148
+ sp.parent_conn.send(
126
149
  {
127
150
  "type": "import",
128
151
  "code": c["code"],
129
- "module_name": c["module_name"],
152
+ "module_name": module_name,
130
153
  "to_cache": to_cache,
131
154
  }
132
155
  )
133
- result = await self.task_runner.run(
134
- self.in_memory_subprocess.parent_conn.recv, [], RunType.THREAD
135
- )
156
+ result = await self.task_runner.run(sp.parent_conn.recv, [], RunType.THREAD)
157
+ fnInfo = self.in_memory_subprocesses[subprocess_key]["fnInfo"]
158
+
136
159
  if result["status"] == "success":
137
- task_metadata.update(result["content"])
160
+ for k, v in result["content"].items():
161
+ if k not in fnInfo:
162
+ fnInfo[k] = {
163
+ "dependencies": set(),
164
+ "averageRunTime": None,
165
+ "callCount": 0,
166
+ "lastStatsUpdate": time.time(),
167
+ }
168
+ fnInfo[k]["dependencies"].update(v)
138
169
  else:
139
- error_msgs[c["module_name"]] = result["error"]
170
+ # TODO: store error message and display on frontend?
171
+ pass
172
+
173
+ initial_result = await self.call_in_memory_subprocess_fn(
174
+ TaskMessage(id=create_new_id(), content={}),
175
+ call_all=True,
176
+ subprocess_key=subprocess_key,
177
+ )
140
178
 
141
- for k in task_metadata.keys():
142
- task_output = await self(k, TaskMessage(id=create_new_id(), content={}))
143
- initial_content.extend(task_output["content"])
179
+ logger.debug(
180
+ f"self.in_memory_subprocesses keys: {self.in_memory_subprocesses.keys()}"
181
+ )
144
182
 
145
- return task_metadata, error_msgs, initial_content
183
+ return initial_result["content"]
146
184
 
147
185
  def close(self):
148
- self._stop_event.set()
149
- self.in_memory_subprocess.close()
150
- self.stdout_thread.join()
151
- if self._stdout_processor_task:
152
- self._stdout_processor_task.cancel()
186
+ self.stop_event.set()
187
+ for v in self.in_memory_subprocesses.values():
188
+ v["subprocess"].close()
189
+
190
+ def update_average_subprocess_fn_time(self, subprocess_key, fn_name, new_time):
191
+ fnInfo = self.in_memory_subprocesses[subprocess_key]["fnInfo"][fn_name]
192
+ current_avg = fnInfo["averageRunTime"]
193
+ new_avg = (
194
+ new_time
195
+ if current_avg is None
196
+ else ((0.9) * current_avg) + (0.1 * new_time)
197
+ )
198
+ fnInfo["averageRunTime"] = new_avg
199
+ fnInfo["callCount"] += 1
200
+ fnInfo["lastStatsUpdate"] = time.time()
201
+
202
+ async def maybe_send_latest_run_time_info(
203
+ self, subprocess_key, fn_name, msg_id, websocket_manager
204
+ ):
205
+ fnInfo = self.in_memory_subprocesses[subprocess_key]["fnInfo"][fn_name]
206
+ if fnInfo["callCount"] % 10 == 0 or (
207
+ fnInfo["lastStatsUpdate"] and (time.time() - fnInfo["lastStatsUpdate"]) > 10
208
+ ):
209
+ newInfo = self.getInMemorySubprocessInfo()
210
+ await websocket_manager.send_message_to_requester(
211
+ msg_id, newInfo, C.WS_IN_MEMORY_FN_AVG_RUN_TIME
212
+ )
213
+
214
+ def getInMemorySubprocessInfo(self):
215
+ output = {}
216
+ for sp_key, sp_info in self.in_memory_subprocesses.items():
217
+ output[sp_key] = {"fnInfo": copy.deepcopy(sp_info["fnInfo"])}
218
+ for fnInfo in output[sp_key]["fnInfo"].values():
219
+ fnInfo["dependencies"] = list(fnInfo["dependencies"])
220
+ return output
setta/tasks/utils.py CHANGED
@@ -1,14 +1,20 @@
1
+ import asyncio
1
2
  import importlib.util
2
3
  import json
4
+ import logging
3
5
  import multiprocessing
4
- import os
6
+ import queue
5
7
  import sys
8
+ import threading
6
9
  import traceback
7
10
  import uuid
8
11
 
9
12
  from setta.tasks.fns.utils import TaskDefinition
13
+ from setta.utils.constants import CWD
10
14
  from setta.utils.utils import nested_access
11
15
 
16
+ logger = logging.getLogger(__name__)
17
+
12
18
 
13
19
  def import_code_from_string(code_string, module_name=None, add_to_sys_modules=True):
14
20
  # Generate a unique module name if one isn't provided
@@ -16,7 +22,7 @@ def import_code_from_string(code_string, module_name=None, add_to_sys_modules=Tr
16
22
  module_name = f"setta_dynamic_module_{uuid.uuid4().hex}"
17
23
 
18
24
  # Add current directory to sys.path if it's not already there
19
- current_dir = os.getcwd()
25
+ current_dir = str(CWD)
20
26
  if current_dir not in sys.path:
21
27
  sys.path.insert(0, current_dir)
22
28
 
@@ -40,13 +46,24 @@ def import_code_from_string(code_string, module_name=None, add_to_sys_modules=Tr
40
46
 
41
47
 
42
48
  class SettaInMemoryFnSubprocess:
43
- def __init__(self):
49
+ def __init__(self, stop_event, websockets):
44
50
  self.parent_conn, self.child_conn = multiprocessing.Pipe()
45
51
  self.process = multiprocessing.Process(target=self._subprocess_main)
46
52
  self.stdout_parent_conn, self.stdout_child_conn = multiprocessing.Pipe()
47
53
  self.process.daemon = True # Ensure process dies with parent
48
54
  self.process.start()
49
55
 
56
+ self.stop_event = asyncio.Event()
57
+ self.tasks_stop_event = stop_event
58
+ self.websockets = websockets
59
+ self.stdout_queue = queue.Queue()
60
+ self.stdout_processor_task = None
61
+ self.stdout_thread = threading.Thread(target=self.stdout_listener, daemon=True)
62
+ self.stdout_thread.start()
63
+
64
+ if len(self.websockets) > 0:
65
+ self.start_stdout_processor_task()
66
+
50
67
  def _subprocess_main(self):
51
68
  """Main loop in subprocess that handles all requests"""
52
69
  # Initialize store for imported modules
@@ -83,15 +100,15 @@ class SettaInMemoryFnSubprocess:
83
100
  # Import and store module
84
101
  module = import_code_from_string(code, module_name)
85
102
  added_fn_names = add_fns_from_module(fns_dict, module, module_name)
86
- task_metadata = {}
103
+ dependencies = {}
87
104
  for k in added_fn_names:
88
105
  cache[k] = msg["to_cache"]
89
- task_metadata[k] = get_task_metadata(fns_dict[k], cache[k])
106
+ dependencies[k] = get_task_metadata(fns_dict[k], cache[k])
90
107
 
91
108
  self.child_conn.send(
92
109
  {
93
110
  "status": "success",
94
- "content": task_metadata,
111
+ "content": dependencies,
95
112
  }
96
113
  )
97
114
 
@@ -122,20 +139,36 @@ class SettaInMemoryFnSubprocess:
122
139
 
123
140
  def close(self):
124
141
  try:
142
+ logger.debug("Initiating shutdown sequence")
125
143
  self.parent_conn.send({"type": "shutdown"})
126
- self.process.join(timeout=1.0)
127
- except:
128
- pass
144
+ self.process.join(timeout=2) # Add timeout to process join
145
+
146
+ if self.process.is_alive():
147
+ logger.debug("Process still alive after timeout, forcing termination")
148
+ self.process.terminate()
149
+ self.process.join(timeout=1)
150
+ except Exception as e:
151
+ logger.debug(f"Error during process shutdown: {e}")
129
152
 
130
- if self.process.is_alive():
131
- self.process.terminate()
132
- self.process.join()
153
+ # Set stop event before closing pipes
154
+ self.stop_event.set()
133
155
 
134
- # Close both sets of connections
135
- self.parent_conn.close()
136
- self.child_conn.close()
137
- self.stdout_parent_conn.close()
138
- self.stdout_child_conn.close()
156
+ # Close all connections
157
+ for conn in [
158
+ self.parent_conn,
159
+ self.child_conn,
160
+ self.stdout_parent_conn,
161
+ self.stdout_child_conn,
162
+ ]:
163
+ conn.close()
164
+
165
+ self.stdout_thread.join(timeout=2) # Add timeout to thread join
166
+
167
+ if self.stdout_thread.is_alive():
168
+ logger.debug("Stdout thread failed to terminate within timeout")
169
+
170
+ if self.stdout_processor_task:
171
+ self.stdout_processor_task.cancel()
139
172
 
140
173
  def process_message(self, fn_name, message, cache):
141
174
  if fn_name in cache:
@@ -147,6 +180,60 @@ class SettaInMemoryFnSubprocess:
147
180
  message.content = exporter_obj.output
148
181
  return message.content
149
182
 
183
+ def start_stdout_processor_task(self):
184
+ if self.stdout_processor_task is None or self.stdout_processor_task.done():
185
+ self.stdout_processor_task = asyncio.create_task(
186
+ self.process_stdout_queue()
187
+ )
188
+
189
+ async def stop_stdout_processor_task(self):
190
+ if self.stdout_processor_task and not self.stdout_processor_task.done():
191
+ self.stdout_processor_task.cancel()
192
+ try:
193
+ await self.stdout_processor_task
194
+ except asyncio.CancelledError:
195
+ pass
196
+ self.stdout_processor_task = None
197
+
198
+ async def process_stdout_queue(self):
199
+ while not self.should_stop():
200
+ try:
201
+ if self.should_stop():
202
+ break
203
+ if len(self.websockets) > 0:
204
+ stdout_data = self.stdout_queue.get_nowait()
205
+ stdout_data = stdout_data.replace("\n", "\r\n")
206
+ for w in self.websockets:
207
+ await w.send_text(stdout_data)
208
+ self.stdout_queue.task_done()
209
+ except queue.Empty:
210
+ await asyncio.sleep(0.1) # Check for connection every 100ms
211
+ except asyncio.CancelledError:
212
+ break
213
+ except Exception as e:
214
+ if self.should_stop():
215
+ break
216
+ logger.debug(f"Error processing stdout: {e}")
217
+
218
+ def stdout_listener(self):
219
+ while not self.should_stop():
220
+ if self.stdout_parent_conn.poll(0.1): # Check for data with timeout
221
+ try:
222
+ stdout_data = self.stdout_parent_conn.recv()
223
+ self.stdout_queue.put(stdout_data)
224
+ except EOFError: # Pipe was closed
225
+ break
226
+ except Exception as e:
227
+ logger.debug(f"Error in stdout listener: {e}")
228
+ if self.should_stop():
229
+ break
230
+ else: # No data available within timeout
231
+ if self.should_stop():
232
+ break
233
+
234
+ def should_stop(self):
235
+ return self.stop_event.is_set() or self.tasks_stop_event.is_set()
236
+
150
237
 
151
238
  def add_fns_from_module(fns_dict, module, module_name=None):
152
239
  count = 1
@@ -168,11 +255,11 @@ def add_fns_from_module(fns_dict, module, module_name=None):
168
255
  def get_task_metadata(in_memory_fn, exporter_obj):
169
256
  # None means run the task on every change
170
257
  if in_memory_fn.dependencies is None:
171
- dependencies = None
258
+ dependencies = set([None])
172
259
  # Empty array means only run when the task imported.
173
260
  # Non-empty array means run when specified dependencies update.
174
261
  else:
175
- dependencies = [
262
+ dependencies = set(
176
263
  exporter_obj.var_name_reverse_mapping[d] for d in in_memory_fn.dependencies
177
- ]
178
- return {"dependencies": dependencies}
264
+ )
265
+ return dependencies
@@ -4,6 +4,7 @@ import json
4
4
  import logging
5
5
  import platform
6
6
  import select
7
+ import shlex
7
8
  import time
8
9
  import traceback
9
10
  from asyncio import CancelledError
@@ -44,12 +45,14 @@ def is_command_running_in_pty(pid):
44
45
 
45
46
  def get_terminal_shell():
46
47
  if USER_SETTINGS["backend"]["defaultTerminalShell"]:
47
- return USER_SETTINGS["backend"]["defaultTerminalShell"]
48
+ return shlex.split(
49
+ USER_SETTINGS["backend"]["defaultTerminalShell"], posix=not IS_WINDOWS
50
+ )
48
51
  if IS_WINDOWS:
49
- return "bash.exe"
52
+ return ["bash.exe"]
50
53
  if IS_MACOS:
51
- return "zsh"
52
- return "bash"
54
+ return ["zsh"]
55
+ return ["bash"]
53
56
 
54
57
 
55
58
  class TerminalWebsockets:
@@ -61,8 +64,6 @@ class TerminalWebsockets:
61
64
  def new_terminal(self, projectConfigId, sectionId, isTemporary):
62
65
  if sectionId not in self.PTY_PIDS:
63
66
  terminal_shell = get_terminal_shell()
64
- if not IS_WINDOWS:
65
- terminal_shell = [terminal_shell]
66
67
  pty_process = PtyProcess.spawn(terminal_shell)
67
68
  self.PTY_PIDS[sectionId] = {
68
69
  "pid": pty_process.pid,
setta/utils/constants.py CHANGED
@@ -10,8 +10,10 @@ CONSTANTS_FOLDER = (
10
10
  Path("../../../constants") if is_dev_mode() else Path("../static/constants")
11
11
  )
12
12
  SEED_FOLDER = Path("../../../seed") if is_dev_mode() else Path("../static/seed")
13
- CODE_FOLDER = "setta_code"
14
- DB_BACKUP_FOLDER = ".setta_backups"
13
+ CWD = Path.cwd()
14
+ SETTA_FILES_FOLDER = CWD / "setta_files"
15
+ CODE_FOLDER = SETTA_FILES_FOLDER / "code"
16
+ DB_BACKUP_FOLDER = SETTA_FILES_FOLDER / "backups"
15
17
  CODE_FOLDER_ENV_VARIABLE = "SETTA_CODE_FOLDER"
16
18
  HOST_ENV_VARIABLE = "SETTA_HOST"
17
19
  PORT_ENV_VARIABLE = "SETTA_PORT"
@@ -99,6 +101,7 @@ CODE_INFO_TABLE_DATA_JSON_FIELDS = set(
99
101
  "positionalOnly",
100
102
  "isPinned",
101
103
  "isFrozen",
104
+ "ignoreTypeErrors",
102
105
  )
103
106
  )
104
107
 
@@ -43,7 +43,9 @@ class WebsocketManager:
43
43
  if wid == self.server_cli_id:
44
44
  # process task and send any results back to requester
45
45
  result = await tasks(
46
- message["messageType"], TaskMessage.parse_obj(message)
46
+ message["messageType"],
47
+ TaskMessage.parse_obj(message),
48
+ websocket_manager=self,
47
49
  )
48
50
  if "content" in result:
49
51
  websocket = self.sockets[fromWebsocketId]["websocket"]
@@ -64,10 +66,13 @@ class WebsocketManager:
64
66
  if "location" in message:
65
67
  self.sockets[fromWebsocketId]["location"] = message["location"]
66
68
 
67
- async def send_message_to_requester(self, id, content):
69
+ async def send_message_to_requester(self, id, content, messageType=None):
68
70
  # just send data to target websocket
69
71
  websocket = self.sockets[self.message_id_to_sender_id[id]]["websocket"]
70
- await websocket.send_text(json.dumps({"id": id, "content": content}))
72
+ return_val = {"id": id, "content": content}
73
+ if messageType:
74
+ return_val["messageType"] = messageType
75
+ await websocket.send_text(json.dumps(return_val))
71
76
 
72
77
  async def send_message_to_location(self, content, messageType, location):
73
78
  for w in self.sockets.values():