stoobly-agent 1.10.2__py3-none-any.whl → 1.10.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. stoobly_agent/__init__.py +1 -1
  2. stoobly_agent/app/api/application_http_request_handler.py +1 -1
  3. stoobly_agent/app/cli/helpers/validations.py +1 -1
  4. stoobly_agent/app/cli/scaffold/app.py +1 -1
  5. stoobly_agent/app/cli/scaffold/app_create_command.py +2 -1
  6. stoobly_agent/app/cli/scaffold/docker/workflow/run_command.py +89 -26
  7. stoobly_agent/app/cli/scaffold/local/workflow/run_command.py +94 -42
  8. stoobly_agent/app/cli/scaffold/service_dependency.py +1 -1
  9. stoobly_agent/app/cli/scaffold/service_workflow_validate_command.py +6 -1
  10. stoobly_agent/app/cli/scaffold/templates/app/.docker-compose.base.yml +1 -1
  11. stoobly_agent/app/proxy/mitmproxy/request_facade.py +1 -1
  12. stoobly_agent/app/proxy/mock/eval_fixtures_service.py +34 -11
  13. stoobly_agent/app/proxy/mock/eval_request_service.py +1 -1
  14. stoobly_agent/app/proxy/utils/allowed_request_service.py +2 -2
  15. stoobly_agent/lib/cache.py +1 -1
  16. stoobly_agent/test/app/models/schemas/.stoobly/db/VERSION +1 -1
  17. stoobly_agent/test/app/proxy/mitmproxy/request_facade_test.py +48 -8
  18. stoobly_agent/test/app/proxy/mock/eval_fixtures_service_test.py +467 -21
  19. {stoobly_agent-1.10.2.dist-info → stoobly_agent-1.10.4.dist-info}/METADATA +1 -1
  20. {stoobly_agent-1.10.2.dist-info → stoobly_agent-1.10.4.dist-info}/RECORD +23 -23
  21. {stoobly_agent-1.10.2.dist-info → stoobly_agent-1.10.4.dist-info}/WHEEL +0 -0
  22. {stoobly_agent-1.10.2.dist-info → stoobly_agent-1.10.4.dist-info}/entry_points.txt +0 -0
  23. {stoobly_agent-1.10.2.dist-info → stoobly_agent-1.10.4.dist-info}/licenses/LICENSE +0 -0
stoobly_agent/__init__.py CHANGED
@@ -1,2 +1,2 @@
1
1
  COMMAND = 'stoobly-agent'
2
- VERSION = '1.10.2'
2
+ VERSION = '1.10.4'
@@ -134,7 +134,7 @@ class ApplicationHTTPRequestHandler(SimpleHTTPRequestHandler):
134
134
  for endpoint_handler in ROUTES[method]:
135
135
  path = endpoint_handler[0]
136
136
 
137
- matches = self.path == path if isinstance(path, str) else bool(re.match(path, self.path))
137
+ matches = self.path == path if isinstance(path, str) else bool(re.fullmatch(path, self.path))
138
138
 
139
139
  if matches:
140
140
  handler = endpoint_handler[1]
@@ -126,7 +126,7 @@ def validate_aliases(validations, **kwargs) -> Union[Alias, None]:
126
126
  handle_missing_alias(parsed_validation, kwargs.get('format'))
127
127
 
128
128
  if parsed_validation['value'] != None:
129
- if not re.match(parsed_validation['value'], aliases_map[name]):
129
+ if not re.fullmatch(parsed_validation['value'], aliases_map[name]):
130
130
  handle_invalid_alias(parsed_validation, aliases_map[name], kwargs.get('format'))
131
131
 
132
132
  def filter_response(res, status: int):
@@ -111,7 +111,7 @@ class App():
111
111
 
112
112
  # Skip files that match the ignore list pattern, use regex
113
113
  for ignore_pattern in ignore:
114
- if re.match(os.path.join(src, ignore_pattern), src_file_path):
114
+ if re.fullmatch(os.path.join(src, ignore_pattern), src_file_path):
115
115
  ignored = True
116
116
  break
117
117
 
@@ -12,7 +12,7 @@ from .app import App
12
12
  from .app_command import AppCommand
13
13
  from .constants import PLUGIN_CYPRESS, PLUGIN_PLAYWRIGHT, RUN_ON_DOCKER, RUN_ON_LOCAL
14
14
  from .docker.template_files import plugin_docker_cypress, plugin_docker_playwright, plugin_local_cypress, plugin_local_playwright, remove_app_docker_files, remove_service_docker_files
15
- from .templates.constants import CORE_GATEWAY_SERVICE_NAME, CORE_MOCK_UI_SERVICE_NAME, MAINTAINED_RUN
15
+ from .templates.constants import CORE_GATEWAY_SERVICE_NAME, CORE_MOCK_UI_SERVICE_NAME, CUSTOM_RUN, MAINTAINED_RUN
16
16
 
17
17
  class AppCreateOptions(TypedDict):
18
18
  docker_socket_path: str
@@ -85,6 +85,7 @@ class AppCreateCommand(AppCommand):
85
85
  ignore.append(f"{CORE_MOCK_UI_SERVICE_NAME}/.*")
86
86
 
87
87
  if RUN_ON_DOCKER in self.app_run_on:
88
+ ignore.append(f".*/{CUSTOM_RUN}")
88
89
  ignore.append(f".*/{MAINTAINED_RUN}")
89
90
 
90
91
  # Copy all app templates
@@ -2,12 +2,14 @@ import os
2
2
  import pdb
3
3
  import subprocess
4
4
  import sys
5
+ import time
5
6
 
6
7
  from typing import List
8
+ from types import FunctionType
7
9
 
8
10
  from stoobly_agent.app.cli.scaffold.docker.constants import APP_EGRESS_NETWORK_TEMPLATE, APP_INGRESS_NETWORK_TEMPLATE, DOCKERFILE_CONTEXT
9
11
  from stoobly_agent.app.cli.scaffold.docker.service.configure_gateway import configure_gateway
10
- from stoobly_agent.app.cli.scaffold.templates.constants import CORE_ENTRYPOINT_SERVICE_NAME
12
+ from stoobly_agent.app.cli.scaffold.templates.constants import CORE_ENTRYPOINT_SERVICE_NAME, CORE_SERVICES
11
13
  from stoobly_agent.app.cli.scaffold.workflow import Workflow
12
14
  from stoobly_agent.app.cli.scaffold.workflow_run_command import WorkflowRunCommand
13
15
  from stoobly_agent.app.cli.types.workflow_run_command import BuildOptions, DownOptions, UpOptions, WorkflowDownOptions, WorkflowUpOptions, WorkflowLogsOptions
@@ -27,10 +29,14 @@ class DockerWorkflowRunCommand(WorkflowRunCommand):
27
29
  self.services = services or []
28
30
  self.script = script
29
31
 
32
+ @property
33
+ def timestamp_file_extension(self):
34
+ return '.timestamp'
35
+
30
36
  @property
31
37
  def timestamp_file_path(self):
32
38
  """Get the path to the timestamp file for this workflow."""
33
- return os.path.join(self.workflow_namespace.path, f"{self.workflow_name}.timestamp")
39
+ return os.path.join(self.workflow_namespace.path, self.timestamp_file_name(self.workflow_name))
34
40
 
35
41
  def exec_setup(self, containerized=False, user_id=None, verbose=False):
36
42
  """Setup Docker environment including gateway, images, and networks."""
@@ -47,25 +53,34 @@ class DockerWorkflowRunCommand(WorkflowRunCommand):
47
53
 
48
54
  for command in init_commands:
49
55
  self.exec(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
50
-
51
- def up(self, **options: WorkflowUpOptions):
52
- """Execute the complete Docker workflow up process."""
53
- # Define timestamp file path
54
- timestamp_file = self.timestamp_file_path
55
-
56
+
57
+ def timestamp_file_name(self, workflow_name: str):
58
+ return f"{workflow_name}{self.timestamp_file_extension}"
59
+
60
+ def __create_timestamp_file(self):
56
61
  # Create timestamp file to indicate workflow is starting
62
+ timestamp_file = self.timestamp_file_path
63
+
57
64
  try:
58
65
  with open(timestamp_file, 'w') as f:
59
- import time
60
66
  f.write(str(time.time()))
61
- Logger.instance(LOG_ID).info(f"Created timestamp file: {timestamp_file}")
67
+ Logger.instance(LOG_ID).debug(f"Created timestamp file: {timestamp_file}")
62
68
  except Exception as e:
63
69
  Logger.instance(LOG_ID).error(f"Failed to create timestamp file: {e}")
64
70
  sys.exit(1)
65
71
 
72
+ return timestamp_file
73
+
74
+ def up(self, **options: WorkflowUpOptions):
75
+ """Execute the complete Docker workflow up process."""
76
+
66
77
  no_publish = options.get('no_publish', False)
67
78
  print_service_header = options.get('print_service_header')
68
-
79
+
80
+ self.__iterate_active_workflows(handle_active=self.__handle_up_active)
81
+
82
+ timestamp_file = self.__create_timestamp_file()
83
+
69
84
  try:
70
85
  # Create individual service commands
71
86
  commands: List[DockerWorkflowRunCommand] = []
@@ -128,11 +143,7 @@ class DockerWorkflowRunCommand(WorkflowRunCommand):
128
143
 
129
144
  def down(self, **options: WorkflowDownOptions):
130
145
  """Execute the complete Docker workflow down process."""
131
- # Check if workflow is running (timestamp file exists)
132
- timestamp_file = self.timestamp_file_path
133
- if not os.path.exists(timestamp_file):
134
- Logger.instance(LOG_ID).info(f"Workflow '{self.workflow_name}' is not running. No timestamp file found: {timestamp_file}")
135
- return
146
+ timestamp_file = self.__find_and_verify_timestamp_file()
136
147
 
137
148
  print_service_header = options.get('print_service_header')
138
149
 
@@ -188,7 +199,6 @@ class DockerWorkflowRunCommand(WorkflowRunCommand):
188
199
  self.exec(remove_ingress_network_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
189
200
 
190
201
  # Clean up timestamp file
191
- timestamp_file = os.path.join(self.workflow_namespace.path, f"{self.workflow_name}.timestamp")
192
202
  if os.path.exists(timestamp_file):
193
203
  try:
194
204
  os.remove(timestamp_file)
@@ -197,14 +207,8 @@ class DockerWorkflowRunCommand(WorkflowRunCommand):
197
207
 
198
208
  def logs(self, **options: WorkflowLogsOptions):
199
209
  """Execute the complete Docker workflow logs process."""
200
- # Check if workflow is running (timestamp file exists)
201
- timestamp_file = self.timestamp_file_path
202
- if not os.path.exists(timestamp_file):
203
- Logger.instance(LOG_ID).info(f"Workflow '{self.workflow_name}' is not running. No timestamp file found: {timestamp_file}")
204
- return
205
-
206
- from ...templates.constants import CORE_SERVICES
207
-
210
+ timestamp_file = self.__find_and_verify_timestamp_file()
211
+
208
212
  print_service_header = options.get('print_service_header')
209
213
 
210
214
  # Filter services based on options
@@ -420,4 +424,63 @@ class DockerWorkflowRunCommand(WorkflowRunCommand):
420
424
  result = subprocess.run(command, shell=True, **options)
421
425
  if result.returncode != 0:
422
426
  Logger.instance(LOG_ID).error(command)
423
- sys.exit(1)
427
+ sys.exit(1)
428
+
429
+ def __find_and_verify_timestamp_file(self):
430
+ # Check if workflow is running (timestamp file exists)
431
+
432
+ timestamp_file = self.timestamp_file_path
433
+ if not os.path.exists(timestamp_file):
434
+ Logger.instance(LOG_ID).error(f"Workflow '{self.workflow_name}' is not running.")
435
+
436
+ if self.workflow_name != self.workflow_namespace.namespace:
437
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow up {self.workflow_name} --namespace {self.workflow_namespace.namespace}` to start it first.")
438
+ else:
439
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow up {self.workflow_name}` to start it first.")
440
+ sys.exit(2)
441
+
442
+ return timestamp_file
443
+
444
+ def __handle_up_active(self, folder: str, timestamp_file_path: str):
445
+ file_name = os.path.basename(timestamp_file_path)
446
+
447
+ # In the case of a namespace, the workflow name is the name of the file without the timestamp extension
448
+ workflow_name = self.workflow_name
449
+ if folder != self.workflow_name:
450
+ workflow_name = file_name.split(self.timestamp_file_extension)[0]
451
+
452
+ Logger.instance(LOG_ID).error(f"Workflow '{workflow_name}' is running, please stop it first.")
453
+
454
+ if folder != workflow_name:
455
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow down {workflow_name} --namespace {folder}` to stop it first.")
456
+ else:
457
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow down {workflow_name}` to stop it first.")
458
+
459
+ sys.exit(1)
460
+
461
+ def __iterate_active_workflows(self, **kwargs):
462
+ handle_active: FunctionType = kwargs.get('handle_active')
463
+ tmp_dir_path = self.app.data_dir.tmp_dir_path
464
+
465
+ # For each folder in self.app.data_dir.tmp_dir_path
466
+ for folder in os.listdir(tmp_dir_path):
467
+ folder_path = os.path.join(tmp_dir_path, folder)
468
+
469
+ # If the folder is not a directory, skip
470
+ if not os.path.isdir(folder_path):
471
+ continue
472
+
473
+ # For each file in folder_path that ends with .timestamp
474
+ for file in os.listdir(folder_path):
475
+ if not file.endswith(self.timestamp_file_extension):
476
+ continue
477
+
478
+ # If the folder contains a .timestamp file, then another workflow is running
479
+ timestamp_file_path = os.path.join(folder_path, file)
480
+
481
+ # Allow re-running the same workflow
482
+ if timestamp_file_path == self.timestamp_file_path:
483
+ continue
484
+
485
+ if handle_active:
486
+ handle_active(folder, timestamp_file_path)
@@ -4,6 +4,7 @@ import signal
4
4
  import subprocess
5
5
  import sys
6
6
 
7
+ from types import FunctionType
7
8
  from typing import Optional, List
8
9
 
9
10
  from stoobly_agent.app.cli.scaffold.constants import PLUGIN_CYPRESS, PLUGIN_PLAYWRIGHT
@@ -34,26 +35,34 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
34
35
  self._log_file_path = os.path.join(self.workflow_namespace.path, f"{self.workflow_name}.log")
35
36
  return self._log_file_path
36
37
 
38
+ @property
39
+ def pid_file_extension(self):
40
+ return '.pid'
41
+
37
42
  @property
38
43
  def pid_file_path(self):
39
44
  """Get the path to the PID file for this workflow."""
40
45
  if not self._pid_file_path:
41
- self._pid_file_path = os.path.join(self.workflow_namespace.path, f"{self.workflow_name}.pid")
46
+ self._pid_file_path = os.path.join(self.workflow_namespace.path, self.pid_file_name(self.workflow_name))
42
47
  return self._pid_file_path
43
48
 
49
+ def pid_file_name(self, workflow_name: str):
50
+ return f"{workflow_name}{self.pid_file_extension}"
51
+
44
52
  def _write_pid(self, pid: int):
45
53
  """Write the process PID to the PID file."""
46
54
  os.makedirs(os.path.dirname(self.pid_file_path), exist_ok=True)
47
55
  with open(self.pid_file_path, 'w') as f:
48
56
  f.write(str(pid))
49
57
 
50
- def _read_pid(self) -> Optional[int]:
58
+ def _read_pid(self, file_path = None) -> Optional[int]:
51
59
  """Read the process PID from the PID file."""
52
- if not os.path.exists(self.pid_file_path):
60
+ file_path = file_path or self.pid_file_path
61
+ if not os.path.exists(file_path):
53
62
  return None
54
63
 
55
64
  try:
56
- with open(self.pid_file_path, 'r') as f:
65
+ with open(file_path, 'r') as f:
57
66
  return int(f.read().strip())
58
67
  except (ValueError, IOError):
59
68
  return None
@@ -130,9 +139,11 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
130
139
  """Start the workflow using local stoobly-agent run."""
131
140
  detached = options.get('detached', False)
132
141
 
133
- commands = self.workflow_service_commands(**options)
142
+ self.__iterate_active_workflows(handle_active=self.__handle_up_active, handle_stale=self.__handle_up_stale)
134
143
 
135
144
  # iterate through each service in the workflow
145
+ commands = self.workflow_service_commands(**options)
146
+
136
147
  public_directory_paths = []
137
148
  response_fixtures_paths = []
138
149
  for command in commands:
@@ -145,18 +156,7 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
145
156
  if os.path.exists(command.response_fixtures_path):
146
157
  response_fixtures_paths.append('--response-fixtures-path')
147
158
  response_fixtures_paths.append(f"{command.response_fixtures_path}:{url}")
148
-
149
- # Check if PID file already exists
150
- if os.path.exists(self.pid_file_path):
151
- pid = self._read_pid()
152
- if pid and self._is_process_running(pid):
153
- Logger.instance(LOG_ID).error(f"Workflow {self.workflow_name} is already running with PID: {pid}")
154
- Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow down {self.workflow_name}` to stop it first")
155
- sys.exit(1)
156
- else:
157
- # PID file exists but process is not running, clean it up
158
- os.remove(self.pid_file_path)
159
-
159
+
160
160
  for command in commands:
161
161
  command.service_up(**options)
162
162
 
@@ -166,17 +166,9 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
166
166
 
167
167
  def down(self, **options: WorkflowDownOptions):
168
168
  """Stop the workflow by killing the local process."""
169
-
170
- pid = self._read_pid()
169
+
170
+ pid = self.__find_and_verify_workflow_pid()
171
171
  if not pid:
172
- Logger.instance(LOG_ID).warning(f"No PID file found for {self.workflow_name}")
173
- return
174
-
175
- if not self._is_process_running(pid):
176
- Logger.instance(LOG_ID).info(f"Process {pid} for {self.workflow_name} is not running")
177
- # Clean up PID file
178
- if os.path.exists(self.pid_file_path):
179
- os.remove(self.pid_file_path)
180
172
  return
181
173
 
182
174
  # Kill the process
@@ -222,10 +214,8 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
222
214
  """Show logs for the local workflow process."""
223
215
  follow = options.get('follow', False)
224
216
 
225
- pid = self._read_pid()
226
- if not pid:
227
- Logger.instance(LOG_ID).warning(f"No PID file found for {self.workflow_name}")
228
- return
217
+ # Find and verify the workflow PID
218
+ self.__find_and_verify_workflow_pid()
229
219
 
230
220
  # Build log command
231
221
  log_file = f"{self.log_file_path}"
@@ -243,17 +233,6 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
243
233
  except subprocess.CalledProcessError as e:
244
234
  Logger.instance(LOG_ID).error(f"Failed to show logs for {self.workflow_name}: {e}")
245
235
 
246
- def status(self):
247
- """Check the status of the local workflow process."""
248
- pid = self._read_pid()
249
- if not pid:
250
- return "not running"
251
-
252
- if self._is_process_running(pid):
253
- return f"running (PID: {pid})"
254
- else:
255
- return "not running (stale PID file)"
256
-
257
236
  def workflow_service_commands(self, **options: WorkflowUpOptions):
258
237
  commands = list(map(lambda service_name: LocalWorkflowRunCommand(self.app, service_name=service_name, **options), self.services))
259
238
  commands.sort(key=lambda command: command.service_config.priority)
@@ -273,6 +252,79 @@ class LocalWorkflowRunCommand(WorkflowRunCommand):
273
252
  else:
274
253
  print(f"cat {log_file}", file=output_file)
275
254
 
255
+ def __find_and_verify_workflow_pid(self):
256
+ pid = self._read_pid()
257
+ if not pid:
258
+ Logger.instance(LOG_ID).error(f"Workflow {self.workflow_name} is not running.")
259
+
260
+ # If the workflow name does not match the workflow namespace, then recommend with --namespace option
261
+ if self.workflow_name != self.workflow_namespace.namespace:
262
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow up {self.workflow_name} --namespace {self.workflow_namespace.namespace}` to start it first.")
263
+ else:
264
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow up {self.workflow_name}` to start it first.")
265
+
266
+ sys.exit(1)
267
+
268
+ if not self._is_process_running(pid):
269
+ Logger.instance(LOG_ID).info(f"Process {pid} for {self.workflow_name} is not running")
270
+ # Clean up PID file
271
+ if os.path.exists(self.pid_file_path):
272
+ os.remove(self.pid_file_path)
273
+ return
274
+
275
+ return pid
276
+
277
+ def __handle_up_active(self, folder: str, pid: str, pid_file_path: str):
278
+ # Allow re-running the same workflow, bring workflow down first
279
+ if pid_file_path == self.pid_file_path and os.path.exists(pid_file_path):
280
+ self.down()
281
+ else:
282
+ file_name = os.path.basename(pid_file_path)
283
+ workflow_name = self.workflow_name
284
+ if folder != self.workflow_name:
285
+ workflow_name = file_name.split(self.pid_file_extension)[0]
286
+
287
+ Logger.instance(LOG_ID).error(f"Workflow {workflow_name} is already running with PID {pid}")
288
+
289
+ if folder != workflow_name:
290
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow down {workflow_name} --namespace {folder}` to stop it first.")
291
+ else:
292
+ Logger.instance(LOG_ID).error(f"Run `stoobly-agent scaffold workflow down {workflow_name}` to stop it first.")
293
+
294
+ sys.exit(1)
295
+
296
+ def __handle_up_stale(self, folder: str, pid: str, pid_file_path: str):
297
+ # PID file exists but process is not running, clean it up
298
+ os.remove(pid_file_path)
299
+
300
+ def __iterate_active_workflows(self, **kwargs):
301
+ handle_active: FunctionType = kwargs.get('handle_active')
302
+ handle_stale: FunctionType = kwargs.get('handle_stale')
303
+ tmp_dir_path = self.app.data_dir.tmp_dir_path
304
+
305
+ # For each folder in self.app.data_dir.tmp_dir_path
306
+ for folder in os.listdir(tmp_dir_path):
307
+ folder_path = os.path.join(tmp_dir_path, folder)
308
+
309
+ # If the folder is not a directory, skip
310
+ if not os.path.isdir(folder_path):
311
+ continue
312
+
313
+ # For each file in folder_path that ends with .pid
314
+ for file in os.listdir(folder_path):
315
+ if not file.endswith(self.pid_file_extension):
316
+ continue
317
+
318
+ # If the folder contains a .pid file, then another workflow is running
319
+ pid_file_path = os.path.join(folder_path, file)
320
+ pid = self._read_pid(pid_file_path)
321
+ if pid and self._is_process_running(pid):
322
+ if handle_active:
323
+ handle_active(folder, pid, pid_file_path)
324
+ else:
325
+ if handle_stale:
326
+ handle_stale(folder, pid, pid_file_path)
327
+
276
328
  def __up_command(self, public_directory_paths: List[str], response_fixtures_paths: List[str], **options: WorkflowUpOptions):
277
329
  # Build the stoobly-agent run command
278
330
  command = ['stoobly-agent', 'run']
@@ -37,7 +37,7 @@ class DockerImage:
37
37
  (?::(?P<tag>[^@]+))? # optional :tag
38
38
  (?:@(?P<digest>.+))? # optional @digest
39
39
  """
40
- match = re.match(pattern, ref, re.VERBOSE)
40
+ match = re.fullmatch(pattern, ref, re.VERBOSE)
41
41
  if not match:
42
42
  raise ValueError(f"Invalid Docker image reference: {ref}")
43
43
 
@@ -201,7 +201,12 @@ class ServiceWorkflowValidateCommand(ServiceCommand, ValidateCommand):
201
201
  # Test workflow won't expose services that are detached and have a hostname to the host such as assets.
202
202
  # Need to test connection from inside the Docker network
203
203
  if self.service_config.hostname and self.workflow_name == WORKFLOW_TEST_TYPE:
204
- self.validate_internal_hostname(url)
204
+ try:
205
+ self.validate_internal_hostname(url)
206
+ except ScaffoldValidateException:
207
+ time.sleep(1)
208
+ # Retry once
209
+ self.validate_internal_hostname(url)
205
210
 
206
211
  self.validate_init_containers(self.service_docker_compose.init_container_name, self.service_docker_compose.configure_container_name)
207
212
 
@@ -4,6 +4,6 @@ services:
4
4
  service: stoobly_base
5
5
  volumes:
6
6
  - ${CONTEXT_DIR}/.stoobly:/home/stoobly/.stoobly
7
- - ${APP_DIR}/.stoobly/docker:/home/stoobly/.stoobly/docker
7
+ - ${APP_DIR}/.stoobly/services:/home/stoobly/.stoobly/services
8
8
  stoobly_base:
9
9
  image: stoobly.${USER_ID}
@@ -170,7 +170,7 @@ class MitmproxyRequestFacade(Request):
170
170
  pattern = rewrite_rule.pattern
171
171
 
172
172
  try:
173
- url_matches = re.match(pattern, self.url)
173
+ url_matches = re.fullmatch(pattern, self.url)
174
174
  except re.error as e:
175
175
  Logger.instance().error(f"RegExp error '{e}' for {pattern}")
176
176
  return False
@@ -58,12 +58,9 @@ def eval_fixtures(request: MitmproxyRequest, **options: MockOptions) -> Union[Re
58
58
  continue
59
59
 
60
60
  # Try to find the file in this directory
61
- _fixture_path = os.path.join(dir_path_config['path'], request_path.lstrip('/'))
61
+ fixture_path = os.path.join(dir_path_config['path'], request_path.lstrip('/'))
62
62
  if request.headers.get('accept'):
63
- fixture_path = __guess_file_path(_fixture_path, request.headers['accept'])
64
-
65
- if not fixture_path:
66
- fixture_path = _fixture_path
63
+ fixture_path = __guess_file_path(fixture_path, request.headers['accept'])
67
64
 
68
65
  if os.path.isfile(fixture_path):
69
66
  break
@@ -74,15 +71,31 @@ def eval_fixtures(request: MitmproxyRequest, **options: MockOptions) -> Union[Re
74
71
  return
75
72
  else:
76
73
  fixture_path = fixture.get('path')
77
- if not fixture_path or not os.path.isfile(fixture_path):
74
+ if not fixture_path:
78
75
  return
79
76
 
77
+ if os.path.isdir(fixture_path):
78
+ request_path = request.path
79
+ match = re.match(fixture.get('path_pattern', request_path), request_path)
80
+
81
+ if not match or match.end() == len(request_path):
82
+ sub_path = 'index'
83
+ else:
84
+ sub_path = request_path[match.end():]
85
+
86
+ fixture_path = os.path.join(fixture_path, sub_path.lstrip('/'))
87
+ if request.headers.get('accept'):
88
+ fixture_path = __guess_file_path(fixture_path, request.headers['accept'])
89
+
90
+ if not os.path.isfile(fixture_path):
91
+ return
92
+
80
93
  _headers = fixture.get('headers')
81
94
  headers = CaseInsensitiveDict(_headers if isinstance(_headers, dict) else {})
82
95
 
83
96
  if fixture.get('status_code'):
84
97
  status_code = fixture.get('status_code')
85
-
98
+
86
99
  with open(fixture_path, 'rb') as fp:
87
100
  response = Response()
88
101
 
@@ -158,13 +171,13 @@ def __guess_content_type(file_path):
158
171
  return
159
172
  return mimetypes.types_map.get(file_extension)
160
173
 
161
- def __guess_file_path(file_path, content_type):
174
+ def __guess_file_path(file_path: str, content_type):
162
175
  file_extension = os.path.splitext(file_path)[1]
163
176
  if file_extension:
164
177
  return file_path
165
178
 
166
179
  if not content_type:
167
- return
180
+ return file_path
168
181
 
169
182
  content_types = __parse_accept_header(content_type)
170
183
 
@@ -178,6 +191,8 @@ def __guess_file_path(file_path, content_type):
178
191
  if os.path.isfile(_file_path):
179
192
  return _file_path
180
193
 
194
+ return file_path
195
+
181
196
  def __find_fixture_for_request(request: MitmproxyRequest, fixtures: dict, method: str):
182
197
  """Find a fixture for the given request in the provided fixtures."""
183
198
  if not fixtures:
@@ -193,7 +208,7 @@ def __find_fixture_in_routes(fixtures: dict, method: str, request_path: str):
193
208
  return None
194
209
 
195
210
  for path_pattern in routes:
196
- if not re.match(path_pattern, request_path):
211
+ if not re.fullmatch(path_pattern, request_path):
197
212
  continue
198
213
 
199
214
  fixture = routes[path_pattern]
@@ -203,6 +218,7 @@ def __find_fixture_in_routes(fixtures: dict, method: str, request_path: str):
203
218
  path = fixture.get('path')
204
219
 
205
220
  if path:
221
+ fixture['path_pattern'] = path_pattern
206
222
  return fixture
207
223
 
208
224
  return None
@@ -211,6 +227,9 @@ def __choose_highest_priority_content_type(accept_header: str) -> Optional[str]:
211
227
  if not accept_header:
212
228
  return None
213
229
 
230
+ if accept_header == '*/*':
231
+ return 'text/plain'
232
+
214
233
  types = []
215
234
  for part in accept_header.split(","):
216
235
  media_range = part.strip()
@@ -234,9 +253,13 @@ def __choose_highest_priority_content_type(accept_header: str) -> Optional[str]:
234
253
  return types[0][0] if types else None
235
254
 
236
255
  def __origin_matches(pattern: str, request_origin: str) -> bool:
237
- return bool(re.match(pattern, request_origin))
256
+ return bool(re.fullmatch(pattern, request_origin))
238
257
 
239
258
  def __parse_accept_header(accept_header):
259
+ # In the case accept_header is */*, default to html and json file types
260
+ if accept_header == '*/*':
261
+ return ['text/html', 'application/json']
262
+
240
263
  types = []
241
264
  for item in accept_header.split(","):
242
265
  parts = item.split(";")
@@ -202,7 +202,7 @@ def __filter_by_match_rules(request: MitmproxyRequest, match_rules: List[MatchRu
202
202
 
203
203
  def __matches(url: str, pattern: str):
204
204
  try:
205
- return re.match(pattern, url)
205
+ return re.fullmatch(pattern, url)
206
206
  except re.error as e:
207
207
  Logger.instance().error(f"RegExp error '{e}' for {pattern}")
208
208
  return False
@@ -80,7 +80,7 @@ def __include(request: MitmproxyRequest, patterns: List[str]) -> bool:
80
80
 
81
81
  for pattern in patterns:
82
82
  try:
83
- if re.match(pattern, request.url):
83
+ if re.fullmatch(pattern, request.url):
84
84
  return True
85
85
  except re.error as e:
86
86
  Logger.instance(LOG_ID).error(f"RegExp error '{e}' for {pattern}")
@@ -94,7 +94,7 @@ def __exclude(request: MitmproxyRequest, patterns: List[str]) -> bool:
94
94
 
95
95
  for pattern in patterns:
96
96
  try:
97
- if re.match(pattern, request.url):
97
+ if re.fullmatch(pattern, request.url):
98
98
  return True
99
99
  except re.error as e:
100
100
  Logger.instance(LOG_ID).error(f"RegExp error '{e}' for {pattern}")
@@ -32,7 +32,7 @@ class Cache:
32
32
  else:
33
33
  delete_list = []
34
34
  for key in self.data.keys():
35
- if re.match(pattern, key):
35
+ if re.fullmatch(pattern, key):
36
36
  delete_list.append(key)
37
37
 
38
38
  for key in delete_list:
@@ -1 +1 @@
1
- 1.10.1
1
+ 1.10.2