pipeline-eds 0.2.10__tar.gz → 0.2.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/PKG-INFO +2 -2
  2. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/pyproject.toml +4 -3
  3. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/api/eds.py +2 -7
  4. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/cli.py +47 -34
  5. pipeline_eds-0.2.13/src/pipeline/install_appdata.py +84 -0
  6. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/workspace_manager.py +25 -0
  7. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/LICENSE +0 -0
  8. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/README.md +0 -0
  9. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/__init__.py +0 -0
  10. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/__main__.py +0 -0
  11. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/api/__init__.py +0 -0
  12. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/api/rjn.py +0 -0
  13. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/api/status_api.py +0 -0
  14. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/calls.py +0 -0
  15. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/configrationmanager.py +0 -0
  16. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/decorators.py +0 -0
  17. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/env.py +0 -0
  18. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/environment.py +0 -0
  19. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/gui_fastapi_plotly_live.py +0 -0
  20. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/gui_mpl_live.py +0 -0
  21. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/helpers.py +0 -0
  22. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/logging_setup.py +0 -0
  23. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/pastehelpers.py +0 -0
  24. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/philosophy.py +0 -0
  25. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/plotbuffer.py +0 -0
  26. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/points_loader.py +0 -0
  27. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/queriesmanager.py +0 -0
  28. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/src/pipeline/time_manager.py +0 -0
  29. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/default-workspace.toml +0 -0
  30. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/__init__.py +0 -0
  31. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/code/__init__.py +0 -0
  32. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/code/aggregator.py +0 -0
  33. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/code/collector.py +0 -0
  34. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/code/sanitizer.py +0 -0
  35. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/code/storage.py +0 -0
  36. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/configurations/config_time.toml +0 -0
  37. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/configurations/configuration.toml +0 -0
  38. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/README.md +0 -0
  39. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/aggregate/README.md +0 -0
  40. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/aggregate/live_data - Copy.csv +0 -0
  41. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/aggregate/live_data_EFF.csv +0 -0
  42. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/aggregate/live_data_INF.csv +0 -0
  43. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/export_eds_points_neo.txt +0 -0
  44. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.csv +0 -0
  45. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/manual_data_load_to_postman_wetwell.xlsx +0 -0
  46. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/manual_effluent.csv +0 -0
  47. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/manual_influent.csv +0 -0
  48. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/exports/manual_wetwell.csv +0 -0
  49. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/history/time_sample.txt +0 -0
  50. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/imports/zdMaxson_idcsD321E_sid11003.toml +0 -0
  51. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/imports/zdMaxson_idcsFI8001_sid8528.toml +0 -0
  52. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/imports/zdMaxson_idcsM100FI_sid2308.toml +0 -0
  53. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/imports/zdMaxson_idcsM310LI_sid2382.toml +0 -0
  54. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/queries/default-queries.toml +0 -0
  55. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/queries/points-maxson.csv +0 -0
  56. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/queries/points-stiles.csv +0 -0
  57. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/queries/timestamps_success.json +0 -0
  58. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/scripts/__init__.py +0 -0
  59. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/scripts/daemon_runner.py +0 -0
  60. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/secrets/README.md +0 -0
  61. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_rjn/secrets/secrets-example.yaml +0 -0
  62. {pipeline_eds-0.2.10 → pipeline_eds-0.2.13}/workspaces/eds_to_termux/..txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pipeline-eds
3
- Version: 0.2.10
3
+ Version: 0.2.13
4
4
  Summary: The official API pipeline library for mulch-based projects. Key target: Emerson Ovation EDS REST API.
5
5
  License: BSD-3
6
6
  Author: George Clayton Bennett
@@ -13,7 +13,7 @@ Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Provides-Extra: windb
15
15
  Requires-Dist: certifi (>=2025.1.31,<2026.0.0)
16
- Requires-Dist: fastapi (==0.100.0)
16
+ Requires-Dist: fastapi (==0.103.0)
17
17
  Requires-Dist: mulch (>=0.2.8,<0.3.0)
18
18
  Requires-Dist: mysql-connector-python (>=9.3.0,<10.0.0)
19
19
  Requires-Dist: pendulum (>=3.1.0,<4.0.0)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "pipeline-eds"
3
- version = "0.2.10"
3
+ version = "0.2.13"
4
4
  description = "The official API pipeline library for mulch-based projects. Key target: Emerson Ovation EDS REST API."
5
5
  authors = ["George Clayton Bennett <george.bennett@memphistn.gov>"]
6
6
  license = "BSD-3"
@@ -18,7 +18,8 @@ repository = "https://github.com/city-of-memphis-wastewater/pipeline"
18
18
  #collector = "pipeline.collector:main"
19
19
  #eds-client = "pipeline.api.eds:main"
20
20
  #rjn-client = "pipeline.api.rjn:main"
21
- pipeline = "pipeline.cli:app" # for installation of pipeline as a cli. Ex: pipeline trend M100FI -s June3 -f June17
21
+ pipeline = "pipeline.cli:app" # for installation of pipeline as a CLI. Ex: pipeline trend M100FI -s June3 -f June17
22
+ eds = "pipeline.cli:app" # alias eds for pipeline CLI
22
23
 
23
24
 
24
25
  [tool.pytest.ini_options]
@@ -38,7 +39,7 @@ pyyaml = ">=6.0.2,<7.0.0"
38
39
  certifi = ">=2025.1.31,<2026.0.0"
39
40
  schedule = ">=1.2.2,<2.0.0"
40
41
  #fastapi = ">=0.115.12,<0.116.0"
41
- fastapi = "0.100.0"
42
+ fastapi = "0.103.0"
42
43
  pydantic = "1.10.13"
43
44
  urllib3 = "^2.4.0"
44
45
  plotly = "^6.2.0"
@@ -726,13 +726,8 @@ def demo_eds_webplot_point_live():
726
726
  gui_fastapi_plotly_live.run_gui(data_buffer)
727
727
 
728
728
  @log_function_call(level=logging.DEBUG)
729
- def load_historic_data(queries_manager, workspace_manager, session, iess_list, starttime=None, endtime=None):
730
- if starttime is None:
731
- # back_to_last_success = True
732
- starttime = queries_manager.get_most_recent_successful_timestamp(api_id=zd)
733
-
734
- if endtime is None:
735
- endtime = helpers.get_now_time_rounded(workspace_manager)
729
+ def load_historic_data(session, iess_list, starttime, endtime):
730
+
736
731
 
737
732
  starttime = TimeManager(starttime).as_unix()
738
733
  endtime = TimeManager(endtime).as_unix()
@@ -1,9 +1,4 @@
1
1
  '''
2
- import typer
3
- from pathlib import Path
4
-
5
- app = typer.Typer()
6
-
7
2
  @app.command()
8
3
  def list_workspaces(workspaces_dir: Path = Path("workspaces")):
9
4
  """List valid mulch workspaces in the given directory."""
@@ -14,37 +9,51 @@ def list_workspaces(workspaces_dir: Path = Path("workspaces")):
14
9
  if path.is_dir() and (path / ".mulch").is_dir():
15
10
  typer.echo(f"🪴 {path.name}")
16
11
 
17
- @app.command()
18
- def list_mulch_folders(start: Path = Path(".")):
19
- """Recursively find folders containing a .mulch/ directory."""
20
- for path in start.rglob(".mulch"):
21
- typer.echo(f"📁 {path.parent}")
22
-
23
- @app.command()
24
- def inspect(workspace: Path):
25
- """Show scaffold or metadata info from a workspace."""
26
- metadata = workspace / ".mulch" / "mulch-scaffold.json"
27
- if metadata.exists():
28
- typer.echo(f"🔍 {workspace.name}: {metadata}")
29
- typer.echo(metadata.read_text())
30
- else:
31
- typer.echo(f"No scaffold found in {workspace}")
32
12
  '''
33
- # src/pipeline/cli.py
34
-
35
13
  import typer
36
14
  import importlib
37
15
  from pathlib import Path
16
+ from importlib.metadata import version, PackageNotFoundError
38
17
 
39
18
  from pipeline.env import SecretConfig
40
19
  #from pipeline.helpers import setup_logging
41
20
  from pipeline.workspace_manager import WorkspaceManager
42
21
 
22
+ ### Versioning
23
+ def print_version(value: bool):
24
+ if value:
25
+ try:
26
+ typer.secho(f"mulch {MULCH_VERSION}",fg=typer.colors.GREEN, bold=True)
27
+ except PackageNotFoundError:
28
+ typer.echo("Version info not found")
29
+ raise typer.Exit()
30
+ try:
31
+ MULCH_VERSION = version("mulch")
32
+ __version__ = version("mulch")
33
+ except PackageNotFoundError:
34
+ MULCH_VERSION = "unknown"
35
+
36
+ try:
37
+ from importlib.metadata import version
38
+ __version__ = version("mulch")
39
+ except PackageNotFoundError:
40
+ # fallback if running from source
41
+ try:
42
+ with open(Path(__file__).parent / "VERSION") as f:
43
+ __version__ = f.read().strip()
44
+ except FileNotFoundError:
45
+ __version__ = "dev"
46
+
47
+ ### Pipeline CLI
48
+
43
49
  app = typer.Typer(help="CLI for running pipeline workspaces.")
44
50
 
45
51
 
46
52
  @app.callback(invoke_without_command=True)
47
- def main(ctx: typer.Context):
53
+ def main(
54
+ ctx: typer.Context,
55
+ version: bool = typer.Option(None, "--version", callback=lambda v: print_version(v), is_eager=True, help="Show the version and exit.")
56
+ ):
48
57
  """
49
58
  Pipeline CLI – run workspaces built on the pipeline framework.
50
59
  """
@@ -52,6 +61,7 @@ def main(ctx: typer.Context):
52
61
  typer.echo(ctx.get_help())
53
62
  raise typer.Exit()
54
63
 
64
+
55
65
  @app.command()
56
66
  def run(
57
67
  workspace: str = typer.Option(None, help="Workspace to run"),
@@ -92,7 +102,7 @@ def trend(
92
102
  starttime: str = typer.Option(None, "--start", "-s", help="Index from 'mulch order' to choose scaffold source."),
93
103
  endtime: str = typer.Option(None, "--end", "-end", help="Reference a known template for workspace organization."),
94
104
  zd: str = typer.Option('Maxson', "--zd", "-z", help = "Define the EDS ZD from your secrets file. This must correlate with your idcs point selection(s)."),
95
- workspace: str = typer.Option(WorkspaceManager.identify_default_workspace_name(),"--workspace","-w", help = "Provide the name of the workspace you want to use, for the secrets.yaml credentials and for the timezone config. If a start time is not provided, the workspace queries can checked for the most recent successful timestamp. ")
105
+ workspace: str = typer.Option(None,"--workspace","-w", help = "Provide the name of the workspace you want to use, for the secrets.yaml credentials and for the timezone config. If a start time is not provided, the workspace queries can checked for the most recent successful timestamp. ")
96
106
  ):
97
107
  """
98
108
  Show a curve for a sensor over time.
@@ -101,10 +111,17 @@ def trend(
101
111
  import pendulum
102
112
  from pipeline.api.eds import EdsClient, load_historic_data
103
113
  from pipeline import helpers
104
- from pipeline.queriesmanager import QueriesManager
105
114
  from pipeline.plotbuffer import PlotBuffer
106
115
  from pipeline import gui_fastapi_plotly_live
107
116
  from pipeline import environment
117
+ from pipeline.workspace_manager import WorkspaceManager
118
+ ws_dir = WorkspaceManager.ensure_workspace()
119
+
120
+ # must set up %appdata for pip/x installation. Use mulch or yeoman for this. And have a secrets filler.
121
+ if workspace is None:
122
+ WorkspaceManager.identify_default_workspace_name()
123
+ wm = WorkspaceManager(workspace)
124
+ secrets_dict = SecretConfig.load_config(secrets_file_path = wm.get_secrets_file_path())
108
125
 
109
126
  if zd.lower() == "stiles":
110
127
  zd = "WWTF"
@@ -118,20 +135,18 @@ def trend(
118
135
  idcs_to_iess_suffix = ".UNIT0@NET0"
119
136
  iess_list = [x+idcs_to_iess_suffix for x in idcs]
120
137
 
121
-
122
- wm = WorkspaceManager(workspace)
123
- secrets_dict = SecretConfig.load_config(secrets_file_path = wm.get_secrets_file_path())
124
-
125
138
  base_url = secrets_dict.get("eds_apis", {}).get(zd, {}).get("url").rstrip("/")
126
139
  session = EdsClient.login_to_session(api_url = base_url,
127
140
  username = secrets_dict.get("eds_apis", {}).get(zd, {}).get("username"),
128
141
  password = secrets_dict.get("eds_apis", {}).get(zd, {}).get("password"))
129
142
  session.base_url = base_url
130
143
  session.zd = secrets_dict.get("eds_apis", {}).get(zd, {}).get("zd")
131
- queries_manager = QueriesManager(wm)
144
+
132
145
 
133
146
  if starttime is None:
134
147
  # back_to_last_success = True
148
+ from pipeline.queriesmanager import QueriesManager
149
+ queries_manager = QueriesManager(wm)
135
150
  dt_start = queries_manager.get_most_recent_successful_timestamp(api_id=zd)
136
151
  else:
137
152
  dt_start = pendulum.parse(starttime, strict=False)
@@ -142,7 +157,7 @@ def trend(
142
157
 
143
158
  # Should automatically choose time step granularity based on time length; map
144
159
 
145
- results = load_historic_data(queries_manager, wm, session, iess_list, dt_start, dt_finish)
160
+ results = load_historic_data(session, iess_list, dt_start, dt_finish)
146
161
 
147
162
  data_buffer = PlotBuffer()
148
163
  for idx, rows in enumerate(results):
@@ -150,7 +165,7 @@ def trend(
150
165
  label = f"{row.get('rjn_entityid')} ({row.get('units')})"
151
166
  ts = helpers.iso(row.get("ts"))
152
167
  av = row.get("value")
153
- data_buffer.append(label, ts, av)
168
+ data_buffer.append(label, ts, av) # needs to be adapted for multiple iess sensor results
154
169
 
155
170
  if not environment.matplotlib_enabled():
156
171
  gui_fastapi_plotly_live.run_gui(data_buffer)
@@ -158,8 +173,6 @@ def trend(
158
173
  from pipeline import gui_mpl_live
159
174
  gui_mpl_live.run_gui(data_buffer)
160
175
 
161
-
162
-
163
176
  @app.command()
164
177
  def list_workspaces():
165
178
  """
@@ -0,0 +1,84 @@
1
+ # install_appdata.py
2
+ import typer
3
+ from pathlib import Path
4
+ import sys
5
+ import os
6
+ import shutil
7
+
8
+ app = typer.Typer(help="Manage mulch-like pipeline workspace installation")
9
+
10
+ def setup():
11
+ platform = sys.platform
12
+ if platform.startswith("win"):
13
+ from mulch import reg_winreg
14
+ # Always build LocalAppData mulch folder first
15
+
16
+ # Copy files
17
+ source_dir = Path(__file__).parent # this is src/mulch/scripts/install
18
+ target_dir = Path(os.environ['LOCALAPPDATA']) / "pipeline"
19
+ target_dir.mkdir(parents=True, exist_ok=True)
20
+
21
+ copy_mulch_installation_files(source_dir, target_dir)
22
+
23
+ # Registry
24
+ reg_winreg.call()
25
+ reg_winreg.verify_registry() # deterministic check
26
+
27
+ print("Mulch context menu installed successfully.")
28
+
29
+ elif platform.startswith("linux"):
30
+ thunar_action_dir = Path.home() / ".local/share/file-manager/actions"
31
+ thunar_action_dir.mkdir(parents=True, exist_ok=True)
32
+
33
+ menu_items = [
34
+ ("mulch-workspace.desktop", "mulch workspace"),
35
+ ("mulch-seed.desktop", "mulch seed"),
36
+ ]
37
+
38
+ for filename, label in menu_items:
39
+ src = Path(__file__).parent / filename
40
+ dest = thunar_action_dir / filename
41
+ if src.exists():
42
+ # Use copy2 to preserve metadata
43
+ shutil.copy2(src, dest)
44
+ os.chmod(dest, 0o755)
45
+ print(f"Installed `{label}` context menu item to {dest}")
46
+ else:
47
+ print(f"Skipping `{label}` context menu installation (no .desktop file found).")
48
+
49
+ elif platform == "darwin":
50
+ print("macOS detected: please implement context menu setup via Automator or Finder Service")
51
+ # You can extend this with AppleScript or Automator commands here
52
+ else:
53
+ raise RuntimeError(f"Unsupported platform for setup: {platform}")
54
+
55
+ def copy_mulch_installation_files(source_dir, target_dir):
56
+ required_files = [
57
+ "call-mulch-workspace.ps1",
58
+ "mulch-workspace.ps1",
59
+ "call-mulch-seed.ps1",
60
+ "mulch-seed.ps1",
61
+ "mulch-icon.ico",
62
+ ".mulchvision"
63
+ ]
64
+ missing_files = []
65
+ for f in required_files:
66
+ src = source_dir / f
67
+ if src.exists():
68
+ shutil.copy2(src, target_dir)
69
+ print(f"Copied {f} to {target_dir}")
70
+ else:
71
+ missing_files.append(f)
72
+
73
+ if missing_files:
74
+ raise FileNotFoundError(
75
+ f"Missing required files in {source_dir}: {', '.join(missing_files)}"
76
+ )
77
+
78
+ @app.command()
79
+ def install_appdata():
80
+ """Install the mulch workspace and mulch seed right-click context menu items."""
81
+ setup()
82
+
83
+ if __name__ == "__main__":
84
+ app()
@@ -26,6 +26,8 @@ class WorkspaceManager:
26
26
  SECRETS_YAML_FILE_NAME ='secrets.yaml'
27
27
  SECRETS_EXAMPLE_YAML_FILE_NAME ='secrets-example.yaml'
28
28
  DEFAULT_WORKSPACE_TOML_FILE_NAME = 'default-workspace.toml'
29
+ APP_NAME = "pipeline"
30
+
29
31
  TIMESTAMPS_JSON_FILE_NAME = 'timestamps_success.json'
30
32
  ROOT_DIR = Path(__file__).resolve().parents[2] # root directory
31
33
 
@@ -230,6 +232,29 @@ class WorkspaceManager:
230
232
  def name(self):
231
233
  return self.workspace_name
232
234
 
235
+ @classmethod
236
+ def get_appdata_dir(cls) -> Path:
237
+ """Return platform-appropriate appdata folder."""
238
+ if os.name == "nt": # Windows
239
+ base = Path(os.getenv("APPDATA", Path.home() / "AppData" / "Roaming"))
240
+ elif os.name == "posix" and "ANDROID_ROOT" in os.environ: # Termux
241
+ base = Path.home() / ".local" / "share"
242
+ else: # macOS/Linux
243
+ base = Path(os.getenv("XDG_DATA_HOME", Path.home() / ".local" / "share"))
244
+ return base / cls.APP_NAME
245
+
246
+ @classmethod
247
+ def ensure_workspace(cls) -> Path:
248
+ """Create workspace folder and default toml if missing."""
249
+ workspaces_dir = cls.get_appdata_dir() / "workspaces"
250
+ workspaces_dir.mkdir(parents=True, exist_ok=True)
251
+
252
+ default_file = workspaces_dir / cls.DEFAULT_WORKSPACE_TOML_FILE_NAME
253
+ if not default_file.exists():
254
+ default_file.write_text("# Default workspace config\n")
255
+ return workspaces_dir
256
+
257
+
233
258
  def establish_default_workspace():
234
259
  workspace_name = WorkspaceManager.identify_default_workspace_name()
235
260
  logging.info(f"workspace_name = {workspace_name}")
File without changes
File without changes