machineconfig 5.17__py3-none-any.whl → 5.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

Files changed (53) hide show
  1. machineconfig/cluster/sessions_managers/wt_local.py +6 -1
  2. machineconfig/cluster/sessions_managers/wt_local_manager.py +4 -2
  3. machineconfig/cluster/sessions_managers/wt_remote_manager.py +4 -2
  4. machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +4 -2
  5. machineconfig/cluster/sessions_managers/zellij_local_manager.py +1 -1
  6. machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +3 -1
  7. machineconfig/profile/create.py +108 -140
  8. machineconfig/profile/create_frontend.py +58 -0
  9. machineconfig/profile/shell.py +45 -9
  10. machineconfig/scripts/python/ai/solutions/_shared.py +9 -1
  11. machineconfig/scripts/python/ai/solutions/copilot/instructions/python/dev.instructions.md +1 -1
  12. machineconfig/scripts/python/ai/solutions/generic.py +12 -2
  13. machineconfig/scripts/python/count_lines_frontend.py +1 -1
  14. machineconfig/scripts/python/devops.py +90 -54
  15. machineconfig/scripts/python/dotfile.py +14 -8
  16. machineconfig/scripts/python/interactive.py +3 -21
  17. machineconfig/scripts/python/share_terminal.py +1 -1
  18. machineconfig/setup_linux/__init__.py +11 -0
  19. machineconfig/setup_linux/{openssh_all.sh → ssh/openssh_all.sh} +1 -0
  20. machineconfig/setup_linux/web_shortcuts/interactive.sh +1 -1
  21. machineconfig/setup_windows/__init__.py +12 -0
  22. machineconfig/setup_windows/apps.ps1 +1 -0
  23. machineconfig/setup_windows/{openssh_all.ps1 → ssh/openssh_all.ps1} +5 -5
  24. machineconfig/setup_windows/web_shortcuts/interactive.ps1 +1 -1
  25. machineconfig/utils/code.py +7 -4
  26. machineconfig/utils/files/dbms.py +355 -0
  27. machineconfig/utils/files/read.py +2 -2
  28. machineconfig/utils/installer.py +5 -5
  29. machineconfig/utils/links.py +128 -104
  30. machineconfig/utils/procs.py +4 -4
  31. machineconfig/utils/scheduler.py +10 -14
  32. {machineconfig-5.17.dist-info → machineconfig-5.19.dist-info}/METADATA +1 -1
  33. {machineconfig-5.17.dist-info → machineconfig-5.19.dist-info}/RECORD +41 -51
  34. machineconfig/scripts/windows/dotfile.ps1 +0 -1
  35. machineconfig/setup_linux/others/openssh-server_add_pub_key.sh +0 -57
  36. machineconfig/setup_linux/web_shortcuts/croshell.sh +0 -11
  37. machineconfig/setup_linux/web_shortcuts/ssh.sh +0 -52
  38. machineconfig/setup_windows/symlinks.ps1 +0 -5
  39. machineconfig/setup_windows/symlinks2linux.ps1 +0 -1
  40. machineconfig/setup_windows/web_shortcuts/all.ps1 +0 -18
  41. machineconfig/setup_windows/web_shortcuts/ascii_art.ps1 +0 -36
  42. machineconfig/setup_windows/web_shortcuts/croshell.ps1 +0 -16
  43. machineconfig/setup_windows/web_shortcuts/ssh.ps1 +0 -11
  44. machineconfig/setup_windows/wsl_refresh.ps1 +0 -8
  45. machineconfig/setup_windows/wt_and_pwsh.ps1 +0 -9
  46. /machineconfig/setup_linux/{openssh_wsl.sh → ssh/openssh_wsl.sh} +0 -0
  47. /machineconfig/setup_windows/{quirks.ps1 → others/power_options.ps1} +0 -0
  48. /machineconfig/setup_windows/{openssh-server.ps1 → ssh/openssh-server.ps1} +0 -0
  49. /machineconfig/setup_windows/{openssh-server_add-sshkey.ps1 → ssh/openssh-server_add-sshkey.ps1} +0 -0
  50. /machineconfig/setup_windows/{openssh-server_add_identity.ps1 → ssh/openssh-server_add_identity.ps1} +0 -0
  51. {machineconfig-5.17.dist-info → machineconfig-5.19.dist-info}/WHEEL +0 -0
  52. {machineconfig-5.17.dist-info → machineconfig-5.19.dist-info}/entry_points.txt +0 -0
  53. {machineconfig-5.17.dist-info → machineconfig-5.19.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,355 @@
1
+ import time
2
+ from typing import Optional, Any, Callable
3
+
4
+ import polars as pl
5
+
6
+ from sqlalchemy.orm import sessionmaker, Session
7
+ from sqlalchemy import text, inspect as inspect__
8
+ from sqlalchemy.engine import Engine, Connection, create_engine
9
+ from sqlalchemy.ext.asyncio import create_async_engine
10
+ from sqlalchemy.engine import Inspector
11
+ from sqlalchemy.sql.schema import MetaData
12
+ from pathlib import Path as P
13
+
14
+ OPLike = Optional[P] | str | None
15
+
16
+
17
+ class DBMS:
18
+ def __init__(self, engine: Engine, sch: Optional[str] = None, vws: bool = False):
19
+ self.eng: Engine = engine
20
+ self.con: Optional[Connection] = None
21
+ self.ses: Optional[Session] = None
22
+ self.insp: Optional[Inspector] = None
23
+ self.meta: Optional[MetaData] = None
24
+ db_path = P(self.eng.url.database) if self.eng.url.database else None
25
+ if db_path and db_path.exists():
26
+ self.path: Optional[P] = db_path
27
+ else: self.path = None
28
+
29
+ # self.db = db
30
+ self.sch = sch
31
+ self.vws: bool = vws
32
+ self.schema: list[str] = []
33
+
34
+ self.sch_tab: dict[str, list[str]]
35
+ self.sch_vws: dict[str, list[str]]
36
+ self.description: Optional[pl.DataFrame] = None
37
+ # self.tables = None
38
+ # self.views = None
39
+ # self.sch_tab: Optional[Struct] = None
40
+ # self.sch_vws: Optional[Struct] = None
41
+ # if inspect: self.refresh()
42
+ # self.ip_formatter: Optional[Any] = None
43
+ # self.db_specs: Optional[Any] = None
44
+ if self.path is not None:
45
+ if self.path.is_file():
46
+ path_repr = self.path.as_uri()
47
+ else:
48
+ path_repr = self.path
49
+ print(f"Database at {path_repr} is ready.")
50
+
51
+ def refresh(self, sch: Optional[str] = None) -> 'DBMS': # fails if multiple schemas are there and None is specified
52
+ self.con = self.eng.connect()
53
+ self.ses = sessionmaker()(bind=self.eng) # ORM style
54
+ self.meta = MetaData()
55
+ self.meta.reflect(bind=self.eng, schema=sch or self.sch)
56
+ insp = inspect__(subject=self.eng)
57
+ self.insp = insp
58
+ assert self.insp is not None
59
+ self.schema = self.insp.get_schema_names()
60
+ print(f"Inspecting tables of schema `{self.schema}` {self.eng}")
61
+ self.sch_tab = {k: v for k, v in zip(self.schema, [insp.get_table_names(schema=x) for x in self.schema])} # dict(zip(self.schema, self.schema.apply(lambda x: self.insp.get_table_names(schema=x)))) #
62
+ print(f"Inspecting views of schema `{self.schema}` {self.eng}")
63
+ self.sch_vws = {k: v for k, v in zip(self.schema, [insp.get_view_names(schema=x) for x in self.schema])}
64
+ return self
65
+
66
+ @classmethod
67
+ def from_local_db(cls, path: OPLike = None, echo: bool = False, share_across_threads: bool = False, pool_size: int = 5, **kwargs: Any):
68
+ return cls(engine=cls.make_sql_engine(path=path, echo=echo, share_across_threads=share_across_threads, pool_size=pool_size, **kwargs))
69
+
70
+ def __repr__(self): return f"DataBase @ {self.eng}"
71
+ def get_columns(self, table: str, sch: Optional[str] = None):
72
+ assert self.meta is not None
73
+ return self.meta.tables[self._get_table_identifier(table=table, sch=sch)].exported_columns.keys()
74
+ def close(self, sleep: int = 2):
75
+ if self.path:
76
+ print(f"Terminating database `{self.path.as_uri() if self.path.is_file() and 'memory' not in str(self.path) else self.path}`")
77
+ if self.con: self.con.close()
78
+ if self.ses: self.ses.close()
79
+ self.eng.pool.dispose()
80
+ self.eng.dispose()
81
+ time.sleep(sleep)
82
+ def _get_table_identifier(self, table: str, sch: Optional[str]):
83
+ if sch is None: sch = self.sch
84
+ if sch is not None:
85
+ # Handle DuckDB schema names that contain dots (e.g., "klines.main")
86
+ if self.eng.url.drivername == 'duckdb' and '.' in sch and sch.endswith('.main'):
87
+ # For DuckDB schemas like "klines.main", just use the table name without schema
88
+ return f'"{table}"'
89
+ else:
90
+ return f'"{sch}"."{table}"'
91
+ else:
92
+ return f'"{table}"'
93
+
94
+ @staticmethod
95
+ def make_sql_engine(path: OPLike = None, echo: bool = False, dialect: str = "sqlite", driver: str = ["pysqlite", "DBAPI"][0], pool_size: int = 5, share_across_threads: bool = True, **kwargs: Any):
96
+ """Establish lazy initialization with database"""
97
+ from sqlalchemy.pool import StaticPool, NullPool
98
+ _ = NullPool
99
+ _ = driver
100
+ if str(path) == "memory":
101
+ print("Linking to in-memory database.")
102
+ if share_across_threads:
103
+ # see: https://docs.sqlalchemy.org/en/14/dialects/sqlite.html#using-a-memory-database-in-multiple-threads
104
+ return create_engine(url=f"{dialect}+{driver}:///:memory:", echo=echo, future=True, poolclass=StaticPool, connect_args={"check_same_thread": False})
105
+ else:
106
+ return create_engine(url=f"{dialect}+{driver}:///:memory:", echo=echo, future=True, pool_size=pool_size, **kwargs)
107
+ if path is None:
108
+ tmp_dir = P.home().joinpath(".tmp").joinpath("tmp_dbs")
109
+ tmp_dir.mkdir(parents=True, exist_ok=True)
110
+ import tempfile
111
+ with tempfile.NamedTemporaryFile(suffix=".sqlite", dir=str(tmp_dir), delete=False) as tmp_file:
112
+ path = P(tmp_file.name)
113
+ else:
114
+ path = P(path).expanduser().resolve()
115
+ path.parent.mkdir(parents=True, exist_ok=True)
116
+ path_repr = path.as_uri() if path.is_file() else path
117
+ dialect = path.suffix.removeprefix('.')
118
+ print(f"Linking to database at {path_repr}")
119
+ connect_args = kwargs.pop("connect_args", {}) or {}
120
+ try:
121
+ if path.suffix == ".duckdb": # only apply for duckdb files
122
+ # don't overwrite user's explicit setting if already provided
123
+ connect_args.setdefault("read_only", True)
124
+ print(" - Opening DuckDB in read-only mode.")
125
+ except Exception:
126
+ pass
127
+ if pool_size == 0:
128
+ res = create_engine(url=f"{dialect}:///{path}", echo=echo, future=True, poolclass=NullPool, connect_args=connect_args, **kwargs) # echo flag is just a short for the more formal way of logging sql commands.
129
+ else:
130
+ res = create_engine(url=f"{dialect}:///{path}", echo=echo, future=True, pool_size=pool_size, connect_args=connect_args, **kwargs) # echo flag is just a short for the more formal way of logging sql commands.
131
+ return res
132
+ @staticmethod
133
+ def make_sql_async_engine(path: OPLike = None, echo: bool = False, dialect: str = "sqlite", driver: str = "aiosqlite", pool_size: int = 5, share_across_threads: bool = True, **kwargs: Any):
134
+ """Establish lazy initialization with database"""
135
+ from sqlalchemy.pool import StaticPool, NullPool
136
+ _ = NullPool
137
+ if str(path) == "memory":
138
+ print("Linking to in-memory database.")
139
+ if share_across_threads:
140
+ # see: https://docs.sqlalchemy.org/en/14/dialects/sqlite.html#using-a-memory-database-in-multiple-threads
141
+ return create_async_engine(url=f"{dialect}+{driver}://", echo=echo, future=True, poolclass=StaticPool, connect_args={"mode": "memory", "cache": "shared"})
142
+ else:
143
+ return create_async_engine(url=f"{dialect}+{driver}:///:memory:", echo=echo, future=True, pool_size=pool_size, **kwargs)
144
+ if path is None:
145
+ tmp_dir = P.home().joinpath(".tmp").joinpath("tmp_dbs")
146
+ tmp_dir.mkdir(parents=True, exist_ok=True)
147
+ import tempfile
148
+ with tempfile.NamedTemporaryFile(suffix=".sqlite", dir=str(tmp_dir), delete=False) as tmp_file:
149
+ path = P(tmp_file.name)
150
+ else:
151
+ path = P(path).expanduser().resolve()
152
+ path.parent.mkdir(parents=True, exist_ok=True)
153
+ path_repr = path.as_uri() if path.is_file() else path
154
+ dialect = path.suffix.removeprefix('.')
155
+ print(f"Linking to database at {path_repr}")
156
+ # Add DuckDB-specific read-only flag automatically when pointing to an existing .duckdb file
157
+ connect_args = kwargs.pop("connect_args", {}) or {}
158
+ try:
159
+ if path.suffix == ".duckdb": # only apply for duckdb files
160
+ # don't overwrite user's explicit setting if already provided
161
+ connect_args.setdefault("read_only", True)
162
+ print(" - Opening DuckDB in read-only mode.")
163
+ except Exception:
164
+ pass
165
+ if pool_size == 0:
166
+ res = create_async_engine(url=f"{dialect}+{driver}:///{path}", echo=echo, future=True, poolclass=NullPool, connect_args=connect_args, **kwargs) # echo flag is just a short for the more formal way of logging sql commands.
167
+ else:
168
+ res = create_async_engine(url=f"{dialect}+{driver}:///{path}", echo=echo, future=True, pool_size=pool_size, connect_args=connect_args, **kwargs) # echo flag is just a short for the more formal way of logging sql commands.
169
+ return res
170
+
171
+ # ==================== QUERIES =====================================
172
+ def execute_as_you_go(self, *commands: str, res_func: Callable[[Any], Any] = lambda x: x.all(), df: bool = False):
173
+ with self.eng.begin() as conn:
174
+ result = None
175
+ for command in commands:
176
+ result = conn.execute(text(command))
177
+ # conn.commit() # if driver is sqlite3, the connection is autocommitting. # this commit is only needed in case of DBAPI driver.
178
+ return res_func(result) if not df else pl.DataFrame(res_func(result))
179
+
180
+ def execute_begin_once(self, command: str, res_func: Callable[[Any], Any] = lambda x: x.all(), df: bool = False):
181
+ with self.eng.begin() as conn:
182
+ result = conn.execute(text(command)) # no need for commit regardless of driver
183
+ result = res_func(result)
184
+ return result if not df else pl.DataFrame(result)
185
+
186
+ def execute(self, command: str):
187
+ with self.eng.begin() as conn:
188
+ result = conn.execute(text(command))
189
+ # conn.commit()
190
+ return result
191
+
192
+ # def execute_script(self, command: str, df: bool = False):
193
+ # with self.eng.begin() as conn: result = conn.executescript(text(command))
194
+ # return result if not df else pl.DataFrame(result)
195
+
196
+ # ========================== TABLES =====================================
197
+ def read_table(self, table: Optional[str] = None, sch: Optional[str] = None, size: int = 5):
198
+ if sch is None:
199
+ # First try to find schemas that have tables (excluding system schemas)
200
+ schemas_with_tables = []
201
+ for schema_name in self.schema:
202
+ if schema_name not in ["information_schema", "pg_catalog", "system"]:
203
+ if schema_name in self.sch_tab and len(self.sch_tab[schema_name]) > 0:
204
+ schemas_with_tables.append(schema_name)
205
+
206
+ if len(schemas_with_tables) == 0:
207
+ raise ValueError(f"No schemas with tables found. Available schemas: {self.schema}")
208
+
209
+ # Prefer non-"main" schemas if available, otherwise use main
210
+ if len(schemas_with_tables) > 1 and "main" in schemas_with_tables:
211
+ sch = [s for s in schemas_with_tables if s != "main"][0]
212
+ else:
213
+ sch = schemas_with_tables[0]
214
+ print(f"Auto-selected schema: `{sch}` from available schemas: {schemas_with_tables}")
215
+
216
+ if table is None:
217
+ if sch not in self.sch_tab:
218
+ raise ValueError(f"Schema `{sch}` not found. Available schemas: {list(self.sch_tab.keys())}")
219
+ tables = self.sch_tab[sch]
220
+ assert len(tables) > 0, f"No tables found in schema `{sch}`"
221
+ import random
222
+ table = random.choice(tables)
223
+ print(f"Reading table `{table}` from schema `{sch}`")
224
+ if self.con:
225
+ try:
226
+ res = self.con.execute(text(f'''SELECT * FROM {self._get_table_identifier(table, sch)} '''))
227
+ return pl.DataFrame(res.fetchmany(size))
228
+ except Exception:
229
+ print(f"Error executing query for table `{table}` in schema `{sch}`")
230
+ print(f"Available schemas and tables: {self.sch_tab}")
231
+ raise
232
+
233
+ def insert_dicts(self, table: str, *mydicts: dict[str, Any]) -> None:
234
+ cmd = f"""INSERT INTO {table} VALUES """
235
+ for mydict in mydicts: cmd += f"""({tuple(mydict)}), """
236
+ self.execute_begin_once(cmd)
237
+
238
+ def describe_db(self):
239
+ self.refresh()
240
+ assert self.meta is not None
241
+ res_all = []
242
+ assert self.ses is not None
243
+ from rich.progress import Progress
244
+ with Progress() as progress:
245
+ task = progress.add_task("Inspecting tables", total=len(self.meta.sorted_tables))
246
+ for tbl in self.meta.sorted_tables:
247
+ table = tbl.name
248
+ if self.sch is not None:
249
+ table = f"{self.sch}.{table}"
250
+ count = self.ses.query(tbl).count()
251
+ res = dict(table=table, count=count, size_mb=count * len(tbl.exported_columns) * 10 / 1e6,
252
+ columns=len(tbl.exported_columns), schema=self.sch)
253
+ res_all.append(res)
254
+ progress.update(task, advance=1)
255
+ self.description = pl.DataFrame(res_all)
256
+ return self.description
257
+
258
+ def describe_table(self, table: str, sch: Optional[str] = None, dtype: bool = True) -> None:
259
+ print(table.center(100, "="))
260
+ self.refresh()
261
+ assert self.meta is not None
262
+ tbl = self.meta.tables[table]
263
+ assert self.ses is not None
264
+ count = self.ses.query(tbl).count()
265
+ res = dict(name=table, count=count, size_mb=count * len(tbl.exported_columns) * 10 / 1e6)
266
+ from machineconfig.utils.accessories import pprint
267
+ pprint(res, title="TABLE DETAILS")
268
+ dat = self.read_table(table=table, sch=sch, size=2)
269
+ df = dat # dat is already a polars DataFrame
270
+ print("SAMPLE:\n", df)
271
+ assert self.insp is not None
272
+ if dtype: print("\nDETAILED COLUMNS:\n", pl.DataFrame(self.insp.get_columns(table)))
273
+ print("\n" * 3)
274
+
275
+
276
+ DB_TMP_PATH = P.home().joinpath(".tmp").joinpath("tmp_dbs").joinpath("results").joinpath("data.sqlite")
277
+
278
+
279
+ def to_db(table: str, idx: int, idx_max: int, data: Any):
280
+ import pickle
281
+ DB_TMP_PATH.parent.mkdir(parents=True, exist_ok=True)
282
+ db = DBMS.from_local_db(DB_TMP_PATH)
283
+ time_now = time.time_ns()
284
+ data_blob = pickle.dumps(data)
285
+ create_table = f"""CREATE TABLE IF NOT EXISTS "{table}" (time INT PRIMARY KEY, idx INT, idx_max INT, data BLOB)"""
286
+ insert_row = f"""INSERT INTO "{table}" (time, idx, idx_max, data) VALUES (:time, :idx, :idx_max, :data)"""
287
+ with db.eng.begin() as conn:
288
+ conn.execute(text(create_table))
289
+ conn.execute(
290
+ text(insert_row),
291
+ {'time': time_now, 'idx': idx, 'idx_max': idx_max, 'data': data_blob}
292
+ )
293
+ # conn.commit()
294
+ db.close()
295
+
296
+
297
+ def from_db(table: str):
298
+ import pickle
299
+ DB_TMP_PATH.parent.mkdir(parents=True, exist_ok=True)
300
+ db = DBMS.from_local_db(DB_TMP_PATH)
301
+ with db.eng.connect() as conn:
302
+ res = conn.execute(text(f"""SELECT * FROM "{table}" """))
303
+ records = res.fetchall()
304
+ df = pl.DataFrame(records, schema=['time', 'idx', 'idx_max', 'data'])
305
+ df = df.with_columns(pl.col('data').map_elements(pickle.loads))
306
+ return df
307
+
308
+
309
+ def get_table_specs(engine: Engine, table_name: str) -> pl.DataFrame:
310
+ inspector = inspect__(engine)
311
+ # Collect table information
312
+ columns_info = [{
313
+ 'name': col['name'],
314
+ 'type': str(col['type']),
315
+ 'nullable': col['nullable'],
316
+ 'default': col['default'],
317
+ 'autoincrement': col.get('autoincrement'),
318
+ 'category': 'column'
319
+ } for col in inspector.get_columns(table_name)]
320
+ # Primary keys
321
+ pk_info = [{
322
+ 'name': pk,
323
+ 'type': None,
324
+ 'nullable': False,
325
+ 'default': None,
326
+ 'autoincrement': None,
327
+ 'category': 'primary_key'
328
+ } for pk in inspector.get_pk_constraint(table_name)['constrained_columns']]
329
+ # Foreign keys
330
+ fk_info = [{
331
+ 'name': fk['constrained_columns'][0],
332
+ 'type': f"FK -> {fk['referred_table']}.{fk['referred_columns'][0]}",
333
+ 'nullable': None,
334
+ 'default': None,
335
+ 'autoincrement': None,
336
+ 'category': 'foreign_key'
337
+ } for fk in inspector.get_foreign_keys(table_name)]
338
+ # Indexe
339
+ index_info = [{
340
+ 'name': idx['name'],
341
+ 'type': f"Index on {', '.join(col for col in idx['column_names'] if col)}",
342
+ 'nullable': None,
343
+ 'default': None,
344
+ 'autoincrement': None,
345
+ 'category': 'index',
346
+ 'unique': idx['unique']
347
+ } for idx in inspector.get_indexes(table_name)]
348
+ # Combine all information
349
+ all_info = columns_info + pk_info + fk_info + index_info
350
+ # Convert to DataFrame
351
+ df = pl.DataFrame(all_info)
352
+ return df
353
+
354
+ if __name__ == '__main__':
355
+ pass
@@ -63,8 +63,8 @@ class Read:
63
63
  return res
64
64
  @staticmethod
65
65
  def toml(path: 'Path'):
66
- import toml
67
- return toml.loads(Path(path).read_text(encoding='utf-8'))
66
+ import tomllib
67
+ return tomllib.loads(Path(path).read_text(encoding='utf-8'))
68
68
  @staticmethod
69
69
  def npy(path: 'Path', **kwargs: Any):
70
70
  import numpy as np
@@ -65,12 +65,12 @@ def check_latest():
65
65
 
66
66
  # Print each group
67
67
  for status, items in grouped_data.items():
68
- print(f"\n{status.upper()}:")
69
- print("-" * 60)
68
+ console.print(f"\n[bold]{status.upper()}:[/bold]")
69
+ console.rule(style="dim")
70
70
  for item in items:
71
- print(f" {item['Tool']:<20} | Current: {item['Current Version']:<15} | New: {item['New Version']}")
72
- print("-" * 60)
73
- print(f"{'═' * 80}")
71
+ console.print(f" {item['Tool']:<20} | Current: {item['Current Version']:<15} | New: {item['New Version']}")
72
+ console.rule(style="dim")
73
+ console.rule(style="bold blue")
74
74
 
75
75
 
76
76
  def get_installed_cli_apps():