lkr-dev-cli 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lkr/__init__.py CHANGED
@@ -0,0 +1,3 @@
1
+ from lkr.tools.classes import UserAttributeUpdater
2
+
3
+ __all__ = ["UserAttributeUpdater"]
lkr/logger.py CHANGED
@@ -8,6 +8,13 @@ from rich.theme import Theme
8
8
 
9
9
  from lkr.custom_types import LogLevel
10
10
 
11
+ structlog.configure(
12
+ processors=[
13
+ structlog.processors.TimeStamper(fmt="iso"),
14
+ structlog.processors.JSONRenderer(),
15
+ ]
16
+ )
17
+
11
18
  # Define a custom theme for our logging
12
19
  theme = Theme(
13
20
  {
@@ -50,6 +57,7 @@ logging.basicConfig(
50
57
  logger = logging.getLogger("lkr")
51
58
  structured_logger = structlog.get_logger("lkr.structured")
52
59
 
60
+
53
61
  # Configure the requests_transport logger to only show debug messages when LOG_LEVEL is DEBUG
54
62
  requests_logger = logging.getLogger("looker_sdk.rtl.requests_transport")
55
63
  if log_level != "DEBUG":
lkr/main.py CHANGED
@@ -9,6 +9,7 @@ from lkr.custom_types import LogLevel
9
9
  from lkr.logger import logger
10
10
  from lkr.mcp.main import group as mcp_group
11
11
  from lkr.observability.main import group as observability_group
12
+ from lkr.tools.main import group as tools_group
12
13
 
13
14
  app = typer.Typer(
14
15
  name="lkr", help="LookML Repository CLI", add_completion=True, no_args_is_help=True
@@ -17,6 +18,7 @@ app = typer.Typer(
17
18
  app.add_typer(auth_group, name="auth")
18
19
  app.add_typer(mcp_group, name="mcp")
19
20
  app.add_typer(observability_group, name="observability")
21
+ app.add_typer(tools_group, name="tools")
20
22
 
21
23
 
22
24
  @app.callback()
lkr/mcp/classes.py ADDED
@@ -0,0 +1,99 @@
1
+ # server.py
2
+ from datetime import datetime
3
+ from typing import Annotated, Any, List
4
+
5
+ import duckdb
6
+ from pydantic import BaseModel, Field, computed_field
7
+
8
+ from lkr.mcp.utils import get_database_search_file
9
+ from lkr.observability.classes import now
10
+
11
+
12
+ class SpectaclesResponse(BaseModel):
13
+ success: bool
14
+ result: Any | None = None
15
+ error: str | None = None
16
+ sql: str | None = None
17
+ share_url: str | None = None
18
+
19
+
20
+ class SpectaclesRequest(BaseModel):
21
+ model: Annotated[
22
+ str,
23
+ Field(
24
+ description="the model to run a test query against, you can find this by the filenames in the repository, they will end with .model.lkml. You should not pass in the .model.lkml extension.",
25
+ default="",
26
+ ),
27
+ ]
28
+ explore: Annotated[
29
+ str,
30
+ Field(
31
+ description="the explore to run a test query against, you can find this by finding explore: <name> {} in any file in the repository",
32
+ default="",
33
+ ),
34
+ ]
35
+ fields: Annotated[
36
+ List[str],
37
+ Field(
38
+ description="this should be the list of fields you want to return from the test query. If the user does not provide them, use all that have changed in your current context",
39
+ default=[],
40
+ ),
41
+ ]
42
+
43
+
44
+ class Connection(BaseModel):
45
+ connection: str
46
+ updated_at: datetime = Field(default_factory=now)
47
+
48
+ @computed_field(return_type=str)
49
+ @property
50
+ def fully_qualified_name(self) -> str:
51
+ return self.connection
52
+
53
+
54
+ class Database(Connection):
55
+ database: str
56
+
57
+ @computed_field(return_type=str)
58
+ @property
59
+ def fully_qualified_name(self) -> str:
60
+ return f"{self.connection}.{self.database}"
61
+
62
+
63
+ class Schema(Database):
64
+ database_schema_name: str
65
+
66
+ @computed_field(return_type=str)
67
+ @property
68
+ def fully_qualified_name(self) -> str:
69
+ return f"{self.connection}.{self.database}.{self.database_schema_name}"
70
+
71
+
72
+ class Table(Schema):
73
+ database_table_name: str
74
+
75
+ @computed_field(return_type=str)
76
+ @property
77
+ def fully_qualified_name(self) -> str:
78
+ return f"{self.connection}.{self.database}.{self.database_schema_name}.{self.database_table_name}"
79
+
80
+
81
+ class Row(Table):
82
+ database_column_name: str
83
+ data_type_database: str
84
+ data_type_looker: str
85
+
86
+ @computed_field(return_type=str)
87
+ @property
88
+ def fully_qualified_name(self) -> str:
89
+ return f"{self.connection}.{self.database}.{self.database_schema_name}.{self.database_table_name}.{self.database_column_name}"
90
+
91
+ def append(self, base_url: str) -> None:
92
+ with open(get_database_search_file(base_url), "a") as f:
93
+ f.write(self.model_dump_json() + "\n")
94
+
95
+ def exists(self, conn: duckdb.DuckDBPyConnection, *, base_url: str) -> bool:
96
+ columns = conn.execute(
97
+ f"SELECT * FROM read_json_auto('{get_database_search_file(base_url)}') WHERE fully_qualified_name = '{self.fully_qualified_name}'"
98
+ ).fetchall()
99
+ return len(columns) > 0
lkr/mcp/main.py CHANGED
@@ -1,23 +1,37 @@
1
1
  # server.py
2
- import os
3
2
  import threading
4
- from datetime import datetime, timezone
3
+ from datetime import datetime
5
4
  from pathlib import Path
6
- from typing import Annotated, Any, Callable, List, Literal, Self, Set
5
+ from typing import Annotated, List, Literal, Self, Set
7
6
 
8
7
  import duckdb
9
8
  import typer
10
- from fastmcp import FastMCP
11
9
  from looker_sdk.sdk.api40.models import (
12
10
  SqlQueryCreate,
13
11
  WriteQuery,
14
12
  )
15
- from pydantic import BaseModel, Field, computed_field
13
+ from mcp.server.fastmcp import FastMCP
14
+ from pydantic import BaseModel, Field
16
15
  from pydash import get
17
16
 
18
17
  from lkr.auth_service import get_auth
19
18
  from lkr.classes import LkrCtxObj
20
19
  from lkr.logger import logger
20
+ from lkr.mcp.classes import (
21
+ Connection,
22
+ Database,
23
+ Row,
24
+ Schema,
25
+ SpectaclesRequest,
26
+ SpectaclesResponse,
27
+ Table,
28
+ )
29
+ from lkr.mcp.utils import (
30
+ conn_registry_path,
31
+ get_connection_registry_file,
32
+ get_database_search_file,
33
+ ok,
34
+ )
21
35
 
22
36
  __all__ = ["group"]
23
37
 
@@ -28,34 +42,7 @@ ctx_lkr: LkrCtxObj | None = None
28
42
  # Create an MCP server
29
43
  mcp = FastMCP("lkr:mcp")
30
44
 
31
- group = typer.Typer(name="spectacles")
32
-
33
- db_loc: Path | None = None
34
-
35
-
36
- def get_db_loc() -> Path:
37
- global db_loc
38
- if db_loc is None:
39
- db_path = os.path.expanduser("~/.lkr")
40
- db_loc = Path(db_path) / "mcp_search_db"
41
- db_loc.mkdir(exist_ok=True, parents=True)
42
- return db_loc
43
-
44
-
45
- def get_database_search_file(prefix: str = "") -> Path:
46
- p = get_db_loc() / f"{prefix + '.' if prefix else ''}looker_connection_search.jsonl"
47
- if not p.exists():
48
- p.touch()
49
- return p
50
-
51
-
52
- def get_connection_registry_file(
53
- type: Literal["connection", "database", "schema", "table"], prefix: str = ""
54
- ) -> Path:
55
- return (
56
- get_db_loc()
57
- / f"{prefix + '.' if prefix else ''}looker_connection_registry.{type}.jsonl"
58
- )
45
+ group = typer.Typer()
59
46
 
60
47
 
61
48
  # Initialize DuckDB connection
@@ -68,38 +55,6 @@ conn.execute("INSTALL 'json'")
68
55
  conn.execute("LOAD 'json'")
69
56
 
70
57
 
71
- class SpectaclesResponse(BaseModel):
72
- success: bool
73
- result: Any | None = None
74
- error: str | None = None
75
- sql: str | None = None
76
- share_url: str | None = None
77
-
78
-
79
- class SpectaclesRequest(BaseModel):
80
- model: Annotated[
81
- str,
82
- Field(
83
- description="the model to run a test query against, you can find this by the filenames in the repository, they will end with .model.lkml. You should not pass in the .model.lkml extension.",
84
- default="",
85
- ),
86
- ]
87
- explore: Annotated[
88
- str,
89
- Field(
90
- description="the explore to run a test query against, you can find this by finding explore: <name> {} in any file in the repository",
91
- default="",
92
- ),
93
- ]
94
- fields: Annotated[
95
- List[str],
96
- Field(
97
- description="this should be the list of fields you want to return from the test query. If the user does not provide them, use all that have changed in your current context",
98
- default=[],
99
- ),
100
- ]
101
-
102
-
103
58
  def get_mcp_sdk(ctx: LkrCtxObj | typer.Context):
104
59
  sdk = get_auth(ctx).get_current_sdk(prompt_refresh_invalid_token=False)
105
60
  sdk.auth.settings.agent_tag += "-mcp"
@@ -179,85 +134,6 @@ SELECT * FROM (
179
134
  )
180
135
 
181
136
 
182
- def now() -> datetime:
183
- return datetime.now(timezone.utc)
184
-
185
-
186
- class Connection(BaseModel):
187
- connection: str
188
- updated_at: datetime = Field(default_factory=now)
189
-
190
- @computed_field(return_type=str)
191
- @property
192
- def fully_qualified_name(self) -> str:
193
- return self.connection
194
-
195
-
196
- class Database(Connection):
197
- database: str
198
-
199
- @computed_field(return_type=str)
200
- @property
201
- def fully_qualified_name(self) -> str:
202
- return f"{self.connection}.{self.database}"
203
-
204
-
205
- class Schema(Database):
206
- database_schema_name: str
207
-
208
- @computed_field(return_type=str)
209
- @property
210
- def fully_qualified_name(self) -> str:
211
- return f"{self.connection}.{self.database}.{self.database_schema_name}"
212
-
213
-
214
- class Table(Schema):
215
- database_table_name: str
216
-
217
- @computed_field(return_type=str)
218
- @property
219
- def fully_qualified_name(self) -> str:
220
- return f"{self.connection}.{self.database}.{self.database_schema_name}.{self.database_table_name}"
221
-
222
-
223
- class Row(Table):
224
- database_column_name: str
225
- data_type_database: str
226
- data_type_looker: str
227
-
228
- @computed_field(return_type=str)
229
- @property
230
- def fully_qualified_name(self) -> str:
231
- return f"{self.connection}.{self.database}.{self.database_schema_name}.{self.database_table_name}.{self.database_column_name}"
232
-
233
- def append(self, base_url: str) -> None:
234
- with open(get_database_search_file(base_url), "a") as f:
235
- f.write(self.model_dump_json() + "\n")
236
-
237
- def exists(self, base_url: str) -> bool:
238
- columns = conn.execute(
239
- f"SELECT * FROM read_json_auto('{get_database_search_file(base_url)}') WHERE fully_qualified_name = '{self.fully_qualified_name}'"
240
- ).fetchall()
241
- return len(columns) > 0
242
-
243
-
244
- def ok[T](func: Callable[[], T], default: T) -> T:
245
- try:
246
- return func()
247
- except Exception:
248
- # logger.error(f"Error in {func.__name__}: {str(e)}")
249
- return default
250
-
251
-
252
- def conn_registry_path(
253
- type: Literal["connection", "database", "schema", "table"], prefix: str = ""
254
- ) -> Path:
255
- file_loc = get_connection_registry_file(type, prefix)
256
- if not file_loc.exists():
257
- file_loc.touch()
258
- return file_loc
259
-
260
-
261
137
  class ConnectionRegistry(BaseModel):
262
138
  connections: Set[str]
263
139
  databases: Set[str]
@@ -301,7 +177,7 @@ class ConnectionRegistry(BaseModel):
301
177
 
302
178
  def load_connections(self, dt_filter: datetime | None = None) -> None:
303
179
  file = conn_registry_path("connection", self.prefix)
304
- # logger.debug(f"Loading connections from {file}")
180
+ logger.debug(f"Loading connections from {file}")
305
181
  sql = f"SELECT connection FROM read_json_auto('{file}')"
306
182
  if dt_filter:
307
183
  sql += f" WHERE updated_at > '{dt_filter.isoformat()}'"
@@ -310,8 +186,8 @@ class ConnectionRegistry(BaseModel):
310
186
  for row in results:
311
187
  connection = Connection(connection=row[0])
312
188
  self.connections.add(connection.fully_qualified_name)
313
- except Exception:
314
- # logger.error(f"Error loading connections from {file}: {str(e)}")
189
+ except Exception as e:
190
+ logger.error(f"Error loading connections from {file}: {str(e)}")
315
191
  return
316
192
 
317
193
  def load_databases(self, dt_filter: datetime | None = None) -> None:
@@ -324,8 +200,8 @@ class ConnectionRegistry(BaseModel):
324
200
  for row in results:
325
201
  database = Database(connection=row[0], database=row[1])
326
202
  self.databases.add(database.fully_qualified_name)
327
- except Exception:
328
- # logger.error(f"Error loading databases from {file}: {str(e)}")
203
+ except Exception as e:
204
+ logger.error(f"Error loading databases from {file}: {str(e)}")
329
205
  return
330
206
 
331
207
  def load_schemas(self, dt_filter: datetime | None = None) -> None:
@@ -340,8 +216,8 @@ class ConnectionRegistry(BaseModel):
340
216
  connection=row[0], database=row[1], database_schema_name=row[2]
341
217
  )
342
218
  self.schemas.add(schema.fully_qualified_name)
343
- except Exception:
344
- # logger.error(f"Error loading schemas from {file}: {str(e)}")
219
+ except Exception as e:
220
+ logger.error(f"Error loading schemas from {file}: {str(e)}")
345
221
  return
346
222
 
347
223
  def load_tables(self, dt_filter: datetime | None = None) -> None:
@@ -359,8 +235,8 @@ class ConnectionRegistry(BaseModel):
359
235
  database_table_name=row[3],
360
236
  )
361
237
  self.tables.add(table.fully_qualified_name)
362
- except Exception:
363
- # logger.error(f"Error loading tables from {file}: {str(e)}")
238
+ except Exception as e:
239
+ logger.error(f"Error loading tables from {file}: {str(e)}")
364
240
  return
365
241
 
366
242
  @classmethod
@@ -387,27 +263,31 @@ def populate_looker_connection_search_on_startup(ctx: typer.Context) -> None:
387
263
  # logger.debug("Populating looker connection search")
388
264
  sdk = get_mcp_sdk(ctx)
389
265
  if not current_instance:
390
- # logger.error("No current instance found")
391
- raise typer.Abort()
392
- registry = ConnectionRegistry.initialize(prefix=current_instance)
266
+ logger.error("No current instance found")
267
+ return
268
+ url_from_instance = sdk.auth.settings.base_url
269
+ logger.debug(
270
+ f"Populating looker connection search for {url_from_instance} from {current_instance}"
271
+ )
272
+ registry = ConnectionRegistry.initialize(prefix=url_from_instance)
393
273
  connections = ok(lambda: sdk.all_connections(), [])
394
274
  for connection in connections:
395
275
  if not connection.name:
396
276
  continue
397
277
  elif registry.check("connection", connection.name):
398
- # logger.debug(
399
- # f"Skipping {connection.name} because it already exists in the registry"
400
- # )
278
+ logger.debug(
279
+ f"Skipping {connection.name} because it already exists in the registry"
280
+ )
401
281
  continue
402
- # logger.debug(f"Populating looker connection search for {connection.name}")
282
+ logger.debug(f"Populating looker connection search for {connection.name}")
403
283
  databases = ok(lambda: sdk.connection_databases(connection.name or ""), [])
404
284
  for database in databases:
405
285
  if registry.check("database", database):
406
- # logger.debug(
407
- # f"Skipping {database} because it already exists in the registry"
408
- # )
286
+ logger.debug(
287
+ f"Skipping {database} because it already exists in the registry"
288
+ )
409
289
  continue
410
- # logger.debug(f"Populating looker connection search for {database}")
290
+ logger.debug(f"Populating looker connection search for {database}")
411
291
  schemas = ok(
412
292
  lambda: sdk.connection_schemas(
413
293
  connection.name or "", database, cache=True, fields="name"
@@ -559,22 +439,48 @@ def load_database_search_file(file_loc: Path) -> None:
559
439
  )
560
440
 
561
441
 
562
- class SearchFullyQualifiedNamesRequest(BaseModel):
563
- search_term: str = Field(
564
- description="The search term to search for within the fully qualified column name. It will be converted to lowercase before searching. The fully quallified column name incluses database, schema, table, and column names."
565
- )
566
-
567
-
568
442
  # Add a dynamic greeting resource
569
443
  @mcp.tool()
570
- def search_fully_qualified_names(req: SearchFullyQualifiedNamesRequest) -> List[dict]:
444
+ def search_fully_qualified_names(
445
+ search_term: Annotated[
446
+ str,
447
+ Field(
448
+ description="The search term to search for within the fully qualified column name. It will be converted to lowercase before searching. The fully quallified column name incluses database, schema, table, and column names.",
449
+ min_length=1,
450
+ ),
451
+ ],
452
+ database_filter: Annotated[
453
+ str | None,
454
+ Field(
455
+ description="The database to search for within the fully qualified column name. It will be converted to lowercase before searching. The fully quallified column name incluses database, schema, table, and column names. If not provided, all databases will be searched. This is synonymous with BigQuery's projects.",
456
+ ),
457
+ ],
458
+ schema_filter: Annotated[
459
+ str | None,
460
+ Field(
461
+ description="The schema to search for within the fully qualified column name. It will be converted to lowercase before searching. The fully quallified column name incluses database, schema, table, and column names. If not provided, all schemas will be searched. This is synonymous with BigQuery's datasets",
462
+ ),
463
+ ],
464
+ table_filter: Annotated[
465
+ str | None,
466
+ Field(
467
+ description="The table to search for within the fully qualified column name. It will be converted to lowercase before searching. The fully quallified column name incluses database, schema, table, and column names. If not provided, all tables will be searched.",
468
+ ),
469
+ ],
470
+ limit: Annotated[
471
+ int,
472
+ Field(
473
+ description="The number of results to return. If not provided, the default is 10000.",
474
+ default=100,
475
+ ),
476
+ ],
477
+ ) -> List[Row]:
571
478
  """
572
479
  Use lkr to search fully qualified columns which include connection, database, schema, table, column names, and data types
573
- Returns a list of matching rows with their BM25 scores
480
+ Returns a list of matching rows with their BM25 scores. If no database, schema, or table is provided, all will be searched. When specified together, databsae, scema and table are filtered together using an AND.
574
481
  """
575
- result = conn.execute(
576
- """
577
- SELECT
482
+ sql = """
483
+ SELECT
578
484
  connection,
579
485
  database,
580
486
  database_schema_name,
@@ -584,15 +490,30 @@ def search_fully_qualified_names(req: SearchFullyQualifiedNamesRequest) -> List[
584
490
  data_type_looker,
585
491
  fts_main_looker_connection_search.match_bm25(
586
492
  fully_qualified_name,
587
- ?
493
+ $search_term
588
494
  ) AS score
589
495
  FROM looker_connection_search
590
496
  WHERE score IS NOT NULL
591
- ORDER BY score DESC
592
- LIMIT 10000
593
- """,
594
- [req.search_term.lower()],
595
- # [search_term, connections, databases, schemas, tables],
497
+ """
498
+ params = dict(
499
+ search_term=search_term.lower(),
500
+ limit=limit,
501
+ )
502
+ if database:
503
+ sql += " AND database = $database"
504
+ params["database"] = database
505
+ if schema:
506
+ sql += " AND database_schema_name = $schema"
507
+ params["schema"] = schema
508
+ if table:
509
+ sql += " AND database_table_name = $table"
510
+ params["table"] = table
511
+ sql += " ORDER BY score DESC LIMIT $limit"
512
+ logger.debug(f"Executing SQL: {sql}")
513
+ logger.debug(f"Params: {params}")
514
+ result = conn.execute(
515
+ sql,
516
+ params,
596
517
  ).fetchall()
597
518
  return [
598
519
  Row(
@@ -603,20 +524,31 @@ def search_fully_qualified_names(req: SearchFullyQualifiedNamesRequest) -> List[
603
524
  database_column_name=row[4],
604
525
  data_type_database=row[5],
605
526
  data_type_looker=row[6],
606
- ).model_dump()
527
+ )
607
528
  for row in result
608
529
  ]
609
530
 
610
531
 
611
- @group.callback()
612
- def main(ctx: typer.Context):
532
+ @group.command(name="run")
533
+ def run(
534
+ ctx: typer.Context,
535
+ debug: bool = typer.Option(False, help="Debug mode"),
536
+ ):
537
+ from lkr.logger import LogLevel, set_log_level
538
+
613
539
  global ctx_lkr
614
- ctx_lkr = LkrCtxObj(force_oauth=False)
615
- validate_current_instance_database_search_file(ctx)
616
540
 
541
+ ctx_lkr = LkrCtxObj(force_oauth=False)
542
+ validate_current_instance_database_search_file(ctx, debug)
543
+ sdk = get_mcp_sdk(ctx_lkr)
544
+ if not sdk.auth.settings.base_url:
545
+ logger.error("No current instance found")
546
+ raise typer.Exit(1)
617
547
 
618
- @group.command(name="run")
619
- def run():
548
+ if debug:
549
+ set_log_level(LogLevel.DEBUG)
550
+ else:
551
+ set_log_level(LogLevel.ERROR)
620
552
  mcp.run()
621
553
 
622
554
 
@@ -631,18 +563,25 @@ def check_for_database_search_file(ctx: typer.Context) -> None:
631
563
  raise typer.Abort()
632
564
 
633
565
 
634
- def validate_current_instance_database_search_file(ctx: typer.Context) -> None:
566
+ def validate_current_instance_database_search_file(
567
+ ctx: typer.Context, debug: bool
568
+ ) -> None:
635
569
  global current_instance
636
570
  check = get_auth(ctx).get_current_instance()
571
+ if not check:
572
+ logger.error("No current instance found")
637
573
  if not current_instance:
638
574
  current_instance = check
639
- thread = threading.Thread(target=check_for_database_search_file, args=(ctx,))
640
- # thread.daemon = True
575
+ thread = threading.Thread(
576
+ target=check_for_database_search_file, args=(ctx,), daemon=not debug
577
+ )
641
578
  thread.start()
642
579
  elif current_instance != check:
643
580
  current_instance = check
644
- thread = threading.Thread(target=check_for_database_search_file, args=(ctx,))
645
- # thread.daemon = True
581
+ thread = threading.Thread(
582
+ target=check_for_database_search_file, args=(ctx,), daemon=not debug
583
+ )
584
+ thread.daemon = True if not debug else False
646
585
  thread.start()
647
586
  else:
648
587
  pass
lkr/mcp/utils.py ADDED
@@ -0,0 +1,55 @@
1
+ # server.py
2
+ import os
3
+ from datetime import datetime, timezone
4
+ from pathlib import Path
5
+ from typing import Callable, Literal
6
+
7
+ from lkr.logger import logger
8
+
9
+ db_loc: Path | None = None
10
+
11
+
12
+ def get_db_loc() -> Path:
13
+ global db_loc
14
+ if db_loc is None:
15
+ db_path = os.path.expanduser("~/.lkr")
16
+ db_loc = Path(db_path) / "mcp_search_db"
17
+ db_loc.mkdir(exist_ok=True, parents=True)
18
+ return db_loc
19
+
20
+
21
+ def get_database_search_file(prefix: str = "") -> Path:
22
+ p = get_db_loc() / f"{prefix + '.' if prefix else ''}looker_connection_search.jsonl"
23
+ if not p.exists():
24
+ p.touch()
25
+ return p
26
+
27
+
28
+ def get_connection_registry_file(
29
+ type: Literal["connection", "database", "schema", "table"], prefix: str = ""
30
+ ) -> Path:
31
+ return (
32
+ get_db_loc()
33
+ / f"{prefix + '.' if prefix else ''}looker_connection_registry.{type}.jsonl"
34
+ )
35
+
36
+
37
+ def now() -> datetime:
38
+ return datetime.now(timezone.utc)
39
+
40
+
41
+ def ok[T](func: Callable[[], T], default: T) -> T:
42
+ try:
43
+ return func()
44
+ except Exception as e:
45
+ logger.error(f"Error in {func.__name__}: {str(e)}")
46
+ return default
47
+
48
+
49
+ def conn_registry_path(
50
+ type: Literal["connection", "database", "schema", "table"], prefix: str = ""
51
+ ) -> Path:
52
+ file_loc = get_connection_registry_file(type, prefix)
53
+ if not file_loc.exists():
54
+ file_loc.touch()
55
+ return file_loc
lkr/tools/classes.py ADDED
@@ -0,0 +1,206 @@
1
+ from typing import Literal, Optional, Self, cast
2
+
3
+ from fastapi import Request
4
+ from looker_sdk.sdk.api40.methods import Looker40SDK
5
+ from looker_sdk.sdk.api40.models import (
6
+ UserAttributeGroupValue,
7
+ WriteUserAttribute,
8
+ WriteUserAttributeWithValue,
9
+ )
10
+ from pydantic import BaseModel, model_validator
11
+
12
+ from lkr.auth_service import init_api_key_sdk
13
+ from lkr.classes import LookerApiKey
14
+ from lkr.logger import logger
15
+
16
+
17
+ class UserAttributeUpdater(BaseModel):
18
+ client_id: Optional[str] = None
19
+ client_secret: Optional[str] = None
20
+ base_url: Optional[str] = None
21
+ value: Optional[str] = None
22
+ user_attribute: Optional[str]
23
+ user_attribute_id: Optional[str] = None
24
+ update_type: Literal["group", "default", "user"]
25
+ group_name: Optional[str] = None
26
+ group_id: Optional[str] = None
27
+ looker_user_id: Optional[str] = None
28
+ external_user_id: Optional[str] = None
29
+ email: Optional[str] = None
30
+
31
+ @model_validator(mode="after")
32
+ def check_variables(self) -> Self:
33
+ if not (bool(self.user_attribute) or bool(self.user_attribute_id)):
34
+ raise ValueError("Either user_attribute or user_attribute_id must be set")
35
+ if self.update_type == "group":
36
+ if not (bool(self.group_name) or bool(self.group_id)):
37
+ raise ValueError("Either group_name or group_id must be set")
38
+ if self.update_type == "user":
39
+ if not (
40
+ bool(self.looker_user_id)
41
+ or bool(self.external_user_id)
42
+ or bool(self.email)
43
+ ):
44
+ raise ValueError(
45
+ "Either looker_user_id, external_user_id, or email must be set"
46
+ )
47
+ return self
48
+
49
+ def get_request_authorization_for_value(self, request: Request):
50
+ authorization_token = request.headers.get("Authorization")
51
+ if authorization_token:
52
+ self.value = authorization_token
53
+ else:
54
+ logger.error("No authorization token found")
55
+
56
+ def _get_sdk(self):
57
+ api_key: LookerApiKey | None = None
58
+ if self.client_id and self.client_secret and self.base_url:
59
+ api_key = LookerApiKey(
60
+ client_id=self.client_id,
61
+ client_secret=self.client_secret,
62
+ base_url=self.base_url,
63
+ )
64
+ else:
65
+ api_key = LookerApiKey.from_env()
66
+ if not api_key:
67
+ logger.error("No API key found")
68
+ return None
69
+ return init_api_key_sdk(api_key, True)
70
+
71
+ def _get_group_id(self, sdk: Looker40SDK) -> str | None:
72
+ if self.group_id:
73
+ return self.group_id
74
+ elif self.group_name:
75
+ groups = sdk.search_groups(name=self.group_name, fields="id")
76
+ if groups:
77
+ return groups[0].id
78
+ return None
79
+
80
+ def _get_user_attribute_id(self, sdk: Looker40SDK) -> str | None:
81
+ if self.user_attribute_id:
82
+ return self.user_attribute_id
83
+ elif self.user_attribute:
84
+ user_attributes = cast(
85
+ list[dict], sdk.get("/user_attributes", structure=list[dict])
86
+ )
87
+ for user_attribute in user_attributes or []:
88
+ if user_attribute["name"] == self.user_attribute:
89
+ return user_attribute["id"]
90
+ return None
91
+
92
+ def delete_user_attribute_value(self):
93
+ sdk = self._get_sdk()
94
+ if not sdk:
95
+ raise ValueError("No SDK found")
96
+ user_attribute_id = self._get_user_attribute_id(sdk)
97
+ if not user_attribute_id:
98
+ raise ValueError("User attribute not found")
99
+
100
+ if self.update_type == "group":
101
+ group_id = self._get_group_id(sdk)
102
+ if group_id:
103
+ sdk.delete_user_attribute_group_value(
104
+ group_id=group_id,
105
+ user_attribute_id=user_attribute_id,
106
+ )
107
+ else:
108
+ raise ValueError("Group not found")
109
+ elif self.update_type == "default":
110
+ sdk.delete_user_attribute(user_attribute_id)
111
+ elif self.update_type == "user":
112
+ if self.looker_user_id:
113
+ sdk.delete_user_attribute_user_value(
114
+ user_id=self.looker_user_id,
115
+ user_attribute_id=user_attribute_id,
116
+ )
117
+ elif self.external_user_id:
118
+ sdk.delete_user_attribute_user_value(
119
+ user_id=self.external_user_id,
120
+ user_attribute_id=user_attribute_id,
121
+ )
122
+ elif self.email:
123
+ sdk.delete_user_attribute_user_value(
124
+ user_id=self.email,
125
+ user_attribute_id=user_attribute_id,
126
+ )
127
+ else:
128
+ raise ValueError("User not found")
129
+
130
+ def update_user_attribute_value(self):
131
+ if not self.value:
132
+ raise ValueError("Value is required to update user attribute")
133
+
134
+ sdk = self._get_sdk()
135
+ if not sdk:
136
+ raise ValueError("No SDK found")
137
+ user_attribute_id = self._get_user_attribute_id(sdk)
138
+
139
+ if not user_attribute_id:
140
+ raise ValueError("User attribute not found")
141
+ user_attribute = sdk.user_attribute(user_attribute_id)
142
+ if not user_attribute:
143
+ raise ValueError("User attribute not found")
144
+
145
+ if user_attribute.type != "string":
146
+ raise ValueError("User attribute is not a string")
147
+
148
+ if self.update_type == "group":
149
+ group_id = self._get_group_id(sdk)
150
+ if group_id:
151
+ sdk.update_user_attribute_group_value(
152
+ group_id=group_id,
153
+ user_attribute_id=user_attribute_id,
154
+ body=UserAttributeGroupValue(
155
+ group_id=group_id,
156
+ user_attribute_id=user_attribute_id,
157
+ value=self.value,
158
+ ),
159
+ )
160
+ else:
161
+ raise ValueError("Group not found")
162
+ elif self.update_type == "default":
163
+ sdk.update_user_attribute(
164
+ user_attribute_id,
165
+ WriteUserAttribute(
166
+ name=user_attribute.name,
167
+ label=user_attribute.label,
168
+ type=user_attribute.type,
169
+ default_value=self.value,
170
+ ),
171
+ )
172
+ elif self.update_type == "user":
173
+
174
+ def set_user_attribute_user_value(user_id: str):
175
+ sdk.set_user_attribute_user_value(
176
+ user_id=user_id,
177
+ user_attribute_id=user_attribute_id,
178
+ body=WriteUserAttributeWithValue(
179
+ value=self.value,
180
+ ),
181
+ )
182
+
183
+ if self.looker_user_id:
184
+ set_user_attribute_user_value(self.looker_user_id)
185
+
186
+ elif self.external_user_id:
187
+ user = sdk.user_for_credential("embed", self.external_user_id)
188
+ if not (user and user.id):
189
+ raise ValueError("User not found")
190
+ set_user_attribute_user_value(user.id)
191
+
192
+ elif self.email:
193
+ user = sdk.user_for_credential("email", self.email)
194
+ if not (user and user.id):
195
+ raise ValueError("User not found")
196
+ set_user_attribute_user_value(user.id)
197
+
198
+ else:
199
+ raise ValueError("User not found")
200
+ else:
201
+ raise ValueError("Invalid update_type")
202
+
203
+
204
+ class AttributeUpdaterResponse(BaseModel):
205
+ success: bool = False
206
+ message: str
lkr/tools/main.py ADDED
@@ -0,0 +1,55 @@
1
+ import typer
2
+ import uvicorn
3
+ from fastapi import FastAPI, Request
4
+
5
+ from lkr.tools.classes import AttributeUpdaterResponse, UserAttributeUpdater
6
+
7
+ __all__ = ["group"]
8
+
9
+ group = typer.Typer()
10
+
11
+
12
+ @group.command()
13
+ def user_attribute_updater(
14
+ ctx: typer.Context,
15
+ host: str = typer.Option(default="127.0.0.1", envvar="HOST"),
16
+ port: int = typer.Option(default=8080, envvar="PORT"),
17
+ ):
18
+ api = FastAPI()
19
+
20
+ @api.post("/identity_token")
21
+ def identity_token(request: Request, body: UserAttributeUpdater):
22
+ try:
23
+ body.get_request_authorization_for_value(request)
24
+ body.update_user_attribute_value()
25
+ return AttributeUpdaterResponse(
26
+ success=True, message="User attribute updated"
27
+ )
28
+ except Exception as e:
29
+ return AttributeUpdaterResponse(success=False, message=str(e))
30
+
31
+ @api.delete("/value")
32
+ def delete_user_attribute_value(request: Request, body: UserAttributeUpdater):
33
+ try:
34
+ body.delete_user_attribute_value()
35
+ return AttributeUpdaterResponse(
36
+ success=True, message="User attribute value deleted"
37
+ )
38
+ except Exception as e:
39
+ return AttributeUpdaterResponse(success=False, message=str(e))
40
+
41
+ @api.post("/value")
42
+ def update_user_attribute_value(request: Request, body: UserAttributeUpdater):
43
+ try:
44
+ body.update_user_attribute_value()
45
+ return AttributeUpdaterResponse(
46
+ success=True, message="User attribute value updated"
47
+ )
48
+ except Exception as e:
49
+ return AttributeUpdaterResponse(success=False, message=str(e))
50
+
51
+ uvicorn.run(api, host=host, port=port)
52
+
53
+
54
+ if __name__ == "__main__":
55
+ group()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lkr-dev-cli
3
- Version: 0.0.24
3
+ Version: 0.0.26
4
4
  Summary: lkr: a command line interface for looker
5
5
  Author: bwebs
6
6
  License-Expression: MIT
@@ -9,8 +9,8 @@ Requires-Python: >=3.12
9
9
  Requires-Dist: cryptography>=42.0.0
10
10
  Requires-Dist: duckdb>=1.2.2
11
11
  Requires-Dist: fastapi>=0.115.12
12
- Requires-Dist: fastmcp>=2.3.5
13
12
  Requires-Dist: looker-sdk>=25.4.0
13
+ Requires-Dist: mcp[cli]>=1.9.2
14
14
  Requires-Dist: pydantic>=2.11.4
15
15
  Requires-Dist: pydash>=8.0.5
16
16
  Requires-Dist: questionary>=2.1.0
@@ -178,26 +178,33 @@ For example:
178
178
 
179
179
  ### Cloud Run + GCP Health Check example
180
180
 
181
- One of the simplest ways to launch the health check is the `lkr-cli` public docker image, Cloud Run, and the GCP health check service. Here's an example; make sure to change your region, models, user_attributes, and external_user_id.
181
+ One of the simplest ways to launch the health check is the `lkr-cli` public docker image, Cloud Run, and the GCP health check service. Here's an example; make sure to change your region and project. HEALTH_URL is an example of how to structure the query parameters for the health check.
182
182
 
183
183
  ```bash
184
184
  export REGION=<your region>
185
185
  export PROJECT=<your project id>
186
186
 
187
- export HEALTH_URL="/health?dashboard_id=1&external_user_id=embed-user-abc&models=thelook&user_attributes={\"store_id\":\"1\"}"
187
+ export HEALTH_URL="/health?dashboard_id=1&external_user_id=observability-embed-user&models=thelook&user_attributes={\"store_id\":\"1\"}"
188
188
 
189
189
  gcloud run deploy lkr-observability \
190
190
  --image us-central1-docker.pkg.dev/lkr-dev-production/lkr-cli/cli:latest \
191
191
  --command lkr \
192
192
  --args observability,embed \
193
193
  --platform managed \
194
- --region $REGION \
194
+ --region $REGION \
195
195
  --project $PROJECT \
196
196
  --cpu 2 \
197
197
  --memory 4Gi \
198
- --liveness-probe httpGet.path=$HEALTH_URL,timeoutSeconds=20,periodSeconds=20 \
199
198
  --set-env-vars LOOKERSDK_CLIENT_ID=<your client id>,LOOKERSDK_CLIENT_SECRET=<your client secret>,LOOKERSDK_BASE_URL=<your instance url>
200
199
 
200
+ gcloud monitoring uptime create lkr-observability-health-check \
201
+ --protocol https \
202
+ --project $PROJECT \
203
+ --resource-type="cloud-run-revision" \
204
+ --resource-labels="project_id=${PROJECT},service_name=lkr-observability,location=${REGION}" \
205
+ --path="${HEALTH_URL}" \
206
+ --period="15" \
207
+ --timeout="60"
201
208
 
202
209
  ```
203
210
 
@@ -1,21 +1,25 @@
1
- lkr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1
+ lkr/__init__.py,sha256=IooxWuZJ0XeK6dTc0PTRy3pVhofSoepOVJHBMuBWByI,87
2
2
  lkr/auth_service.py,sha256=NIGlSVtzS3ajpWYx2gTJVDknuF_KxwYsJEj88Hc05dQ,19887
3
3
  lkr/classes.py,sha256=f2TJOXFta0s8LJLEXOqPdWPLg-EIIntUSDS6gDOon7M,1163
4
4
  lkr/constants.py,sha256=DdCfsV6q8wgs2iHpIQeb6oDP_2XejusEHyPvCbaM3yY,108
5
5
  lkr/custom_types.py,sha256=feJ-W2U61PJTiotMLuZJqxrotA53er95kO1O30mooy4,323
6
6
  lkr/exceptions.py,sha256=M_aR4YaCZtY4wyxhcoqJCVkxVu9z3Wwo5KgSDyOoEnI,210
7
- lkr/logger.py,sha256=wLEVluF-2lVMPJ228D7O4-shHaydEGXmn47j567nUgw,1887
8
- lkr/main.py,sha256=wpnSiCpIScEi-jmrLuwcSJrMX3tMkuaYNwlvNkVf5N8,2375
7
+ lkr/logger.py,sha256=vKlJZqiMzJbYBzmiiD0HzJp-J-rHd4nWX-7P4ZKgh78,2033
8
+ lkr/main.py,sha256=pTVibYNb7Wh-dKVbqAozUGWPbeofqIo0gQceZSXoySQ,2464
9
9
  lkr/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  lkr/auth/main.py,sha256=7tWGPWokzbBnrX1enZ9YP4rdDJqYBlGfuYe0Wg-fXT4,7532
11
11
  lkr/auth/oauth.py,sha256=n2yAcccdBZaloGVtFRTwCPBfh1cvVYNbXLsFCxmWc5M,7207
12
- lkr/mcp/main.py,sha256=fz7Bvbkj1CTD185lGwNdMYP7bOTDadWL1xdD4JZW8qQ,22886
12
+ lkr/mcp/classes.py,sha256=dqtZxivftufQpQNXmlzzhx1lw3nN7DwTvFc6pZUF_bA,2992
13
+ lkr/mcp/main.py,sha256=JRMfdNk5_tqxWz8caqKBmcxP2ieNXAE4-1HjEgC4jl4,21693
14
+ lkr/mcp/utils.py,sha256=lXkXmoD-a7WkJYI4qqzh-n1wE4T5YajOkALZtjJxYWg,1378
13
15
  lkr/observability/classes.py,sha256=LgGuUnY-J1csPrlAKnw4PPOqOfbvaOx2cxENlQgJYcE,5816
14
16
  lkr/observability/embed_container.html,sha256=IcDG-QVsYYNGQGrkDrx9OMZ2Pmo4C8oAjRHddFQ7Tlw,2939
15
17
  lkr/observability/main.py,sha256=XbejIdqhNNUMqHVezb5EnLaJ32dO9-Bt0o5d8lc0kyw,9544
16
18
  lkr/observability/utils.py,sha256=UpaBrp_ufaXLoz4p3xG3K6lHKBpP9wBhvP8rDmeGoWg,2148
17
- lkr_dev_cli-0.0.24.dist-info/METADATA,sha256=PKOjKBiQqRlg4nfYzcFjbjYHSzaRNCslIlBiBGaaZPw,10663
18
- lkr_dev_cli-0.0.24.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
- lkr_dev_cli-0.0.24.dist-info/entry_points.txt,sha256=nn2sFMGDpwUVE61ZUpbDPnQZkW7Gc08nV-tyLGo8q34,37
20
- lkr_dev_cli-0.0.24.dist-info/licenses/LICENSE,sha256=hKnCOORW1JRE_M2vStz8dblS5u1iR-2VpqS9xagKNa0,1063
21
- lkr_dev_cli-0.0.24.dist-info/RECORD,,
19
+ lkr/tools/classes.py,sha256=tXOFLdahjEHlpr0dgR3miwDeb98Qw1X7bUA-26Hzhkg,7635
20
+ lkr/tools/main.py,sha256=zHBZWjCp_pYw_dPKQg_rLZTHLbnKYAoPPFBuHSqtEc0,1751
21
+ lkr_dev_cli-0.0.26.dist-info/METADATA,sha256=fTj56Vl0lcKtV9gruaG0haOjqJYmvXzBeCsN2JpRLaY,10953
22
+ lkr_dev_cli-0.0.26.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
23
+ lkr_dev_cli-0.0.26.dist-info/entry_points.txt,sha256=nn2sFMGDpwUVE61ZUpbDPnQZkW7Gc08nV-tyLGo8q34,37
24
+ lkr_dev_cli-0.0.26.dist-info/licenses/LICENSE,sha256=hKnCOORW1JRE_M2vStz8dblS5u1iR-2VpqS9xagKNa0,1063
25
+ lkr_dev_cli-0.0.26.dist-info/RECORD,,