kobai-sdk 0.3.3rc1__tar.gz → 0.3.4rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kobai-sdk might be problematic. Click here for more details.

Files changed (21) hide show
  1. {kobai_sdk-0.3.3rc1/kobai_sdk.egg-info → kobai_sdk-0.3.4rc1}/PKG-INFO +1 -3
  2. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/genie.py +0 -32
  3. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/tenant_client.py +2 -7
  4. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1/kobai_sdk.egg-info}/PKG-INFO +1 -3
  5. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai_sdk.egg-info/requires.txt +0 -2
  6. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/pyproject.toml +2 -4
  7. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/LICENSE +0 -0
  8. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/MANIFEST.in +0 -0
  9. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/README.md +0 -0
  10. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/__init__.py +0 -0
  11. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/ai_query.py +0 -0
  12. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/ai_rag.py +0 -0
  13. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/databricks_client.py +0 -0
  14. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/demo_tenant_client.py +0 -0
  15. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/ms_authenticate.py +0 -0
  16. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/spark_client.py +0 -0
  17. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai/tenant_api.py +0 -0
  18. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai_sdk.egg-info/SOURCES.txt +0 -0
  19. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai_sdk.egg-info/dependency_links.txt +0 -0
  20. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/kobai_sdk.egg-info/top_level.txt +0 -0
  21. {kobai_sdk-0.3.3rc1 → kobai_sdk-0.3.4rc1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kobai-sdk
3
- Version: 0.3.3rc1
3
+ Version: 0.3.4rc1
4
4
  Summary: A package that enables interaction with a Kobai tenant.
5
5
  Author-email: Ryan Oattes <ryan@kobai.io>
6
6
  License: Apache License
@@ -221,8 +221,6 @@ Requires-Dist: azure-identity
221
221
  Requires-Dist: azure-storage-blob
222
222
  Requires-Dist: langchain-core
223
223
  Requires-Dist: langchain-community
224
- Requires-Dist: langchain_openai
225
- Requires-Dist: databricks_langchain
226
224
  Provides-Extra: dev
227
225
  Requires-Dist: black; extra == "dev"
228
226
  Requires-Dist: bumpver; extra == "dev"
@@ -1,4 +1,3 @@
1
- import json
2
1
 
3
2
  def get_genie_descriptions(solution_id, structure, schema):
4
3
  for di, dom in enumerate(structure["domains"]):
@@ -82,7 +81,6 @@ def recurse_parent_props(uri, structure, props, rels, visited=None):
82
81
  recurse_parent_props(icon, structure, props, rels, visited)
83
82
 
84
83
  def add_map_count(structure):
85
- #mapping_defs = get_tenant_mapping_config(structure["solution_id"])
86
84
 
87
85
  for dom in structure["domains"]:
88
86
  for con in dom["concepts"]:
@@ -93,41 +91,11 @@ def add_map_count(structure):
93
91
  map_count = map_count + 1
94
92
  con["map_count"] = map_count
95
93
 
96
- #def get_tenant_mapping_config(solution_id):
97
- # mapping_sql = f"""
98
- # select s.id, mapd.concept_type_uri
99
- # from studio.solutions s
100
- # inner join studio.mapping_defs mapd
101
- # on s.id = mapd.solution_id
102
- # where s.id = {solution_id}
103
- # """
104
-
105
- # mapping_rows = app_db.run_query(mapping_sql)
106
-
107
- # mapping_defs = []
108
- # for row in mapping_rows:
109
- # mapping_def = {"solution_id": row[0], "concept_type_uri": row[1]}
110
- # mapping_defs.append(mapping_def)
111
- # return mapping_defs
112
-
113
94
  ############################
114
95
  # Question Config
115
96
  ############################
116
97
 
117
98
  def get_tenant_question_config(solution_id, structure):
118
- #question_sql = f"""
119
- # select s.id solution_id, q.id, q.description, q.definition, a.id api_id
120
- # from studio.models m
121
- # inner join studio.queries q
122
- # on m.id = q.model_id
123
- # inner join studio.solutions s
124
- # on m.id = s.model_id
125
- # left join studio.api a
126
- # on q.id = a.query_id
127
- # where s.id = {solution_id}
128
- #"""
129
-
130
- #question_rows = app_db.run_query(question_sql)
131
99
 
132
100
  question_defs = []
133
101
  for row in structure["queries"]:
@@ -5,8 +5,6 @@ import urllib.parse
5
5
 
6
6
  from pyspark.sql import SparkSession
7
7
 
8
- from langchain_community.chat_models import ChatDatabricks
9
- from databricks_langchain import DatabricksEmbeddings
10
8
  from langchain_core.language_models.chat_models import BaseChatModel
11
9
  from langchain_core.embeddings import Embeddings
12
10
 
@@ -281,9 +279,10 @@ class TenantClient:
281
279
 
282
280
  for t in self.__get_view_sql(domains=domains, concepts=concepts, not_concepts=not_concepts, enforce_map=enforce_map):
283
281
  payload["table_identifiers"].append(t["table"])
282
+ print(t["table"])
284
283
  response = self.databricks_client._DatabricksClient__api_patch("/api/2.0/data-rooms/" + room_id, payload)
285
284
 
286
- payload = {"title":"Notes","content":"When filtering for a named entity, use a like comparison instead of equality. All tables are denormalized, so columns may have repeated rows for the same primary identifier. You should handle this by putting each table in a subquery and using the DISTINCT keyword.","instruction_type":"TEXT_INSTRUCTION"}
285
+ payload = {"title":"Notes","content":"When filtering for a named entity, use a like comparison instead of equality. All tables are denormalized, so columns may have repeated rows for the same primary identifier. You should handle this by putting each table in a subquery and using the DISTINCT keyword. The first column in each view is a unique identifier that should only be used for joins, and never shown to a user. Find another column to identify the subject of the table.","instruction_type":"TEXT_INSTRUCTION"}
287
286
  instructions = self.databricks_client._DatabricksClient__api_get("/api/2.0/data-rooms/" + room_id + "/instructions")
288
287
  inst_id = "-1"
289
288
 
@@ -325,10 +324,6 @@ class TenantClient:
325
324
  ########################################
326
325
 
327
326
  def __get_descriptions(self):
328
-
329
- #params={"schema": self.schema, "tenant_id": self.model_id}
330
- #response = self.api_client._TenantAPI__run_get("/episteme-svcs/api/descriptions", params=params)
331
- #return response.json()
332
327
 
333
328
  tenant_config = self.get_tenant_config()
334
329
  descriptions = get_genie_descriptions(self.model_id, tenant_config, self.schema)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kobai-sdk
3
- Version: 0.3.3rc1
3
+ Version: 0.3.4rc1
4
4
  Summary: A package that enables interaction with a Kobai tenant.
5
5
  Author-email: Ryan Oattes <ryan@kobai.io>
6
6
  License: Apache License
@@ -221,8 +221,6 @@ Requires-Dist: azure-identity
221
221
  Requires-Dist: azure-storage-blob
222
222
  Requires-Dist: langchain-core
223
223
  Requires-Dist: langchain-community
224
- Requires-Dist: langchain_openai
225
- Requires-Dist: databricks_langchain
226
224
  Provides-Extra: dev
227
225
  Requires-Dist: black; extra == "dev"
228
226
  Requires-Dist: bumpver; extra == "dev"
@@ -5,8 +5,6 @@ azure-identity
5
5
  azure-storage-blob
6
6
  langchain-core
7
7
  langchain-community
8
- langchain_openai
9
- databricks_langchain
10
8
 
11
9
  [dev]
12
10
  black
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "kobai-sdk"
7
- version = "0.3.3rc1"
7
+ version = "0.3.4rc1"
8
8
  description = "A package that enables interaction with a Kobai tenant."
9
9
  readme = "README.md"
10
10
  authors = [{ name = "Ryan Oattes", email = "ryan@kobai.io" }]
@@ -24,9 +24,7 @@ dependencies = [
24
24
  "azure-identity",
25
25
  "azure-storage-blob",
26
26
  "langchain-core",
27
- "langchain-community",
28
- "langchain_openai",
29
- "databricks_langchain"
27
+ "langchain-community"
30
28
  ]
31
29
  requires-python = ">=3.11"
32
30
 
File without changes
File without changes
File without changes
File without changes