quillsql 2.1.6__py3-none-any.whl → 2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,10 @@
1
1
  import psycopg2
2
- from psycopg2.extras import RealDictCursor
3
2
  import json
4
3
  import redis
5
4
  from quillsql.error import PgQueryError
6
5
  from quillsql.db.db_helper import connect_to_db, run_query_by_db
7
6
 
8
- ## The TTL for new cache entries (default: 1h)
7
+ # The TTL for new cache entries (default: 1h)
9
8
  DEFAULT_CACHE_TTL = 24 * 60 * 60
10
9
 
11
10
 
@@ -33,7 +32,9 @@ class CachedConnection:
33
32
  def exec_with_reconnect(self, sql):
34
33
  reconnect_count = 0
35
34
  while True:
36
- self.connection = connect_to_db(self.database_type, self.config, self.using_connection_string)
35
+ self.connection = connect_to_db(
36
+ self.database_type, self.config, self.using_connection_string
37
+ )
37
38
  try:
38
39
  return run_query_by_db(self.database_type, sql, self.connection)
39
40
  except psycopg2.Error as err:
@@ -57,9 +58,9 @@ class CachedConnection:
57
58
  cached_result = self.cache.get(key)
58
59
  if cached_result:
59
60
  cached = json.loads(cached_result)
60
- return {"rows": cached, "fields": cached['fields']}
61
+ return {"rows": cached, "fields": cached["fields"]}
61
62
  else:
62
63
  new_result = self.exec(sql)
63
64
  new_result_string = json.dumps(new_result)
64
65
  self.cache.set(key, new_result_string, "EX", DEFAULT_CACHE_TTL)
65
- return {"rows": new_result, "fields": new_result['fields']}
66
+ return {"rows": new_result, "fields": new_result["fields"]}
quillsql/db/db_helper.py CHANGED
@@ -1,42 +1,61 @@
1
- from quillsql.db.postgres import format_postgres, connect_to_postgres, get_schema_column_info_postgres, get_tables_by_schema_postgres, run_query_postgres, disconnect_from_postgres
2
- from quillsql.db.bigquery import format_bigquery_config, connect_to_bigquery, get_schema_column_info_big_query, get_tables_by_schema_big_query, run_query_big_query
1
+ from quillsql.db.postgres import (
2
+ format_postgres,
3
+ connect_to_postgres,
4
+ get_schema_column_info_postgres,
5
+ get_tables_by_schema_postgres,
6
+ run_query_postgres,
7
+ disconnect_from_postgres,
8
+ )
9
+ from quillsql.db.bigquery import (
10
+ format_bigquery_config,
11
+ connect_to_bigquery,
12
+ get_schema_column_info_big_query,
13
+ get_tables_by_schema_big_query,
14
+ run_query_big_query,
15
+ )
16
+
3
17
 
4
18
  def get_db_credentials(database_type, connection_string):
5
- if(database_type.lower() == 'postgresql'):
6
- return format_postgres(connection_string)
7
- elif(database_type.lower() == 'bigquery'):
8
- return format_bigquery_config(connection_string)
9
- return {}
19
+ if database_type.lower() == "postgresql":
20
+ return format_postgres(connection_string)
21
+ elif database_type.lower() == "bigquery":
22
+ return format_bigquery_config(connection_string)
23
+ return {}
24
+
10
25
 
11
26
  def connect_to_db(database_type, config, using_connection_string):
12
- if(database_type.lower() == 'postgresql'):
27
+ if database_type.lower() == "postgresql":
13
28
  return connect_to_postgres(config, using_connection_string)
14
- elif(database_type.lower() == 'bigquery'):
29
+ elif database_type.lower() == "bigquery":
15
30
  return connect_to_bigquery(config, using_connection_string)
16
31
  return None
17
32
 
33
+
18
34
  def run_query_by_db(database_type, query, connection):
19
- if (database_type.lower() == 'postgresql'):
35
+ if database_type.lower() == "postgresql":
20
36
  return run_query_postgres(query, connection)
21
- elif (database_type.lower() == 'bigquery'):
37
+ elif database_type.lower() == "bigquery":
22
38
  return run_query_big_query(query, connection)
23
39
  return None
24
40
 
41
+
25
42
  def disconnect_from_db(database_type, connection):
26
- if (database_type.lower() == 'postgresql'):
43
+ if database_type.lower() == "postgresql":
27
44
  return disconnect_from_postgres(connection)
28
45
  return None
29
46
 
47
+
30
48
  def get_schema_tables_by_db(database_type, connection, schema_name):
31
- if (database_type.lower() == 'postgresql'):
49
+ if database_type.lower() == "postgresql":
32
50
  return get_tables_by_schema_postgres(connection, schema_name)
33
- elif (database_type.lower() == 'bigquery'):
51
+ elif database_type.lower() == "bigquery":
34
52
  return get_tables_by_schema_big_query(connection, schema_name)
35
53
  return None
36
54
 
55
+
37
56
  def get_schema_column_info_by_db(database_type, connection, schema_name, table_names):
38
- if (database_type.lower() == 'postgresql'):
57
+ if database_type.lower() == "postgresql":
39
58
  return get_schema_column_info_postgres(connection, schema_name, table_names)
40
- elif (database_type.lower() == 'bigquery'):
59
+ elif database_type.lower() == "bigquery":
41
60
  return get_schema_column_info_big_query(connection, schema_name, table_names)
42
- return None
61
+ return None
quillsql/db/postgres.py CHANGED
@@ -1,72 +1,127 @@
1
+ import os
1
2
  import psycopg2
2
3
  from psycopg2.extensions import make_dsn
3
4
  from quillsql.assets.pgtypes import PG_TYPES
4
5
 
5
- def format_postgres(connection_string ):
6
- to_dsn = lambda conn: make_dsn(conn) if "://" in conn else conn
6
+
7
+ def format_postgres(connection_string):
8
+ def to_dsn(conn):
9
+ return make_dsn(conn) if "://" in conn else conn
10
+
7
11
  return to_dsn(connection_string)
8
12
 
13
+
9
14
  def connect_to_postgres(config, usingConnectionString):
15
+ os.environ['PGGSSENCMODE'] = 'disable' # https://github.com/psycopg/psycopg2/issues/1084
10
16
  if usingConnectionString:
11
- return psycopg2.connect(config)
17
+ return psycopg2.connect(config)
12
18
  else:
13
- return psycopg2.connect(
14
- database=config['dbname'],
15
- user=config['user'],
16
- password=config['password'],
17
- host=config['host'],
18
- port=config['port']
19
- )
19
+ return psycopg2.connect(
20
+ database=config["dbname"],
21
+ user=config["user"],
22
+ password=config["password"],
23
+ host=config["host"],
24
+ port=config["port"],
25
+ )
26
+
20
27
 
21
28
  def run_query_postgres(query, connection):
22
29
  cursor = connection.cursor()
23
30
  cursor.execute(query)
24
31
  result = cursor.fetchall()
25
- fields = [
26
- {"name": desc[0], "dataTypeID": desc[1]} for desc in cursor.description
27
- ]
32
+ fields = [{"name": desc[0], "dataTypeID": desc[1]} for desc in cursor.description]
28
33
  cursor.close()
29
- rows_dict = [dict(zip([field['name'] for field in fields], row)) for row in result]
34
+ rows_dict = [dict(zip([field["name"] for field in fields], row)) for row in result]
30
35
  return {"rows": rows_dict, "fields": fields}
31
36
 
37
+
32
38
  def disconnect_from_postgres(connection):
33
39
  connection.close()
34
40
  return
35
41
 
42
+
36
43
  # getTablesBySchemaPostgres
44
+
45
+
37
46
  def get_tables_by_schema_postgres(connection, schema_names):
38
47
  all_tables = []
39
48
  for schema_name in schema_names:
40
- query = f"SELECT table_name, table_schema FROM information_schema.tables WHERE table_schema = '{schema_name}'"
41
- results = run_query_postgres(query, connection)
42
- for row in results['rows']:
43
- cur_table = {}
44
- cur_table['table_name'] = row['table_name']
45
- cur_table['schema_name'] = row['table_schema']
46
- all_tables.append(cur_table)
49
+ query = f"""
50
+ SELECT table_name, table_schema
51
+ FROM information_schema.tables
52
+ WHERE table_schema = '{schema_name}'
53
+
54
+ UNION
55
+
56
+ SELECT c.relname as table_name, n.nspname as table_schema
57
+ FROM pg_class c
58
+ JOIN pg_namespace n ON c.relnamespace = n.oid
59
+ WHERE n.nspname = '{schema_name}'
60
+ AND c.relkind = 'm';
61
+ """
62
+ results = run_query_postgres(query, connection)
63
+ for row in results["rows"]:
64
+ cur_table = {}
65
+ cur_table["table_name"] = row["table_name"]
66
+ cur_table["schema_name"] = row["table_schema"]
67
+ all_tables.append(cur_table)
47
68
  return all_tables
48
69
 
49
- # getSchemaColumnInfoPostgress
70
+
71
+ # getSchemaColumnInfoPostgres
72
+
73
+
50
74
  def get_schema_column_info_postgres(connection, schema_name, table_names):
51
75
  all_columns = []
52
76
  for table_name in table_names:
53
- query = f"SELECT column_name, udt_name FROM information_schema.columns WHERE table_schema = '{table_name['schema_name']}' AND table_name = '{table_name['table_name']}' ORDER BY ordinal_position"
77
+ query = f"""
78
+ SELECT column_name as "column_name", udt_name as "field_type", ordinal_position as "sort_number"
79
+ FROM information_schema.columns
80
+ WHERE table_schema = '{table_name['schema_name']}'
81
+ AND table_name = '{table_name['table_name']}'
82
+
83
+ UNION
84
+
85
+ SELECT a.attname as "column_name", t.typname as "field_type", a.attnum as "sort_number"
86
+ FROM pg_attribute a
87
+ JOIN pg_class c ON a.attrelid = c.oid
88
+ JOIN pg_namespace n ON c.relnamespace = n.oid
89
+ JOIN pg_type t ON a.atttypid = t.oid
90
+ WHERE n.nspname = '{table_name['schema_name']}'
91
+ AND c.relname = '{table_name['table_name']}'
92
+ AND c.relkind = 'm'
93
+ AND a.attnum > 0
94
+ AND NOT a.attisdropped
95
+ ORDER BY "sort_number"
96
+ """
54
97
  results = run_query_postgres(query, connection)
55
98
  columns = []
56
- for row in results['rows']:
57
- # Convert row['udt_name'] to postgresql oid
58
- pg_type = next((pg_type for pg_type in PG_TYPES if pg_type['typname'] == row['udt_name']), None)
59
- if pg_type == None:
99
+ for row in results["rows"]:
100
+ pg_type = next(
101
+ (
102
+ pg_type
103
+ for pg_type in PG_TYPES
104
+ if pg_type["typname"] == row["field_type"]
105
+ ),
106
+ None,
107
+ )
108
+ if pg_type is None:
60
109
  pg_type = 1043
61
- columns.append({
62
- 'columnName': row['column_name'],
63
- 'displayName': row['column_name'],
64
- 'dataTypeID': pg_type['oid'],
65
- 'fieldType': row['udt_name'],
66
- })
67
- all_columns.append({
68
- 'tableName': table_name['schema_name']+'.'+table_name['table_name'],
69
- 'displayName': table_name['schema_name']+'.'+table_name['table_name'],
70
- 'columns': columns
71
- })
72
- return all_columns
110
+ columns.append(
111
+ {
112
+ "columnName": row["column_name"],
113
+ "displayName": row["column_name"],
114
+ "dataTypeID": pg_type["oid"],
115
+ "fieldType": row["field_type"],
116
+ }
117
+ )
118
+ all_columns.append(
119
+ {
120
+ "tableName": table_name["schema_name"] + "." + table_name["table_name"],
121
+ "displayName": table_name["schema_name"]
122
+ + "."
123
+ + table_name["table_name"],
124
+ "columns": columns,
125
+ }
126
+ )
127
+ return all_columns
quillsql/error.py CHANGED
@@ -1,5 +1,5 @@
1
1
  class PgQueryError(Exception):
2
- def __init__(self, message, query, position):
3
- super().__init__(message)
4
- self.query = query
5
- self.position = position
2
+ def __init__(self, message, query, position):
3
+ super().__init__(message)
4
+ self.query = query
5
+ self.position = position
@@ -1,3 +1,4 @@
1
1
  # __init__.py
2
2
 
3
- from .run_query_processes import remove_fields, array_to_map
3
+ from .run_query_processes import remove_fields, array_to_map
4
+ from .filters import Filter, FilterType, FieldType, StringOperator, NumberOperator, NullOperator, DateOperator, convert_custom_filter
@@ -0,0 +1,180 @@
1
+ from enum import Enum
2
+ from typing import Union, Any, Optional
3
+ from dataclasses import dataclass, asdict
4
+
5
+ # Constants
6
+ IS_EXACTLY = 'is exactly'
7
+ IS_NOT_EXACTLY = 'is not exactly'
8
+ CONTAINS = 'contains'
9
+ IS = 'is'
10
+ IS_NOT = 'is not'
11
+ IS_NOT_NULL = 'is not null'
12
+ IS_NULL = 'is null'
13
+
14
+ IN_THE_LAST = 'in the last'
15
+ IN_THE_PREVIOUS = 'in the previous'
16
+ IN_THE_CURRENT = 'in the current'
17
+
18
+ EQUAL_TO = 'equal to'
19
+ NOT_EQUAL_TO = 'not equal to'
20
+ GREATER_THAN = 'greater than'
21
+ LESS_THAN = 'less than'
22
+ GREATER_THAN_OR_EQUAL_TO = 'greater than or equal to'
23
+ LESS_THAN_OR_EQUAL_TO = 'less than or equal to'
24
+
25
+ YEAR = 'year'
26
+ QUARTER = 'quarter'
27
+ MONTH = 'month'
28
+ WEEK = 'week'
29
+ DAY = 'day'
30
+ HOUR = 'hour'
31
+
32
+ NUMBER = 'number'
33
+ STRING = 'string'
34
+ DATE = 'date'
35
+ NULL = 'null'
36
+ CUSTOM = 'custom'
37
+ BOOLEAN = 'boolean'
38
+
39
+ # Enums
40
+ class StringOperator(Enum):
41
+ IS_EXACTLY = IS_EXACTLY
42
+ IS_NOT_EXACTLY = IS_NOT_EXACTLY
43
+ CONTAINS = CONTAINS
44
+ IS = IS
45
+ IS_NOT = IS_NOT
46
+
47
+ class DateOperator(Enum):
48
+ CUSTOM = CUSTOM
49
+ IN_THE_LAST = IN_THE_LAST
50
+ IN_THE_PREVIOUS = IN_THE_PREVIOUS
51
+ IN_THE_CURRENT = IN_THE_CURRENT
52
+ EQUAL_TO = EQUAL_TO
53
+ NOT_EQUAL_TO = NOT_EQUAL_TO
54
+ GREATER_THAN = GREATER_THAN
55
+ LESS_THAN = LESS_THAN
56
+ GREATER_THAN_OR_EQUAL_TO = GREATER_THAN_OR_EQUAL_TO
57
+ LESS_THAN_OR_EQUAL_TO = LESS_THAN_OR_EQUAL_TO
58
+
59
+ class NumberOperator(Enum):
60
+ EQUAL_TO = EQUAL_TO
61
+ NOT_EQUAL_TO = NOT_EQUAL_TO
62
+ GREATER_THAN = GREATER_THAN
63
+ LESS_THAN = LESS_THAN
64
+ GREATER_THAN_OR_EQUAL_TO = GREATER_THAN_OR_EQUAL_TO
65
+ LESS_THAN_OR_EQUAL_TO = LESS_THAN_OR_EQUAL_TO
66
+
67
+ class NullOperator(Enum):
68
+ IS_NOT_NULL = IS_NOT_NULL
69
+ IS_NULL = IS_NULL
70
+
71
+ class BoolOperator(Enum):
72
+ EQUAL_TO = EQUAL_TO
73
+ NOT_EQUAL_TO = NOT_EQUAL_TO
74
+
75
+ class TimeUnit(Enum):
76
+ YEAR = YEAR
77
+ QUARTER = QUARTER
78
+ MONTH = MONTH
79
+ WEEK = WEEK
80
+ DAY = DAY
81
+ HOUR = HOUR
82
+
83
+ class FieldType(Enum):
84
+ STRING = STRING
85
+ NUMBER = NUMBER
86
+ DATE = DATE
87
+ NULL = NULL
88
+ BOOLEAN = BOOLEAN
89
+
90
+ class FilterType(Enum):
91
+ STRING_FILTER = 'string-filter'
92
+ DATE_FILTER = 'date-filter'
93
+ DATE_CUSTOM_FILTER = 'date-custom-filter'
94
+ DATE_COMPARISON_FILTER = 'date-comparison-filter'
95
+ NUMERIC_FILTER = 'numeric-filter'
96
+ NULL_FILTER = 'null-filter'
97
+ STRING_IN_FILTER = 'string-in-filter'
98
+ BOOLEAN_FILTER = 'boolean-filter'
99
+
100
+ # Types
101
+ Operator = Union[StringOperator, DateOperator, NumberOperator, NullOperator, BoolOperator]
102
+
103
+ # Base Filter Interface
104
+ @dataclass
105
+ class DateRange:
106
+ startDate: str
107
+ endDate: str
108
+
109
+ @dataclass
110
+ class DateValue:
111
+ value: int
112
+ unit: TimeUnit
113
+ @dataclass
114
+ class BaseFilter:
115
+ filterType: FilterType
116
+ fieldType: FieldType
117
+ operator: Operator
118
+ field: str
119
+ value: Union[bool, int, str, list[str], DateRange, DateValue, None]
120
+ table: Optional[str] = None
121
+
122
+ @dataclass
123
+ class Filter:
124
+ filter_type: FilterType
125
+ operator: Operator
126
+ value: Union[bool, int, str, list[str], DateRange, DateValue, None]
127
+ field: str
128
+ table: str
129
+
130
+ def convert_custom_filter(filter: Filter) -> dict:
131
+ if filter.filter_type == FilterType.STRING_FILTER:
132
+ if not isinstance(filter.value, str):
133
+ raise ValueError('Invalid value for StringFilter, expected string')
134
+ if filter.operator not in StringOperator:
135
+ raise ValueError('Invalid operator for StringFilter, expected StringOperator')
136
+ return asdict(BaseFilter(filter.filter_type, FieldType.STRING, filter.operator, filter.field, filter.value, filter.table))
137
+ elif filter.filter_type == FilterType.STRING_IN_FILTER:
138
+ if not isinstance(filter.value, list):
139
+ raise ValueError('Invalid value for StringInFilter, expected list')
140
+ if filter.operator not in StringOperator:
141
+ raise ValueError('Invalid operator for StringInFilter, expected StringOperator')
142
+ return asdict(BaseFilter(filter.filter_type, FieldType.STRING, filter.operator, filter.field, filter.value, filter.table))
143
+ elif filter.filter_type == FilterType.NUMERIC_FILTER:
144
+ if not isinstance(filter.value, int):
145
+ raise ValueError('Invalid value for NumericFilter, expected int')
146
+ if filter.operator not in NumberOperator:
147
+ raise ValueError('Invalid operator for NumericFilter, expected NumberOperator')
148
+ return asdict(BaseFilter(filter.filter_type, FieldType.NUMBER, filter.operator, filter.field, filter.value, filter.table))
149
+ elif filter.filter_type == FilterType.DATE_FILTER:
150
+ if not isinstance(filter.value, DateValue) or filter.value is None:
151
+ raise ValueError('Invalid value for DateFilter, expected DateValue')
152
+ if filter.operator not in DateOperator:
153
+ raise ValueError('Invalid operator for DateFilter, expected DateOperator')
154
+ return asdict(BaseFilter(filter.filter_type, FieldType.DATE, filter.operator, filter.field, filter.value, filter.table))
155
+ elif filter.filter_type == FilterType.DATE_CUSTOM_FILTER:
156
+ if not isinstance(filter.value, DateRange) or filter.value is None:
157
+ raise ValueError('Invalid value for DateCustomFilter, expected DateRange')
158
+ if filter.operator not in DateOperator:
159
+ raise ValueError('Invalid operator for DateCustomFilter, expected DateOperator')
160
+ return asdict(BaseFilter(filter.filter_type, FieldType.DATE, filter.operator, filter.field, filter.value, filter.table))
161
+ elif filter.filter_type == FilterType.DATE_COMPARISON_FILTER:
162
+ if not isinstance(filter.value, str):
163
+ raise ValueError('Invalid value for DateComparisonFilter, expected str')
164
+ if filter.operator not in DateOperator:
165
+ raise ValueError('Invalid operator for DateComparisonFilter, expected DateOperator')
166
+ return asdict(BaseFilter(filter.filter_type, FieldType.DATE, filter.operator, filter.field, filter.value, filter.table))
167
+ elif filter.filter_type == FilterType.NULL_FILTER:
168
+ if filter.value is not None:
169
+ raise ValueError('Invalid value for NullFilter, expected None')
170
+ if filter.operator not in NullOperator:
171
+ raise ValueError('Invalid operator for NullFilter, expected NullOperator')
172
+ return asdict(BaseFilter(filter.filter_type, FieldType.NULL, filter.operator, filter.field, filter.value, filter.table))
173
+ elif filter.filter_type == FilterType.BOOLEAN_FILTER:
174
+ if not isinstance(filter.value, bool):
175
+ raise ValueError('Invalid value for BooleanFilter, expected bool')
176
+ if filter.operator not in BoolOperator:
177
+ raise ValueError('Invalid operator for BooleanFilter, expected BoolOperator')
178
+ return asdict(BaseFilter(filter.filter_type, FieldType.BOOLEAN, filter.operator, filter.field, filter.value, filter.table))
179
+ else:
180
+ raise ValueError(f'Unknown filter type: {filter.filter_type}')
@@ -1,19 +1,20 @@
1
- import json
2
-
3
1
  def remove_fields(query_result, fields_to_remove):
4
- fields = [
5
- {"name": field['name'], "dataTypeID": field['dataTypeID']}
6
- for field in query_result['fields']
7
- if field['name'] not in fields_to_remove
8
- ]
9
- rows = [row for row in query_result['rows']]
10
- for row in rows:
11
- for field in fields_to_remove:
12
- if field in row:
13
- del row[field]
14
- return {"fields": fields, "rows": rows}
2
+ fields = [
3
+ {"name": field["name"], "dataTypeID": field["dataTypeID"]}
4
+ for field in query_result["fields"]
5
+ if field["name"] not in fields_to_remove
6
+ ]
7
+ rows = [row for row in query_result["rows"]]
8
+ for row in rows:
9
+ for field in fields_to_remove:
10
+ if field in row:
11
+ del row[field]
12
+ return {"fields": fields, "rows": rows}
13
+
15
14
 
16
15
  def array_to_map(queries, array_to_map, metadata, target_pool):
17
- for i in range(len(queries)):
18
- query_result = target_pool.query(queries[i])
19
- metadata[array_to_map.get("arrayName")][i][array_to_map.get("field")] = query_result.get("rows")
16
+ mapped_array = []
17
+ for i in range(len(queries)):
18
+ query_result = target_pool.query(queries[i])
19
+ mapped_array.append(query_result.get("rows"))
20
+ return mapped_array
@@ -1,6 +1,9 @@
1
1
  from quillsql.assets.pgtypes import PG_TYPES
2
2
 
3
+
3
4
  def convert_type_to_postgres(data_type_id):
4
- # find the object in PG_TYPES that matches the type
5
- pg_type = next((pg_type for pg_type in PG_TYPES if pg_type['oid'] == data_type_id), None)
6
- return pg_type['typname'] if pg_type else data_type_id
5
+ # find the object in PG_TYPES that matches the type
6
+ pg_type = next(
7
+ (pg_type for pg_type in PG_TYPES if pg_type["oid"] == data_type_id), None
8
+ )
9
+ return pg_type["typname"] if pg_type else data_type_id
@@ -0,0 +1,60 @@
1
+ from typing import Union, List, Dict
2
+
3
+ # Type aliases for clarity
4
+ TenantId = Union[str, int]
5
+ TenantInfo = Dict[str, Union[str, List[TenantId]]]
6
+ Tenants = Union[List[TenantId], List[TenantInfo]]
7
+
8
+ def extract_tenant_ids(tenants: Tenants) -> List[TenantId]:
9
+ """
10
+ Extract tenant IDs from the tenants parameter, which can be either a list of IDs
11
+ or a list of tenant info dictionaries.
12
+
13
+ Args:
14
+ tenants: Either a list of tenant IDs (strings/integers) or a list of tenant info dictionaries
15
+
16
+ Returns:
17
+ List of tenant IDs
18
+
19
+ Raises:
20
+ ValueError: If the tenants parameter format is invalid
21
+ """
22
+ if not tenants:
23
+ raise ValueError("Invalid format for tenants: empty list")
24
+
25
+ first_tenant = tenants[0]
26
+
27
+ if isinstance(first_tenant, (str, int)):
28
+ return tenants # type: ignore
29
+ elif isinstance(first_tenant, dict) and "tenantIds" in first_tenant:
30
+ # TODO: support multiple tenants in future
31
+ return first_tenant["tenantIds"]
32
+ else:
33
+ raise ValueError("Invalid format for tenants")
34
+
35
+ def extract_tenant_field(tenants: Tenants, dashboard_owner: str) -> str:
36
+ """
37
+ Extract tenant field from the tenants parameter, falling back to dashboard_owner
38
+ if tenants is a simple list of IDs.
39
+
40
+ Args:
41
+ tenants: Either a list of tenant IDs (strings/integers) or a list of tenant info dictionaries
42
+ dashboard_owner: The default tenant field to use if tenants is a simple list
43
+
44
+ Returns:
45
+ The tenant field string
46
+
47
+ Raises:
48
+ ValueError: If the tenants parameter format is invalid
49
+ """
50
+ if not tenants:
51
+ raise ValueError("Invalid format for tenants: empty list")
52
+
53
+ first_tenant = tenants[0]
54
+
55
+ if isinstance(first_tenant, (str, int)):
56
+ return dashboard_owner
57
+ elif isinstance(first_tenant, dict) and "tenantField" in first_tenant:
58
+ return first_tenant["tenantField"]
59
+ else:
60
+ raise ValueError("Invalid format for tenants")
@@ -0,0 +1,69 @@
1
+ Metadata-Version: 2.4
2
+ Name: quillsql
3
+ Version: 2.2.0
4
+ Summary: Quill SDK for Python.
5
+ Home-page: https://github.com/quill-sql/quill-python
6
+ Author: Quill
7
+ Author-email: shawn@quill.co
8
+ Description-Content-Type: text/markdown
9
+ Requires-Dist: psycopg2-binary
10
+ Requires-Dist: requests
11
+ Requires-Dist: redis
12
+ Requires-Dist: python-dotenv
13
+ Requires-Dist: pytest
14
+ Requires-Dist: google-cloud-bigquery
15
+ Requires-Dist: google-auth
16
+ Dynamic: author
17
+ Dynamic: author-email
18
+ Dynamic: description
19
+ Dynamic: description-content-type
20
+ Dynamic: home-page
21
+ Dynamic: requires-dist
22
+ Dynamic: summary
23
+
24
+ # Quill Python SDK
25
+
26
+ ## Quickstart
27
+
28
+ First, install the quillsql package by running:
29
+
30
+ ```bash
31
+ $ pip install quillsql
32
+ ```
33
+
34
+ Then, add a `/quill` endpoint to your existing python server. For example, if
35
+ you were running a FASTAPI app, you would just add the endpoint like this:
36
+
37
+ ```python
38
+ from quillsql import Quill
39
+
40
+ quill = Quill(
41
+ private_key=os.getenv("QULL_PRIVATE_KEY"),
42
+ database_connection_string=os.getenv("POSTGRES_READ"),
43
+ database_type="postgresql"
44
+ )
45
+
46
+ security = HTTPBearer()
47
+
48
+ async def authenticate_jwt(token: str = Depends(security)):
49
+ # Your JWT validation logic here
50
+ # Return user object or raise HTTPException
51
+ user = validate_jwt_token(token.credentials)
52
+ return user
53
+
54
+ @app.post("/quill")
55
+ async def quill_post(data: Request, user: dict = Depends(authenticate_jwt)):
56
+ # assuming user fetched via auth middleware has an userId
57
+ user_id = user["user_id"]
58
+ body = await data.json()
59
+ metadata = body.get("metadata")
60
+
61
+ result = quill.query(
62
+ tenants=[{"tenantField": "user_id", "tenantIds": [user_id]}],
63
+ metadata=metadata
64
+ )
65
+ return result
66
+ ```
67
+
68
+ Then you can run your app like normally. Pass in this route to our react library
69
+ on the frontend and you all set!