singlestoredb 1.15.1__cp38-abi3-win_amd64.whl → 1.15.3__cp38-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of singlestoredb might be problematic. Click here for more details.
- _singlestoredb_accel.pyd +0 -0
- singlestoredb/__init__.py +1 -1
- singlestoredb/apps/_python_udfs.py +18 -3
- singlestoredb/apps/_stdout_supress.py +1 -1
- singlestoredb/apps/_uvicorn_util.py +4 -0
- singlestoredb/config.py +18 -0
- singlestoredb/converters.py +1 -1
- singlestoredb/functions/ext/asgi.py +209 -23
- singlestoredb/functions/ext/timer.py +2 -11
- singlestoredb/functions/ext/utils.py +55 -6
- singlestoredb/tests/test_ext_func_data.py +18 -18
- {singlestoredb-1.15.1.dist-info → singlestoredb-1.15.3.dist-info}/METADATA +1 -1
- {singlestoredb-1.15.1.dist-info → singlestoredb-1.15.3.dist-info}/RECORD +17 -17
- {singlestoredb-1.15.1.dist-info → singlestoredb-1.15.3.dist-info}/LICENSE +0 -0
- {singlestoredb-1.15.1.dist-info → singlestoredb-1.15.3.dist-info}/WHEEL +0 -0
- {singlestoredb-1.15.1.dist-info → singlestoredb-1.15.3.dist-info}/entry_points.txt +0 -0
- {singlestoredb-1.15.1.dist-info → singlestoredb-1.15.3.dist-info}/top_level.txt +0 -0
_singlestoredb_accel.pyd
CHANGED
|
Binary file
|
singlestoredb/__init__.py
CHANGED
|
@@ -13,6 +13,9 @@ if typing.TYPE_CHECKING:
|
|
|
13
13
|
# Keep track of currently running server
|
|
14
14
|
_running_server: 'typing.Optional[AwaitableUvicornServer]' = None
|
|
15
15
|
|
|
16
|
+
# Maximum number of UDFs allowed
|
|
17
|
+
MAX_UDFS_LIMIT = 10
|
|
18
|
+
|
|
16
19
|
|
|
17
20
|
async def run_udf_app(
|
|
18
21
|
log_level: str = 'error',
|
|
@@ -44,20 +47,32 @@ async def run_udf_app(
|
|
|
44
47
|
udf_suffix = ''
|
|
45
48
|
if app_config.running_interactively:
|
|
46
49
|
udf_suffix = '_test'
|
|
47
|
-
app = Application(
|
|
50
|
+
app = Application(
|
|
51
|
+
url=base_url,
|
|
52
|
+
app_mode='managed',
|
|
53
|
+
name_suffix=udf_suffix,
|
|
54
|
+
log_level=log_level,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
if not app.endpoints:
|
|
58
|
+
raise ValueError('You must define at least one function.')
|
|
59
|
+
if len(app.endpoints) > MAX_UDFS_LIMIT:
|
|
60
|
+
raise ValueError(
|
|
61
|
+
f'You can only define a maximum of {MAX_UDFS_LIMIT} functions.',
|
|
62
|
+
)
|
|
48
63
|
|
|
49
64
|
config = uvicorn.Config(
|
|
50
65
|
app,
|
|
51
66
|
host='0.0.0.0',
|
|
52
67
|
port=app_config.listen_port,
|
|
53
|
-
|
|
68
|
+
log_config=app.get_uvicorn_log_config(),
|
|
54
69
|
)
|
|
55
|
-
_running_server = AwaitableUvicornServer(config)
|
|
56
70
|
|
|
57
71
|
# Register the functions only if the app is running interactively.
|
|
58
72
|
if app_config.running_interactively:
|
|
59
73
|
app.register_functions(replace=True)
|
|
60
74
|
|
|
75
|
+
_running_server = AwaitableUvicornServer(config)
|
|
61
76
|
asyncio.create_task(_running_server.serve())
|
|
62
77
|
await _running_server.wait_for_startup()
|
|
63
78
|
|
|
@@ -30,3 +30,7 @@ class AwaitableUvicornServer(uvicorn.Server):
|
|
|
30
30
|
|
|
31
31
|
async def wait_for_startup(self) -> None:
|
|
32
32
|
await self._startup_future
|
|
33
|
+
|
|
34
|
+
async def shutdown(self, sockets: Optional[list[socket.socket]] = None) -> None:
|
|
35
|
+
if self.started:
|
|
36
|
+
await super().shutdown(sockets)
|
singlestoredb/config.py
CHANGED
|
@@ -407,6 +407,12 @@ register_option(
|
|
|
407
407
|
environ=['SINGLESTOREDB_EXT_FUNC_LOG_LEVEL'],
|
|
408
408
|
)
|
|
409
409
|
|
|
410
|
+
register_option(
|
|
411
|
+
'external_function.log_file', 'string', check_str, None,
|
|
412
|
+
'File path to write logs to instead of console.',
|
|
413
|
+
environ=['SINGLESTOREDB_EXT_FUNC_LOG_FILE'],
|
|
414
|
+
)
|
|
415
|
+
|
|
410
416
|
register_option(
|
|
411
417
|
'external_function.name_prefix', 'string', check_str, '',
|
|
412
418
|
'Prefix to add to external function names.',
|
|
@@ -450,6 +456,18 @@ register_option(
|
|
|
450
456
|
environ=['SINGLESTOREDB_EXT_FUNC_TIMEOUT'],
|
|
451
457
|
)
|
|
452
458
|
|
|
459
|
+
register_option(
|
|
460
|
+
'external_function.disable_metrics', 'bool', check_bool, False,
|
|
461
|
+
'Disable logging of function call metrics.',
|
|
462
|
+
environ=['SINGLESTOREDB_EXT_FUNC_DISABLE_METRICS'],
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
register_option(
|
|
466
|
+
'external_function.app_name', 'string', check_str, None,
|
|
467
|
+
'Name for the external function application instance.',
|
|
468
|
+
environ=['SINGLESTOREDB_EXT_FUNC_APP_NAME'],
|
|
469
|
+
)
|
|
470
|
+
|
|
453
471
|
#
|
|
454
472
|
# Debugging options
|
|
455
473
|
#
|
singlestoredb/converters.py
CHANGED
|
@@ -91,7 +91,6 @@ except ImportError:
|
|
|
91
91
|
|
|
92
92
|
logger = utils.get_logger('singlestoredb.functions.ext.asgi')
|
|
93
93
|
|
|
94
|
-
|
|
95
94
|
# If a number of processes is specified, create a pool of workers
|
|
96
95
|
num_processes = max(0, int(os.environ.get('SINGLESTOREDB_EXT_NUM_PROCESSES', 0)))
|
|
97
96
|
if num_processes > 1:
|
|
@@ -678,8 +677,24 @@ class Application(object):
|
|
|
678
677
|
link_credentials : Dict[str, Any], optional
|
|
679
678
|
The CREDENTIALS section of a LINK definition. This dictionary gets
|
|
680
679
|
converted to JSON for the CREATE LINK call.
|
|
680
|
+
name_prefix : str, optional
|
|
681
|
+
Prefix to add to function names when registering with the database
|
|
682
|
+
name_suffix : str, optional
|
|
683
|
+
Suffix to add to function names when registering with the database
|
|
681
684
|
function_database : str, optional
|
|
682
685
|
The database to use for external function definitions.
|
|
686
|
+
log_file : str, optional
|
|
687
|
+
File path to write logs to instead of console. If None, logs are
|
|
688
|
+
written to console. When specified, application logger handlers
|
|
689
|
+
are replaced with a file handler.
|
|
690
|
+
log_level : str, optional
|
|
691
|
+
Logging level for the application logger. Valid values are 'info',
|
|
692
|
+
'debug', 'warning', 'error'. Defaults to 'info'.
|
|
693
|
+
disable_metrics : bool, optional
|
|
694
|
+
Disable logging of function call metrics. Defaults to False.
|
|
695
|
+
app_name : str, optional
|
|
696
|
+
Name for the application instance. Used to create a logger-specific
|
|
697
|
+
name. If not provided, a random name will be generated.
|
|
683
698
|
|
|
684
699
|
"""
|
|
685
700
|
|
|
@@ -846,6 +861,10 @@ class Application(object):
|
|
|
846
861
|
name_prefix: str = get_option('external_function.name_prefix'),
|
|
847
862
|
name_suffix: str = get_option('external_function.name_suffix'),
|
|
848
863
|
function_database: Optional[str] = None,
|
|
864
|
+
log_file: Optional[str] = get_option('external_function.log_file'),
|
|
865
|
+
log_level: str = get_option('external_function.log_level'),
|
|
866
|
+
disable_metrics: bool = get_option('external_function.disable_metrics'),
|
|
867
|
+
app_name: Optional[str] = get_option('external_function.app_name'),
|
|
849
868
|
) -> None:
|
|
850
869
|
if link_name and (link_config or link_credentials):
|
|
851
870
|
raise ValueError(
|
|
@@ -862,6 +881,15 @@ class Application(object):
|
|
|
862
881
|
get_option('external_function.link_credentials') or '{}',
|
|
863
882
|
) or None
|
|
864
883
|
|
|
884
|
+
# Generate application name if not provided
|
|
885
|
+
if app_name is None:
|
|
886
|
+
app_name = f'udf_app_{secrets.token_hex(4)}'
|
|
887
|
+
|
|
888
|
+
self.name = app_name
|
|
889
|
+
|
|
890
|
+
# Create logger instance specific to this application
|
|
891
|
+
self.logger = utils.get_logger(f'singlestoredb.functions.ext.asgi.{self.name}')
|
|
892
|
+
|
|
865
893
|
# List of functions specs
|
|
866
894
|
specs: List[Union[str, Callable[..., Any], ModuleType]] = []
|
|
867
895
|
|
|
@@ -953,6 +981,97 @@ class Application(object):
|
|
|
953
981
|
self.endpoints = endpoints
|
|
954
982
|
self.external_functions = external_functions
|
|
955
983
|
self.function_database = function_database
|
|
984
|
+
self.log_file = log_file
|
|
985
|
+
self.log_level = log_level
|
|
986
|
+
self.disable_metrics = disable_metrics
|
|
987
|
+
|
|
988
|
+
# Configure logging
|
|
989
|
+
self._configure_logging()
|
|
990
|
+
|
|
991
|
+
def _configure_logging(self) -> None:
|
|
992
|
+
"""Configure logging based on the log_file settings."""
|
|
993
|
+
# Set logger level
|
|
994
|
+
self.logger.setLevel(getattr(logging, self.log_level.upper()))
|
|
995
|
+
|
|
996
|
+
# Remove all existing handlers to ensure clean configuration
|
|
997
|
+
self.logger.handlers.clear()
|
|
998
|
+
|
|
999
|
+
# Configure log file if specified
|
|
1000
|
+
if self.log_file:
|
|
1001
|
+
# Create file handler
|
|
1002
|
+
file_handler = logging.FileHandler(self.log_file)
|
|
1003
|
+
file_handler.setLevel(getattr(logging, self.log_level.upper()))
|
|
1004
|
+
|
|
1005
|
+
# Use JSON formatter for file logging
|
|
1006
|
+
formatter = utils.JSONFormatter()
|
|
1007
|
+
file_handler.setFormatter(formatter)
|
|
1008
|
+
|
|
1009
|
+
# Add the handler to the logger
|
|
1010
|
+
self.logger.addHandler(file_handler)
|
|
1011
|
+
else:
|
|
1012
|
+
# For console logging, create a new stream handler with JSON formatter
|
|
1013
|
+
console_handler = logging.StreamHandler()
|
|
1014
|
+
console_handler.setLevel(getattr(logging, self.log_level.upper()))
|
|
1015
|
+
console_handler.setFormatter(utils.JSONFormatter())
|
|
1016
|
+
self.logger.addHandler(console_handler)
|
|
1017
|
+
|
|
1018
|
+
# Prevent propagation to avoid duplicate or differently formatted messages
|
|
1019
|
+
self.logger.propagate = False
|
|
1020
|
+
|
|
1021
|
+
def get_uvicorn_log_config(self) -> Dict[str, Any]:
|
|
1022
|
+
"""
|
|
1023
|
+
Create uvicorn log config that matches the Application's logging format.
|
|
1024
|
+
|
|
1025
|
+
This method returns the log configuration used by uvicorn, allowing external
|
|
1026
|
+
users to match the logging format of the Application class.
|
|
1027
|
+
|
|
1028
|
+
Returns
|
|
1029
|
+
-------
|
|
1030
|
+
Dict[str, Any]
|
|
1031
|
+
Log configuration dictionary compatible with uvicorn's log_config parameter
|
|
1032
|
+
|
|
1033
|
+
"""
|
|
1034
|
+
log_config = {
|
|
1035
|
+
'version': 1,
|
|
1036
|
+
'disable_existing_loggers': False,
|
|
1037
|
+
'formatters': {
|
|
1038
|
+
'json': {
|
|
1039
|
+
'()': 'singlestoredb.functions.ext.utils.JSONFormatter',
|
|
1040
|
+
},
|
|
1041
|
+
},
|
|
1042
|
+
'handlers': {
|
|
1043
|
+
'default': {
|
|
1044
|
+
'class': (
|
|
1045
|
+
'logging.FileHandler' if self.log_file
|
|
1046
|
+
else 'logging.StreamHandler'
|
|
1047
|
+
),
|
|
1048
|
+
'formatter': 'json',
|
|
1049
|
+
},
|
|
1050
|
+
},
|
|
1051
|
+
'loggers': {
|
|
1052
|
+
'uvicorn': {
|
|
1053
|
+
'handlers': ['default'],
|
|
1054
|
+
'level': self.log_level.upper(),
|
|
1055
|
+
'propagate': False,
|
|
1056
|
+
},
|
|
1057
|
+
'uvicorn.error': {
|
|
1058
|
+
'handlers': ['default'],
|
|
1059
|
+
'level': self.log_level.upper(),
|
|
1060
|
+
'propagate': False,
|
|
1061
|
+
},
|
|
1062
|
+
'uvicorn.access': {
|
|
1063
|
+
'handlers': ['default'],
|
|
1064
|
+
'level': self.log_level.upper(),
|
|
1065
|
+
'propagate': False,
|
|
1066
|
+
},
|
|
1067
|
+
},
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
# Add filename to file handler if log file is specified
|
|
1071
|
+
if self.log_file:
|
|
1072
|
+
log_config['handlers']['default']['filename'] = self.log_file # type: ignore
|
|
1073
|
+
|
|
1074
|
+
return log_config
|
|
956
1075
|
|
|
957
1076
|
async def __call__(
|
|
958
1077
|
self,
|
|
@@ -976,19 +1095,22 @@ class Application(object):
|
|
|
976
1095
|
request_id = str(uuid.uuid4())
|
|
977
1096
|
|
|
978
1097
|
timer = Timer(
|
|
1098
|
+
app_name=self.name,
|
|
979
1099
|
id=request_id,
|
|
980
1100
|
timestamp=datetime.datetime.now(
|
|
981
1101
|
datetime.timezone.utc,
|
|
982
1102
|
).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
|
|
983
1103
|
)
|
|
984
1104
|
call_timer = Timer(
|
|
1105
|
+
app_name=self.name,
|
|
985
1106
|
id=request_id,
|
|
986
1107
|
timestamp=datetime.datetime.now(
|
|
987
1108
|
datetime.timezone.utc,
|
|
988
1109
|
).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
|
|
989
1110
|
)
|
|
990
1111
|
|
|
991
|
-
|
|
1112
|
+
if scope['type'] != 'http':
|
|
1113
|
+
raise ValueError(f"Expected HTTP scope, got {scope['type']}")
|
|
992
1114
|
|
|
993
1115
|
method = scope['method']
|
|
994
1116
|
path = tuple(x for x in scope['path'].split('/') if x)
|
|
@@ -1014,14 +1136,15 @@ class Application(object):
|
|
|
1014
1136
|
# Call the endpoint
|
|
1015
1137
|
if method == 'POST' and func is not None and path == self.invoke_path:
|
|
1016
1138
|
|
|
1017
|
-
logger.info(
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
'
|
|
1021
|
-
'
|
|
1139
|
+
self.logger.info(
|
|
1140
|
+
'Function call initiated',
|
|
1141
|
+
extra={
|
|
1142
|
+
'app_name': self.name,
|
|
1143
|
+
'request_id': request_id,
|
|
1144
|
+
'function_name': func_name.decode('utf-8'),
|
|
1022
1145
|
'content_type': content_type.decode('utf-8'),
|
|
1023
1146
|
'accepts': accepts.decode('utf-8'),
|
|
1024
|
-
}
|
|
1147
|
+
},
|
|
1025
1148
|
)
|
|
1026
1149
|
|
|
1027
1150
|
args_data_format = func_info['args_data_format']
|
|
@@ -1101,8 +1224,14 @@ class Application(object):
|
|
|
1101
1224
|
await send(output_handler['response'])
|
|
1102
1225
|
|
|
1103
1226
|
except asyncio.TimeoutError:
|
|
1104
|
-
|
|
1105
|
-
'
|
|
1227
|
+
self.logger.exception(
|
|
1228
|
+
'Function call timeout',
|
|
1229
|
+
extra={
|
|
1230
|
+
'app_name': self.name,
|
|
1231
|
+
'request_id': request_id,
|
|
1232
|
+
'function_name': func_name.decode('utf-8'),
|
|
1233
|
+
'timeout': func_info['timeout'],
|
|
1234
|
+
},
|
|
1106
1235
|
)
|
|
1107
1236
|
body = (
|
|
1108
1237
|
'[TimeoutError] Function call timed out after ' +
|
|
@@ -1112,15 +1241,26 @@ class Application(object):
|
|
|
1112
1241
|
await send(self.error_response_dict)
|
|
1113
1242
|
|
|
1114
1243
|
except asyncio.CancelledError:
|
|
1115
|
-
|
|
1116
|
-
'Function call cancelled
|
|
1244
|
+
self.logger.exception(
|
|
1245
|
+
'Function call cancelled',
|
|
1246
|
+
extra={
|
|
1247
|
+
'app_name': self.name,
|
|
1248
|
+
'request_id': request_id,
|
|
1249
|
+
'function_name': func_name.decode('utf-8'),
|
|
1250
|
+
},
|
|
1117
1251
|
)
|
|
1118
1252
|
body = b'[CancelledError] Function call was cancelled'
|
|
1119
1253
|
await send(self.error_response_dict)
|
|
1120
1254
|
|
|
1121
1255
|
except Exception as e:
|
|
1122
|
-
|
|
1123
|
-
'
|
|
1256
|
+
self.logger.exception(
|
|
1257
|
+
'Function call error',
|
|
1258
|
+
extra={
|
|
1259
|
+
'app_name': self.name,
|
|
1260
|
+
'request_id': request_id,
|
|
1261
|
+
'function_name': func_name.decode('utf-8'),
|
|
1262
|
+
'exception_type': type(e).__name__,
|
|
1263
|
+
},
|
|
1124
1264
|
)
|
|
1125
1265
|
body = f'[{type(e).__name__}] {str(e).strip()}'.encode('utf-8')
|
|
1126
1266
|
await send(self.error_response_dict)
|
|
@@ -1173,7 +1313,17 @@ class Application(object):
|
|
|
1173
1313
|
for k, v in call_timer.metrics.items():
|
|
1174
1314
|
timer.metrics[k] = v
|
|
1175
1315
|
|
|
1176
|
-
|
|
1316
|
+
if not self.disable_metrics:
|
|
1317
|
+
metrics = timer.finish()
|
|
1318
|
+
self.logger.info(
|
|
1319
|
+
'Function call metrics',
|
|
1320
|
+
extra={
|
|
1321
|
+
'app_name': self.name,
|
|
1322
|
+
'request_id': request_id,
|
|
1323
|
+
'function_name': timer.metadata.get('function', ''),
|
|
1324
|
+
'metrics': metrics,
|
|
1325
|
+
},
|
|
1326
|
+
)
|
|
1177
1327
|
|
|
1178
1328
|
def _create_link(
|
|
1179
1329
|
self,
|
|
@@ -1230,9 +1380,11 @@ class Application(object):
|
|
|
1230
1380
|
) -> Dict[str, Any]:
|
|
1231
1381
|
"""
|
|
1232
1382
|
Return the functions and function signature information.
|
|
1383
|
+
|
|
1233
1384
|
Returns
|
|
1234
1385
|
-------
|
|
1235
1386
|
Dict[str, Any]
|
|
1387
|
+
|
|
1236
1388
|
"""
|
|
1237
1389
|
functions = {}
|
|
1238
1390
|
no_default = object()
|
|
@@ -1284,8 +1436,13 @@ class Application(object):
|
|
|
1284
1436
|
doc_examples.append(ex_dict)
|
|
1285
1437
|
|
|
1286
1438
|
except Exception as e:
|
|
1287
|
-
logger.warning(
|
|
1288
|
-
|
|
1439
|
+
self.logger.warning(
|
|
1440
|
+
'Could not parse docstring for function',
|
|
1441
|
+
extra={
|
|
1442
|
+
'app_name': self.name,
|
|
1443
|
+
'function_name': key.decode('utf-8'),
|
|
1444
|
+
'error': str(e),
|
|
1445
|
+
},
|
|
1289
1446
|
)
|
|
1290
1447
|
|
|
1291
1448
|
if not func_name or key == func_name:
|
|
@@ -1740,6 +1897,22 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1740
1897
|
),
|
|
1741
1898
|
help='logging level',
|
|
1742
1899
|
)
|
|
1900
|
+
parser.add_argument(
|
|
1901
|
+
'--log-file', metavar='filepath',
|
|
1902
|
+
default=defaults.get(
|
|
1903
|
+
'log_file',
|
|
1904
|
+
get_option('external_function.log_file'),
|
|
1905
|
+
),
|
|
1906
|
+
help='File path to write logs to instead of console',
|
|
1907
|
+
)
|
|
1908
|
+
parser.add_argument(
|
|
1909
|
+
'--disable-metrics', action='store_true',
|
|
1910
|
+
default=defaults.get(
|
|
1911
|
+
'disable_metrics',
|
|
1912
|
+
get_option('external_function.disable_metrics'),
|
|
1913
|
+
),
|
|
1914
|
+
help='Disable logging of function call metrics',
|
|
1915
|
+
)
|
|
1743
1916
|
parser.add_argument(
|
|
1744
1917
|
'--name-prefix', metavar='name_prefix',
|
|
1745
1918
|
default=defaults.get(
|
|
@@ -1764,6 +1937,14 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1764
1937
|
),
|
|
1765
1938
|
help='Database to use for the function definition',
|
|
1766
1939
|
)
|
|
1940
|
+
parser.add_argument(
|
|
1941
|
+
'--app-name', metavar='app_name',
|
|
1942
|
+
default=defaults.get(
|
|
1943
|
+
'app_name',
|
|
1944
|
+
get_option('external_function.app_name'),
|
|
1945
|
+
),
|
|
1946
|
+
help='Name for the application instance',
|
|
1947
|
+
)
|
|
1767
1948
|
parser.add_argument(
|
|
1768
1949
|
'functions', metavar='module.or.func.path', nargs='*',
|
|
1769
1950
|
help='functions or modules to export in UDF server',
|
|
@@ -1771,8 +1952,6 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1771
1952
|
|
|
1772
1953
|
args = parser.parse_args(argv)
|
|
1773
1954
|
|
|
1774
|
-
logger.setLevel(getattr(logging, args.log_level.upper()))
|
|
1775
|
-
|
|
1776
1955
|
if i > 0:
|
|
1777
1956
|
break
|
|
1778
1957
|
|
|
@@ -1864,6 +2043,10 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1864
2043
|
name_prefix=args.name_prefix,
|
|
1865
2044
|
name_suffix=args.name_suffix,
|
|
1866
2045
|
function_database=args.function_database or None,
|
|
2046
|
+
log_file=args.log_file,
|
|
2047
|
+
log_level=args.log_level,
|
|
2048
|
+
disable_metrics=args.disable_metrics,
|
|
2049
|
+
app_name=args.app_name,
|
|
1867
2050
|
)
|
|
1868
2051
|
|
|
1869
2052
|
funcs = app.get_create_functions(replace=args.replace_existing)
|
|
@@ -1871,11 +2054,11 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1871
2054
|
raise RuntimeError('no functions specified')
|
|
1872
2055
|
|
|
1873
2056
|
for f in funcs:
|
|
1874
|
-
logger.info(f)
|
|
2057
|
+
app.logger.info(f)
|
|
1875
2058
|
|
|
1876
2059
|
try:
|
|
1877
2060
|
if args.db:
|
|
1878
|
-
logger.info('
|
|
2061
|
+
app.logger.info('Registering functions with database')
|
|
1879
2062
|
app.register_functions(
|
|
1880
2063
|
args.db,
|
|
1881
2064
|
replace=args.replace_existing,
|
|
@@ -1890,6 +2073,9 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1890
2073
|
).items() if v is not None
|
|
1891
2074
|
}
|
|
1892
2075
|
|
|
2076
|
+
# Configure uvicorn logging to use JSON format matching Application's format
|
|
2077
|
+
app_args['log_config'] = app.get_uvicorn_log_config()
|
|
2078
|
+
|
|
1893
2079
|
if use_async:
|
|
1894
2080
|
asyncio.create_task(_run_uvicorn(uvicorn, app, app_args, db=args.db))
|
|
1895
2081
|
else:
|
|
@@ -1897,7 +2083,7 @@ def main(argv: Optional[List[str]] = None) -> None:
|
|
|
1897
2083
|
|
|
1898
2084
|
finally:
|
|
1899
2085
|
if not use_async and args.db:
|
|
1900
|
-
logger.info('
|
|
2086
|
+
app.logger.info('Dropping functions from database')
|
|
1901
2087
|
app.drop_functions(args.db)
|
|
1902
2088
|
|
|
1903
2089
|
|
|
@@ -1910,7 +2096,7 @@ async def _run_uvicorn(
|
|
|
1910
2096
|
"""Run uvicorn server and clean up functions after shutdown."""
|
|
1911
2097
|
await uvicorn.Server(uvicorn.Config(app, **app_args)).serve()
|
|
1912
2098
|
if db:
|
|
1913
|
-
logger.info('
|
|
2099
|
+
app.logger.info('Dropping functions from database')
|
|
1914
2100
|
app.drop_functions(db)
|
|
1915
2101
|
|
|
1916
2102
|
|
|
@@ -4,10 +4,6 @@ from typing import Any
|
|
|
4
4
|
from typing import Dict
|
|
5
5
|
from typing import Optional
|
|
6
6
|
|
|
7
|
-
from . import utils
|
|
8
|
-
|
|
9
|
-
logger = utils.get_logger('singlestoredb.functions.ext.metrics')
|
|
10
|
-
|
|
11
7
|
|
|
12
8
|
class RoundedFloatEncoder(json.JSONEncoder):
|
|
13
9
|
|
|
@@ -87,12 +83,7 @@ class Timer:
|
|
|
87
83
|
self.entries.clear()
|
|
88
84
|
self._current_key = None
|
|
89
85
|
|
|
90
|
-
def finish(self) ->
|
|
86
|
+
def finish(self) -> Dict[str, Any]:
|
|
91
87
|
"""Finish the current timing context and store the elapsed time."""
|
|
92
88
|
self.metrics['total'] = time.perf_counter() - self.start_time
|
|
93
|
-
self.
|
|
94
|
-
|
|
95
|
-
def log_metrics(self) -> None:
|
|
96
|
-
if self.metadata.get('function'):
|
|
97
|
-
result = dict(type='function_metrics', **self.metadata, **self.metrics)
|
|
98
|
-
logger.info(json.dumps(result, cls=RoundedFloatEncoder))
|
|
89
|
+
return dict(type='function_metrics', **self.metadata, **self.metrics)
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env python
|
|
2
|
+
import datetime
|
|
2
3
|
import json
|
|
3
4
|
import logging
|
|
4
5
|
import re
|
|
@@ -30,14 +31,62 @@ except ImportError:
|
|
|
30
31
|
return super().formatMessage(recordcopy)
|
|
31
32
|
|
|
32
33
|
|
|
34
|
+
class JSONFormatter(logging.Formatter):
|
|
35
|
+
"""Custom JSON formatter for structured logging."""
|
|
36
|
+
|
|
37
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
38
|
+
# Create proper ISO timestamp with microseconds
|
|
39
|
+
timestamp = datetime.datetime.fromtimestamp(
|
|
40
|
+
record.created, tz=datetime.timezone.utc,
|
|
41
|
+
)
|
|
42
|
+
# Keep only 3 digits for milliseconds
|
|
43
|
+
iso_timestamp = timestamp.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
|
|
44
|
+
|
|
45
|
+
log_entry = {
|
|
46
|
+
'timestamp': iso_timestamp,
|
|
47
|
+
'level': record.levelname,
|
|
48
|
+
'logger': record.name,
|
|
49
|
+
'message': record.getMessage(),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Add extra fields if present
|
|
53
|
+
allowed_fields = [
|
|
54
|
+
'app_name', 'request_id', 'function_name',
|
|
55
|
+
'content_type', 'accepts', 'metrics',
|
|
56
|
+
]
|
|
57
|
+
for field in allowed_fields:
|
|
58
|
+
if hasattr(record, field):
|
|
59
|
+
log_entry[field] = getattr(record, field)
|
|
60
|
+
|
|
61
|
+
# Add exception info if present
|
|
62
|
+
if record.exc_info:
|
|
63
|
+
log_entry['exception'] = self.formatException(record.exc_info)
|
|
64
|
+
|
|
65
|
+
return json.dumps(log_entry)
|
|
66
|
+
|
|
67
|
+
|
|
33
68
|
def get_logger(name: str) -> logging.Logger:
|
|
34
|
-
"""Return a
|
|
69
|
+
"""Return a logger with JSON formatting."""
|
|
35
70
|
logger = logging.getLogger(name)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
71
|
+
|
|
72
|
+
# Only configure if not already configured with JSON formatter
|
|
73
|
+
has_json_formatter = any(
|
|
74
|
+
isinstance(getattr(handler, 'formatter', None), JSONFormatter)
|
|
75
|
+
for handler in logger.handlers
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
if not logger.handlers or not has_json_formatter:
|
|
79
|
+
# Clear handlers only if we need to reconfigure
|
|
80
|
+
logger.handlers.clear()
|
|
81
|
+
handler = logging.StreamHandler()
|
|
82
|
+
formatter = JSONFormatter()
|
|
83
|
+
handler.setFormatter(formatter)
|
|
84
|
+
logger.addHandler(handler)
|
|
85
|
+
logger.setLevel(logging.INFO)
|
|
86
|
+
|
|
87
|
+
# Prevent propagation to avoid duplicate messages or different formatting
|
|
88
|
+
logger.propagate = False
|
|
89
|
+
|
|
41
90
|
return logger
|
|
42
91
|
|
|
43
92
|
|
|
@@ -269,7 +269,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
269
269
|
def test_numpy_accel(self):
|
|
270
270
|
dump_res = rowdat_1._dump_numpy_accel(
|
|
271
271
|
col_types, numpy_row_ids, numpy_data,
|
|
272
|
-
)
|
|
272
|
+
)
|
|
273
273
|
load_res = rowdat_1._load_numpy_accel(col_spec, dump_res)
|
|
274
274
|
|
|
275
275
|
ids = load_res[0]
|
|
@@ -294,7 +294,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
294
294
|
def test_numpy(self):
|
|
295
295
|
dump_res = rowdat_1._dump_numpy(
|
|
296
296
|
col_types, numpy_row_ids, numpy_data,
|
|
297
|
-
)
|
|
297
|
+
)
|
|
298
298
|
load_res = rowdat_1._load_numpy(col_spec, dump_res)
|
|
299
299
|
|
|
300
300
|
ids = load_res[0]
|
|
@@ -387,7 +387,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
387
387
|
with self.assertRaises(res, msg=f'Expected {res} for {data} in {dtype}'):
|
|
388
388
|
rowdat_1._dump_numpy_accel(
|
|
389
389
|
[dtype], numpy_row_ids, [(arr, None)],
|
|
390
|
-
)
|
|
390
|
+
)
|
|
391
391
|
|
|
392
392
|
# Pure Python
|
|
393
393
|
if 'mediumint exceeds' in name:
|
|
@@ -396,13 +396,13 @@ class TestRowdat1(unittest.TestCase):
|
|
|
396
396
|
with self.assertRaises(res, msg=f'Expected {res} for {data} in {dtype}'):
|
|
397
397
|
rowdat_1._dump_numpy(
|
|
398
398
|
[dtype], numpy_row_ids, [(arr, None)],
|
|
399
|
-
)
|
|
399
|
+
)
|
|
400
400
|
|
|
401
401
|
else:
|
|
402
402
|
# Accelerated
|
|
403
403
|
dump_res = rowdat_1._dump_numpy_accel(
|
|
404
404
|
[dtype], numpy_row_ids, [(arr, None)],
|
|
405
|
-
)
|
|
405
|
+
)
|
|
406
406
|
load_res = rowdat_1._load_numpy_accel([('x', dtype)], dump_res)
|
|
407
407
|
assert load_res[1][0][0] == res, \
|
|
408
408
|
f'Expected {res} for {data}, but got {load_res[1][0][0]} in {dtype}'
|
|
@@ -410,7 +410,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
410
410
|
# Pure Python
|
|
411
411
|
dump_res = rowdat_1._dump_numpy(
|
|
412
412
|
[dtype], numpy_row_ids, [(arr, None)],
|
|
413
|
-
)
|
|
413
|
+
)
|
|
414
414
|
load_res = rowdat_1._load_numpy([('x', dtype)], dump_res)
|
|
415
415
|
assert load_res[1][0][0] == res, \
|
|
416
416
|
f'Expected {res} for {data}, but got {load_res[1][0][0]} in {dtype}'
|
|
@@ -788,7 +788,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
788
788
|
# Accelerated
|
|
789
789
|
dump_res = rowdat_1._dump_numpy_accel(
|
|
790
790
|
[dtype], numpy_row_ids, [(data, None)],
|
|
791
|
-
)
|
|
791
|
+
)
|
|
792
792
|
load_res = rowdat_1._load_numpy_accel([('x', dtype)], dump_res)
|
|
793
793
|
|
|
794
794
|
if name == 'double from float32':
|
|
@@ -800,7 +800,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
800
800
|
# Pure Python
|
|
801
801
|
dump_res = rowdat_1._dump_numpy(
|
|
802
802
|
[dtype], numpy_row_ids, [(data, None)],
|
|
803
|
-
)
|
|
803
|
+
)
|
|
804
804
|
load_res = rowdat_1._load_numpy([('x', dtype)], dump_res)
|
|
805
805
|
|
|
806
806
|
if name == 'double from float32':
|
|
@@ -812,7 +812,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
812
812
|
def test_python(self):
|
|
813
813
|
dump_res = rowdat_1._dump(
|
|
814
814
|
col_types, py_row_ids, py_col_data,
|
|
815
|
-
)
|
|
815
|
+
)
|
|
816
816
|
load_res = rowdat_1._load(col_spec, dump_res)
|
|
817
817
|
|
|
818
818
|
ids = load_res[0]
|
|
@@ -824,7 +824,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
824
824
|
def test_python_accel(self):
|
|
825
825
|
dump_res = rowdat_1._dump_accel(
|
|
826
826
|
col_types, py_row_ids, py_col_data,
|
|
827
|
-
)
|
|
827
|
+
)
|
|
828
828
|
load_res = rowdat_1._load_accel(col_spec, dump_res)
|
|
829
829
|
|
|
830
830
|
ids = load_res[0]
|
|
@@ -836,7 +836,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
836
836
|
def test_polars(self):
|
|
837
837
|
dump_res = rowdat_1._dump_polars(
|
|
838
838
|
col_types, polars_row_ids, polars_data,
|
|
839
|
-
)
|
|
839
|
+
)
|
|
840
840
|
load_res = rowdat_1._load_polars(col_spec, dump_res)
|
|
841
841
|
|
|
842
842
|
ids = load_res[0]
|
|
@@ -861,7 +861,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
861
861
|
def test_polars_accel(self):
|
|
862
862
|
dump_res = rowdat_1._dump_polars_accel(
|
|
863
863
|
col_types, polars_row_ids, polars_data,
|
|
864
|
-
)
|
|
864
|
+
)
|
|
865
865
|
load_res = rowdat_1._load_polars_accel(col_spec, dump_res)
|
|
866
866
|
|
|
867
867
|
ids = load_res[0]
|
|
@@ -886,7 +886,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
886
886
|
def test_pandas(self):
|
|
887
887
|
dump_res = rowdat_1._dump_pandas(
|
|
888
888
|
col_types, pandas_row_ids, pandas_data,
|
|
889
|
-
)
|
|
889
|
+
)
|
|
890
890
|
load_res = rowdat_1._load_pandas(col_spec, dump_res)
|
|
891
891
|
|
|
892
892
|
ids = load_res[0]
|
|
@@ -911,7 +911,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
911
911
|
def test_pandas_accel(self):
|
|
912
912
|
dump_res = rowdat_1._dump_pandas_accel(
|
|
913
913
|
col_types, pandas_row_ids, pandas_data,
|
|
914
|
-
)
|
|
914
|
+
)
|
|
915
915
|
load_res = rowdat_1._load_pandas_accel(col_spec, dump_res)
|
|
916
916
|
|
|
917
917
|
ids = load_res[0]
|
|
@@ -936,7 +936,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
936
936
|
def test_pyarrow(self):
|
|
937
937
|
dump_res = rowdat_1._dump_arrow(
|
|
938
938
|
col_types, pyarrow_row_ids, pyarrow_data,
|
|
939
|
-
)
|
|
939
|
+
)
|
|
940
940
|
load_res = rowdat_1._load_arrow(col_spec, dump_res)
|
|
941
941
|
|
|
942
942
|
ids = load_res[0]
|
|
@@ -961,7 +961,7 @@ class TestRowdat1(unittest.TestCase):
|
|
|
961
961
|
def test_pyarrow_accel(self):
|
|
962
962
|
dump_res = rowdat_1._dump_arrow_accel(
|
|
963
963
|
col_types, pyarrow_row_ids, pyarrow_data,
|
|
964
|
-
)
|
|
964
|
+
)
|
|
965
965
|
load_res = rowdat_1._load_arrow_accel(col_spec, dump_res)
|
|
966
966
|
|
|
967
967
|
ids = load_res[0]
|
|
@@ -1053,7 +1053,7 @@ class TestJSON(unittest.TestCase):
|
|
|
1053
1053
|
def test_pandas(self):
|
|
1054
1054
|
dump_res = rowdat_1._dump_pandas(
|
|
1055
1055
|
col_types, pandas_row_ids, pandas_data,
|
|
1056
|
-
)
|
|
1056
|
+
)
|
|
1057
1057
|
load_res = rowdat_1._load_pandas(col_spec, dump_res)
|
|
1058
1058
|
|
|
1059
1059
|
ids = load_res[0]
|
|
@@ -1078,7 +1078,7 @@ class TestJSON(unittest.TestCase):
|
|
|
1078
1078
|
def test_pyarrow(self):
|
|
1079
1079
|
dump_res = rowdat_1._dump_arrow(
|
|
1080
1080
|
col_types, pyarrow_row_ids, pyarrow_data,
|
|
1081
|
-
)
|
|
1081
|
+
)
|
|
1082
1082
|
load_res = rowdat_1._load_arrow(col_spec, dump_res)
|
|
1083
1083
|
|
|
1084
1084
|
ids = load_res[0]
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
_singlestoredb_accel.pyd,sha256=
|
|
2
|
-
singlestoredb/__init__.py,sha256=
|
|
1
|
+
_singlestoredb_accel.pyd,sha256=r7uriN9QvsnrC_WNK0jkgC-ck_rWnjcPDAxyu62IcXQ,63488
|
|
2
|
+
singlestoredb/__init__.py,sha256=Wmeo26b80RO-r0WDJSLxad_YrAJzt3Y0-UkHRu_q6tU,2347
|
|
3
3
|
singlestoredb/auth.py,sha256=RmYiH0Wlc2RXc4pTlRMysxtBI445ggCIwojWKC_eDLE,7844
|
|
4
|
-
singlestoredb/config.py,sha256=
|
|
4
|
+
singlestoredb/config.py,sha256=rS8OmWMaHfMJQTkmSw_qwXR2R0HP80eP4gjzVmXkL2E,14419
|
|
5
5
|
singlestoredb/connection.py,sha256=I2AP_0l7hNARfXiSuVW953CsGYn_rKbTg_NyWEiGHbY,47542
|
|
6
|
-
singlestoredb/converters.py,sha256=
|
|
6
|
+
singlestoredb/converters.py,sha256=ax1wpwv04CpDA039UDjDSTw0ojjIY7T9KMz2oYQxKjc,21654
|
|
7
7
|
singlestoredb/exceptions.py,sha256=WCCJrNSsU-hD-621Jpd6bwmvGftQ7byXkk-XKXlaxpg,3354
|
|
8
8
|
singlestoredb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
singlestoredb/pytest.py,sha256=TH364xRCN7_QaN0oRQDHixrEcDx_ZBgu3bmY0tvKrYU,9357
|
|
@@ -19,9 +19,9 @@ singlestoredb/apps/_config.py,sha256=b_Op6KjSJdPwym-AlHcy0dXLHiks1cL8Q2ea1W8r3NA
|
|
|
19
19
|
singlestoredb/apps/_connection_info.py,sha256=P9rW4t2k3QFk3A34dIg9DbCWBqrcHDcZPi2Q9r2-o3A,321
|
|
20
20
|
singlestoredb/apps/_dashboards.py,sha256=qEdDivjwS68Uukay0Qw-3awHZFpkcqapzd3vLaVUzWo,1521
|
|
21
21
|
singlestoredb/apps/_process.py,sha256=eMiBO4piaRX1S6zdnMx0X0E4J7E1XrXndnVW0GRYq1Y,976
|
|
22
|
-
singlestoredb/apps/_python_udfs.py,sha256=
|
|
23
|
-
singlestoredb/apps/_stdout_supress.py,sha256=
|
|
24
|
-
singlestoredb/apps/_uvicorn_util.py,sha256=
|
|
22
|
+
singlestoredb/apps/_python_udfs.py,sha256=dJS7wc4F2WL2IyjuJkQNlr3505OmBredJXI_H07HaRk,3258
|
|
23
|
+
singlestoredb/apps/_stdout_supress.py,sha256=5p7xJSuhsjZo8d_vkXDs5aVHjW5KGm8W_R6NnFapPMg,695
|
|
24
|
+
singlestoredb/apps/_uvicorn_util.py,sha256=g_iQ4gAJ_98cE2_sDhO74b1vy7aEjIhICs6hPb6AGMk,1153
|
|
25
25
|
singlestoredb/docstring/__init__.py,sha256=NQ5uUsz_CwWALDXQxS_ecHFoqe-2efJLH81zlX8hU2E,876
|
|
26
26
|
singlestoredb/docstring/attrdoc.py,sha256=fOc_lU7ax4xCUaKG8inCcZbcj18QmTc7aSWOi5F36wc,4347
|
|
27
27
|
singlestoredb/docstring/common.py,sha256=NDwwk2uizIcxQE1bjYG-PcE4-fKifkEXW2CYfBHffb4,6637
|
|
@@ -48,12 +48,12 @@ singlestoredb/functions/signature.py,sha256=1aSFezUgWSRsGcrBjOVVZyZgw0q356y7IWgM
|
|
|
48
48
|
singlestoredb/functions/utils.py,sha256=lZPxdYfHxrSfxGWCoF0YZyakVy2iYlozJ1lPSaPKRlo,11190
|
|
49
49
|
singlestoredb/functions/ext/__init__.py,sha256=5ppI8IZN_zOwoJFdu_Oq9ipxtyHw9n6OMVAa_s9T_yY,24
|
|
50
50
|
singlestoredb/functions/ext/arrow.py,sha256=mQhwaMpvCH_dP92WIhP_j-stu272n4UAHsFUOBTgnq0,9436
|
|
51
|
-
singlestoredb/functions/ext/asgi.py,sha256=
|
|
51
|
+
singlestoredb/functions/ext/asgi.py,sha256=trGgDFrOr3p3GeU12Nehoma5zVlGwMnncHzbaqXI2Q0,73763
|
|
52
52
|
singlestoredb/functions/ext/json.py,sha256=j9133xOpyuSqb8smBmi_bPvv6OYCbNfpbLbEicyGqmQ,10522
|
|
53
53
|
singlestoredb/functions/ext/mmap.py,sha256=0BN9OyEONZ174qdZWe2m3Xykt3-QcxyLYBt2iCG772Q,14123
|
|
54
54
|
singlestoredb/functions/ext/rowdat_1.py,sha256=UNMMUA8mb6iIRfJV2FsdA20Sw6s-LEdHQ_tC4K4g70Q,21836
|
|
55
|
-
singlestoredb/functions/ext/timer.py,sha256=
|
|
56
|
-
singlestoredb/functions/ext/utils.py,sha256=
|
|
55
|
+
singlestoredb/functions/ext/timer.py,sha256=fVo0YIwV8T6Fbl6kBbgnwMTQeLVXCVDmuzkXLC5MpVg,2795
|
|
56
|
+
singlestoredb/functions/ext/utils.py,sha256=KE0g1s4jUEoJK44CChuCgPB6Ko3KpRbVSYI_aHbzams,7156
|
|
57
57
|
singlestoredb/functions/typing/__init__.py,sha256=5AJG4nx-HKCeemNxL0qc1VunYPJ5lHRzpYAK_qMybNw,1380
|
|
58
58
|
singlestoredb/functions/typing/numpy.py,sha256=WJt0bWwyEA8Mofpn_-0Q82u7Q8XAtzBuhbaXSqE1E34,681
|
|
59
59
|
singlestoredb/functions/typing/pandas.py,sha256=-abvGDup-WwTbaAyQuNo4Fq7ATe8gYx_5b2yUPJlX7o,85
|
|
@@ -149,7 +149,7 @@ singlestoredb/tests/test_connection.py,sha256=OXOk6qCJci62orlptwl8S4BkETVPbFP3uM
|
|
|
149
149
|
singlestoredb/tests/test_dbapi.py,sha256=cNJoTEZvYG7ckcwT7xqlkJX-2TDEYGTDDU1Igucp48k,679
|
|
150
150
|
singlestoredb/tests/test_exceptions.py,sha256=vscMYmdOJr0JmkTAJrNI2w0Q96Nfugjkrt5_lYnw8i0,1176
|
|
151
151
|
singlestoredb/tests/test_ext_func.py,sha256=YidPnlO7HWsVIbPwdCa33Oo8SyGkP2_Pcuj_pu39r4s,47743
|
|
152
|
-
singlestoredb/tests/test_ext_func_data.py,sha256=
|
|
152
|
+
singlestoredb/tests/test_ext_func_data.py,sha256=LeQoV5QQkSJ7WVOKZw_F5zzIFXXrGputh3k_lDDKlG4,48616
|
|
153
153
|
singlestoredb/tests/test_fusion.py,sha256=XT5rhYx32mndcZGaW2Xc7DTLMLEcf_vO3w1Dxss9nMM,52120
|
|
154
154
|
singlestoredb/tests/test_http.py,sha256=7hwXe61hlUes3nji0MTTZweo94tJAlJ-vA5ct9geXFQ,8868
|
|
155
155
|
singlestoredb/tests/test_management.py,sha256=Cn0n-RhzZPgVqcgDDNrGvwCDJMZo34KVRqYsy_KaKOE,53537
|
|
@@ -173,9 +173,9 @@ singlestoredb/utils/results.py,sha256=wR70LhCqlobniZf52r67zYLBOKjWHQm68NAskdRQND
|
|
|
173
173
|
singlestoredb/utils/xdict.py,sha256=-wi1lSPTnY99fhVMBhPKJ8cCsQhNG4GMUfkEBDKYgCw,13321
|
|
174
174
|
sqlx/__init__.py,sha256=4Sdn8HN-Hf8v0_wCt60DCckCg8BvgM3-9r4YVfZycRE,89
|
|
175
175
|
sqlx/magic.py,sha256=6VBlotgjautjev599tHaTYOfcfOA9m6gV_-P1_Qc4lI,3622
|
|
176
|
-
singlestoredb-1.15.
|
|
177
|
-
singlestoredb-1.15.
|
|
178
|
-
singlestoredb-1.15.
|
|
179
|
-
singlestoredb-1.15.
|
|
180
|
-
singlestoredb-1.15.
|
|
181
|
-
singlestoredb-1.15.
|
|
176
|
+
singlestoredb-1.15.3.dist-info/LICENSE,sha256=Bojenzui8aPNjlF3w4ojguDP7sTf8vFV_9Gc2UAG1sg,11542
|
|
177
|
+
singlestoredb-1.15.3.dist-info/METADATA,sha256=mw7wyD-0WbCOe3hN6n6M6K1hFBD6YT8Kmt_NJx3B8MQ,5949
|
|
178
|
+
singlestoredb-1.15.3.dist-info/WHEEL,sha256=UyMHzmWA0xVqVPKfTiLs2eN3OWWZUl-kQemNbpIqlKo,100
|
|
179
|
+
singlestoredb-1.15.3.dist-info/entry_points.txt,sha256=bSLaTWB5zGjpVYPAaI46MkkDup0su-eb3uAhCNYuRV0,48
|
|
180
|
+
singlestoredb-1.15.3.dist-info/top_level.txt,sha256=lA65Vf4qAMfg_s1oG3LEO90h4t1Z-SPDbRqkevI3bSY,40
|
|
181
|
+
singlestoredb-1.15.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|