singlestoredb 1.4.3__cp38-abi3-win32.whl → 1.6.0__cp38-abi3-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of singlestoredb might be problematic. Click here for more details.

@@ -7,6 +7,7 @@ from typing import Optional
7
7
  from typing import Union
8
8
 
9
9
  from ..exceptions import ManagementError
10
+ from .job import JobsManager
10
11
  from .manager import Manager
11
12
  from .utils import vars_to_str
12
13
 
@@ -190,3 +191,19 @@ class Organization(object):
190
191
  )
191
192
  out._manager = manager
192
193
  return out
194
+
195
+ @property
196
+ def jobs(self) -> JobsManager:
197
+ """
198
+ Retrieve a SingleStoreDB scheduled job manager.
199
+
200
+ Parameters
201
+ ----------
202
+ manager : WorkspaceManager, optional
203
+ The WorkspaceManager the JobsManager belongs to
204
+
205
+ Returns
206
+ -------
207
+ :class:`JobsManager`
208
+ """
209
+ return JobsManager(self._manager)
@@ -215,6 +215,26 @@ def get_token() -> Optional[str]:
215
215
  return None
216
216
 
217
217
 
218
+ def get_cluster_id() -> Optional[str]:
219
+ """Return the cluster id for the current token or environment."""
220
+ return os.environ.get('SINGLESTOREDB_CLUSTER') or None
221
+
222
+
223
+ def get_workspace_id() -> Optional[str]:
224
+ """Return the workspace id for the current token or environment."""
225
+ return os.environ.get('SINGLESTOREDB_WORKSPACE') or None
226
+
227
+
228
+ def get_virtual_workspace_id() -> Optional[str]:
229
+ """Return the virtual workspace id for the current token or environment."""
230
+ return os.environ.get('SINGLESTOREDB_VIRTUAL_WORKSPACE') or None
231
+
232
+
233
+ def get_database_name() -> Optional[str]:
234
+ """Return the default database name for the current token or environment."""
235
+ return os.environ.get('SINGLESTOREDB_DEFAULT_DATABASE') or None
236
+
237
+
218
238
  def enable_http_tracing() -> None:
219
239
  """Enable tracing of HTTP requests."""
220
240
  import logging
@@ -246,7 +266,33 @@ def to_datetime(
246
266
  out = converters.datetime_fromisoformat(obj)
247
267
  if isinstance(out, str):
248
268
  return None
249
- if isinstance(out, datetime.date):
269
+ if isinstance(out, datetime.date) and not isinstance(out, datetime.datetime):
270
+ return datetime.datetime(out.year, out.month, out.day)
271
+ return out
272
+
273
+
274
+ def to_datetime_strict(
275
+ obj: Optional[Union[str, datetime.datetime]],
276
+ ) -> datetime.datetime:
277
+ """Convert string to datetime."""
278
+ if not obj:
279
+ raise TypeError('not possible to convert None to datetime')
280
+ if isinstance(obj, datetime.datetime):
281
+ return obj
282
+ if obj == '0001-01-01T00:00:00Z':
283
+ raise ValueError('not possible to convert 0001-01-01T00:00:00Z to datetime')
284
+ obj = obj.replace('Z', '')
285
+ # Fix datetimes with truncated zeros
286
+ if '.' in obj:
287
+ obj, micros = obj.split('.', 1)
288
+ micros = micros + '0' * (6 - len(micros))
289
+ obj = obj + '.' + micros
290
+ out = converters.datetime_fromisoformat(obj)
291
+ if not out:
292
+ raise TypeError('not possible to convert None to datetime')
293
+ if isinstance(out, str):
294
+ raise ValueError('value cannot be str')
295
+ if isinstance(out, datetime.date) and not isinstance(out, datetime.datetime):
250
296
  return datetime.datetime(out.year, out.month, out.day)
251
297
  return out
252
298
 
@@ -280,6 +280,8 @@ class Connection(BaseConnection):
280
280
  Should the connection track the SINGLESTOREDB_URL environment variable?
281
281
  enable_extended_data_types : bool, optional
282
282
  Should extended data types (BSON, vector) be enabled?
283
+ vector_data_format : str, optional
284
+ Specify the data type of vector values: json or binary
283
285
 
284
286
  See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_
285
287
  in the specification.
@@ -350,6 +352,7 @@ class Connection(BaseConnection):
350
352
  encoding_errors='strict',
351
353
  track_env=False,
352
354
  enable_extended_data_types=True,
355
+ vector_data_format='binary',
353
356
  ):
354
357
  BaseConnection.__init__(**dict(locals()))
355
358
 
@@ -634,6 +637,13 @@ class Connection(BaseConnection):
634
637
  self._in_sync = False
635
638
  self._track_env = bool(track_env) or self.host == 'singlestore.com'
636
639
  self._enable_extended_data_types = enable_extended_data_types
640
+ if vector_data_format.lower() in ['json', 'binary']:
641
+ self._vector_data_format = vector_data_format
642
+ else:
643
+ raise ValueError(
644
+ 'unknown value for vector_data_format, '
645
+ f'expecting "json" or "binary": {vector_data_format}',
646
+ )
637
647
  self._connection_info = {}
638
648
  events.subscribe(self._handle_event)
639
649
 
@@ -1117,6 +1127,15 @@ class Connection(BaseConnection):
1117
1127
  pass
1118
1128
  c.close()
1119
1129
 
1130
+ if self._vector_data_format:
1131
+ c = self.cursor()
1132
+ try:
1133
+ val = self._vector_data_format
1134
+ c.execute(f'SET @@SESSION.vector_type_project_format={val}')
1135
+ except self.OperationalError:
1136
+ pass
1137
+ c.close()
1138
+
1120
1139
  if self.init_command is not None:
1121
1140
  c = self.cursor()
1122
1141
  c.execute(self.init_command)
@@ -9,6 +9,7 @@ from typing import Callable
9
9
  from typing import Dict
10
10
  from typing import List
11
11
  from typing import Optional
12
+ from typing import Tuple
12
13
 
13
14
  from . import _objects as obj
14
15
  from ..management import workspace as mgr
@@ -187,6 +188,37 @@ class Portal(object):
187
188
  timeout_message='timeout waiting for workspace update',
188
189
  )
189
190
 
191
+ deployment = workspace
192
+
193
+ @property
194
+ def connection(self) -> Tuple[obj.Workspace, Optional[str]]:
195
+ """Workspace and default database name."""
196
+ return self.workspace, self.default_database
197
+
198
+ @connection.setter
199
+ def connection(self, workspace_and_default_database: Tuple[str, str]) -> None:
200
+ """Set workspace and default database name."""
201
+ name_or_id, default_database = workspace_and_default_database
202
+ if re.match(
203
+ r'[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}',
204
+ name_or_id, flags=re.I,
205
+ ):
206
+ w = mgr.get_workspace(name_or_id)
207
+ else:
208
+ w = mgr.get_workspace_group(self.workspace_group_id).workspaces[name_or_id]
209
+
210
+ if w.state and w.state.lower() not in ['active', 'resumed']:
211
+ raise RuntimeError('workspace is not active')
212
+
213
+ id = w.id
214
+
215
+ self._call_javascript(
216
+ 'changeConnection', [id, default_database],
217
+ wait_on_condition=lambda: self.workspace_id == id and
218
+ self.default_database == default_database, # type: ignore
219
+ timeout_message='timeout waiting for workspace update',
220
+ )
221
+
190
222
  @property
191
223
  def cluster_id(self) -> Optional[str]:
192
224
  """Cluster ID."""
@@ -91,7 +91,7 @@ class TestFusion(unittest.TestCase):
91
91
 
92
92
 
93
93
  @pytest.mark.management
94
- class TestManagementAPIFusion(unittest.TestCase):
94
+ class TestWorkspaceFusion(unittest.TestCase):
95
95
 
96
96
  id: str = secrets.token_hex(8)
97
97
  dbname: str = ''
@@ -463,3 +463,252 @@ class TestManagementAPIFusion(unittest.TestCase):
463
463
  mgr.workspace_groups[wg_name].terminate(force=True)
464
464
  except Exception:
465
465
  pass
466
+
467
+
468
+ @pytest.mark.management
469
+ class TestJobsFusion(unittest.TestCase):
470
+
471
+ id: str = secrets.token_hex(8)
472
+ notebook_name: str = 'Scheduling Test.ipynb'
473
+ dbname: str = ''
474
+ dbexisted: bool = False
475
+ manager: None
476
+ workspace_group: None
477
+ workspace: None
478
+ job_ids = []
479
+
480
+ @classmethod
481
+ def setUpClass(cls):
482
+ sql_file = os.path.join(os.path.dirname(__file__), 'test.sql')
483
+ cls.dbname, cls.dbexisted = utils.load_sql(sql_file)
484
+ cls.manager = s2.manage_workspaces()
485
+ us_regions = [x for x in cls.manager.regions if x.name.startswith('US')]
486
+ cls.workspace_group = cls.manager.create_workspace_group(
487
+ f'Jobs Fusion Testing {cls.id}',
488
+ region=random.choice(us_regions),
489
+ firewall_ranges=[],
490
+ )
491
+ cls.workspace = cls.workspace_group.create_workspace(
492
+ f'jobs-test-{cls.id}',
493
+ wait_on_active=True,
494
+ )
495
+ os.environ['SINGLESTOREDB_DEFAULT_DATABASE'] = cls.dbname
496
+ os.environ['SINGLESTOREDB_WORKSPACE'] = cls.workspace.id
497
+
498
+ @classmethod
499
+ def tearDownClass(cls):
500
+ for job_id in cls.job_ids:
501
+ cls.manager.organizations.current.jobs.delete(job_id)
502
+ if cls.workspace_group is not None:
503
+ cls.workspace_group.terminate(force=True)
504
+ cls.manager = None
505
+ cls.workspace_group = None
506
+ cls.workspace = None
507
+ if os.environ.get('SINGLESTOREDB_WORKSPACE', None) is not None:
508
+ del os.environ['SINGLESTOREDB_WORKSPACE']
509
+ if os.environ.get('SINGLESTOREDB_DEFAULT_DATABASE', None) is not None:
510
+ del os.environ['SINGLESTOREDB_DEFAULT_DATABASE']
511
+
512
+ def setUp(self):
513
+ self.enabled = os.environ.get('SINGLESTOREDB_FUSION_ENABLED')
514
+ os.environ['SINGLESTOREDB_FUSION_ENABLED'] = '1'
515
+ self.conn = s2.connect(database=type(self).dbname, local_infile=True)
516
+ self.cur = self.conn.cursor()
517
+
518
+ def tearDown(self):
519
+ if self.enabled:
520
+ os.environ['SINGLESTOREDB_FUSION_ENABLED'] = self.enabled
521
+ else:
522
+ del os.environ['SINGLESTOREDB_FUSION_ENABLED']
523
+
524
+ try:
525
+ if self.cur is not None:
526
+ self.cur.close()
527
+ except Exception:
528
+ # traceback.print_exc()
529
+ pass
530
+
531
+ try:
532
+ if self.conn is not None:
533
+ self.conn.close()
534
+ except Exception:
535
+ # traceback.print_exc()
536
+ pass
537
+
538
+ def test_schedule_drop_job(self):
539
+ # schedule recurring job
540
+ self.cur.execute(
541
+ f'schedule job using notebook "{self.notebook_name}" '
542
+ 'with mode "recurring" '
543
+ 'execute every 5 minutes '
544
+ 'with name "recurring-job" '
545
+ 'create snapshot '
546
+ 'resume target '
547
+ 'with runtime "notebooks-cpu-small" '
548
+ 'with parameters '
549
+ '{"strParam": "string", "intParam": 1, '
550
+ '"floatParam": 1.0, "boolParam": true}',
551
+ )
552
+ out = list(self.cur)
553
+ job_id = out[0][0]
554
+ self.job_ids.append(job_id)
555
+ desc = self.cur.description
556
+ assert len(desc) == 1
557
+ assert desc[0][0] == 'JobID'
558
+ assert len(out) == 1
559
+ assert out[0][0] == job_id
560
+
561
+ # drop job
562
+ self.cur.execute(f'drop jobs {job_id}')
563
+ out = list(self.cur)
564
+ desc = self.cur.description
565
+ assert len(desc) == 2
566
+ assert [x[0] for x in desc] == [
567
+ 'JobID', 'Success',
568
+ ]
569
+ assert len(out) == 1
570
+ res = out[0]
571
+ assert res[0] == job_id
572
+ assert res[1] == 1
573
+
574
+ def test_run_wait_drop_job(self):
575
+ # run job
576
+ self.cur.execute(
577
+ f'run job using notebook "{self.notebook_name}" '
578
+ 'with runtime "notebooks-cpu-small" '
579
+ 'with parameters '
580
+ '{"strParam": "string", "intParam": 1, '
581
+ '"floatParam": 1.0, "boolParam": true}',
582
+ )
583
+ out = list(self.cur)
584
+ job_id = out[0][0]
585
+ self.job_ids.append(job_id)
586
+ desc = self.cur.description
587
+ assert len(desc) == 1
588
+ assert desc[0][0] == 'JobID'
589
+ assert len(out) == 1
590
+ assert out[0][0] == job_id
591
+
592
+ # wait on job
593
+ self.cur.execute(f'wait on jobs {job_id}')
594
+ out = list(self.cur)
595
+ desc = self.cur.description
596
+ assert len(desc) == 1
597
+ assert desc[0][0] == 'Success'
598
+ assert out[0][0] == 1
599
+
600
+ # drop job
601
+ self.cur.execute(f'drop jobs {job_id}')
602
+ out = list(self.cur)
603
+ desc = self.cur.description
604
+ assert len(desc) == 2
605
+ assert [x[0] for x in desc] == [
606
+ 'JobID', 'Success',
607
+ ]
608
+ assert len(out) == 1
609
+ res = out[0]
610
+ assert res[0] == job_id
611
+ assert res[1] == 1
612
+
613
+ def test_show_jobs_and_executions(self):
614
+ # schedule recurring job
615
+ self.cur.execute(
616
+ f'schedule job using notebook "{self.notebook_name}" '
617
+ 'with mode "recurring" '
618
+ 'execute every 5 minutes '
619
+ 'with name "show-job" '
620
+ 'with runtime "notebooks-cpu-small" '
621
+ 'with parameters '
622
+ '{"strParam": "string", "intParam": 1, '
623
+ '"floatParam": 1.0, "boolParam": true}',
624
+ )
625
+ out = list(self.cur)
626
+ job_id = out[0][0]
627
+ self.job_ids.append(job_id)
628
+ desc = self.cur.description
629
+ assert len(desc) == 1
630
+ assert desc[0][0] == 'JobID'
631
+ assert len(out) == 1
632
+ assert out[0][0] == job_id
633
+
634
+ # show jobs with name like "show-job"
635
+ self.cur.execute(f'show jobs {job_id} like "show-job"')
636
+ out = list(self.cur)
637
+ desc = self.cur.description
638
+ assert len(desc) == 9
639
+ assert [x[0] for x in desc] == [
640
+ 'JobID', 'Name', 'CreatedAt', 'EnqueuedBy',
641
+ 'CompletedExecutions', 'NotebookPath', 'DatabaseName', 'TargetID',
642
+ 'TargetType',
643
+ ]
644
+ assert len(out) == 1
645
+ job = out[0]
646
+ assert job[0] == job_id
647
+ assert job[1] == 'show-job'
648
+ assert job[5] == self.notebook_name
649
+ assert job[6] == self.dbname
650
+ assert job[7] == self.workspace.id
651
+ assert job[8] == 'Workspace'
652
+
653
+ # show jobs with name like "show-job" extended
654
+ self.cur.execute(f'show jobs {job_id} like "show-job" extended')
655
+ out = list(self.cur)
656
+ desc = self.cur.description
657
+ assert len(desc) == 17
658
+ assert [x[0] for x in desc] == [
659
+ 'JobID', 'Name', 'CreatedAt', 'EnqueuedBy',
660
+ 'CompletedExecutions', 'NotebookPath', 'DatabaseName', 'TargetID',
661
+ 'TargetType', 'Description', 'TerminatedAt', 'CreateSnapshot',
662
+ 'MaxDurationInMins', 'ExecutionIntervalInMins', 'Mode', 'StartAt',
663
+ 'ResumeTarget',
664
+ ]
665
+ assert len(out) == 1
666
+ job = out[0]
667
+ assert job[0] == job_id
668
+ assert job[1] == 'show-job'
669
+ assert job[5] == self.notebook_name
670
+ assert job[6] == self.dbname
671
+ assert job[7] == self.workspace.id
672
+ assert job[8] == 'Workspace'
673
+ assert not job[11]
674
+ assert job[13] == 5
675
+ assert job[14] == 'Recurring'
676
+ assert not job[16]
677
+
678
+ # show executions for job with id job_id from 1 to 5
679
+ self.cur.execute(f'show job executions for {job_id} from 1 to 5')
680
+ out = list(self.cur)
681
+ desc = self.cur.description
682
+ assert len(desc) == 7
683
+ assert [x[0] for x in desc] == [
684
+ 'ExecutionID', 'ExecutionNumber', 'JobID',
685
+ 'Status', 'ScheduledStartTime', 'StartedAt', 'FinishedAt',
686
+ ]
687
+ exec_job_ids = [x[2] for x in out]
688
+ assert [x for x in exec_job_ids] == [job_id]
689
+
690
+ # show executions for job with id job_id from 1 to 5 extended
691
+ self.cur.execute(f'show job executions for {job_id} from 1 to 5 extended')
692
+ out = list(self.cur)
693
+ desc = self.cur.description
694
+ assert len(desc) == 8
695
+ assert [x[0] for x in desc] == [
696
+ 'ExecutionID', 'ExecutionNumber', 'JobID',
697
+ 'Status', 'ScheduledStartTime', 'StartedAt', 'FinishedAt',
698
+ 'SnapshotNotebookPath',
699
+ ]
700
+ exec_job_ids = [x[2] for x in out]
701
+ assert [x for x in exec_job_ids] == [job_id]
702
+
703
+ # drop job
704
+ self.cur.execute(f'drop jobs {job_id}')
705
+ out = list(self.cur)
706
+ desc = self.cur.description
707
+ assert len(desc) == 2
708
+ assert [x[0] for x in desc] == [
709
+ 'JobID', 'Success',
710
+ ]
711
+ assert len(out) == 1
712
+ res = out[0]
713
+ assert res[0] == job_id
714
+ assert res[1] == 1
@@ -11,6 +11,8 @@ import unittest
11
11
  import pytest
12
12
 
13
13
  import singlestoredb as s2
14
+ from singlestoredb.management.job import Status
15
+ from singlestoredb.management.job import TargetType
14
16
 
15
17
 
16
18
  TEST_DIR = pathlib.Path(os.path.dirname(__file__))
@@ -876,3 +878,153 @@ class TestSecrets(unittest.TestCase):
876
878
 
877
879
  assert secret.name == 'secret_name'
878
880
  assert secret.value == 'secret_value'
881
+
882
+
883
+ @pytest.mark.management
884
+ class TestJob(unittest.TestCase):
885
+
886
+ manager = None
887
+ workspace_group = None
888
+ workspace = None
889
+ password = None
890
+ job_ids = []
891
+
892
+ @classmethod
893
+ def setUpClass(cls):
894
+ cls.manager = s2.manage_workspaces()
895
+
896
+ us_regions = [x for x in cls.manager.regions if 'US' in x.name]
897
+ cls.password = secrets.token_urlsafe(20)
898
+
899
+ name = clean_name(secrets.token_urlsafe(20)[:20])
900
+
901
+ cls.workspace_group = cls.manager.create_workspace_group(
902
+ f'wg-test-{name}',
903
+ region=random.choice(us_regions).id,
904
+ admin_password=cls.password,
905
+ firewall_ranges=['0.0.0.0/0'],
906
+ )
907
+
908
+ try:
909
+ cls.workspace = cls.workspace_group.create_workspace(
910
+ f'ws-test-{name}-x',
911
+ wait_on_active=True,
912
+ )
913
+ except Exception:
914
+ cls.workspace_group.terminate(force=True)
915
+ raise
916
+
917
+ @classmethod
918
+ def tearDownClass(cls):
919
+ for job_id in cls.job_ids:
920
+ cls.manager.organizations.current.jobs.delete(job_id)
921
+ if cls.workspace_group is not None:
922
+ cls.workspace_group.terminate(force=True)
923
+ cls.workspace_group = None
924
+ cls.workspace = None
925
+ cls.manager = None
926
+ cls.password = None
927
+ if os.environ.get('SINGLESTOREDB_WORKSPACE', None) is not None:
928
+ del os.environ['SINGLESTOREDB_WORKSPACE']
929
+ if os.environ.get('SINGLESTOREDB_DEFAULT_DATABASE', None) is not None:
930
+ del os.environ['SINGLESTOREDB_DEFAULT_DATABASE']
931
+
932
+ def test_job_without_database_target(self):
933
+ """
934
+ Creates job without target database on a specific runtime
935
+ Waits for job to finish
936
+ Gets the job
937
+ Deletes the job
938
+ """
939
+ if os.environ.get('SINGLESTOREDB_WORKSPACE', None) is not None:
940
+ del os.environ['SINGLESTOREDB_WORKSPACE']
941
+ if os.environ.get('SINGLESTOREDB_DEFAULT_DATABASE', None) is not None:
942
+ del os.environ['SINGLESTOREDB_DEFAULT_DATABASE']
943
+
944
+ job_manager = self.manager.organizations.current.jobs
945
+ job = job_manager.run(
946
+ 'Scheduling Test.ipynb',
947
+ 'notebooks-cpu-small',
948
+ {'strParam': 'string', 'intParam': 1, 'floatParam': 1.0, 'boolParam': True},
949
+ )
950
+ self.job_ids.append(job.job_id)
951
+ assert job.execution_config.notebook_path == 'Scheduling Test.ipynb'
952
+ assert job.schedule.mode == job_manager.modes().ONCE
953
+ assert not job.execution_config.create_snapshot
954
+ assert job.completed_executions_count == 0
955
+ assert job.name is None
956
+ assert job.description is None
957
+ assert job.job_metadata == []
958
+ assert job.terminated_at is None
959
+ assert job.target_config is None
960
+ job.wait()
961
+ job = job_manager.get(job.job_id)
962
+ assert job.execution_config.notebook_path == 'Scheduling Test.ipynb'
963
+ assert job.schedule.mode == job_manager.modes().ONCE
964
+ assert not job.execution_config.create_snapshot
965
+ assert job.completed_executions_count == 1
966
+ assert job.name is None
967
+ assert job.description is None
968
+ assert job.job_metadata != []
969
+ assert len(job.job_metadata) == 1
970
+ assert job.job_metadata[0].count == 1
971
+ assert job.job_metadata[0].status == Status.COMPLETED
972
+ assert job.terminated_at is None
973
+ assert job.target_config is None
974
+ deleted = job.delete()
975
+ assert deleted
976
+ job = job_manager.get(job.job_id)
977
+ assert job.terminated_at is not None
978
+
979
+ def test_job_with_database_target(self):
980
+ """
981
+ Creates job with target database on a specific runtime
982
+ Waits for job to finish
983
+ Gets the job
984
+ Deletes the job
985
+ """
986
+ os.environ['SINGLESTOREDB_DEFAULT_DATABASE'] = 'information_schema'
987
+ os.environ['SINGLESTOREDB_WORKSPACE'] = self.workspace.id
988
+
989
+ job_manager = self.manager.organizations.current.jobs
990
+ job = job_manager.run(
991
+ 'Scheduling Test.ipynb',
992
+ 'notebooks-cpu-small',
993
+ {'strParam': 'string', 'intParam': 1, 'floatParam': 1.0, 'boolParam': True},
994
+ )
995
+ self.job_ids.append(job.job_id)
996
+ assert job.execution_config.notebook_path == 'Scheduling Test.ipynb'
997
+ assert job.schedule.mode == job_manager.modes().ONCE
998
+ assert not job.execution_config.create_snapshot
999
+ assert job.completed_executions_count == 0
1000
+ assert job.name is None
1001
+ assert job.description is None
1002
+ assert job.job_metadata == []
1003
+ assert job.terminated_at is None
1004
+ assert job.target_config is not None
1005
+ assert job.target_config.database_name == 'information_schema'
1006
+ assert job.target_config.target_id == self.workspace.id
1007
+ assert job.target_config.target_type == TargetType.WORKSPACE
1008
+ assert not job.target_config.resume_target
1009
+ job.wait()
1010
+ job = job_manager.get(job.job_id)
1011
+ assert job.execution_config.notebook_path == 'Scheduling Test.ipynb'
1012
+ assert job.schedule.mode == job_manager.modes().ONCE
1013
+ assert not job.execution_config.create_snapshot
1014
+ assert job.completed_executions_count == 1
1015
+ assert job.name is None
1016
+ assert job.description is None
1017
+ assert job.job_metadata != []
1018
+ assert len(job.job_metadata) == 1
1019
+ assert job.job_metadata[0].count == 1
1020
+ assert job.job_metadata[0].status == Status.COMPLETED
1021
+ assert job.terminated_at is None
1022
+ assert job.target_config is not None
1023
+ assert job.target_config.database_name == 'information_schema'
1024
+ assert job.target_config.target_id == self.workspace.id
1025
+ assert job.target_config.target_type == TargetType.WORKSPACE
1026
+ assert not job.target_config.resume_target
1027
+ deleted = job.delete()
1028
+ assert deleted
1029
+ job = job_manager.get(job.job_id)
1030
+ assert job.terminated_at is not None
@@ -73,6 +73,13 @@ def load_sql(sql_file: str) -> str:
73
73
  with open(sql_file, 'r') as infile:
74
74
  with s2.connect(**args) as conn:
75
75
  with conn.cursor() as cur:
76
+ try:
77
+ cur.execute('SET GLOBAL default_partitions_per_leaf=2')
78
+ cur.execute('SET GLOBAL log_file_size_partitions=1048576')
79
+ cur.execute('SET GLOBAL log_file_size_ref_dbs=1048576')
80
+ except s2.OperationalError:
81
+ pass
82
+
76
83
  if not dbname:
77
84
  dbname = 'TEST_{}'.format(uuid.uuid4()).replace('-', '_')
78
85
  cur.execute(f'CREATE DATABASE {dbname};')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: singlestoredb
3
- Version: 1.4.3
3
+ Version: 1.6.0
4
4
  Summary: Interface to the SingleStoreDB database and workspace management APIs
5
5
  Home-page: https://github.com/singlestore-labs/singlestoredb-python
6
6
  Author: SingleStore