frogml-core 0.0.110__py3-none-any.whl → 0.0.112__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. frogml_core/__init__.py +1 -1
  2. frogml_core/automations/automations.py +6 -3
  3. frogml_core/feature_store/data_sources/batch/_jdbc.py +1 -0
  4. frogml_core/feature_store/data_sources/batch/athena.py +4 -2
  5. frogml_core/feature_store/data_sources/batch/big_query.py +4 -2
  6. frogml_core/feature_store/data_sources/batch/clickhouse.py +4 -2
  7. frogml_core/feature_store/data_sources/batch/csv.py +4 -2
  8. frogml_core/feature_store/data_sources/batch/elastic_search.py +4 -2
  9. frogml_core/feature_store/data_sources/batch/mongodb.py +4 -2
  10. frogml_core/feature_store/data_sources/batch/mysql.py +3 -2
  11. frogml_core/feature_store/data_sources/batch/parquet.py +4 -2
  12. frogml_core/feature_store/data_sources/batch/postgres.py +3 -2
  13. frogml_core/feature_store/data_sources/batch/redshift.py +5 -2
  14. frogml_core/feature_store/data_sources/batch/snowflake.py +4 -2
  15. frogml_core/feature_store/data_sources/batch/vertica.py +4 -2
  16. frogml_core/feature_store/data_sources/streaming/kafka/kafka.py +4 -1
  17. frogml_core/feature_store/feature_sets/base_feature_set.py +1 -0
  18. frogml_core/feature_store/feature_sets/batch.py +4 -0
  19. frogml_core/feature_store/feature_sets/streaming.py +9 -3
  20. frogml_core/feature_store/online/client.py +2 -1
  21. frogml_core/model_loggers/artifact_logger.py +34 -6
  22. {frogml_core-0.0.110.dist-info → frogml_core-0.0.112.dist-info}/METADATA +1 -1
  23. {frogml_core-0.0.110.dist-info → frogml_core-0.0.112.dist-info}/RECORD +26 -26
  24. frogml_proto/qwak/models/models_query_pb2.pyi +4 -4
  25. frogml_storage/__init__.py +1 -1
  26. {frogml_core-0.0.110.dist-info → frogml_core-0.0.112.dist-info}/WHEEL +0 -0
frogml_core/__init__.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """Top-level package for frogml."""
2
2
 
3
3
  __author__ = "jfrog"
4
- __version__ = "0.0.110"
4
+ __version__ = "0.0.112"
5
5
 
6
6
  from frogml_core.inner.di_configuration import wire_dependencies
7
7
  from frogml_core.model.model_version_tracking import ( # noqa: F401,E501
@@ -293,6 +293,7 @@ class Automation:
293
293
  create_audit: AutomationAudit = field(default_factory=AutomationAudit)
294
294
  on_error: Notification = field(default=None)
295
295
  on_success: Notification = field(default=None)
296
+ jfrog_token_id: str = field(default="")
296
297
 
297
298
  def to_proto(self):
298
299
  return AutomationProto(
@@ -315,10 +316,11 @@ class Automation:
315
316
  qwak_environment_id=self.environment,
316
317
  create_audit=self.create_audit.to_proto(),
317
318
  is_deleted=self.is_deleted,
319
+ jfrog_token_id=self.jfrog_token_id,
318
320
  )
319
321
 
320
- @staticmethod
321
- def from_proto(message: AutomationProto):
322
+ @classmethod
323
+ def from_proto(cls, message: AutomationProto):
322
324
  action = map_action_name_to_class(
323
325
  message.automation_spec.action.WhichOneof("action")
324
326
  )
@@ -332,7 +334,7 @@ class Automation:
332
334
  message.automation_spec.on_success.WhichOneof("notification")
333
335
  )
334
336
 
335
- return Automation(
337
+ return cls(
336
338
  id=message.automation_id,
337
339
  name=message.automation_spec.automation_name,
338
340
  description=message.automation_spec.automation_description,
@@ -359,6 +361,7 @@ class Automation:
359
361
  if on_success_notification
360
362
  else None
361
363
  ),
364
+ jfrog_token_id=message.jfrog_token_id,
362
365
  )
363
366
 
364
367
  def __str__(self):
@@ -13,6 +13,7 @@ class JdbcSource(BaseBatchSource, ABC):
13
13
  url: Optional[str] = None
14
14
  db_table: Optional[str] = None
15
15
  query: Optional[str] = None
16
+ repository: Optional[str] = None
16
17
 
17
18
  def __post_init__(self):
18
19
  self._validate()
@@ -52,6 +52,7 @@ class AthenaSource(JdbcSource):
52
52
  aws_authentication: Optional[AwsAuthentication] = None
53
53
  time_partition_columns: Optional[TimePartitionColumns] = None
54
54
  workgroup: Optional[str] = None
55
+ repository: Optional[str] = None
55
56
 
56
57
  def _validate(self):
57
58
  super()._validate()
@@ -67,7 +68,7 @@ class AthenaSource(JdbcSource):
67
68
  "At least one of: `s3_output_location`, `workgroup` must be set"
68
69
  )
69
70
 
70
- def _to_proto(self, artifact_url: Optional[str] = None):
71
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
71
72
  proto_date_partition_columns = None
72
73
  proto_time_fragment_partition_columns = None
73
74
 
@@ -97,6 +98,7 @@ class AthenaSource(JdbcSource):
97
98
  )
98
99
 
99
100
  return ProtoDataSourceSpec(
101
+ data_source_repository_name=self.repository,
100
102
  batch_source=ProtoBatchSource(
101
103
  name=self.name,
102
104
  description=self.description,
@@ -116,7 +118,7 @@ class AthenaSource(JdbcSource):
116
118
  workgroup=self.workgroup,
117
119
  ),
118
120
  ),
119
- )
121
+ ),
120
122
  )
121
123
 
122
124
  @classmethod
@@ -25,6 +25,7 @@ class BigQuerySource(BaseBatchSource):
25
25
  views_enabled: Optional[bool] = False
26
26
  materialization_dataset: Optional[str] = None
27
27
  materialization_project: Optional[str] = None
28
+ repository: Optional[str] = None
28
29
 
29
30
  def __post_init__(self):
30
31
  if not (self.sql or (self.table and self.dataset)):
@@ -59,8 +60,9 @@ class BigQuerySource(BaseBatchSource):
59
60
  materialization_dataset=bigquery.materialization_dataset,
60
61
  )
61
62
 
62
- def _to_proto(self, artifact_url: Optional[str] = None):
63
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
63
64
  return ProtoDataSourceSpec(
65
+ data_source_repository_name=self.repository,
64
66
  batch_source=ProtoBatchSource(
65
67
  name=self.name,
66
68
  description=self.description,
@@ -76,5 +78,5 @@ class BigQuerySource(BaseBatchSource):
76
78
  materialization_dataset=self.materialization_dataset,
77
79
  materialization_project=self.materialization_project,
78
80
  ),
79
- )
81
+ ),
80
82
  )
@@ -21,6 +21,7 @@ class ClickhouseSource(BaseBatchSource):
21
21
  url: Optional[str] = None
22
22
  db_table: Optional[str] = None
23
23
  query: Optional[str] = None
24
+ repository: Optional[str] = None
24
25
 
25
26
  def __post_init__(self):
26
27
  self._validate()
@@ -29,8 +30,9 @@ class ClickhouseSource(BaseBatchSource):
29
30
  if not (bool(self.db_table) ^ bool(self.query)):
30
31
  raise FrogmlException("Only one of query and db_table must be set")
31
32
 
32
- def _to_proto(self, artifact_url: Optional[str] = None):
33
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
33
34
  return ProtoDataSourceSpec(
35
+ data_source_repository_name=self.repository,
34
36
  batch_source=ProtoBatchSource(
35
37
  name=self.name,
36
38
  description=self.description,
@@ -42,7 +44,7 @@ class ClickhouseSource(BaseBatchSource):
42
44
  table=self.db_table,
43
45
  sql=self.query,
44
46
  ),
45
- )
47
+ ),
46
48
  )
47
49
 
48
50
  @classmethod
@@ -25,6 +25,7 @@ class CsvSource(BaseBatchSource):
25
25
  quote_character: str = '"'
26
26
  escape_character: str = '"'
27
27
  filesystem_configuration: Optional[FileSystemConfiguration] = None
28
+ repository: Optional[str] = None
28
29
 
29
30
  @classmethod
30
31
  def _from_proto(cls, proto):
@@ -42,12 +43,13 @@ class CsvSource(BaseBatchSource):
42
43
  filesystem_configuration=fs_conf,
43
44
  )
44
45
 
45
- def _to_proto(self, artifact_url: Optional[str] = None):
46
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
46
47
  fs_conf = None
47
48
  if self.filesystem_configuration:
48
49
  fs_conf = self.filesystem_configuration._to_proto()
49
50
 
50
51
  return ProtoDataSourceSpec(
52
+ featureset_repository_name=self.repository,
51
53
  batch_source=ProtoBatchSource(
52
54
  name=self.name,
53
55
  description=self.description,
@@ -58,5 +60,5 @@ class CsvSource(BaseBatchSource):
58
60
  escape_character=self.escape_character,
59
61
  filesystem_configuration=fs_conf,
60
62
  ),
61
- )
63
+ ),
62
64
  )
@@ -23,6 +23,7 @@ class ElasticSearchSource(BaseBatchSource):
23
23
  password_secret_name: Optional[str] = None
24
24
  exclude_fields: Optional[str] = None
25
25
  parse_dates: Optional[bool] = True
26
+ repository: Optional[str] = None
26
27
 
27
28
  @classmethod
28
29
  def _from_proto(cls, proto):
@@ -41,8 +42,9 @@ class ElasticSearchSource(BaseBatchSource):
41
42
  parse_dates=elastic.parse_dates,
42
43
  )
43
44
 
44
- def _to_proto(self, artifact_url: Optional[str] = None):
45
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
45
46
  return ProtoDataSourceSpec(
47
+ data_source_repository_name=self.repository,
46
48
  batch_source=ProtoBatchSource(
47
49
  name=self.name,
48
50
  description=self.description,
@@ -57,5 +59,5 @@ class ElasticSearchSource(BaseBatchSource):
57
59
  exclude_fields=self.exclude_fields,
58
60
  parse_dates=self.parse_dates,
59
61
  ),
60
- )
62
+ ),
61
63
  )
@@ -22,6 +22,7 @@ class MongoDbSource(BaseBatchSource):
22
22
  collection: str
23
23
  connection_params: str
24
24
  protocol: str = "mongodb"
25
+ repository: Optional[str] = None
25
26
 
26
27
  @classmethod
27
28
  def _from_proto(cls, proto):
@@ -39,8 +40,9 @@ class MongoDbSource(BaseBatchSource):
39
40
  protocol=mongo.protocol,
40
41
  )
41
42
 
42
- def _to_proto(self, artifact_url: Optional[str] = None):
43
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoBatchSource:
43
44
  return ProtoDataSourceSpec(
45
+ data_source_repository_name=self.repository,
44
46
  batch_source=ProtoBatchSource(
45
47
  name=self.name,
46
48
  description=self.description,
@@ -54,5 +56,5 @@ class MongoDbSource(BaseBatchSource):
54
56
  connection_params=self.connection_params,
55
57
  protocol=self.protocol,
56
58
  ),
57
- )
59
+ ),
58
60
  )
@@ -32,8 +32,9 @@ class MysqlSource(JdbcSource):
32
32
  query=mysql.query,
33
33
  )
34
34
 
35
- def _to_proto(self, artifact_url: Optional[str] = None):
35
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoJdbcSource:
36
36
  return ProtoDataSourceSpec(
37
+ data_source_repository_name=self.repository,
37
38
  batch_source=ProtoBatchSource(
38
39
  name=self.name,
39
40
  description=self.description,
@@ -46,5 +47,5 @@ class MysqlSource(JdbcSource):
46
47
  query=self.query,
47
48
  mysqlSource=ProtoMysqlSource(),
48
49
  ),
49
- )
50
+ ),
50
51
  )
@@ -26,6 +26,7 @@ class ParquetSource(BaseBatchSource):
26
26
  filesystem_configuration: FileSystemConfiguration = field(
27
27
  default_factory=lambda: AnonymousS3Configuration()
28
28
  )
29
+ repository: Optional[str] = None
29
30
 
30
31
  @classmethod
31
32
  def _from_proto(cls, proto):
@@ -40,12 +41,13 @@ class ParquetSource(BaseBatchSource):
40
41
  filesystem_configuration=fs_conf,
41
42
  )
42
43
 
43
- def _to_proto(self, artifact_url: Optional[str] = None):
44
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
44
45
  fs_conf = None
45
46
  if self.filesystem_configuration:
46
47
  fs_conf = self.filesystem_configuration._to_proto()
47
48
 
48
49
  return ProtoDataSourceSpec(
50
+ data_source_repository_name=self.repository,
49
51
  batch_source=ProtoBatchSource(
50
52
  name=self.name,
51
53
  description=self.description,
@@ -53,5 +55,5 @@ class ParquetSource(BaseBatchSource):
53
55
  parquetSource=ProtoParquetSource(
54
56
  path=self.path, filesystem_configuration=fs_conf
55
57
  ),
56
- )
58
+ ),
57
59
  )
@@ -18,8 +18,9 @@ from frogml_core.feature_store.data_sources.batch._jdbc import JdbcSource
18
18
 
19
19
  @dataclass
20
20
  class PostgresSource(JdbcSource):
21
- def _to_proto(self, artifact_url: Optional[str] = None):
21
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
22
22
  return ProtoDataSourceSpec(
23
+ data_source_repository_name=self.repository,
23
24
  batch_source=ProtoBatchSource(
24
25
  name=self.name,
25
26
  description=self.description,
@@ -32,7 +33,7 @@ class PostgresSource(JdbcSource):
32
33
  query=self.query,
33
34
  postgresqlSource=ProtoPostgresqlSource(),
34
35
  ),
35
- )
36
+ ),
36
37
  )
37
38
 
38
39
  @classmethod
@@ -27,6 +27,8 @@ class RedshiftSource(JdbcSource):
27
27
  secret_access_key: Optional[str] = None
28
28
  query_group: Optional[str] = "_qwak_featurestore"
29
29
 
30
+ repository: Optional[str] = None
31
+
30
32
  def _validate(self):
31
33
  authentication_methods = sum(
32
34
  [
@@ -46,8 +48,9 @@ class RedshiftSource(JdbcSource):
46
48
  "or user id secret name and password secret name"
47
49
  )
48
50
 
49
- def _to_proto(self, artifact_url: Optional[str] = None):
51
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
50
52
  return ProtoDataSourceSpec(
53
+ data_source_repository_name=self.repository,
51
54
  batch_source=ProtoBatchSource(
52
55
  name=self.name,
53
56
  description=self.description,
@@ -65,7 +68,7 @@ class RedshiftSource(JdbcSource):
65
68
  secret_access_key=self.secret_access_key,
66
69
  ),
67
70
  ),
68
- )
71
+ ),
69
72
  )
70
73
 
71
74
  @classmethod
@@ -24,6 +24,7 @@ class SnowflakeSource(BaseBatchSource):
24
24
  warehouse: str
25
25
  table: Optional[str] = None
26
26
  query: Optional[str] = None
27
+ repository: Optional[str] = None
27
28
 
28
29
  def __post_init__(self):
29
30
  self._validate()
@@ -40,8 +41,9 @@ class SnowflakeSource(BaseBatchSource):
40
41
  if not self.password_secret_name:
41
42
  raise FrogmlException("password_secret_name must be set!")
42
43
 
43
- def _to_proto(self, artifact_url: Optional[str] = None):
44
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
44
45
  return ProtoDataSourceSpec(
46
+ data_source_repository_name=self.repository,
45
47
  batch_source=ProtoBatchSource(
46
48
  name=self.name,
47
49
  description=self.description,
@@ -56,7 +58,7 @@ class SnowflakeSource(BaseBatchSource):
56
58
  table=self.table,
57
59
  query=self.query,
58
60
  ),
59
- )
61
+ ),
60
62
  )
61
63
 
62
64
  @classmethod
@@ -22,6 +22,7 @@ class VerticaSource(BaseBatchSource):
22
22
  table: str
23
23
  username_secret_name: str
24
24
  password_secret_name: str
25
+ repository: Optional[str] = None
25
26
 
26
27
  @classmethod
27
28
  def _from_proto(cls, proto):
@@ -39,8 +40,9 @@ class VerticaSource(BaseBatchSource):
39
40
  table=vertica.table,
40
41
  )
41
42
 
42
- def _to_proto(self, artifact_url: Optional[str] = None):
43
+ def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
43
44
  return ProtoDataSourceSpec(
45
+ data_source_repository_name=self.repository,
44
46
  batch_source=ProtoBatchSource(
45
47
  name=self.name,
46
48
  description=self.description,
@@ -54,5 +56,5 @@ class VerticaSource(BaseBatchSource):
54
56
  port=self.port,
55
57
  table=self.table,
56
58
  ),
57
- )
59
+ ),
58
60
  )
@@ -52,6 +52,8 @@ class KafkaSource(BaseStreamingSource):
52
52
  default_factory=lambda: SslAuthentication()
53
53
  )
54
54
 
55
+ repository: Optional[str] = None
56
+
55
57
  def __post_init__(self):
56
58
  self._validate()
57
59
 
@@ -84,6 +86,7 @@ class KafkaSource(BaseStreamingSource):
84
86
 
85
87
  def _to_proto(self, artifact_url: Optional[str] = None) -> ProtoDataSourceSpec:
86
88
  return ProtoDataSourceSpec(
89
+ data_source_repository_name=self.repository,
87
90
  stream_source=ProtoStreamingSource(
88
91
  name=self.name,
89
92
  description=self.description,
@@ -99,7 +102,7 @@ class KafkaSource(BaseStreamingSource):
99
102
  artifact_path=artifact_url
100
103
  ),
101
104
  ),
102
- )
105
+ ),
103
106
  )
104
107
 
105
108
  @classmethod
@@ -33,6 +33,7 @@ class BaseFeatureSet(ABC):
33
33
  data_sources: List[str]
34
34
  entity: str = str()
35
35
  key: str = str()
36
+ repository: Optional[str] = None
36
37
  __instance_module_path__: Optional[str] = None
37
38
 
38
39
  def _validate(self):
@@ -72,6 +72,7 @@ def feature_set(
72
72
  name: str = None,
73
73
  entity: Optional[str] = None,
74
74
  key: Optional[str] = None,
75
+ repository: Optional[str] = None,
75
76
  ):
76
77
  """
77
78
  Define a batch scheduled feature set. Default scheduling policy is every 4 hours.
@@ -116,6 +117,7 @@ def feature_set(
116
117
  name=fs_name,
117
118
  entity=entity if entity else None,
118
119
  key=key if key else None,
120
+ repository=repository if repository else None,
119
121
  data_sources=data_sources,
120
122
  timestamp_column_name=timestamp_column_name,
121
123
  transformation=user_transformation,
@@ -323,6 +325,7 @@ class BatchFeatureSet(BaseFeatureSet):
323
325
  data_sources=[
324
326
  ds.data_source.name for ds in batch_v1_def.feature_set_batch_sources
325
327
  ],
328
+ repository=proto.featureset_repository_name,
326
329
  timestamp_column_name=batch_v1_def.timestamp_column_name,
327
330
  scheduling_policy=batch_v1_def.scheduling_policy,
328
331
  transformation=BaseTransformation._from_proto(batch_v1_def.transformation),
@@ -404,6 +407,7 @@ class BatchFeatureSet(BaseFeatureSet):
404
407
  git_commit=git_commit,
405
408
  features=features,
406
409
  entity=self._get_entity_definition(feature_registry),
410
+ featureset_repository_name=self.repository,
407
411
  feature_set_type=ProtoFeatureSetType(
408
412
  batch_feature_set_v1=ProtoBatchFeatureSetV1(
409
413
  online_sink=True,
@@ -86,6 +86,7 @@ def feature_set(
86
86
  entity: Optional[str] = None,
87
87
  key: Optional[str] = None,
88
88
  auxiliary_sinks: List[BaseSink] = [],
89
+ repository: Optional[str] = None,
89
90
  ):
90
91
  """
91
92
  Creates a streaming feature set for the specified entity using the given streaming data sources.
@@ -140,6 +141,7 @@ def feature_set(
140
141
  name=fs_name,
141
142
  entity=entity if entity else None,
142
143
  key=key if key else None,
144
+ repository=repository,
143
145
  data_sources=data_sources,
144
146
  timestamp_column_name=timestamp_column_name,
145
147
  transformation=user_transformation,
@@ -340,6 +342,7 @@ class StreamingFeatureSet(BaseFeatureSet):
340
342
 
341
343
  return cls(
342
344
  name=proto.name,
345
+ repository=proto.featureset_repository_name,
343
346
  entity=Entity._from_proto(proto.entity).name,
344
347
  data_sources=[ds.name for ds in streaming_def.data_sources],
345
348
  timestamp_column_name=streaming_def.timestamp_column_name,
@@ -420,6 +423,7 @@ class StreamingFeatureSet(BaseFeatureSet):
420
423
  features=features,
421
424
  entity=self._get_entity_definition(feature_registry),
422
425
  feature_set_type=proto_featureset_type,
426
+ featureset_repository_name=self.repository,
423
427
  ),
424
428
  artifact_url,
425
429
  )
@@ -429,10 +433,12 @@ class StreamingFeatureSet(BaseFeatureSet):
429
433
 
430
434
  super()._validate()
431
435
 
436
+ # verify offline_scheduling_policy was set
437
+ if not self.offline_scheduling_policy:
438
+ raise FrogmlException("'offline_scheduling_policy' field must be set")
439
+
432
440
  # verify the cron expression is valid
433
- if not self.offline_scheduling_policy or not croniter.croniter.is_valid(
434
- self.offline_scheduling_policy
435
- ):
441
+ if not croniter.croniter.is_valid(self.offline_scheduling_policy):
436
442
  raise FrogmlException(
437
443
  f"offline scheduling policy "
438
444
  f"'{self.offline_scheduling_policy}'"
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import math
3
+ from io import StringIO
3
4
  from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple
4
5
 
5
6
  from frogml_proto.qwak.ecosystem.v0.ecosystem_pb2 import (
@@ -221,7 +222,7 @@ class OnlineClient:
221
222
  request, metadata=self._metadata
222
223
  )
223
224
  features_df: pd.DataFrame = pd.read_json(
224
- response_df_json.pandas_df_as_json, orient="split"
225
+ StringIO(response_df_json.pandas_df_as_json), orient="split"
225
226
  )
226
227
  results.append(
227
228
  pd.concat(
@@ -1,5 +1,8 @@
1
- import urllib.request
1
+ import logging
2
+ from datetime import timedelta
2
3
  from typing import Optional
4
+ import requests
5
+ from requests import Response
3
6
 
4
7
  from frogml_proto.qwak.builds.build_url_pb2 import BuildVersioningTagsType
5
8
  from frogml_core.clients.build_orchestrator.client import BuildOrchestratorClient
@@ -13,8 +16,14 @@ from frogml_core.inner.model_loggers_utils import (
13
16
  )
14
17
  from frogml_proto.qwak.builds.builds_orchestrator_service_pb2 import (
15
18
  GetBuildVersioningUploadURLResponse,
19
+ GetBuildVersioningDownloadURLResponse,
16
20
  )
17
21
 
22
+ logging.basicConfig(level=logging.INFO)
23
+ logger = logging.getLogger(__name__)
24
+
25
+ MAX_CHUNK_SIZE = 8 * 1_024
26
+
18
27
 
19
28
  def log_file(
20
29
  from_path: str,
@@ -80,21 +89,40 @@ def load_file(
80
89
  Returns:
81
90
  the path to the newly created data file
82
91
  """
92
+ logger.info(f"Loading file to {to_path}")
83
93
  if not validate_tag(tag):
84
94
  raise FrogmlException(
85
95
  "Tag should contain only letters, numbers, underscore or hyphen"
86
96
  )
87
97
 
88
98
  model_id = validate_model(model_id)
89
- download_url_response = BuildOrchestratorClient().get_build_versioning_download_url(
90
- build_id=build_id, model_id=model_id, tag=tag
99
+ download_url_response: (
100
+ GetBuildVersioningDownloadURLResponse
101
+ ) = BuildOrchestratorClient().get_build_versioning_download_url(
102
+ build_id=build_id,
103
+ model_id=model_id,
104
+ tag=tag,
105
+ tag_type=BuildVersioningTagsType.FILE_TAG_TYPE,
91
106
  )
92
107
 
93
108
  try:
94
- filename, headers = urllib.request.urlretrieve( # nosec B310
95
- url=download_url_response.download_url, filename=to_path
109
+ response: Response = requests.get(
110
+ download_url_response.download_url,
111
+ headers=download_url_response.headers,
112
+ stream=True,
113
+ timeout=(
114
+ timedelta(seconds=10).total_seconds(), # timeout to connect
115
+ timedelta(minutes=20).total_seconds(), # timeout to read
116
+ ),
96
117
  )
118
+ logger.info(f"Downloading file finished with status {response.status_code}")
119
+ response.raise_for_status()
120
+
121
+ with open(to_path, "wb") as f:
122
+ for chunk in response.iter_content(chunk_size=MAX_CHUNK_SIZE):
123
+ if chunk:
124
+ f.write(chunk)
97
125
 
98
- return filename
126
+ return to_path
99
127
  except Exception as error:
100
128
  raise FrogmlException(f"Unable to load save artifact locally: {str(error)}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: frogml-core
3
- Version: 0.0.110
3
+ Version: 0.0.112
4
4
  Summary: frogml Core contains the necessary objects and communication tools for using the Jfrog ml Platform
5
5
  License: Apache-2.0
6
6
  Keywords: mlops,ml,deployment,serving,model
@@ -1,7 +1,7 @@
1
- frogml_core/__init__.py,sha256=Q10let7zj7bZvhVQ91wAHfK90sOgGYDqArvf7wpigpk,778
1
+ frogml_core/__init__.py,sha256=SNMYZzIxL9iYw_75tt9Zjp8e606xQbK4AT4rRIpY-BU,778
2
2
  frogml_core/automations/__init__.py,sha256=j2gD15MN-xVWhI5rAFsDwhL0CIyICLNT0scXsKvNBkU,1547
3
3
  frogml_core/automations/automation_executions.py,sha256=xpOb9Dq8gPPGNQDJTvBBZbNz4woZDRZY0HqnLSu7pwU,3230
4
- frogml_core/automations/automations.py,sha256=GKEQyQMi8sxX5oZn62PaxPi0zD8IaJRjBkhczRJxHNs,13070
4
+ frogml_core/automations/automations.py,sha256=srG8S5Z20hUsk6uehz75dmCs8mEgNCCwuCXfj3CG8hY,13210
5
5
  frogml_core/automations/batch_execution_action.py,sha256=tFGQWI16CGNfo-DZJ0n86SBkKWbNt1Q8712wF0_OgUI,13327
6
6
  frogml_core/automations/build_and_deploy_action.py,sha256=WK9CJ0oYr6u6Sy794OMjTvLohA_aHaofx0ectWN-K3o,31962
7
7
  frogml_core/automations/common.py,sha256=98YNX-jCrmQ97k1IxRqzdsnet-rohcXp4yIdVNwpyh8,2752
@@ -124,31 +124,31 @@ frogml_core/feature_store/data_sources/attributes.py,sha256=SsiPoJkqSONhK8mnn6ft
124
124
  frogml_core/feature_store/data_sources/base.py,sha256=sCJ1CzbhRX-fgsw_Y3ucUu-tKC3i93upasJfGw3EmtE,4287
125
125
  frogml_core/feature_store/data_sources/batch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
126
126
  frogml_core/feature_store/data_sources/batch/_batch.py,sha256=xnrC1SZ4OH4b7gYLRajEBFQ2U00cA3JxKT8bKNTNHLg,204
127
- frogml_core/feature_store/data_sources/batch/_jdbc.py,sha256=lC-m3ZypER5vLtYo4h6ghYWeXvdfGSOdTadqS-sTb58,676
128
- frogml_core/feature_store/data_sources/batch/athena.py,sha256=fKKdKE1B59dbCiOQQ5amJECoyETwxswkBhDLQ-Pctsg,11292
129
- frogml_core/feature_store/data_sources/batch/big_query.py,sha256=h9iscw9I-WyCj1BeyesASNoIiNwPty9acQ37KGD0Ulo,3112
130
- frogml_core/feature_store/data_sources/batch/clickhouse.py,sha256=mUSE139-ACnInctQNZqETbnWgFyZidvffVJpwQu0T3Q,2149
131
- frogml_core/feature_store/data_sources/batch/csv.py,sha256=saxFeP--CL4h5j2toyxSuOl4kN_WtEEg8pumaYOjCHg,2070
132
- frogml_core/feature_store/data_sources/batch/elastic_search.py,sha256=GJKEsOlaxHbBYSor47bofNtV9kJnVVYtq8BSies7hjM,2205
127
+ frogml_core/feature_store/data_sources/batch/_jdbc.py,sha256=u-o7BF2W66ZVLbOhYsL91f1LWThNy8hGog1I-bwyHsU,713
128
+ frogml_core/feature_store/data_sources/batch/athena.py,sha256=VNy6gbIV87vn6TIWDwTBkfcsX0awLCJ5dFZMv448-F0,11410
129
+ frogml_core/feature_store/data_sources/batch/big_query.py,sha256=V5zKuuESbM_0bR_z8FqT-EMWu10GxR3JMdXCZAER8mc,3230
130
+ frogml_core/feature_store/data_sources/batch/clickhouse.py,sha256=HNhTl21BDJ--Bs_5texT_lmTQufBpowJP4ONi-H6Xbw,2267
131
+ frogml_core/feature_store/data_sources/batch/csv.py,sha256=FMeCEVaBWDZKSEmsgF3nk3TRnWp82QzE9DXCYUAEFJs,2187
132
+ frogml_core/feature_store/data_sources/batch/elastic_search.py,sha256=5P9hGg63iV2DWvNWTq-wCXGGJbYzXRs3zU9fPDSfCaY,2323
133
133
  frogml_core/feature_store/data_sources/batch/filesystem/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
134
134
  frogml_core/feature_store/data_sources/batch/filesystem/aws.py,sha256=4mj34C16mH1JeU2dEKFIiV-ubCGHwILlgFNyDuEHHQY,3051
135
135
  frogml_core/feature_store/data_sources/batch/filesystem/base_config.py,sha256=fqvqkM_0QY4rcZNa1qB-KLl72o8PhrQQnak8-Bk4tHM,245
136
136
  frogml_core/feature_store/data_sources/batch/filesystem/gcp.py,sha256=_UU1a0u1DeTh7okstWiM1e2nD8TJLrEigPIFLQnSkuE,1723
137
137
  frogml_core/feature_store/data_sources/batch/filesystem/utils.py,sha256=u2FyAh91JY8j1EzX5fJSgVJqwiwpMvt43mfMeyabVdE,2171
138
- frogml_core/feature_store/data_sources/batch/mongodb.py,sha256=98Y67LdRgIzfcPNw3CEPV08BOa-HMgrox3Pw-rW883A,1996
139
- frogml_core/feature_store/data_sources/batch/mysql.py,sha256=MOYgsosXIUf3nap83X4lAn-afFE_tydK5_GU0SW8La8,1736
140
- frogml_core/feature_store/data_sources/batch/parquet.py,sha256=uOAmTSWn9plN96UuhSqFs_7R9nkrb6DPnJfc7qzcFfY,1963
141
- frogml_core/feature_store/data_sources/batch/postgres.py,sha256=bpvYg7bgZKG4Mhdh7pp5aN23i9d1GffX5yBSxkQ4Eho,1789
142
- frogml_core/feature_store/data_sources/batch/redshift.py,sha256=f0xH1Nzg3MMjj-WUFR7DzDLNgawv1KmJQzGSw_-eetE,3268
143
- frogml_core/feature_store/data_sources/batch/snowflake.py,sha256=T5b9_mRrq_7en-70KgELmgkiKnS3CI9IYw1sBcmSJlw,2671
144
- frogml_core/feature_store/data_sources/batch/vertica.py,sha256=J9-1KvzWL5JlCqxRYo7MGfF3yXt4wEQ2GzB5RvuS8OM,1905
138
+ frogml_core/feature_store/data_sources/batch/mongodb.py,sha256=sVR38ZBZ0Y8cLng5RVm5ahag0rfABRZ_mngMzJk9YiQ,2111
139
+ frogml_core/feature_store/data_sources/batch/mysql.py,sha256=Hr6urfu6tAa54L-oxTCYugwmuXi2gOtOIIWh-3vBBHU,1813
140
+ frogml_core/feature_store/data_sources/batch/parquet.py,sha256=_KfsDbwJheCtqg1BebmAj6MJTTKlGo9Kd_xpG2upeCk,2081
141
+ frogml_core/feature_store/data_sources/batch/postgres.py,sha256=PoDl3B_gg1wQvLB0NI1vS5hen7rOfd7ShwduWKE93Lk,1870
142
+ frogml_core/feature_store/data_sources/batch/redshift.py,sha256=c5M0L7gmMnmxAvd39D4Eawyx9ZrHfx5YI4p5eMGTbSU,3387
143
+ frogml_core/feature_store/data_sources/batch/snowflake.py,sha256=HWiJoTSe6osG_fITggMjgjttlIOOZxTSRlQ-NGGlbOg,2789
144
+ frogml_core/feature_store/data_sources/batch/vertica.py,sha256=KZo9oIn8vVAYhoE1OWvU_tvcS7vkOr8I2Yj9iDXVd3k,2023
145
145
  frogml_core/feature_store/data_sources/source_authentication.py,sha256=AhMxuCYqjmYeaPc5OF_K2YnHq44GcN46AtT3_4nAAIA,962
146
146
  frogml_core/feature_store/data_sources/streaming/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
147
147
  frogml_core/feature_store/data_sources/streaming/_streaming.py,sha256=LM7vcBeuwlpj9JZjAppuy04mtRoaoQtGixv1oaFcJxI,188
148
148
  frogml_core/feature_store/data_sources/streaming/kafka/__init__.py,sha256=ISoTTLW8e8hNWJHVekWT0yWhjHEwaNvc3Pyzkd23UAE,670
149
149
  frogml_core/feature_store/data_sources/streaming/kafka/authentication.py,sha256=z1VuiHRvALbSFcC2Oa4of2Emy1FHtd6df_-QE6xCR7Q,4305
150
150
  frogml_core/feature_store/data_sources/streaming/kafka/deserialization.py,sha256=uKdYTcVOkq8r9rFlimtVqbfCAROQv66Zs1ZQh1yx9jI,3736
151
- frogml_core/feature_store/data_sources/streaming/kafka/kafka.py,sha256=CcfzXgY6lcxTIEJ7bb3Z8rQM693LtBhd_FSHb-l0xvA,4527
151
+ frogml_core/feature_store/data_sources/streaming/kafka/kafka.py,sha256=UJos6wt_F-DbkvmXAoxZSP8kMlXvF03e9YbyLBR7IkI,4623
152
152
  frogml_core/feature_store/data_sources/time_partition_columns.py,sha256=Glt5aMLKY6fCRiNupgxyliKOev_y9fg6c3DYRNDb5hA,6381
153
153
  frogml_core/feature_store/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
154
  frogml_core/feature_store/entities/entity.py,sha256=ktMQUgd4Wmfcko9g9MB0KXk7UkLXlJFZpBFD5NRZE_Q,2325
@@ -159,13 +159,13 @@ frogml_core/feature_store/execution/execution_query.py,sha256=xWTVcgfXAxxTHuQyrH
159
159
  frogml_core/feature_store/feature_sets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
160
160
  frogml_core/feature_store/feature_sets/_utils/_featureset_utils.py,sha256=mnznpQ0RAk5ONRdbp9m4Y95Z2zWUj0bl4ztamtczO1U,1658
161
161
  frogml_core/feature_store/feature_sets/backfill.py,sha256=oZqGb8PA1BQxVOxf9-MwzeZyjJqgwulbA-flmBmXAj4,1981
162
- frogml_core/feature_store/feature_sets/base_feature_set.py,sha256=1niCItXApA6JZB7tviXJJVaw91sFwSBwiA1f2xOQnwM,5378
163
- frogml_core/feature_store/feature_sets/batch.py,sha256=VADm8KVYhyxV7Ibt2luOlnrbrqd774_ICG0iKPs82cU,17422
162
+ frogml_core/feature_store/feature_sets/base_feature_set.py,sha256=zYJsn0iDBPSS4BmkDyWBKmW-LWJPTWePq9McEu7pNxQ,5415
163
+ frogml_core/feature_store/feature_sets/batch.py,sha256=xnVvONl2sDnOGOiJMYk_uNWACm8oKHl6-CPCZckn-Wc,17636
164
164
  frogml_core/feature_store/feature_sets/context.py,sha256=zV6r0O70cfM4pmxlfC6xxAtro-wBhenXWwYwF3KwfTY,263
165
165
  frogml_core/feature_store/feature_sets/execution_spec.py,sha256=zKQd7U-PdYkZMqBpA9eIRhhWff-8xxKB_Qo4IDolwGI,2348
166
166
  frogml_core/feature_store/feature_sets/metadata.py,sha256=Vv2pyBbwaJZRFhWKRhxdFyN3AsV-DvTQzLs9nyRMWK0,1888
167
167
  frogml_core/feature_store/feature_sets/read_policies.py,sha256=BQu6B6IZuKJt8Ff5RYeADdqpHmSkec790RIYeSl6Ulo,6844
168
- frogml_core/feature_store/feature_sets/streaming.py,sha256=78ItVSojMfLoTY3k0SH6UD40XS7dnVZFn9cKfYUcS3Y,25295
168
+ frogml_core/feature_store/feature_sets/streaming.py,sha256=CDdhFALqWbrU8qAl1iFf0OPF2vWTp53t_hhaWkWMah0,25607
169
169
  frogml_core/feature_store/feature_sets/streaming_backfill.py,sha256=u-tjq86AaXAusLUwPYtdCKhTuySQFAtRrUvbPrY3CtI,9834
170
170
  frogml_core/feature_store/feature_sets/transformations/__init__.py,sha256=ozc50AI9RBY71nhNiJDu1-vSWJL2Bdgstyh7GGUW2ig,902
171
171
  frogml_core/feature_store/feature_sets/transformations/aggregations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -181,7 +181,7 @@ frogml_core/feature_store/offline/_offline_serving_validations.py,sha256=yfkV8UV
181
181
  frogml_core/feature_store/offline/client_v2.py,sha256=kTFyHAYIsKBe3wcuE1S_LyD9CLQ_yCorOLcHVR3Emms,14966
182
182
  frogml_core/feature_store/offline/feature_set_features.py,sha256=MjrQrXNhzk7QBdCojdpLfy1fuGdP3GcpOgcc7n7H0G8,740
183
183
  frogml_core/feature_store/online/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
184
- frogml_core/feature_store/online/client.py,sha256=_NCVJngFgh8D3IVBW8u8estPqZ7mSwg5IxmuTndFY0A,13572
184
+ frogml_core/feature_store/online/client.py,sha256=U1lpnNE_UGYC2Tvi2y6CayQShYNP0YR8roNI1gxmAWA,13606
185
185
  frogml_core/feature_store/online/endpoint_utils.py,sha256=lGssZR-r8kJpcSozVxQAk1_JpVXgRLqOVrK6fw8flPg,2242
186
186
  frogml_core/feature_store/sinks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
187
187
  frogml_core/feature_store/sinks/base.py,sha256=QdIutDlO_8IBRr-zKfHBRHJ1-DjDmFfR_Yuad193kg0,361
@@ -405,7 +405,7 @@ frogml_core/model/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG
405
405
  frogml_core/model/utils/extract_wrapped_function.py,sha256=uIle1zL8vbmeS3PGAuaNFLIUQAsvpuzk3LlH-Teba94,320
406
406
  frogml_core/model/utils/feature_utils.py,sha256=ObIU4jLKOMEa3DWEyrXm1m3wC2wseAVwr-zxlSiojYk,2525
407
407
  frogml_core/model_loggers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
408
- frogml_core/model_loggers/artifact_logger.py,sha256=7T06QL1keo6BTpIh76JtXRcNlZuIbqECz343kEw0Hfc,3128
408
+ frogml_core/model_loggers/artifact_logger.py,sha256=F3Xhw9O82b0_cx24DHoc13SaJ06rqO5-c4Tw4yIMR74,4033
409
409
  frogml_core/model_loggers/data_logger.py,sha256=I3o2TkAGAnKuVWccPMlGL5-rg1e2UDHRc9cRSS8kqmc,5678
410
410
  frogml_core/model_loggers/model_logger.py,sha256=xDUIwRzeFAorzzR47ovyag5ohwuYZWp9fRWpLpMzcg4,863
411
411
  frogml_core/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -957,7 +957,7 @@ frogml_proto/qwak/models/models_pb2.py,sha256=CTvYhYuPcZcPgxm2W_ZjcU1w3NSKwO195e
957
957
  frogml_proto/qwak/models/models_pb2.pyi,sha256=277mGQradMyhof3J54ilaW_SKN0d-dan7DAy_6Zkmo8,39261
958
958
  frogml_proto/qwak/models/models_pb2_grpc.py,sha256=dE-D9iLNznyqNcn3nWMrNd1kAYd8hqXbezOqDP41as4,21681
959
959
  frogml_proto/qwak/models/models_query_pb2.py,sha256=1QDjgF0hd_sN3O501f7qfudVz2Qz7EzA80IPcuzOcnM,2822
960
- frogml_proto/qwak/models/models_query_pb2.pyi,sha256=cZO7EA_JetDIQbC-9hg42G07DJMBC6Adgn6IMhnhhNQ,7456
960
+ frogml_proto/qwak/models/models_query_pb2.pyi,sha256=MYheg8TQtQuDv4BqHOWinzemOKhj35ZIVMHQy3YrAQA,7631
961
961
  frogml_proto/qwak/models/models_query_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
962
962
  frogml_proto/qwak/monitoring/v0/alerting_channel_management_service_pb2.py,sha256=ZbLDlHJ-2GboL9D_kojialc7YaX8z7LaZNtilpJWRiI,5061
963
963
  frogml_proto/qwak/monitoring/v0/alerting_channel_management_service_pb2.pyi,sha256=enqAQBr8RW_FLVL1BkZ_MGjeklQkhIZZ2OU2jczePdk,6907
@@ -1114,7 +1114,7 @@ frogml_services_mock/mocks/workspace_manager_service_mock.py,sha256=WbOiWgOyr-xT
1114
1114
  frogml_services_mock/services_mock.py,sha256=sgKgwhu2W0YOHtzil8x7f1znK_sZr_i27XSeiF4xqVE,21200
1115
1115
  frogml_services_mock/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1116
1116
  frogml_services_mock/utils/service_utils.py,sha256=ZlB0CnB1J6oBn6_m7fQO2U8tKoboHdUa6ljjkRMYNXU,265
1117
- frogml_storage/__init__.py,sha256=00B0bSjQW8FClEUTZrcxxlOoilJS1Cz7oACh4nBKyM4,24
1117
+ frogml_storage/__init__.py,sha256=oUJHw8__aJg8lCcQGVNPbGho_Gk1e9FvvH2adAponBo,24
1118
1118
  frogml_storage/_environment.py,sha256=zuzOJBtBwFaguwn_JkKjfhXStZoustgP30KzOP3mYv8,707
1119
1119
  frogml_storage/artifactory/__init__.py,sha256=C02rcm7kqsZBVA6c6Gztxamj96hn8Aj6BuzYWFRmWbQ,71
1120
1120
  frogml_storage/artifactory/_artifactory_api.py,sha256=Oz0HOpQPSNwWIVAy94UJUyPhLetc7sdZjoTfSXtrFug,11200
@@ -1153,6 +1153,6 @@ frogml_storage/utils/__init__.py,sha256=HQUWfuGUIPZY7kfS795TRW8BQ4WmNqrNjS7lUrbx
1153
1153
  frogml_storage/utils/_input_checks_utility.py,sha256=CFiJOdTBS9piJMtR3lemEz27wZcQ6_-7XESu8iy-mrw,3221
1154
1154
  frogml_storage/utils/_storage_utils.py,sha256=HB2g7uY5A3b33yIcAUM1OjHb5jWsnpESsiDrEviQwrI,366
1155
1155
  frogml_storage/utils/_url_utils.py,sha256=NUEfz9Fp1iE8b676-A5wrMlSTsJVRKrUhcUItOFAJD8,821
1156
- frogml_core-0.0.110.dist-info/METADATA,sha256=kfGIqB-6PU6_my0GsK_S8TBn5K63qCE6xnfknO4DN6Q,14898
1157
- frogml_core-0.0.110.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
1158
- frogml_core-0.0.110.dist-info/RECORD,,
1156
+ frogml_core-0.0.112.dist-info/METADATA,sha256=q_FfTqj6cc_ch7srz-2ga5qcX-aB2qFJexyE8qtAKts,14898
1157
+ frogml_core-0.0.112.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
1158
+ frogml_core-0.0.112.dist-info/RECORD,,
@@ -112,9 +112,9 @@ class Filter(google.protobuf.message.Message):
112
112
  model_name_substring: builtins.str
113
113
  @property
114
114
  def model_group_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
115
- """Filter models by one or more model group names.
116
- Return models that belong to one of the specified model groups.
115
+ """Filter models by one or more model group names. Returns models that belong to any of the specified model groups.
117
116
  Optional field. If not specified, models from all groups are included.
117
+ The maximum number of model group names allowed is 100, with each model group name having a maximum length of 100 characters.
118
118
  """
119
119
  deployment_model_status: global___DeploymentModelStatusFilter.ValueType
120
120
  def __init__(
@@ -141,10 +141,10 @@ class PaginationMetadata(google.protobuf.message.Message):
141
141
  PAGE_SIZE_FIELD_NUMBER: builtins.int
142
142
  NEXT_CURSOR_FIELD_NUMBER: builtins.int
143
143
  page_size: builtins.int
144
- """Number of items per page."""
144
+ """Number of models returned in the current page."""
145
145
  next_cursor: builtins.str
146
146
  """Cursor for the next page.
147
- If there are no more pages, this field will be empty.
147
+ If there are no more pages, an empty string will be returned as the next cursor.
148
148
  """
149
149
  def __init__(
150
150
  self,
@@ -1 +1 @@
1
- __version__ = "0.0.110"
1
+ __version__ = "0.0.112"