chalkpy 2.94.8__py3-none-any.whl → 2.95.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -140,17 +140,26 @@ class DeployKubeComponentsResponse(_message.Message):
140
140
  def __init__(self, nonfatal_errors: _Optional[_Iterable[str]] = ...) -> None: ...
141
141
 
142
142
  class RebuildDeploymentRequest(_message.Message):
143
- __slots__ = ("existing_deployment_id", "new_image_tag", "base_image_override", "enable_profiling", "build_profile")
143
+ __slots__ = (
144
+ "existing_deployment_id",
145
+ "new_image_tag",
146
+ "base_image_override",
147
+ "enable_profiling",
148
+ "build_profile",
149
+ "force_rebuild_dockerfile",
150
+ )
144
151
  EXISTING_DEPLOYMENT_ID_FIELD_NUMBER: _ClassVar[int]
145
152
  NEW_IMAGE_TAG_FIELD_NUMBER: _ClassVar[int]
146
153
  BASE_IMAGE_OVERRIDE_FIELD_NUMBER: _ClassVar[int]
147
154
  ENABLE_PROFILING_FIELD_NUMBER: _ClassVar[int]
148
155
  BUILD_PROFILE_FIELD_NUMBER: _ClassVar[int]
156
+ FORCE_REBUILD_DOCKERFILE_FIELD_NUMBER: _ClassVar[int]
149
157
  existing_deployment_id: str
150
158
  new_image_tag: str
151
159
  base_image_override: str
152
160
  enable_profiling: bool
153
161
  build_profile: _environment_pb2.DeploymentBuildProfile
162
+ force_rebuild_dockerfile: bool
154
163
  def __init__(
155
164
  self,
156
165
  existing_deployment_id: _Optional[str] = ...,
@@ -158,6 +167,7 @@ class RebuildDeploymentRequest(_message.Message):
158
167
  base_image_override: _Optional[str] = ...,
159
168
  enable_profiling: bool = ...,
160
169
  build_profile: _Optional[_Union[_environment_pb2.DeploymentBuildProfile, str]] = ...,
170
+ force_rebuild_dockerfile: bool = ...,
161
171
  ) -> None: ...
162
172
 
163
173
  class RebuildDeploymentResponse(_message.Message):
@@ -235,6 +245,7 @@ class UploadSourceRequest(_message.Message):
235
245
  "use_grpc",
236
246
  "enable_profiling",
237
247
  "build_profile",
248
+ "force_rebuild_dockerfile",
238
249
  )
239
250
  DEPLOYMENT_ID_FIELD_NUMBER: _ClassVar[int]
240
251
  ARCHIVE_FIELD_NUMBER: _ClassVar[int]
@@ -244,6 +255,7 @@ class UploadSourceRequest(_message.Message):
244
255
  USE_GRPC_FIELD_NUMBER: _ClassVar[int]
245
256
  ENABLE_PROFILING_FIELD_NUMBER: _ClassVar[int]
246
257
  BUILD_PROFILE_FIELD_NUMBER: _ClassVar[int]
258
+ FORCE_REBUILD_DOCKERFILE_FIELD_NUMBER: _ClassVar[int]
247
259
  deployment_id: str
248
260
  archive: bytes
249
261
  no_promote: bool
@@ -252,6 +264,7 @@ class UploadSourceRequest(_message.Message):
252
264
  use_grpc: bool
253
265
  enable_profiling: bool
254
266
  build_profile: _environment_pb2.DeploymentBuildProfile
267
+ force_rebuild_dockerfile: bool
255
268
  def __init__(
256
269
  self,
257
270
  deployment_id: _Optional[str] = ...,
@@ -262,6 +275,7 @@ class UploadSourceRequest(_message.Message):
262
275
  use_grpc: bool = ...,
263
276
  enable_profiling: bool = ...,
264
277
  build_profile: _Optional[_Union[_environment_pb2.DeploymentBuildProfile, str]] = ...,
278
+ force_rebuild_dockerfile: bool = ...,
265
279
  ) -> None: ...
266
280
 
267
281
  class UploadSourceResponse(_message.Message):
@@ -0,0 +1,42 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: chalk/server/v1/clickhouse.proto
4
+ # Protobuf Python Version: 4.25.3
5
+ """Generated protocol buffer code."""
6
+
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import symbol_database as _symbol_database
10
+ from google.protobuf.internal import builder as _builder
11
+ # @@protoc_insertion_point(imports)
12
+
13
+ _sym_db = _symbol_database.Default()
14
+
15
+
16
+ from chalk._gen.chalk.auth.v1 import permissions_pb2 as chalk_dot_auth_dot_v1_dot_permissions__pb2
17
+ from chalk._gen.chalk.utils.v1 import sensitive_pb2 as chalk_dot_utils_dot_v1_dot_sensitive__pb2
18
+
19
+
20
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
21
+ b'\n chalk/server/v1/clickhouse.proto\x12\x0f\x63halk.server.v1\x1a\x1f\x63halk/auth/v1/permissions.proto\x1a\x1e\x63halk/utils/v1/sensitive.proto"\x19\n\x17GetClickhouseUriRequest"2\n\x18GetClickhouseUriResponse\x12\x16\n\x03uri\x18\x01 \x01(\tB\x04\xd8\xa1\'\x01R\x03uri2\x84\x01\n\x11\x43lickhouseService\x12o\n\x10GetClickhouseUri\x12(.chalk.server.v1.GetClickhouseUriRequest\x1a).chalk.server.v1.GetClickhouseUriResponse"\x06\x90\x02\x01\x80}\x0b\x42\x98\x01\n\x13\x63om.chalk.server.v1B\x0f\x43lickhouseProtoP\x01Z\x12server/v1;serverv1\xa2\x02\x03\x43SX\xaa\x02\x0f\x43halk.Server.V1\xca\x02\x0f\x43halk\\Server\\V1\xe2\x02\x1b\x43halk\\Server\\V1\\GPBMetadata\xea\x02\x11\x43halk::Server::V1b\x06proto3'
22
+ )
23
+
24
+ _globals = globals()
25
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
26
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "chalk.server.v1.clickhouse_pb2", _globals)
27
+ if _descriptor._USE_C_DESCRIPTORS == False:
28
+ _globals["DESCRIPTOR"]._options = None
29
+ _globals[
30
+ "DESCRIPTOR"
31
+ ]._serialized_options = b"\n\023com.chalk.server.v1B\017ClickhouseProtoP\001Z\022server/v1;serverv1\242\002\003CSX\252\002\017Chalk.Server.V1\312\002\017Chalk\\Server\\V1\342\002\033Chalk\\Server\\V1\\GPBMetadata\352\002\021Chalk::Server::V1"
32
+ _globals["_GETCLICKHOUSEURIRESPONSE"].fields_by_name["uri"]._options = None
33
+ _globals["_GETCLICKHOUSEURIRESPONSE"].fields_by_name["uri"]._serialized_options = b"\330\241'\001"
34
+ _globals["_CLICKHOUSESERVICE"].methods_by_name["GetClickhouseUri"]._options = None
35
+ _globals["_CLICKHOUSESERVICE"].methods_by_name["GetClickhouseUri"]._serialized_options = b"\220\002\001\200}\013"
36
+ _globals["_GETCLICKHOUSEURIREQUEST"]._serialized_start = 118
37
+ _globals["_GETCLICKHOUSEURIREQUEST"]._serialized_end = 143
38
+ _globals["_GETCLICKHOUSEURIRESPONSE"]._serialized_start = 145
39
+ _globals["_GETCLICKHOUSEURIRESPONSE"]._serialized_end = 195
40
+ _globals["_CLICKHOUSESERVICE"]._serialized_start = 198
41
+ _globals["_CLICKHOUSESERVICE"]._serialized_end = 330
42
+ # @@protoc_insertion_point(module_scope)
@@ -0,0 +1,17 @@
1
+ from chalk._gen.chalk.auth.v1 import permissions_pb2 as _permissions_pb2
2
+ from chalk._gen.chalk.utils.v1 import sensitive_pb2 as _sensitive_pb2
3
+ from google.protobuf import descriptor as _descriptor
4
+ from google.protobuf import message as _message
5
+ from typing import ClassVar as _ClassVar, Optional as _Optional
6
+
7
+ DESCRIPTOR: _descriptor.FileDescriptor
8
+
9
+ class GetClickhouseUriRequest(_message.Message):
10
+ __slots__ = ()
11
+ def __init__(self) -> None: ...
12
+
13
+ class GetClickhouseUriResponse(_message.Message):
14
+ __slots__ = ("uri",)
15
+ URI_FIELD_NUMBER: _ClassVar[int]
16
+ uri: str
17
+ def __init__(self, uri: _Optional[str] = ...) -> None: ...
@@ -0,0 +1,78 @@
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+
4
+ import grpc
5
+
6
+ from chalk._gen.chalk.server.v1 import clickhouse_pb2 as chalk_dot_server_dot_v1_dot_clickhouse__pb2
7
+
8
+
9
+ class ClickhouseServiceStub(object):
10
+ """Missing associated documentation comment in .proto file."""
11
+
12
+ def __init__(self, channel):
13
+ """Constructor.
14
+
15
+ Args:
16
+ channel: A grpc.Channel.
17
+ """
18
+ self.GetClickhouseUri = channel.unary_unary(
19
+ "/chalk.server.v1.ClickhouseService/GetClickhouseUri",
20
+ request_serializer=chalk_dot_server_dot_v1_dot_clickhouse__pb2.GetClickhouseUriRequest.SerializeToString,
21
+ response_deserializer=chalk_dot_server_dot_v1_dot_clickhouse__pb2.GetClickhouseUriResponse.FromString,
22
+ )
23
+
24
+
25
+ class ClickhouseServiceServicer(object):
26
+ """Missing associated documentation comment in .proto file."""
27
+
28
+ def GetClickhouseUri(self, request, context):
29
+ """Get the Clickhouse connection URI for an environment"""
30
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
31
+ context.set_details("Method not implemented!")
32
+ raise NotImplementedError("Method not implemented!")
33
+
34
+
35
+ def add_ClickhouseServiceServicer_to_server(servicer, server):
36
+ rpc_method_handlers = {
37
+ "GetClickhouseUri": grpc.unary_unary_rpc_method_handler(
38
+ servicer.GetClickhouseUri,
39
+ request_deserializer=chalk_dot_server_dot_v1_dot_clickhouse__pb2.GetClickhouseUriRequest.FromString,
40
+ response_serializer=chalk_dot_server_dot_v1_dot_clickhouse__pb2.GetClickhouseUriResponse.SerializeToString,
41
+ ),
42
+ }
43
+ generic_handler = grpc.method_handlers_generic_handler("chalk.server.v1.ClickhouseService", rpc_method_handlers)
44
+ server.add_generic_rpc_handlers((generic_handler,))
45
+
46
+
47
+ # This class is part of an EXPERIMENTAL API.
48
+ class ClickhouseService(object):
49
+ """Missing associated documentation comment in .proto file."""
50
+
51
+ @staticmethod
52
+ def GetClickhouseUri(
53
+ request,
54
+ target,
55
+ options=(),
56
+ channel_credentials=None,
57
+ call_credentials=None,
58
+ insecure=False,
59
+ compression=None,
60
+ wait_for_ready=None,
61
+ timeout=None,
62
+ metadata=None,
63
+ ):
64
+ return grpc.experimental.unary_unary(
65
+ request,
66
+ target,
67
+ "/chalk.server.v1.ClickhouseService/GetClickhouseUri",
68
+ chalk_dot_server_dot_v1_dot_clickhouse__pb2.GetClickhouseUriRequest.SerializeToString,
69
+ chalk_dot_server_dot_v1_dot_clickhouse__pb2.GetClickhouseUriResponse.FromString,
70
+ options,
71
+ channel_credentials,
72
+ insecure,
73
+ call_credentials,
74
+ compression,
75
+ wait_for_ready,
76
+ timeout,
77
+ metadata,
78
+ )
@@ -0,0 +1,38 @@
1
+ """
2
+ @generated by mypy-protobuf. Do not edit manually!
3
+ isort:skip_file
4
+ """
5
+
6
+ from abc import (
7
+ ABCMeta,
8
+ abstractmethod,
9
+ )
10
+ from chalk._gen.chalk.server.v1.clickhouse_pb2 import (
11
+ GetClickhouseUriRequest,
12
+ GetClickhouseUriResponse,
13
+ )
14
+ from grpc import (
15
+ Channel,
16
+ Server,
17
+ ServicerContext,
18
+ UnaryUnaryMultiCallable,
19
+ )
20
+
21
+ class ClickhouseServiceStub:
22
+ def __init__(self, channel: Channel) -> None: ...
23
+ GetClickhouseUri: UnaryUnaryMultiCallable[
24
+ GetClickhouseUriRequest,
25
+ GetClickhouseUriResponse,
26
+ ]
27
+ """Get the Clickhouse connection URI for an environment"""
28
+
29
+ class ClickhouseServiceServicer(metaclass=ABCMeta):
30
+ @abstractmethod
31
+ def GetClickhouseUri(
32
+ self,
33
+ request: GetClickhouseUriRequest,
34
+ context: ServicerContext,
35
+ ) -> GetClickhouseUriResponse:
36
+ """Get the Clickhouse connection URI for an environment"""
37
+
38
+ def add_ClickhouseServiceServicer_to_server(servicer: ClickhouseServiceServicer, server: Server) -> None: ...
@@ -24,7 +24,7 @@ from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mas
24
24
 
25
25
 
26
26
  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
27
- b'\n\x1c\x63halk/server/v1/deploy.proto\x12\x0f\x63halk.server.v1\x1a\x1f\x63halk/artifacts/v1/export.proto\x1a\x19\x63halk/auth/v1/audit.proto\x1a\x1f\x63halk/auth/v1/permissions.proto\x1a!chalk/common/v1/chalk_error.proto\x1a\x1a\x63halk/graph/v1/graph.proto\x1a chalk/server/v1/deployment.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x97\x01\n\x13\x44\x65ployBranchRequest\x12\x1f\n\x0b\x62ranch_name\x18\x01 \x01(\tR\nbranchName\x12!\n\x0creset_branch\x18\x02 \x01(\x08R\x0bresetBranch\x12\x18\n\x07\x61rchive\x18\x03 \x01(\x0cR\x07\x61rchive\x12"\n\ris_hot_deploy\x18\x04 \x01(\x08R\x0bisHotDeploy"\x89\x02\n\x14\x44\x65ployBranchResponse\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x34\n\x05graph\x18\x02 \x01(\x0b\x32\x15.chalk.graph.v1.GraphB\x02\x18\x01H\x00R\x05graph\x88\x01\x01\x12H\n\x11\x64\x65ployment_errors\x18\x03 \x03(\x0b\x32\x1b.chalk.common.v1.ChalkErrorR\x10\x64\x65ploymentErrors\x12\x37\n\x06\x65xport\x18\x04 \x01(\x0b\x32\x1a.chalk.artifacts.v1.ExportH\x01R\x06\x65xport\x88\x01\x01\x42\x08\n\x06_graphB\t\n\x07_export"\x92\x02\n\'CreateBranchFromSourceDeploymentRequest\x12\x1f\n\x0b\x62ranch_name\x18\x01 \x01(\tR\nbranchName\x12.\n\x12source_branch_name\x18\x02 \x01(\tH\x00R\x10sourceBranchName\x12\x32\n\x14source_deployment_id\x18\x03 \x01(\tH\x00R\x12sourceDeploymentId\x12X\n\x1b\x63urrent_mainline_deployment\x18\x04 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00R\x19\x63urrentMainlineDeploymentB\x08\n\x06source"\x91\x02\n(CreateBranchFromSourceDeploymentResponse\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12H\n\x11\x64\x65ployment_errors\x18\x02 \x03(\x0b\x32\x1b.chalk.common.v1.ChalkErrorR\x10\x64\x65ploymentErrors\x12\x37\n\x06\x65xport\x18\x03 \x01(\x0b\x32\x1a.chalk.artifacts.v1.ExportH\x00R\x06\x65xport\x88\x01\x01\x12\x32\n\x15\x62ranch_already_exists\x18\x04 \x01(\x08R\x13\x62ranchAlreadyExistsB\t\n\x07_export"t\n\x14GetDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x37\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskR\x08readMask"\x98\x01\n\x15GetDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment\x12\x37\n\x06\x65xport\x18\x02 \x01(\x0b\x32\x1a.chalk.artifacts.v1.ExportH\x00R\x06\x65xport\x88\x01\x01\x42\t\n\x07_export"\xda\x01\n\x16ListDeploymentsRequest\x12\x1b\n\x06\x63ursor\x18\x01 \x01(\tH\x00R\x06\x63ursor\x88\x01\x01\x12\x19\n\x05limit\x18\x02 \x01(\x05H\x01R\x05limit\x88\x01\x01\x12*\n\x0einclude_branch\x18\x03 \x01(\x08H\x02R\rincludeBranch\x88\x01\x01\x12$\n\x0b\x62ranch_name\x18\x04 \x01(\tH\x03R\nbranchName\x88\x01\x01\x42\t\n\x07_cursorB\x08\n\x06_limitB\x11\n\x0f_include_branchB\x0e\n\x0c_branch_name"\x80\x01\n\x17ListDeploymentsResponse\x12=\n\x0b\x64\x65ployments\x18\x01 \x03(\x0b\x32\x1b.chalk.server.v1.DeploymentR\x0b\x64\x65ployments\x12\x1b\n\x06\x63ursor\x18\x02 \x01(\tH\x00R\x06\x63ursor\x88\x01\x01\x42\t\n\x07_cursor"?\n\x18SuspendDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId"X\n\x19SuspendDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment"v\n\x16ScaleDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x37\n\x06sizing\x18\x02 \x01(\x0b\x32\x1f.chalk.server.v1.InstanceSizingR\x06sizing"V\n\x17ScaleDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment"\x89\x01\n\x14TagDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x10\n\x03tag\x18\x02 \x01(\tR\x03tag\x12(\n\rmirror_weight\x18\x03 \x01(\x05H\x00R\x0cmirrorWeight\x88\x01\x01\x42\x10\n\x0e_mirror_weight"\xaa\x01\n\x15TagDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment\x12\x39\n\x16untagged_deployment_id\x18\x02 \x01(\tH\x00R\x14untaggedDeploymentId\x88\x01\x01\x42\x19\n\x17_untagged_deployment_id"\x1d\n\x1bGetActiveDeploymentsRequest"]\n\x1cGetActiveDeploymentsResponse\x12=\n\x0b\x64\x65ployments\x18\x01 \x03(\x0b\x32\x1b.chalk.server.v1.DeploymentR\x0b\x64\x65ployments2\xad\x07\n\rDeployService\x12`\n\x0c\x44\x65ployBranch\x12$.chalk.server.v1.DeployBranchRequest\x1a%.chalk.server.v1.DeployBranchResponse"\x03\x80}\r\x12\x9c\x01\n CreateBranchFromSourceDeployment\x12\x38.chalk.server.v1.CreateBranchFromSourceDeploymentRequest\x1a\x39.chalk.server.v1.CreateBranchFromSourceDeploymentResponse"\x03\x80}\r\x12\x63\n\rGetDeployment\x12%.chalk.server.v1.GetDeploymentRequest\x1a&.chalk.server.v1.GetDeploymentResponse"\x03\x80}\x0b\x12i\n\x0fListDeployments\x12\'.chalk.server.v1.ListDeploymentsRequest\x1a(.chalk.server.v1.ListDeploymentsResponse"\x03\x80}\x0b\x12x\n\x14GetActiveDeployments\x12,.chalk.server.v1.GetActiveDeploymentsRequest\x1a-.chalk.server.v1.GetActiveDeploymentsResponse"\x03\x80}\x02\x12u\n\x11SuspendDeployment\x12).chalk.server.v1.SuspendDeploymentRequest\x1a*.chalk.server.v1.SuspendDeploymentResponse"\t\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x12o\n\x0fScaleDeployment\x12\'.chalk.server.v1.ScaleDeploymentRequest\x1a(.chalk.server.v1.ScaleDeploymentResponse"\t\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x12i\n\rTagDeployment\x12%.chalk.server.v1.TagDeploymentRequest\x1a&.chalk.server.v1.TagDeploymentResponse"\t\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x42\x94\x01\n\x13\x63om.chalk.server.v1B\x0b\x44\x65ployProtoP\x01Z\x12server/v1;serverv1\xa2\x02\x03\x43SX\xaa\x02\x0f\x43halk.Server.V1\xca\x02\x0f\x43halk\\Server\\V1\xe2\x02\x1b\x43halk\\Server\\V1\\GPBMetadata\xea\x02\x11\x43halk::Server::V1b\x06proto3'
27
+ b'\n\x1c\x63halk/server/v1/deploy.proto\x12\x0f\x63halk.server.v1\x1a\x1f\x63halk/artifacts/v1/export.proto\x1a\x19\x63halk/auth/v1/audit.proto\x1a\x1f\x63halk/auth/v1/permissions.proto\x1a!chalk/common/v1/chalk_error.proto\x1a\x1a\x63halk/graph/v1/graph.proto\x1a chalk/server/v1/deployment.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x97\x01\n\x13\x44\x65ployBranchRequest\x12\x1f\n\x0b\x62ranch_name\x18\x01 \x01(\tR\nbranchName\x12!\n\x0creset_branch\x18\x02 \x01(\x08R\x0bresetBranch\x12\x18\n\x07\x61rchive\x18\x03 \x01(\x0cR\x07\x61rchive\x12"\n\ris_hot_deploy\x18\x04 \x01(\x08R\x0bisHotDeploy"\x89\x02\n\x14\x44\x65ployBranchResponse\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x34\n\x05graph\x18\x02 \x01(\x0b\x32\x15.chalk.graph.v1.GraphB\x02\x18\x01H\x00R\x05graph\x88\x01\x01\x12H\n\x11\x64\x65ployment_errors\x18\x03 \x03(\x0b\x32\x1b.chalk.common.v1.ChalkErrorR\x10\x64\x65ploymentErrors\x12\x37\n\x06\x65xport\x18\x04 \x01(\x0b\x32\x1a.chalk.artifacts.v1.ExportH\x01R\x06\x65xport\x88\x01\x01\x42\x08\n\x06_graphB\t\n\x07_export"\x92\x02\n\'CreateBranchFromSourceDeploymentRequest\x12\x1f\n\x0b\x62ranch_name\x18\x01 \x01(\tR\nbranchName\x12.\n\x12source_branch_name\x18\x02 \x01(\tH\x00R\x10sourceBranchName\x12\x32\n\x14source_deployment_id\x18\x03 \x01(\tH\x00R\x12sourceDeploymentId\x12X\n\x1b\x63urrent_mainline_deployment\x18\x04 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00R\x19\x63urrentMainlineDeploymentB\x08\n\x06source"\x91\x02\n(CreateBranchFromSourceDeploymentResponse\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12H\n\x11\x64\x65ployment_errors\x18\x02 \x03(\x0b\x32\x1b.chalk.common.v1.ChalkErrorR\x10\x64\x65ploymentErrors\x12\x37\n\x06\x65xport\x18\x03 \x01(\x0b\x32\x1a.chalk.artifacts.v1.ExportH\x00R\x06\x65xport\x88\x01\x01\x12\x32\n\x15\x62ranch_already_exists\x18\x04 \x01(\x08R\x13\x62ranchAlreadyExistsB\t\n\x07_export"t\n\x14GetDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x37\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskR\x08readMask"\x98\x01\n\x15GetDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment\x12\x37\n\x06\x65xport\x18\x02 \x01(\x0b\x32\x1a.chalk.artifacts.v1.ExportH\x00R\x06\x65xport\x88\x01\x01\x42\t\n\x07_export"\xda\x01\n\x16ListDeploymentsRequest\x12\x1b\n\x06\x63ursor\x18\x01 \x01(\tH\x00R\x06\x63ursor\x88\x01\x01\x12\x19\n\x05limit\x18\x02 \x01(\x05H\x01R\x05limit\x88\x01\x01\x12*\n\x0einclude_branch\x18\x03 \x01(\x08H\x02R\rincludeBranch\x88\x01\x01\x12$\n\x0b\x62ranch_name\x18\x04 \x01(\tH\x03R\nbranchName\x88\x01\x01\x42\t\n\x07_cursorB\x08\n\x06_limitB\x11\n\x0f_include_branchB\x0e\n\x0c_branch_name"\x80\x01\n\x17ListDeploymentsResponse\x12=\n\x0b\x64\x65ployments\x18\x01 \x03(\x0b\x32\x1b.chalk.server.v1.DeploymentR\x0b\x64\x65ployments\x12\x1b\n\x06\x63ursor\x18\x02 \x01(\tH\x00R\x06\x63ursor\x88\x01\x01\x42\t\n\x07_cursor"?\n\x18SuspendDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId"X\n\x19SuspendDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment"v\n\x16ScaleDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x37\n\x06sizing\x18\x02 \x01(\x0b\x32\x1f.chalk.server.v1.InstanceSizingR\x06sizing"V\n\x17ScaleDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment"\x89\x01\n\x14TagDeploymentRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId\x12\x10\n\x03tag\x18\x02 \x01(\tR\x03tag\x12(\n\rmirror_weight\x18\x03 \x01(\x05H\x00R\x0cmirrorWeight\x88\x01\x01\x42\x10\n\x0e_mirror_weight"\xaa\x01\n\x15TagDeploymentResponse\x12;\n\ndeployment\x18\x01 \x01(\x0b\x32\x1b.chalk.server.v1.DeploymentR\ndeployment\x12\x39\n\x16untagged_deployment_id\x18\x02 \x01(\tH\x00R\x14untaggedDeploymentId\x88\x01\x01\x42\x19\n\x17_untagged_deployment_id"\x1d\n\x1bGetActiveDeploymentsRequest"]\n\x1cGetActiveDeploymentsResponse\x12=\n\x0b\x64\x65ployments\x18\x01 \x03(\x0b\x32\x1b.chalk.server.v1.DeploymentR\x0b\x64\x65ployments"A\n\x1aGetDeploymentSourceRequest\x12#\n\rdeployment_id\x18\x01 \x01(\tR\x0c\x64\x65ploymentId"<\n\x1bGetDeploymentSourceResponse\x12\x1d\n\nsigned_url\x18\x01 \x01(\tR\tsignedUrl2\xa4\x08\n\rDeployService\x12`\n\x0c\x44\x65ployBranch\x12$.chalk.server.v1.DeployBranchRequest\x1a%.chalk.server.v1.DeployBranchResponse"\x03\x80}\r\x12\x9c\x01\n CreateBranchFromSourceDeployment\x12\x38.chalk.server.v1.CreateBranchFromSourceDeploymentRequest\x1a\x39.chalk.server.v1.CreateBranchFromSourceDeploymentResponse"\x03\x80}\r\x12\x63\n\rGetDeployment\x12%.chalk.server.v1.GetDeploymentRequest\x1a&.chalk.server.v1.GetDeploymentResponse"\x03\x80}\x0b\x12i\n\x0fListDeployments\x12\'.chalk.server.v1.ListDeploymentsRequest\x1a(.chalk.server.v1.ListDeploymentsResponse"\x03\x80}\x0b\x12x\n\x14GetActiveDeployments\x12,.chalk.server.v1.GetActiveDeploymentsRequest\x1a-.chalk.server.v1.GetActiveDeploymentsResponse"\x03\x80}\x02\x12u\n\x11SuspendDeployment\x12).chalk.server.v1.SuspendDeploymentRequest\x1a*.chalk.server.v1.SuspendDeploymentResponse"\t\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x12o\n\x0fScaleDeployment\x12\'.chalk.server.v1.ScaleDeploymentRequest\x1a(.chalk.server.v1.ScaleDeploymentResponse"\t\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x12i\n\rTagDeployment\x12%.chalk.server.v1.TagDeploymentRequest\x1a&.chalk.server.v1.TagDeploymentResponse"\t\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x12u\n\x13GetDeploymentSource\x12+.chalk.server.v1.GetDeploymentSourceRequest\x1a,.chalk.server.v1.GetDeploymentSourceResponse"\x03\x80}\x0b\x42\x94\x01\n\x13\x63om.chalk.server.v1B\x0b\x44\x65ployProtoP\x01Z\x12server/v1;serverv1\xa2\x02\x03\x43SX\xaa\x02\x0f\x43halk.Server.V1\xca\x02\x0f\x43halk\\Server\\V1\xe2\x02\x1b\x43halk\\Server\\V1\\GPBMetadata\xea\x02\x11\x43halk::Server::V1b\x06proto3'
28
28
  )
29
29
 
30
30
  _globals = globals()
@@ -59,6 +59,8 @@ if _descriptor._USE_C_DESCRIPTORS == False:
59
59
  _globals["_DEPLOYSERVICE"].methods_by_name[
60
60
  "TagDeployment"
61
61
  ]._serialized_options = b"\200}\014\212\323\016\002\010\002"
62
+ _globals["_DEPLOYSERVICE"].methods_by_name["GetDeploymentSource"]._options = None
63
+ _globals["_DEPLOYSERVICE"].methods_by_name["GetDeploymentSource"]._serialized_options = b"\200}\013"
62
64
  _globals["_DEPLOYBRANCHREQUEST"]._serialized_start = 303
63
65
  _globals["_DEPLOYBRANCHREQUEST"]._serialized_end = 454
64
66
  _globals["_DEPLOYBRANCHRESPONSE"]._serialized_start = 457
@@ -91,6 +93,10 @@ if _descriptor._USE_C_DESCRIPTORS == False:
91
93
  _globals["_GETACTIVEDEPLOYMENTSREQUEST"]._serialized_end = 2607
92
94
  _globals["_GETACTIVEDEPLOYMENTSRESPONSE"]._serialized_start = 2609
93
95
  _globals["_GETACTIVEDEPLOYMENTSRESPONSE"]._serialized_end = 2702
94
- _globals["_DEPLOYSERVICE"]._serialized_start = 2705
95
- _globals["_DEPLOYSERVICE"]._serialized_end = 3646
96
+ _globals["_GETDEPLOYMENTSOURCEREQUEST"]._serialized_start = 2704
97
+ _globals["_GETDEPLOYMENTSOURCEREQUEST"]._serialized_end = 2769
98
+ _globals["_GETDEPLOYMENTSOURCERESPONSE"]._serialized_start = 2771
99
+ _globals["_GETDEPLOYMENTSOURCERESPONSE"]._serialized_end = 2831
100
+ _globals["_DEPLOYSERVICE"]._serialized_start = 2834
101
+ _globals["_DEPLOYSERVICE"]._serialized_end = 3894
96
102
  # @@protoc_insertion_point(module_scope)
@@ -210,3 +210,15 @@ class GetActiveDeploymentsResponse(_message.Message):
210
210
  def __init__(
211
211
  self, deployments: _Optional[_Iterable[_Union[_deployment_pb2.Deployment, _Mapping]]] = ...
212
212
  ) -> None: ...
213
+
214
+ class GetDeploymentSourceRequest(_message.Message):
215
+ __slots__ = ("deployment_id",)
216
+ DEPLOYMENT_ID_FIELD_NUMBER: _ClassVar[int]
217
+ deployment_id: str
218
+ def __init__(self, deployment_id: _Optional[str] = ...) -> None: ...
219
+
220
+ class GetDeploymentSourceResponse(_message.Message):
221
+ __slots__ = ("signed_url",)
222
+ SIGNED_URL_FIELD_NUMBER: _ClassVar[int]
223
+ signed_url: str
224
+ def __init__(self, signed_url: _Optional[str] = ...) -> None: ...
@@ -55,6 +55,11 @@ class DeployServiceStub(object):
55
55
  request_serializer=chalk_dot_server_dot_v1_dot_deploy__pb2.TagDeploymentRequest.SerializeToString,
56
56
  response_deserializer=chalk_dot_server_dot_v1_dot_deploy__pb2.TagDeploymentResponse.FromString,
57
57
  )
58
+ self.GetDeploymentSource = channel.unary_unary(
59
+ "/chalk.server.v1.DeployService/GetDeploymentSource",
60
+ request_serializer=chalk_dot_server_dot_v1_dot_deploy__pb2.GetDeploymentSourceRequest.SerializeToString,
61
+ response_deserializer=chalk_dot_server_dot_v1_dot_deploy__pb2.GetDeploymentSourceResponse.FromString,
62
+ )
58
63
 
59
64
 
60
65
  class DeployServiceServicer(object):
@@ -108,6 +113,12 @@ class DeployServiceServicer(object):
108
113
  context.set_details("Method not implemented!")
109
114
  raise NotImplementedError("Method not implemented!")
110
115
 
116
+ def GetDeploymentSource(self, request, context):
117
+ """Missing associated documentation comment in .proto file."""
118
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
119
+ context.set_details("Method not implemented!")
120
+ raise NotImplementedError("Method not implemented!")
121
+
111
122
 
112
123
  def add_DeployServiceServicer_to_server(servicer, server):
113
124
  rpc_method_handlers = {
@@ -151,6 +162,11 @@ def add_DeployServiceServicer_to_server(servicer, server):
151
162
  request_deserializer=chalk_dot_server_dot_v1_dot_deploy__pb2.TagDeploymentRequest.FromString,
152
163
  response_serializer=chalk_dot_server_dot_v1_dot_deploy__pb2.TagDeploymentResponse.SerializeToString,
153
164
  ),
165
+ "GetDeploymentSource": grpc.unary_unary_rpc_method_handler(
166
+ servicer.GetDeploymentSource,
167
+ request_deserializer=chalk_dot_server_dot_v1_dot_deploy__pb2.GetDeploymentSourceRequest.FromString,
168
+ response_serializer=chalk_dot_server_dot_v1_dot_deploy__pb2.GetDeploymentSourceResponse.SerializeToString,
169
+ ),
154
170
  }
155
171
  generic_handler = grpc.method_handlers_generic_handler("chalk.server.v1.DeployService", rpc_method_handlers)
156
172
  server.add_generic_rpc_handlers((generic_handler,))
@@ -391,3 +407,32 @@ class DeployService(object):
391
407
  timeout,
392
408
  metadata,
393
409
  )
410
+
411
+ @staticmethod
412
+ def GetDeploymentSource(
413
+ request,
414
+ target,
415
+ options=(),
416
+ channel_credentials=None,
417
+ call_credentials=None,
418
+ insecure=False,
419
+ compression=None,
420
+ wait_for_ready=None,
421
+ timeout=None,
422
+ metadata=None,
423
+ ):
424
+ return grpc.experimental.unary_unary(
425
+ request,
426
+ target,
427
+ "/chalk.server.v1.DeployService/GetDeploymentSource",
428
+ chalk_dot_server_dot_v1_dot_deploy__pb2.GetDeploymentSourceRequest.SerializeToString,
429
+ chalk_dot_server_dot_v1_dot_deploy__pb2.GetDeploymentSourceResponse.FromString,
430
+ options,
431
+ channel_credentials,
432
+ insecure,
433
+ call_credentials,
434
+ compression,
435
+ wait_for_ready,
436
+ timeout,
437
+ metadata,
438
+ )
@@ -16,6 +16,8 @@ from chalk._gen.chalk.server.v1.deploy_pb2 import (
16
16
  GetActiveDeploymentsResponse,
17
17
  GetDeploymentRequest,
18
18
  GetDeploymentResponse,
19
+ GetDeploymentSourceRequest,
20
+ GetDeploymentSourceResponse,
19
21
  ListDeploymentsRequest,
20
22
  ListDeploymentsResponse,
21
23
  ScaleDeploymentRequest,
@@ -66,6 +68,10 @@ class DeployServiceStub:
66
68
  TagDeploymentRequest,
67
69
  TagDeploymentResponse,
68
70
  ]
71
+ GetDeploymentSource: UnaryUnaryMultiCallable[
72
+ GetDeploymentSourceRequest,
73
+ GetDeploymentSourceResponse,
74
+ ]
69
75
 
70
76
  class DeployServiceServicer(metaclass=ABCMeta):
71
77
  @abstractmethod
@@ -116,5 +122,11 @@ class DeployServiceServicer(metaclass=ABCMeta):
116
122
  request: TagDeploymentRequest,
117
123
  context: ServicerContext,
118
124
  ) -> TagDeploymentResponse: ...
125
+ @abstractmethod
126
+ def GetDeploymentSource(
127
+ self,
128
+ request: GetDeploymentSourceRequest,
129
+ context: ServicerContext,
130
+ ) -> GetDeploymentSourceResponse: ...
119
131
 
120
132
  def add_DeployServiceServicer_to_server(servicer: DeployServiceServicer, server: Server) -> None: ...
@@ -45,6 +45,7 @@ class CronResolverRun(_message.Message):
45
45
  "lower_bound",
46
46
  "upper_bound",
47
47
  "max_samples",
48
+ "used_job_queue",
48
49
  )
49
50
  ID_FIELD_NUMBER: _ClassVar[int]
50
51
  ENVIRONMENT_ID_FIELD_NUMBER: _ClassVar[int]
@@ -60,6 +61,7 @@ class CronResolverRun(_message.Message):
60
61
  LOWER_BOUND_FIELD_NUMBER: _ClassVar[int]
61
62
  UPPER_BOUND_FIELD_NUMBER: _ClassVar[int]
62
63
  MAX_SAMPLES_FIELD_NUMBER: _ClassVar[int]
64
+ USED_JOB_QUEUE_FIELD_NUMBER: _ClassVar[int]
63
65
  id: str
64
66
  environment_id: str
65
67
  resolver_fqn: str
@@ -74,6 +76,7 @@ class CronResolverRun(_message.Message):
74
76
  lower_bound: _timestamp_pb2.Timestamp
75
77
  upper_bound: _timestamp_pb2.Timestamp
76
78
  max_samples: int
79
+ used_job_queue: bool
77
80
  def __init__(
78
81
  self,
79
82
  id: _Optional[str] = ...,
@@ -90,6 +93,7 @@ class CronResolverRun(_message.Message):
90
93
  lower_bound: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...,
91
94
  upper_bound: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...,
92
95
  max_samples: _Optional[int] = ...,
96
+ used_job_queue: bool = ...,
93
97
  ) -> None: ...
94
98
 
95
99
  class ManualTriggerCronResolverRequest(_message.Message):
chalk/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "2.94.8"
1
+ __version__ = "2.95.0"
chalk/client/client.py CHANGED
@@ -2157,21 +2157,20 @@ class ChalkClient:
2157
2157
  name: str,
2158
2158
  version: Optional[int] = None,
2159
2159
  ) -> Union[GetRegisteredModelResponse, GetRegisteredModelVersionResponse]:
2160
- """
2161
- Retrieve a registered model from the Chalk model registry.
2160
+ """Retrieve a registered model from the Chalk model registry.
2162
2161
 
2163
2162
  Parameters
2164
2163
  ----------
2165
- name : str
2166
- Name of the model to retrieve
2167
- version : int, optional
2168
- Specific version number to retrieve. If not provided, returns
2169
- information about all versions of the model
2164
+ name
2165
+ Name of the model to retrieve.
2166
+ version
2167
+ Specific version number to retrieve. If not provided, returns
2168
+ information about all versions of the model.
2170
2169
 
2171
2170
  Returns
2172
2171
  -------
2173
- GetRegisteredModelResponse or GetRegisteredModelVersionResponse
2174
- Model information including metadata, versions, and configuration details
2172
+ Union[GetRegisteredModelResponse, GetRegisteredModelVersionResponse]
2173
+ Model information including metadata, versions, and configuration details.
2175
2174
 
2176
2175
  Examples
2177
2176
  --------
@@ -2248,8 +2247,7 @@ class ChalkClient:
2248
2247
  source_config: Optional[SourceConfig] = None,
2249
2248
  dependencies: Optional[List[str]] = None,
2250
2249
  ) -> RegisterModelVersionResponse:
2251
- """
2252
- Register a model in the Chalk model registry.
2250
+ """Register a model in the Chalk model registry.
2253
2251
 
2254
2252
  Parameters
2255
2253
  ----------
@@ -2291,6 +2289,7 @@ class ChalkClient:
2291
2289
  dependencies : List[str], optional
2292
2290
  List of package dependencies needed to run this model.
2293
2291
  e.g. ["torch==2.7.1", "numpy==1.26.4"]
2292
+
2294
2293
  Returns
2295
2294
  -------
2296
2295
  ModelVersion
@@ -1647,49 +1647,49 @@ class ChalkGRPCClient:
1647
1647
  source_config: Optional[SourceConfig] = None,
1648
1648
  dependencies: Optional[List[str]] = None,
1649
1649
  ) -> RegisterModelVersionResponse:
1650
- """
1651
- Register a model in the Chalk model registry.
1650
+ """Register a model in the Chalk model registry.
1652
1651
 
1653
1652
  Parameters
1654
1653
  ----------
1655
- name : str
1656
- Unique name for the model
1657
- aliases : list of str, optional
1658
- List of version aliases (e.g., ["v1.0", "latest"])
1659
- model : object, optional
1660
- Python model object (for object-based registration)
1661
- model_paths : list of str, optional
1662
- Paths to model files (for file-based registration)
1663
- additional_files : List[str], optional
1664
- Additional files needed for inference (tokenizers, configs, etc.)
1665
- model_type : ModelType, optional
1666
- Type of model framework
1667
- model_encoding : ModelEncoding, optional
1668
- Serialization format
1669
- input_schema : dict, list, or Any
1670
- Definition of the input schema. Can be:
1671
- - dict: Dictionary mapping column names to dtypes for tabular data
1672
- - list: List of (shape, dtype) tuples for tensor data
1673
- output_schema : dict, list, or Any
1674
- Definition of the output schema. Can be:
1675
- - dict: Dictionary mapping column names to dtypes for tabular data
1676
- - list: List of (shape, dtype) tuples for tensor data
1677
- metadata : dict, optional
1678
- Additional metadata dictionary containing framework info,
1679
- training details, performance metrics, etc.
1680
- input_features : FeatureReference, str, optional
1654
+ name
1655
+ Unique name for the model
1656
+ aliases
1657
+ List of version aliases (e.g., `["v1.0", "latest"]`)
1658
+ model
1659
+ Python model object (for object-based registration)
1660
+ model_paths
1661
+ Paths to model files (for file-based registration)
1662
+ additional_files
1663
+ Additional files needed for inference (tokenizers, configs, etc.)
1664
+ model_type
1665
+ Type of model framework
1666
+ model_encoding
1667
+ Serialization format
1668
+ input_schema
1669
+ Definition of the input schema. Can be:
1670
+ - dict: Dictionary mapping column names to dtypes for tabular data
1671
+ - list: List of `(shape, dtype)` tuples for tensor data
1672
+ output_schema
1673
+ Definition of the output schema. Can be:
1674
+ - dict: Dictionary mapping column names to dtypes for tabular data
1675
+ - list: List of `(shape, dtype)` tuples for tensor data
1676
+ metadata
1677
+ Additional metadata dictionary containing framework info,
1678
+ training details, performance metrics, etc.
1679
+ input_features
1681
1680
  The features to be used as inputs to the model.
1682
1681
  For example, `[User.message]`. Features can also be expressed as snakecased strings,
1683
1682
  e.g. `["user.message"]`
1684
- output_features : FeatureReference, str, optional
1683
+ output_features
1685
1684
  The features to be used as outputs to the model.
1686
1685
  For example, `[User.is_spam]`. Features can also be expressed as snakecased strings,
1687
1686
  e.g. `["user.is_spam"]`
1688
- source_config : LocalSourceConfig, S3SourceConfig, HFSourceConfig, optional
1687
+ source_config
1689
1688
  Config to pass credentials to access files from a remote source.
1690
- dependencies : List[str], optional
1689
+ dependencies
1691
1690
  List of package dependencies needed to run this model.
1692
- e.g. ["torch==2.7.1", "numpy==1.26.4"]
1691
+ e.g. `["torch==2.7.1", "numpy==1.26.4"]`
1692
+
1693
1693
  Returns
1694
1694
  -------
1695
1695
  ModelVersion
@@ -2199,6 +2199,7 @@ def has_many(
2199
2199
  The maximum number of items to cache for the joined feature. The
2200
2200
  items in the joined feature aggregate, storing the latest values
2201
2201
  of the joined feature for each primary key in the joined feature.
2202
+
2202
2203
  Examples
2203
2204
  --------
2204
2205
  >>> from chalk.features import DataFrame, features, has_many
@@ -141,6 +141,7 @@ def features(
141
141
  The `cache_nulls` and `cache_defaults` options can be used together on the same feature with the
142
142
  following exceptions: if `cache_nulls=False`, then `cache_defaults` cannot be `"evict_defaults"`, and if
143
143
  `cache_nulls="evict_defaults"`, then `cache_defaults` cannot be `False`.
144
+
144
145
  Other Parameters
145
146
  ----------------
146
147
  cls
@@ -2829,6 +2829,8 @@ class StreamResolver(Resolver[P, T]):
2829
2829
  sql_settings: SQLResolverSettings | None,
2830
2830
  feature_expressions: dict[Feature, Underscore] | None,
2831
2831
  message_producer_parsed: StreamResolverMessageProducerParsed | None,
2832
+ skip_online: bool = False,
2833
+ skip_offline: bool = False,
2832
2834
  ):
2833
2835
  super().__init__(
2834
2836
  function_definition=function_definition,
@@ -2897,6 +2899,8 @@ class StreamResolver(Resolver[P, T]):
2897
2899
 
2898
2900
  self.feature_expressions: dict[Feature, Underscore] | None = feature_expressions
2899
2901
  self.message_producer_parsed: StreamResolverMessageProducerParsed | None = message_producer_parsed
2902
+ self.skip_online = skip_online
2903
+ self.skip_offline = skip_offline
2900
2904
 
2901
2905
  @property
2902
2906
  def output_features(self) -> Sequence[Feature]:
@@ -3808,6 +3812,8 @@ def make_stream_resolver(
3808
3812
  owner: Optional[str] = None,
3809
3813
  doc: str | None = None,
3810
3814
  sink: Sink | None = None,
3815
+ skip_online: bool = False,
3816
+ skip_offline: bool = False,
3811
3817
  ) -> StreamResolver:
3812
3818
  """Constructs a streaming resolver that, instead of a Python function,
3813
3819
  defines its output features as column projections on an input message.
@@ -3840,6 +3846,14 @@ def make_stream_resolver(
3840
3846
  sink
3841
3847
  An optional message producer configuration that specifies where to send messages.
3842
3848
  Read more at https://docs.chalk.ai/api-docs#Sink
3849
+ skip_online
3850
+ If True, skip online persistence (no writes to Redis/DynamoDB/etc).
3851
+ Results will still be processed but not stored in online stores.
3852
+ Note: Only applies to native streaming. Default: False
3853
+ skip_offline
3854
+ If True, skip offline persistence (no result bus publishing for offline storage).
3855
+ Results will still be processed but not stored in offline stores (S3/BigQuery/etc).
3856
+ Note: Only applies to native streaming. Default: False
3843
3857
 
3844
3858
  Returns
3845
3859
  -------
@@ -4017,6 +4031,8 @@ def make_stream_resolver(
4017
4031
  sql_settings=None,
4018
4032
  feature_expressions={unwrap_feature(x): u for x, u in output_features.items()},
4019
4033
  message_producer_parsed=message_producer_parsed,
4034
+ skip_online=skip_online,
4035
+ skip_offline=skip_offline,
4020
4036
  )
4021
4037
  resolver.add_to_registry(override=False)
4022
4038
  return resolver