wandb 0.15.4__py3-none-any.whl → 0.15.5__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- wandb/__init__.py +1 -1
- wandb/analytics/sentry.py +1 -0
- wandb/apis/internal.py +3 -0
- wandb/apis/public.py +18 -20
- wandb/beta/workflows.py +5 -6
- wandb/cli/cli.py +27 -27
- wandb/data_types.py +2 -0
- wandb/integration/langchain/wandb_tracer.py +16 -179
- wandb/integration/sagemaker/config.py +2 -2
- wandb/integration/tensorboard/log.py +4 -4
- wandb/proto/v3/wandb_telemetry_pb2.py +10 -10
- wandb/proto/v4/wandb_telemetry_pb2.py +10 -10
- wandb/proto/wandb_deprecated.py +3 -1
- wandb/sdk/__init__.py +1 -4
- wandb/sdk/artifacts/__init__.py +0 -14
- wandb/sdk/artifacts/artifact.py +1757 -277
- wandb/sdk/artifacts/artifact_manifest_entry.py +26 -6
- wandb/sdk/artifacts/artifact_state.py +10 -0
- wandb/sdk/artifacts/artifacts_cache.py +7 -8
- wandb/sdk/artifacts/exceptions.py +4 -4
- wandb/sdk/artifacts/storage_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/azure_handler.py +16 -6
- wandb/sdk/artifacts/storage_handlers/gcs_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/http_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/local_file_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/multi_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/s3_handler.py +35 -32
- wandb/sdk/artifacts/storage_handlers/tracking_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/wb_artifact_handler.py +5 -9
- wandb/sdk/artifacts/storage_handlers/wb_local_artifact_handler.py +2 -2
- wandb/sdk/artifacts/storage_policies/s3_bucket_policy.py +2 -2
- wandb/sdk/artifacts/storage_policies/wandb_storage_policy.py +24 -16
- wandb/sdk/artifacts/storage_policy.py +3 -3
- wandb/sdk/data_types/_dtypes.py +7 -12
- wandb/sdk/data_types/base_types/json_metadata.py +2 -2
- wandb/sdk/data_types/base_types/media.py +5 -6
- wandb/sdk/data_types/base_types/wb_value.py +12 -13
- wandb/sdk/data_types/helper_types/bounding_boxes_2d.py +4 -5
- wandb/sdk/data_types/helper_types/classes.py +5 -8
- wandb/sdk/data_types/helper_types/image_mask.py +4 -5
- wandb/sdk/data_types/histogram.py +3 -3
- wandb/sdk/data_types/html.py +3 -4
- wandb/sdk/data_types/image.py +4 -5
- wandb/sdk/data_types/molecule.py +2 -2
- wandb/sdk/data_types/object_3d.py +3 -3
- wandb/sdk/data_types/plotly.py +2 -2
- wandb/sdk/data_types/saved_model.py +7 -8
- wandb/sdk/data_types/trace_tree.py +4 -4
- wandb/sdk/data_types/video.py +4 -4
- wandb/sdk/interface/interface.py +8 -10
- wandb/sdk/internal/file_stream.py +2 -3
- wandb/sdk/internal/internal_api.py +99 -4
- wandb/sdk/internal/job_builder.py +15 -7
- wandb/sdk/internal/sender.py +4 -0
- wandb/sdk/internal/settings_static.py +1 -0
- wandb/sdk/launch/_project_spec.py +9 -7
- wandb/sdk/launch/agent/agent.py +115 -58
- wandb/sdk/launch/agent/job_status_tracker.py +34 -0
- wandb/sdk/launch/agent/run_queue_item_file_saver.py +45 -0
- wandb/sdk/launch/builder/abstract.py +5 -1
- wandb/sdk/launch/builder/build.py +16 -10
- wandb/sdk/launch/builder/docker_builder.py +9 -2
- wandb/sdk/launch/builder/kaniko_builder.py +108 -22
- wandb/sdk/launch/builder/noop.py +3 -1
- wandb/sdk/launch/environment/aws_environment.py +2 -1
- wandb/sdk/launch/environment/azure_environment.py +124 -0
- wandb/sdk/launch/github_reference.py +30 -18
- wandb/sdk/launch/launch.py +1 -1
- wandb/sdk/launch/loader.py +15 -0
- wandb/sdk/launch/registry/azure_container_registry.py +132 -0
- wandb/sdk/launch/registry/elastic_container_registry.py +38 -4
- wandb/sdk/launch/registry/google_artifact_registry.py +46 -7
- wandb/sdk/launch/runner/abstract.py +19 -3
- wandb/sdk/launch/runner/kubernetes_runner.py +111 -47
- wandb/sdk/launch/runner/local_container.py +101 -48
- wandb/sdk/launch/runner/sagemaker_runner.py +59 -9
- wandb/sdk/launch/runner/vertex_runner.py +8 -4
- wandb/sdk/launch/sweeps/scheduler.py +102 -27
- wandb/sdk/launch/sweeps/utils.py +21 -0
- wandb/sdk/launch/utils.py +19 -7
- wandb/sdk/lib/_settings_toposort_generated.py +3 -0
- wandb/sdk/service/server.py +22 -9
- wandb/sdk/service/service.py +27 -8
- wandb/sdk/verify/verify.py +6 -9
- wandb/sdk/wandb_config.py +2 -4
- wandb/sdk/wandb_init.py +2 -0
- wandb/sdk/wandb_require.py +7 -0
- wandb/sdk/wandb_run.py +32 -35
- wandb/sdk/wandb_settings.py +10 -3
- wandb/testing/relay.py +15 -2
- wandb/util.py +55 -23
- {wandb-0.15.4.dist-info → wandb-0.15.5.dist-info}/METADATA +11 -8
- {wandb-0.15.4.dist-info → wandb-0.15.5.dist-info}/RECORD +97 -97
- wandb/integration/langchain/util.py +0 -191
- wandb/sdk/artifacts/invalid_artifact.py +0 -23
- wandb/sdk/artifacts/lazy_artifact.py +0 -162
- wandb/sdk/artifacts/local_artifact.py +0 -719
- wandb/sdk/artifacts/public_artifact.py +0 -1188
- {wandb-0.15.4.dist-info → wandb-0.15.5.dist-info}/LICENSE +0 -0
- {wandb-0.15.4.dist-info → wandb-0.15.5.dist-info}/WHEEL +0 -0
- {wandb-0.15.4.dist-info → wandb-0.15.5.dist-info}/entry_points.txt +0 -0
- {wandb-0.15.4.dist-info → wandb-0.15.5.dist-info}/top_level.txt +0 -0
wandb/__init__.py
CHANGED
@@ -11,7 +11,7 @@ For scripts and interactive notebooks, see https://github.com/wandb/examples.
|
|
11
11
|
|
12
12
|
For reference documentation, see https://docs.wandb.com/ref/python.
|
13
13
|
"""
|
14
|
-
__version__ = "0.15.
|
14
|
+
__version__ = "0.15.5"
|
15
15
|
|
16
16
|
# Used with pypi checks and other messages related to pip
|
17
17
|
_wandb_module = "wandb"
|
wandb/analytics/sentry.py
CHANGED
wandb/apis/internal.py
CHANGED
@@ -209,6 +209,9 @@ class Api:
|
|
209
209
|
def fail_run_queue_item(self, *args, **kwargs):
|
210
210
|
return self.api.fail_run_queue_item(*args, **kwargs)
|
211
211
|
|
212
|
+
def update_run_queue_item_warning(self, *args, **kwargs):
|
213
|
+
return self.api.update_run_queue_item_warning(*args, **kwargs)
|
214
|
+
|
212
215
|
def get_launch_agent(self, *args, **kwargs):
|
213
216
|
return self.api.get_launch_agent(*args, **kwargs)
|
214
217
|
|
wandb/apis/public.py
CHANGED
@@ -41,7 +41,6 @@ from wandb import __version__, env, util
|
|
41
41
|
from wandb.apis.internal import Api as InternalApi
|
42
42
|
from wandb.apis.normalize import normalize_exceptions
|
43
43
|
from wandb.errors import CommError
|
44
|
-
from wandb.sdk import artifacts
|
45
44
|
from wandb.sdk.data_types._dtypes import InvalidType, Type, TypeRegistry
|
46
45
|
from wandb.sdk.internal.thread_local_settings import _thread_local_api_settings
|
47
46
|
from wandb.sdk.launch.errors import LaunchError
|
@@ -909,14 +908,13 @@ class Api:
|
|
909
908
|
|
910
909
|
@normalize_exceptions
|
911
910
|
def artifact(self, name, type=None):
|
912
|
-
"""Return a single artifact by parsing path in the form `entity/project/
|
911
|
+
"""Return a single artifact by parsing path in the form `entity/project/name`.
|
913
912
|
|
914
913
|
Arguments:
|
915
914
|
name: (str) An artifact name. May be prefixed with entity/project. Valid names
|
916
915
|
can be in the following forms:
|
917
916
|
name:version
|
918
917
|
name:alias
|
919
|
-
digest
|
920
918
|
type: (str, optional) The type of artifact to fetch.
|
921
919
|
|
922
920
|
Returns:
|
@@ -925,7 +923,9 @@ class Api:
|
|
925
923
|
if name is None:
|
926
924
|
raise ValueError("You must specify name= to fetch an artifact.")
|
927
925
|
entity, project, artifact_name = self._parse_artifact_path(name)
|
928
|
-
artifact =
|
926
|
+
artifact = wandb.Artifact._from_name(
|
927
|
+
entity, project, artifact_name, self.client
|
928
|
+
)
|
929
929
|
if type is not None and artifact.type != type:
|
930
930
|
raise ValueError(
|
931
931
|
f"type {type} specified but this artifact is of type {artifact.type}"
|
@@ -2140,10 +2140,10 @@ class Run(Attrs):
|
|
2140
2140
|
)
|
2141
2141
|
api.set_current_run_id(self.id)
|
2142
2142
|
|
2143
|
-
if isinstance(artifact,
|
2143
|
+
if isinstance(artifact, wandb.Artifact) and not artifact.is_draft():
|
2144
2144
|
api.use_artifact(artifact.id, use_as=use_as or artifact.name)
|
2145
2145
|
return artifact
|
2146
|
-
elif isinstance(artifact, wandb.Artifact):
|
2146
|
+
elif isinstance(artifact, wandb.Artifact) and artifact.is_draft():
|
2147
2147
|
raise ValueError(
|
2148
2148
|
"Only existing artifacts are accepted by this api. "
|
2149
2149
|
"Manually create one with `wandb artifacts put`"
|
@@ -2168,7 +2168,7 @@ class Run(Attrs):
|
|
2168
2168
|
)
|
2169
2169
|
api.set_current_run_id(self.id)
|
2170
2170
|
|
2171
|
-
if isinstance(artifact,
|
2171
|
+
if isinstance(artifact, wandb.Artifact) and not artifact.is_draft():
|
2172
2172
|
artifact_collection_name = artifact.name.split(":")[0]
|
2173
2173
|
api.create_artifact(
|
2174
2174
|
artifact.type,
|
@@ -2177,7 +2177,7 @@ class Run(Attrs):
|
|
2177
2177
|
aliases=aliases,
|
2178
2178
|
)
|
2179
2179
|
return artifact
|
2180
|
-
elif isinstance(artifact, wandb.Artifact):
|
2180
|
+
elif isinstance(artifact, wandb.Artifact) and artifact.is_draft():
|
2181
2181
|
raise ValueError(
|
2182
2182
|
"Only existing artifacts are accepted by this api. "
|
2183
2183
|
"Manually create one with `wandb artifacts put`"
|
@@ -3814,7 +3814,7 @@ class RunArtifacts(Paginator):
|
|
3814
3814
|
}
|
3815
3815
|
%s
|
3816
3816
|
"""
|
3817
|
-
%
|
3817
|
+
% wandb.Artifact._GQL_FRAGMENT
|
3818
3818
|
)
|
3819
3819
|
|
3820
3820
|
input_query = gql(
|
@@ -3842,7 +3842,7 @@ class RunArtifacts(Paginator):
|
|
3842
3842
|
}
|
3843
3843
|
%s
|
3844
3844
|
"""
|
3845
|
-
%
|
3845
|
+
% wandb.Artifact._GQL_FRAGMENT
|
3846
3846
|
)
|
3847
3847
|
|
3848
3848
|
self.run = run
|
@@ -3890,14 +3890,14 @@ class RunArtifacts(Paginator):
|
|
3890
3890
|
|
3891
3891
|
def convert_objects(self):
|
3892
3892
|
return [
|
3893
|
-
|
3894
|
-
self.client,
|
3893
|
+
wandb.Artifact._from_attrs(
|
3895
3894
|
self.run.entity,
|
3896
3895
|
self.run.project,
|
3897
3896
|
"{}:v{}".format(
|
3898
3897
|
r["node"]["artifactSequence"]["name"], r["node"]["versionIndex"]
|
3899
3898
|
),
|
3900
3899
|
r["node"],
|
3900
|
+
self.client,
|
3901
3901
|
)
|
3902
3902
|
for r in self.last_response["project"]["run"][self.run_key]["edges"]
|
3903
3903
|
]
|
@@ -4144,7 +4144,7 @@ class ArtifactVersions(Paginator):
|
|
4144
4144
|
artifact_collection_edge_name(
|
4145
4145
|
server_supports_artifact_collections_gql_edges(client)
|
4146
4146
|
),
|
4147
|
-
|
4147
|
+
wandb.Artifact._GQL_FRAGMENT,
|
4148
4148
|
)
|
4149
4149
|
)
|
4150
4150
|
super().__init__(client, variables, per_page)
|
@@ -4180,12 +4180,12 @@ class ArtifactVersions(Paginator):
|
|
4180
4180
|
if self.last_response["project"]["artifactType"]["artifactCollection"] is None:
|
4181
4181
|
return []
|
4182
4182
|
return [
|
4183
|
-
|
4184
|
-
self.client,
|
4183
|
+
wandb.Artifact._from_attrs(
|
4185
4184
|
self.entity,
|
4186
4185
|
self.project,
|
4187
4186
|
self.collection_name + ":" + a["version"],
|
4188
4187
|
a["node"],
|
4188
|
+
self.client,
|
4189
4189
|
)
|
4190
4190
|
for a in self.last_response["project"]["artifactType"][
|
4191
4191
|
"artifactCollection"
|
@@ -4221,7 +4221,7 @@ class ArtifactFiles(Paginator):
|
|
4221
4221
|
def __init__(
|
4222
4222
|
self,
|
4223
4223
|
client: Client,
|
4224
|
-
artifact: "
|
4224
|
+
artifact: "wandb.Artifact",
|
4225
4225
|
names: Optional[Sequence[str]] = None,
|
4226
4226
|
per_page: int = 50,
|
4227
4227
|
):
|
@@ -4335,9 +4335,7 @@ class Job:
|
|
4335
4335
|
def _get_code_artifact(self, artifact_string):
|
4336
4336
|
artifact_string, base_url, is_id = util.parse_artifact_string(artifact_string)
|
4337
4337
|
if is_id:
|
4338
|
-
code_artifact =
|
4339
|
-
artifact_string, self._api._client
|
4340
|
-
)
|
4338
|
+
code_artifact = wandb.Artifact._from_id(artifact_string, self._api._client)
|
4341
4339
|
else:
|
4342
4340
|
code_artifact = self._api.artifact(name=artifact_string, type="code")
|
4343
4341
|
if code_artifact is None:
|
@@ -4412,7 +4410,7 @@ class Job:
|
|
4412
4410
|
run_config = {}
|
4413
4411
|
for key, item in config.items():
|
4414
4412
|
if util._is_artifact_object(item):
|
4415
|
-
if isinstance(item, wandb.Artifact) and item.
|
4413
|
+
if isinstance(item, wandb.Artifact) and item.is_draft():
|
4416
4414
|
raise ValueError("Cannot queue jobs with unlogged artifacts")
|
4417
4415
|
run_config[key] = util.artifact_to_json(item)
|
4418
4416
|
|
wandb/beta/workflows.py
CHANGED
@@ -5,12 +5,12 @@ from typing import Any, Dict, List, Optional, Union
|
|
5
5
|
import wandb
|
6
6
|
import wandb.data_types as data_types
|
7
7
|
from wandb.data_types import _SavedModel
|
8
|
-
from wandb.sdk.artifacts.artifact import Artifact
|
8
|
+
from wandb.sdk.artifacts.artifact import Artifact
|
9
9
|
from wandb.sdk.artifacts.artifact_manifest_entry import ArtifactManifestEntry
|
10
10
|
|
11
11
|
|
12
12
|
def _add_any(
|
13
|
-
artifact:
|
13
|
+
artifact: Artifact,
|
14
14
|
path_or_obj: Union[
|
15
15
|
str, ArtifactManifestEntry, data_types.WBValue
|
16
16
|
], # todo: add dataframe
|
@@ -23,8 +23,7 @@ def _add_any(
|
|
23
23
|
be moved to the Artifact class in the future.
|
24
24
|
|
25
25
|
Args:
|
26
|
-
artifact: `
|
27
|
-
`wandb.Artifact(...)`
|
26
|
+
artifact: `Artifact` - artifact created with `wandb.Artifact(...)`
|
28
27
|
path_or_obj: `Union[str, ArtifactManifestEntry, data_types.WBValue]` - either a
|
29
28
|
str or valid object which indicates what to add to an artifact.
|
30
29
|
|
@@ -62,7 +61,7 @@ def _log_artifact_version(
|
|
62
61
|
project: Optional[str] = None,
|
63
62
|
scope_project: Optional[bool] = None,
|
64
63
|
job_type: str = "auto",
|
65
|
-
) ->
|
64
|
+
) -> Artifact:
|
66
65
|
"""Create an artifact, populate it, and log it with a run.
|
67
66
|
|
68
67
|
If a run is not present, we create one.
|
@@ -83,7 +82,7 @@ def _log_artifact_version(
|
|
83
82
|
Used to identify runs of a certain job type, i.e "evaluation".
|
84
83
|
|
85
84
|
Returns:
|
86
|
-
|
85
|
+
Artifact
|
87
86
|
|
88
87
|
"""
|
89
88
|
if wandb.run is None:
|
wandb/cli/cli.py
CHANGED
@@ -949,6 +949,9 @@ def launch_sweep(
|
|
949
949
|
wandb.termerror("A project must be configured when using launch")
|
950
950
|
return
|
951
951
|
|
952
|
+
# get personal username, not team name or service account, default to entity
|
953
|
+
author = api.viewer().get("username") or entity
|
954
|
+
|
952
955
|
# if not sweep_config XOR resume_id
|
953
956
|
if not (config or resume_id):
|
954
957
|
wandb.termerror("'config' and/or 'resume_id' required")
|
@@ -993,40 +996,37 @@ def launch_sweep(
|
|
993
996
|
if not found:
|
994
997
|
wandb.termerror(f"Could not find sweep {entity}/{project}/{resume_id}")
|
995
998
|
return
|
999
|
+
|
1000
|
+
if found.get("state") == "RUNNING":
|
1001
|
+
wandb.termerror(
|
1002
|
+
f"Cannot resume sweep {entity}/{project}/{resume_id}, it is already running"
|
1003
|
+
)
|
1004
|
+
return
|
1005
|
+
|
996
1006
|
sweep_obj_id = found["id"]
|
997
1007
|
sweep_config = yaml.safe_load(found["config"])
|
998
1008
|
wandb.termlog(f"Resuming from existing sweep {entity}/{project}/{resume_id}")
|
999
1009
|
if len(parsed_user_config.keys()) > 0:
|
1000
1010
|
wandb.termwarn(
|
1001
|
-
"Sweep
|
1011
|
+
"Sweep parameters loaded from resumed sweep, ignoring provided config"
|
1002
1012
|
)
|
1003
1013
|
|
1004
|
-
|
1005
|
-
|
1006
|
-
|
1007
|
-
|
1008
|
-
|
1009
|
-
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
if scheduler_job and scheduler_job != prev_sweep_run_spec["job"]:
|
1017
|
-
wandb.termerror(
|
1018
|
-
f"Resuming a launch sweep with a different scheduler job is not supported. Loaded from sweep: {prev_sweep_run_spec['job']}, Provided in config: {scheduler_job}"
|
1019
|
-
)
|
1020
|
-
return False
|
1021
|
-
|
1022
|
-
# grab the queue from previously run scheduler if not specified
|
1023
|
-
if not queue and prev_sweep_run_spec.get("overrides", {}).get(
|
1024
|
-
"run_config", {}
|
1025
|
-
).get("sweep_args", {}).get("queue"):
|
1026
|
-
queue = prev_sweep_run_spec["overrides"]["run_config"]["sweep_args"][
|
1027
|
-
"queue"
|
1028
|
-
]
|
1014
|
+
prev_scheduler = json.loads(found.get("scheduler") or "{}")
|
1015
|
+
run_spec = json.loads(prev_scheduler.get("run_spec", "{}"))
|
1016
|
+
if (
|
1017
|
+
scheduler_job
|
1018
|
+
and run_spec.get("job")
|
1019
|
+
and run_spec.get("job") != scheduler_job
|
1020
|
+
):
|
1021
|
+
wandb.termerror(
|
1022
|
+
f"Resuming a launch sweep with a different scheduler job is not supported. Job loaded from sweep: {run_spec.get('job')}, job in config: {scheduler_job}"
|
1023
|
+
)
|
1024
|
+
return
|
1029
1025
|
|
1026
|
+
prev_scheduler_args, prev_settings = sweep_utils.get_previous_args(run_spec)
|
1027
|
+
# Passed in scheduler_args and settings override previous
|
1028
|
+
scheduler_args.update(prev_scheduler_args)
|
1029
|
+
settings.update(prev_settings)
|
1030
1030
|
if not queue:
|
1031
1031
|
wandb.termerror(
|
1032
1032
|
"Launch-sweeps require setting a 'queue', use --queue option or a 'queue' key in the 'launch' section in the config"
|
@@ -1039,7 +1039,7 @@ def launch_sweep(
|
|
1039
1039
|
sweep_config=sweep_config,
|
1040
1040
|
queue=queue,
|
1041
1041
|
project=project,
|
1042
|
-
author=
|
1042
|
+
author=author,
|
1043
1043
|
)
|
1044
1044
|
if not args:
|
1045
1045
|
return
|
wandb/data_types.py
CHANGED
@@ -47,6 +47,7 @@ from .sdk.data_types.molecule import Molecule
|
|
47
47
|
from .sdk.data_types.object_3d import Object3D
|
48
48
|
from .sdk.data_types.plotly import Plotly
|
49
49
|
from .sdk.data_types.saved_model import _SavedModel
|
50
|
+
from .sdk.data_types.trace_tree import WBTraceTree
|
50
51
|
from .sdk.data_types.video import Video
|
51
52
|
from .sdk.lib import runid
|
52
53
|
|
@@ -67,6 +68,7 @@ __all__ = [
|
|
67
68
|
"Object3D",
|
68
69
|
"Plotly",
|
69
70
|
"Video",
|
71
|
+
"WBTraceTree",
|
70
72
|
"_SavedModel",
|
71
73
|
# Typed Legacy Exports (I'd like to remove these)
|
72
74
|
"ImageMask",
|
@@ -14,22 +14,10 @@ integration will not break user code. The one exception to the rule is at import
|
|
14
14
|
LangChain is not installed, or the symbols are not in the same place, the appropriate error
|
15
15
|
will be raised when importing this module.
|
16
16
|
"""
|
17
|
-
import sys
|
18
|
-
|
19
|
-
if sys.version_info >= (3, 8):
|
20
|
-
from typing import TypedDict
|
21
|
-
else:
|
22
|
-
from typing_extensions import TypedDict
|
23
|
-
|
24
|
-
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union
|
25
|
-
|
26
17
|
from packaging import version
|
27
18
|
|
28
|
-
import wandb
|
29
19
|
import wandb.util
|
30
|
-
from wandb.sdk.
|
31
|
-
from wandb.sdk.lib import telemetry as wb_telemetry
|
32
|
-
from wandb.sdk.lib.paths import StrPath
|
20
|
+
from wandb.sdk.lib import deprecate
|
33
21
|
|
34
22
|
langchain = wandb.util.get_module(
|
35
23
|
name="langchain",
|
@@ -37,174 +25,23 @@ langchain = wandb.util.get_module(
|
|
37
25
|
"package installed. Please install it with `pip install langchain`.",
|
38
26
|
)
|
39
27
|
|
40
|
-
if version.parse(langchain.__version__) < version.parse("0.0.
|
28
|
+
if version.parse(langchain.__version__) < version.parse("0.0.188"):
|
41
29
|
raise ValueError(
|
42
|
-
"The Weights & Biases Langchain integration does not support versions 0.0.
|
43
|
-
"To ensure proper functionality, please use version 0.0.
|
30
|
+
"The Weights & Biases Langchain integration does not support versions 0.0.187 and lower. "
|
31
|
+
"To ensure proper functionality, please use version 0.0.188 or higher."
|
44
32
|
)
|
45
33
|
|
46
|
-
# We want these imports after the import_langchain() call, so that we can
|
47
|
-
# catch the ImportError if langchain is not installed.
|
48
|
-
|
49
34
|
# isort: off
|
50
|
-
from langchain.callbacks.tracers
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
from wandb import Settings as WBSettings
|
64
|
-
from wandb.wandb_run import Run as WBRun
|
65
|
-
|
66
|
-
|
67
|
-
class WandbRunArgs(TypedDict):
|
68
|
-
job_type: Optional[str]
|
69
|
-
dir: Optional[StrPath]
|
70
|
-
config: Union[Dict, str, None]
|
71
|
-
project: Optional[str]
|
72
|
-
entity: Optional[str]
|
73
|
-
reinit: Optional[bool]
|
74
|
-
tags: Optional[Sequence]
|
75
|
-
group: Optional[str]
|
76
|
-
name: Optional[str]
|
77
|
-
notes: Optional[str]
|
78
|
-
magic: Optional[Union[dict, str, bool]]
|
79
|
-
config_exclude_keys: Optional[List[str]]
|
80
|
-
config_include_keys: Optional[List[str]]
|
81
|
-
anonymous: Optional[str]
|
82
|
-
mode: Optional[str]
|
83
|
-
allow_val_change: Optional[bool]
|
84
|
-
resume: Optional[Union[bool, str]]
|
85
|
-
force: Optional[bool]
|
86
|
-
tensorboard: Optional[bool]
|
87
|
-
sync_tensorboard: Optional[bool]
|
88
|
-
monitor_gym: Optional[bool]
|
89
|
-
save_code: Optional[bool]
|
90
|
-
id: Optional[str]
|
91
|
-
settings: Union["WBSettings", Dict[str, Any], None]
|
92
|
-
|
93
|
-
|
94
|
-
class WandbTracer(BaseTracer):
|
95
|
-
"""Callback Handler that logs to Weights and Biases.
|
96
|
-
|
97
|
-
This handler will log the model architecture and run traces to Weights and Biases. This will
|
98
|
-
ensure that all LangChain activity is logged to W&B.
|
99
|
-
"""
|
100
|
-
|
101
|
-
_run: Optional["WBRun"] = None
|
102
|
-
_run_args: Optional[WandbRunArgs] = None
|
103
|
-
|
104
|
-
@classmethod
|
105
|
-
def init(
|
106
|
-
cls,
|
107
|
-
run_args: Optional[WandbRunArgs] = None,
|
108
|
-
include_stdout: bool = True,
|
109
|
-
additional_handlers: Optional[List["BaseCallbackHandler"]] = None,
|
110
|
-
) -> None:
|
111
|
-
"""Method provided for backwards compatibility. Please directly construct `WandbTracer` instead."""
|
112
|
-
message = """Global autologging is not currently supported for the LangChain integration.
|
113
|
-
Please directly construct a `WandbTracer` and add it to the list of callbacks. For example:
|
114
|
-
|
115
|
-
LLMChain(llm, callbacks=[WandbTracer()])
|
116
|
-
# end of notebook / script:
|
117
|
-
WandbTracer.finish()"""
|
118
|
-
wandb.termlog(message)
|
119
|
-
|
120
|
-
def __init__(self, run_args: Optional[WandbRunArgs] = None, **kwargs: Any) -> None:
|
121
|
-
"""Initializes the WandbTracer.
|
122
|
-
|
123
|
-
Parameters:
|
124
|
-
run_args: (dict, optional) Arguments to pass to `wandb.init()`. If not provided, `wandb.init()` will be
|
125
|
-
called with no arguments. Please refer to the `wandb.init` for more details.
|
126
|
-
|
127
|
-
To use W&B to monitor all LangChain activity, add this tracer like any other langchain callback
|
128
|
-
```
|
129
|
-
from wandb.integration.langchain import WandbTracer
|
130
|
-
LLMChain(llm, callbacks=[WandbTracer()])
|
131
|
-
# end of notebook / script:
|
132
|
-
WandbTracer.finish()
|
133
|
-
```.
|
134
|
-
"""
|
135
|
-
super().__init__(**kwargs)
|
136
|
-
self._run_args = run_args
|
137
|
-
self._ensure_run(should_print_url=(wandb.run is None))
|
138
|
-
|
139
|
-
@staticmethod
|
140
|
-
def finish() -> None:
|
141
|
-
"""Waits for all asynchronous processes to finish and data to upload.
|
142
|
-
|
143
|
-
Proxy for `wandb.finish()`.
|
144
|
-
"""
|
145
|
-
wandb.finish()
|
146
|
-
|
147
|
-
def _log_trace_from_run(self, run: "Run") -> None:
|
148
|
-
"""Logs a LangChain Run to W*B as a W&B Trace."""
|
149
|
-
self._ensure_run()
|
150
|
-
|
151
|
-
root_span = safely_convert_lc_run_to_wb_span(run)
|
152
|
-
if root_span is None:
|
153
|
-
return
|
154
|
-
|
155
|
-
model_dict = None
|
156
|
-
|
157
|
-
# TODO: Uncomment this once we have a way to get the model from a run
|
158
|
-
# model = safely_get_span_producing_model(run)
|
159
|
-
# if model is not None:
|
160
|
-
# model_dict = safely_convert_model_to_dict(model)
|
161
|
-
|
162
|
-
model_trace = trace_tree.WBTraceTree(
|
163
|
-
root_span=root_span,
|
164
|
-
model_dict=model_dict,
|
35
|
+
from langchain.callbacks.tracers import WandbTracer # noqa: E402, I001
|
36
|
+
|
37
|
+
|
38
|
+
class WandbTracer(WandbTracer):
|
39
|
+
def __init__(self, *args, **kwargs):
|
40
|
+
super().__init__(*args, **kwargs)
|
41
|
+
deprecate.deprecate(
|
42
|
+
field_name=deprecate.Deprecated.langchain_tracer,
|
43
|
+
warning_message="This feature is deprecated and has been moved to `langchain`. Enable tracing by setting "
|
44
|
+
"LANGCHAIN_WANDB_TRACING=true in your environment. See the documentation at "
|
45
|
+
"https://python.langchain.com/docs/ecosystem/integrations/agent_with_wandb_tracing for guidance. "
|
46
|
+
"Replace your current import with `from langchain.callbacks.tracers import WandbTracer`.",
|
165
47
|
)
|
166
|
-
wandb.run.log({"langchain_trace": model_trace})
|
167
|
-
|
168
|
-
def _ensure_run(self, should_print_url=False) -> None:
|
169
|
-
"""Ensures an active W&B run exists.
|
170
|
-
|
171
|
-
If not, will start a new run with the provided run_args.
|
172
|
-
"""
|
173
|
-
if wandb.run is None:
|
174
|
-
# Make a shallow copy of the run args, so we don't modify the original
|
175
|
-
run_args = self._run_args or {} # type: ignore
|
176
|
-
run_args: dict = {**run_args} # type: ignore
|
177
|
-
|
178
|
-
# Prefer to run in silent mode since W&B has a lot of output
|
179
|
-
# which can be undesirable when dealing with text-based models.
|
180
|
-
if "settings" not in run_args: # type: ignore
|
181
|
-
run_args["settings"] = {"silent": True} # type: ignore
|
182
|
-
|
183
|
-
# Start the run and add the stream table
|
184
|
-
wandb.init(**run_args)
|
185
|
-
|
186
|
-
if should_print_url:
|
187
|
-
print_wandb_init_message(wandb.run.settings.run_url)
|
188
|
-
|
189
|
-
with wb_telemetry.context(wandb.run) as tel:
|
190
|
-
tel.feature.langchain_tracer = True
|
191
|
-
|
192
|
-
# Start of required methods (these methods are required by the BaseCallbackHandler interface)
|
193
|
-
@property
|
194
|
-
def always_verbose(self) -> bool:
|
195
|
-
"""Whether to call verbose callbacks even if verbose is False."""
|
196
|
-
return True
|
197
|
-
|
198
|
-
def _generate_id(self) -> Optional[Union[int, str]]:
|
199
|
-
"""Generate an id for a run."""
|
200
|
-
return None
|
201
|
-
|
202
|
-
def _persist_run(self, run: "Run") -> None:
|
203
|
-
"""Persist a run."""
|
204
|
-
try:
|
205
|
-
self._log_trace_from_run(run)
|
206
|
-
except Exception:
|
207
|
-
# Silently ignore errors to not break user code
|
208
|
-
pass
|
209
|
-
|
210
|
-
# End of required methods
|
@@ -20,9 +20,9 @@ def parse_sm_config() -> Dict[str, Any]:
|
|
20
20
|
# Hyperparameter searches quote configs...
|
21
21
|
for k, v in json.load(open(sm_files.SM_PARAM_CONFIG)).items():
|
22
22
|
cast = v.strip('"')
|
23
|
-
if re.match(r"
|
23
|
+
if re.match(r"^-?[\d]+$", cast):
|
24
24
|
cast = int(cast)
|
25
|
-
elif re.match(r"
|
25
|
+
elif re.match(r"^-?[.\d]+$", cast):
|
26
26
|
cast = float(cast)
|
27
27
|
conf[k] = cast
|
28
28
|
return conf
|
@@ -9,7 +9,7 @@ from wandb.sdk.lib import telemetry
|
|
9
9
|
from wandb.viz import custom_chart
|
10
10
|
|
11
11
|
if TYPE_CHECKING:
|
12
|
-
import numpy as np
|
12
|
+
import numpy as np
|
13
13
|
|
14
14
|
from wandb.sdk.internal.tb_watcher import TBHistory
|
15
15
|
|
@@ -42,7 +42,7 @@ def make_ndarray(tensor: Any) -> Optional["np.ndarray"]:
|
|
42
42
|
if res.dtype == "object":
|
43
43
|
return None
|
44
44
|
else:
|
45
|
-
return res
|
45
|
+
return res # type: ignore
|
46
46
|
else:
|
47
47
|
wandb.termwarn(
|
48
48
|
"Can't convert tensor summary, upgrade tensorboard with `pip"
|
@@ -167,7 +167,7 @@ def tf_summary_to_dict( # noqa: C901
|
|
167
167
|
try:
|
168
168
|
# TODO: we should just re-bin if there are too many buckets
|
169
169
|
values[namespaced_tag(value.tag, namespace)] = wandb.Histogram(
|
170
|
-
np_histogram=(counts, bins)
|
170
|
+
np_histogram=(counts, bins) # type: ignore
|
171
171
|
)
|
172
172
|
except ValueError:
|
173
173
|
wandb.termwarn(
|
@@ -238,7 +238,7 @@ def tf_summary_to_dict( # noqa: C901
|
|
238
238
|
)
|
239
239
|
try:
|
240
240
|
# TODO: we should just re-bin if there are too many buckets
|
241
|
-
values[tag] = wandb.Histogram(np_histogram=np_histogram)
|
241
|
+
values[tag] = wandb.Histogram(np_histogram=np_histogram) # type: ignore
|
242
242
|
except ValueError:
|
243
243
|
wandb.termwarn(
|
244
244
|
f"Not logging key {tag!r}. "
|
@@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default()
|
|
15
15
|
from wandb.proto import wandb_base_pb2 as wandb_dot_proto_dot_wandb__base__pb2
|
16
16
|
|
17
17
|
|
18
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!wandb/proto/wandb_telemetry.proto\x12\x0ewandb_internal\x1a\x1cwandb/proto/wandb_base.proto\"\xb3\x03\n\x0fTelemetryRecord\x12-\n\x0cimports_init\x18\x01 \x01(\x0b\x32\x17.wandb_internal.Imports\x12/\n\x0eimports_finish\x18\x02 \x01(\x0b\x32\x17.wandb_internal.Imports\x12(\n\x07\x66\x65\x61ture\x18\x03 \x01(\x0b\x32\x17.wandb_internal.Feature\x12\x16\n\x0epython_version\x18\x04 \x01(\t\x12\x13\n\x0b\x63li_version\x18\x05 \x01(\t\x12\x1b\n\x13huggingface_version\x18\x06 \x01(\t\x12 \n\x03\x65nv\x18\x08 \x01(\x0b\x32\x13.wandb_internal.Env\x12%\n\x05label\x18\t \x01(\x0b\x32\x16.wandb_internal.Labels\x12.\n\ndeprecated\x18\n \x01(\x0b\x32\x1a.wandb_internal.Deprecated\x12&\n\x06issues\x18\x0b \x01(\x0b\x32\x16.wandb_internal.Issues\x12+\n\x05_info\x18\xc8\x01 \x01(\x0b\x32\x1b.wandb_internal._RecordInfo\"\x11\n\x0fTelemetryResult\"\x86\r\n\x07Imports\x12\r\n\x05torch\x18\x01 \x01(\x08\x12\r\n\x05keras\x18\x02 \x01(\x08\x12\x12\n\ntensorflow\x18\x03 \x01(\x08\x12\x0e\n\x06\x66\x61stai\x18\x04 \x01(\x08\x12\x0f\n\x07sklearn\x18\x05 \x01(\x08\x12\x0f\n\x07xgboost\x18\x06 \x01(\x08\x12\x10\n\x08\x63\x61tboost\x18\x07 \x01(\x08\x12\x10\n\x08lightgbm\x18\x08 \x01(\x08\x12\x19\n\x11pytorch_lightning\x18\t \x01(\x08\x12\x0e\n\x06ignite\x18\n \x01(\x08\x12\x14\n\x0ctransformers\x18\x0b \x01(\x08\x12\x0b\n\x03jax\x18\x0c \x01(\x08\x12\x10\n\x08metaflow\x18\r \x01(\x08\x12\x10\n\x08\x61llennlp\x18\x0e \x01(\x08\x12\x11\n\tautogluon\x18\x0f \x01(\x08\x12\x11\n\tautokeras\x18\x10 \x01(\x08\x12\x10\n\x08\x63\x61talyst\x18\x12 \x01(\x08\x12\x10\n\x08\x64\x65\x65pchem\x18\x15 \x01(\x08\x12\x0f\n\x07\x64\x65\x65pctr\x18\x16 \x01(\x08\x12\x0f\n\x07pycaret\x18\x1c \x01(\x08\x12\x14\n\x0cpytorchvideo\x18\x1d \x01(\x08\x12\x0b\n\x03ray\x18\x1e \x01(\x08\x12\x1a\n\x12simpletransformers\x18\x1f \x01(\x08\x12\x0e\n\x06skorch\x18 \x01(\x08\x12\r\n\x05spacy\x18! \x01(\x08\x12\r\n\x05\x66lash\x18\" \x01(\x08\x12\x0e\n\x06optuna\x18# \x01(\x08\x12\x0f\n\x07recbole\x18$ \x01(\x08\x12\x0c\n\x04mmcv\x18% \x01(\x08\x12\r\n\x05mmdet\x18& \x01(\x08\x12\x11\n\ttorchdrug\x18\' \x01(\x08\x12\x11\n\ttorchtext\x18( \x01(\x08\x12\x13\n\x0btorchvision\x18) \x01(\x08\x12\r\n\x05\x65legy\x18* \x01(\x08\x12\x12\n\ndetectron2\x18+ \x01(\x08\x12\r\n\x05\x66lair\x18, \x01(\x08\x12\x0c\n\x04\x66lax\x18- \x01(\x08\x12\x0c\n\x04syft\x18. \x01(\x08\x12\x0b\n\x03TTS\x18/ \x01(\x08\x12\r\n\x05monai\x18\x30 \x01(\x08\x12\x17\n\x0fhuggingface_hub\x18\x31 \x01(\x08\x12\r\n\x05hydra\x18\x32 \x01(\x08\x12\x10\n\x08\x64\x61tasets\x18\x33 \x01(\x08\x12\x0e\n\x06sacred\x18\x34 \x01(\x08\x12\x0e\n\x06joblib\x18\x35 \x01(\x08\x12\x0c\n\x04\x64\x61sk\x18\x36 \x01(\x08\x12\x0f\n\x07\x61syncio\x18\x37 \x01(\x08\x12\x11\n\tpaddleocr\x18\x38 \x01(\x08\x12\r\n\x05ppdet\x18\x39 \x01(\x08\x12\x11\n\tpaddleseg\x18: \x01(\x08\x12\x11\n\tpaddlenlp\x18; \x01(\x08\x12\r\n\x05mmseg\x18< \x01(\x08\x12\r\n\x05mmocr\x18= \x01(\x08\x12\r\n\x05mmcls\x18> \x01(\x08\x12\x0c\n\x04timm\x18? \x01(\x08\x12\x0f\n\x07\x66\x61irseq\x18@ \x01(\x08\x12\x12\n\ndeepchecks\x18\x41 \x01(\x08\x12\x10\n\x08\x63omposer\x18\x42 \x01(\x08\x12\x10\n\x08sparseml\x18\x43 \x01(\x08\x12\x10\n\x08\x61nomalib\x18\x44 \x01(\x08\x12\r\n\x05zenml\x18\x45 \x01(\x08\x12\x12\n\ncolossalai\x18\x46 \x01(\x08\x12\x12\n\naccelerate\x18G \x01(\x08\x12\x0e\n\x06merlin\x18H \x01(\x08\x12\x0f\n\x07nanodet\x18I \x01(\x08\x12#\n\x1bsegmentation_models_pytorch\x18J \x01(\x08\x12\x1d\n\x15sentence_transformers\x18K \x01(\x08\x12\x0b\n\x03\x64gl\x18L \x01(\x08\x12\x17\n\x0ftorch_geometric\x18M \x01(\x08\x12\x0c\n\x04jina\x18N \x01(\x08\x12\x0e\n\x06kornia\x18O \x01(\x08\x12\x16\n\x0e\x61lbumentations\x18P \x01(\x08\x12\x10\n\x08keras_cv\x18Q \x01(\x08\x12\x10\n\x08mmengine\x18R \x01(\x08\x12\x11\n\tdiffusers\x18S \x01(\x08\x12\x0b\n\x03trl\x18T \x01(\x08\x12\x0c\n\x04trlx\x18U \x01(\x08\x12\x11\n\tlangchain\x18V \x01(\x08\x12\x13\n\x0bllama_index\x18W \x01(\x08\x12\x15\n\rstability_sdk\x18X \x01(\x08\x12\x0f\n\x07prefect\x18Y \x01(\x08\x12\x13\n\x0bprefect_ray\x18Z \x01(\x08\x12\x10\n\x08pinecone\x18[ \x01(\x08\x12\x10\n\x08\x63hromadb\x18\\ \x01(\x08\x12\x10\n\x08weaviate\x18] \x01(\x08\x12\x13\n\x0bpromptlayer\x18^ \x01(\x08\x12\x0e\n\x06openai\x18_ \x01(\x08\x12\x0e\n\x06\x63ohere\x18` \x01(\x08\x12\x11\n\tanthropic\x18\x61 \x01(\x08\x12\x0c\n\x04peft\x18\x62 \x01(\x08\x12\x0f\n\x07optimum\x18\x63 \x01(\x08\x12\x10\n\x08\x65valuate\x18\x64 \x01(\x08\x12\x10\n\x08langflow\x18\x65 \x01(\x08\"\
|
18
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!wandb/proto/wandb_telemetry.proto\x12\x0ewandb_internal\x1a\x1cwandb/proto/wandb_base.proto\"\xb3\x03\n\x0fTelemetryRecord\x12-\n\x0cimports_init\x18\x01 \x01(\x0b\x32\x17.wandb_internal.Imports\x12/\n\x0eimports_finish\x18\x02 \x01(\x0b\x32\x17.wandb_internal.Imports\x12(\n\x07\x66\x65\x61ture\x18\x03 \x01(\x0b\x32\x17.wandb_internal.Feature\x12\x16\n\x0epython_version\x18\x04 \x01(\t\x12\x13\n\x0b\x63li_version\x18\x05 \x01(\t\x12\x1b\n\x13huggingface_version\x18\x06 \x01(\t\x12 \n\x03\x65nv\x18\x08 \x01(\x0b\x32\x13.wandb_internal.Env\x12%\n\x05label\x18\t \x01(\x0b\x32\x16.wandb_internal.Labels\x12.\n\ndeprecated\x18\n \x01(\x0b\x32\x1a.wandb_internal.Deprecated\x12&\n\x06issues\x18\x0b \x01(\x0b\x32\x16.wandb_internal.Issues\x12+\n\x05_info\x18\xc8\x01 \x01(\x0b\x32\x1b.wandb_internal._RecordInfo\"\x11\n\x0fTelemetryResult\"\x86\r\n\x07Imports\x12\r\n\x05torch\x18\x01 \x01(\x08\x12\r\n\x05keras\x18\x02 \x01(\x08\x12\x12\n\ntensorflow\x18\x03 \x01(\x08\x12\x0e\n\x06\x66\x61stai\x18\x04 \x01(\x08\x12\x0f\n\x07sklearn\x18\x05 \x01(\x08\x12\x0f\n\x07xgboost\x18\x06 \x01(\x08\x12\x10\n\x08\x63\x61tboost\x18\x07 \x01(\x08\x12\x10\n\x08lightgbm\x18\x08 \x01(\x08\x12\x19\n\x11pytorch_lightning\x18\t \x01(\x08\x12\x0e\n\x06ignite\x18\n \x01(\x08\x12\x14\n\x0ctransformers\x18\x0b \x01(\x08\x12\x0b\n\x03jax\x18\x0c \x01(\x08\x12\x10\n\x08metaflow\x18\r \x01(\x08\x12\x10\n\x08\x61llennlp\x18\x0e \x01(\x08\x12\x11\n\tautogluon\x18\x0f \x01(\x08\x12\x11\n\tautokeras\x18\x10 \x01(\x08\x12\x10\n\x08\x63\x61talyst\x18\x12 \x01(\x08\x12\x10\n\x08\x64\x65\x65pchem\x18\x15 \x01(\x08\x12\x0f\n\x07\x64\x65\x65pctr\x18\x16 \x01(\x08\x12\x0f\n\x07pycaret\x18\x1c \x01(\x08\x12\x14\n\x0cpytorchvideo\x18\x1d \x01(\x08\x12\x0b\n\x03ray\x18\x1e \x01(\x08\x12\x1a\n\x12simpletransformers\x18\x1f \x01(\x08\x12\x0e\n\x06skorch\x18 \x01(\x08\x12\r\n\x05spacy\x18! \x01(\x08\x12\r\n\x05\x66lash\x18\" \x01(\x08\x12\x0e\n\x06optuna\x18# \x01(\x08\x12\x0f\n\x07recbole\x18$ \x01(\x08\x12\x0c\n\x04mmcv\x18% \x01(\x08\x12\r\n\x05mmdet\x18& \x01(\x08\x12\x11\n\ttorchdrug\x18\' \x01(\x08\x12\x11\n\ttorchtext\x18( \x01(\x08\x12\x13\n\x0btorchvision\x18) \x01(\x08\x12\r\n\x05\x65legy\x18* \x01(\x08\x12\x12\n\ndetectron2\x18+ \x01(\x08\x12\r\n\x05\x66lair\x18, \x01(\x08\x12\x0c\n\x04\x66lax\x18- \x01(\x08\x12\x0c\n\x04syft\x18. \x01(\x08\x12\x0b\n\x03TTS\x18/ \x01(\x08\x12\r\n\x05monai\x18\x30 \x01(\x08\x12\x17\n\x0fhuggingface_hub\x18\x31 \x01(\x08\x12\r\n\x05hydra\x18\x32 \x01(\x08\x12\x10\n\x08\x64\x61tasets\x18\x33 \x01(\x08\x12\x0e\n\x06sacred\x18\x34 \x01(\x08\x12\x0e\n\x06joblib\x18\x35 \x01(\x08\x12\x0c\n\x04\x64\x61sk\x18\x36 \x01(\x08\x12\x0f\n\x07\x61syncio\x18\x37 \x01(\x08\x12\x11\n\tpaddleocr\x18\x38 \x01(\x08\x12\r\n\x05ppdet\x18\x39 \x01(\x08\x12\x11\n\tpaddleseg\x18: \x01(\x08\x12\x11\n\tpaddlenlp\x18; \x01(\x08\x12\r\n\x05mmseg\x18< \x01(\x08\x12\r\n\x05mmocr\x18= \x01(\x08\x12\r\n\x05mmcls\x18> \x01(\x08\x12\x0c\n\x04timm\x18? \x01(\x08\x12\x0f\n\x07\x66\x61irseq\x18@ \x01(\x08\x12\x12\n\ndeepchecks\x18\x41 \x01(\x08\x12\x10\n\x08\x63omposer\x18\x42 \x01(\x08\x12\x10\n\x08sparseml\x18\x43 \x01(\x08\x12\x10\n\x08\x61nomalib\x18\x44 \x01(\x08\x12\r\n\x05zenml\x18\x45 \x01(\x08\x12\x12\n\ncolossalai\x18\x46 \x01(\x08\x12\x12\n\naccelerate\x18G \x01(\x08\x12\x0e\n\x06merlin\x18H \x01(\x08\x12\x0f\n\x07nanodet\x18I \x01(\x08\x12#\n\x1bsegmentation_models_pytorch\x18J \x01(\x08\x12\x1d\n\x15sentence_transformers\x18K \x01(\x08\x12\x0b\n\x03\x64gl\x18L \x01(\x08\x12\x17\n\x0ftorch_geometric\x18M \x01(\x08\x12\x0c\n\x04jina\x18N \x01(\x08\x12\x0e\n\x06kornia\x18O \x01(\x08\x12\x16\n\x0e\x61lbumentations\x18P \x01(\x08\x12\x10\n\x08keras_cv\x18Q \x01(\x08\x12\x10\n\x08mmengine\x18R \x01(\x08\x12\x11\n\tdiffusers\x18S \x01(\x08\x12\x0b\n\x03trl\x18T \x01(\x08\x12\x0c\n\x04trlx\x18U \x01(\x08\x12\x11\n\tlangchain\x18V \x01(\x08\x12\x13\n\x0bllama_index\x18W \x01(\x08\x12\x15\n\rstability_sdk\x18X \x01(\x08\x12\x0f\n\x07prefect\x18Y \x01(\x08\x12\x13\n\x0bprefect_ray\x18Z \x01(\x08\x12\x10\n\x08pinecone\x18[ \x01(\x08\x12\x10\n\x08\x63hromadb\x18\\ \x01(\x08\x12\x10\n\x08weaviate\x18] \x01(\x08\x12\x13\n\x0bpromptlayer\x18^ \x01(\x08\x12\x0e\n\x06openai\x18_ \x01(\x08\x12\x0e\n\x06\x63ohere\x18` \x01(\x08\x12\x11\n\tanthropic\x18\x61 \x01(\x08\x12\x0c\n\x04peft\x18\x62 \x01(\x08\x12\x0f\n\x07optimum\x18\x63 \x01(\x08\x12\x10\n\x08\x65valuate\x18\x64 \x01(\x08\x12\x10\n\x08langflow\x18\x65 \x01(\x08\"\x9e\n\n\x07\x46\x65\x61ture\x12\r\n\x05watch\x18\x01 \x01(\x08\x12\x0e\n\x06\x66inish\x18\x02 \x01(\x08\x12\x0c\n\x04save\x18\x03 \x01(\x08\x12\x0f\n\x07offline\x18\x04 \x01(\x08\x12\x0f\n\x07resumed\x18\x05 \x01(\x08\x12\x0c\n\x04grpc\x18\x06 \x01(\x08\x12\x0e\n\x06metric\x18\x07 \x01(\x08\x12\r\n\x05keras\x18\x08 \x01(\x08\x12\x11\n\tsagemaker\x18\t \x01(\x08\x12\x1c\n\x14\x61rtifact_incremental\x18\n \x01(\x08\x12\x10\n\x08metaflow\x18\x0b \x01(\x08\x12\x0f\n\x07prodigy\x18\x0c \x01(\x08\x12\x15\n\rset_init_name\x18\r \x01(\x08\x12\x13\n\x0bset_init_id\x18\x0e \x01(\x08\x12\x15\n\rset_init_tags\x18\x0f \x01(\x08\x12\x17\n\x0fset_init_config\x18\x10 \x01(\x08\x12\x14\n\x0cset_run_name\x18\x11 \x01(\x08\x12\x14\n\x0cset_run_tags\x18\x12 \x01(\x08\x12\x17\n\x0fset_config_item\x18\x13 \x01(\x08\x12\x0e\n\x06launch\x18\x14 \x01(\x08\x12\x1c\n\x14torch_profiler_trace\x18\x15 \x01(\x08\x12\x0b\n\x03sb3\x18\x16 \x01(\x08\x12\x0f\n\x07service\x18\x17 \x01(\x08\x12\x17\n\x0finit_return_run\x18\x18 \x01(\x08\x12\x1f\n\x17lightgbm_wandb_callback\x18\x19 \x01(\x08\x12\x1c\n\x14lightgbm_log_summary\x18\x1a \x01(\x08\x12\x1f\n\x17\x63\x61tboost_wandb_callback\x18\x1b \x01(\x08\x12\x1c\n\x14\x63\x61tboost_log_summary\x18\x1c \x01(\x08\x12\x17\n\x0ftensorboard_log\x18\x1d \x01(\x08\x12\x16\n\x0e\x65stimator_hook\x18\x1e \x01(\x08\x12\x1e\n\x16xgboost_wandb_callback\x18\x1f \x01(\x08\x12\"\n\x1axgboost_old_wandb_callback\x18 \x01(\x08\x12\x0e\n\x06\x61ttach\x18! \x01(\x08\x12\x19\n\x11tensorboard_patch\x18\" \x01(\x08\x12\x18\n\x10tensorboard_sync\x18# \x01(\x08\x12\x15\n\rkfp_wandb_log\x18$ \x01(\x08\x12\x1b\n\x13maybe_run_overwrite\x18% \x01(\x08\x12\x1c\n\x14keras_metrics_logger\x18& \x01(\x08\x12\x1e\n\x16keras_model_checkpoint\x18\' \x01(\x08\x12!\n\x19keras_wandb_eval_callback\x18( \x01(\x08\x12\x1d\n\x15\x66low_control_overflow\x18) \x01(\x08\x12\x0c\n\x04sync\x18* \x01(\x08\x12\x1d\n\x15\x66low_control_disabled\x18+ \x01(\x08\x12\x1b\n\x13\x66low_control_custom\x18, \x01(\x08\x12\x18\n\x10service_disabled\x18- \x01(\x08\x12\x14\n\x0copen_metrics\x18. \x01(\x08\x12\x1a\n\x12ultralytics_yolov8\x18/ \x01(\x08\x12\x17\n\x0fimporter_mlflow\x18\x30 \x01(\x08\x12\x15\n\rsync_tfevents\x18\x31 \x01(\x08\x12\x15\n\rasync_uploads\x18\x32 \x01(\x08\x12\x16\n\x0eopenai_autolog\x18\x33 \x01(\x08\x12\x18\n\x10langchain_tracer\x18\x34 \x01(\x08\x12\x16\n\x0e\x63ohere_autolog\x18\x35 \x01(\x08\x12\x1b\n\x13hf_pipeline_autolog\x18\x36 \x01(\x08\x12\r\n\x05nexus\x18\x37 \x01(\x08\"\x96\x02\n\x03\x45nv\x12\x0f\n\x07jupyter\x18\x01 \x01(\x08\x12\x0e\n\x06kaggle\x18\x02 \x01(\x08\x12\x0f\n\x07windows\x18\x03 \x01(\x08\x12\x0e\n\x06m1_gpu\x18\x04 \x01(\x08\x12\x13\n\x0bstart_spawn\x18\x05 \x01(\x08\x12\x12\n\nstart_fork\x18\x06 \x01(\x08\x12\x18\n\x10start_forkserver\x18\x07 \x01(\x08\x12\x14\n\x0cstart_thread\x18\x08 \x01(\x08\x12\x10\n\x08maybe_mp\x18\t \x01(\x08\x12\x10\n\x08trainium\x18\n \x01(\x08\x12\x0b\n\x03pex\x18\x0b \x01(\x08\x12\r\n\x05\x63olab\x18\x0c \x01(\x08\x12\x0f\n\x07ipython\x18\r \x01(\x08\x12\x12\n\naws_lambda\x18\x0e \x01(\x08\x12\x0f\n\x07\x61md_gpu\x18\x0f \x01(\x08\"H\n\x06Labels\x12\x13\n\x0b\x63ode_string\x18\x01 \x01(\t\x12\x13\n\x0brepo_string\x18\x02 \x01(\t\x12\x14\n\x0c\x63ode_version\x18\x03 \x01(\t\"\x9a\x02\n\nDeprecated\x12!\n\x19keras_callback__data_type\x18\x01 \x01(\x08\x12\x11\n\trun__mode\x18\x02 \x01(\x08\x12\x19\n\x11run__save_no_args\x18\x03 \x01(\x08\x12\x11\n\trun__join\x18\x04 \x01(\x08\x12\r\n\x05plots\x18\x05 \x01(\x08\x12\x15\n\rrun__log_sync\x18\x06 \x01(\x08\x12!\n\x19init__config_include_keys\x18\x07 \x01(\x08\x12!\n\x19init__config_exclude_keys\x18\x08 \x01(\x08\x12\"\n\x1akeras_callback__save_model\x18\t \x01(\x08\x12\x18\n\x10langchain_tracer\x18\n \x01(\x08\"|\n\x06Issues\x12%\n\x1dsettings__validation_warnings\x18\x01 \x01(\x08\x12!\n\x19settings__unexpected_args\x18\x02 \x01(\x08\x12(\n settings__preprocessing_warnings\x18\x03 \x01(\x08\x62\x06proto3')
|
19
19
|
|
20
20
|
|
21
21
|
|
@@ -93,13 +93,13 @@ if _descriptor._USE_C_DESCRIPTORS == False:
|
|
93
93
|
_IMPORTS._serialized_start=541
|
94
94
|
_IMPORTS._serialized_end=2211
|
95
95
|
_FEATURE._serialized_start=2214
|
96
|
-
_FEATURE._serialized_end=
|
97
|
-
_ENV._serialized_start=
|
98
|
-
_ENV._serialized_end=
|
99
|
-
_LABELS._serialized_start=
|
100
|
-
_LABELS._serialized_end=
|
101
|
-
_DEPRECATED._serialized_start=
|
102
|
-
_DEPRECATED._serialized_end=
|
103
|
-
_ISSUES._serialized_start=
|
104
|
-
_ISSUES._serialized_end=
|
96
|
+
_FEATURE._serialized_end=3524
|
97
|
+
_ENV._serialized_start=3527
|
98
|
+
_ENV._serialized_end=3805
|
99
|
+
_LABELS._serialized_start=3807
|
100
|
+
_LABELS._serialized_end=3879
|
101
|
+
_DEPRECATED._serialized_start=3882
|
102
|
+
_DEPRECATED._serialized_end=4164
|
103
|
+
_ISSUES._serialized_start=4166
|
104
|
+
_ISSUES._serialized_end=4290
|
105
105
|
# @@protoc_insertion_point(module_scope)
|