datachain 0.22.0__py3-none-any.whl → 0.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datachain might be problematic. Click here for more details.

@@ -1059,6 +1059,39 @@ class Catalog:
1059
1059
 
1060
1060
  return self.get_dataset(name, project)
1061
1061
 
1062
+ def get_full_dataset_name(
1063
+ self,
1064
+ name: str,
1065
+ project_name: Optional[str] = None,
1066
+ namespace_name: Optional[str] = None,
1067
+ ) -> tuple[str, str, str]:
1068
+ """
1069
+ Returns dataset name together with separated namespace and project name.
1070
+ It takes into account all the ways namespace and project can be added.
1071
+ """
1072
+ parsed_namespace_name, parsed_project_name, name = parse_dataset_name(name)
1073
+
1074
+ namespace_env = os.environ.get("DATACHAIN_NAMESPACE")
1075
+ project_env = os.environ.get("DATACHAIN_PROJECT")
1076
+ if project_env and len(project_env.split(".")) == 2:
1077
+ # we allow setting both namespace and project in DATACHAIN_PROJECT
1078
+ namespace_env, project_env = project_env.split(".")
1079
+
1080
+ namespace_name = (
1081
+ parsed_namespace_name
1082
+ or namespace_name
1083
+ or namespace_env
1084
+ or self.metastore.default_namespace_name
1085
+ )
1086
+ project_name = (
1087
+ parsed_project_name
1088
+ or project_name
1089
+ or project_env
1090
+ or self.metastore.default_project_name
1091
+ )
1092
+
1093
+ return namespace_name, project_name, name
1094
+
1062
1095
  def get_dataset(
1063
1096
  self, name: str, project: Optional[Project] = None
1064
1097
  ) -> DatasetRecord:
@@ -8,7 +8,6 @@ if TYPE_CHECKING:
8
8
 
9
9
  from datachain.cli.utils import determine_flavors
10
10
  from datachain.config import Config
11
- from datachain.dataset import parse_dataset_name
12
11
  from datachain.error import DataChainError, DatasetNotFoundError
13
12
  from datachain.studio import list_datasets as list_datasets_studio
14
13
 
@@ -106,9 +105,8 @@ def list_datasets_local(catalog: "Catalog", name: Optional[str] = None):
106
105
 
107
106
 
108
107
  def list_datasets_local_versions(catalog: "Catalog", name: str):
109
- namespace_name, project_name, name = parse_dataset_name(name)
110
- namespace_name = namespace_name or catalog.metastore.default_namespace_name
111
- project_name = project_name or catalog.metastore.default_project_name
108
+ namespace_name, project_name, name = catalog.get_full_dataset_name(name)
109
+
112
110
  project = catalog.metastore.get_project(project_name, namespace_name)
113
111
  ds = catalog.get_dataset(name, project)
114
112
  for v in ds.versions:
@@ -137,9 +135,7 @@ def rm_dataset(
137
135
  studio: Optional[bool] = False,
138
136
  team: Optional[str] = None,
139
137
  ):
140
- namespace_name, project_name, name = parse_dataset_name(name)
141
- namespace_name = namespace_name or catalog.metastore.default_namespace_name
142
- project_name = project_name or catalog.metastore.default_project_name
138
+ namespace_name, project_name, name = catalog.get_full_dataset_name(name)
143
139
 
144
140
  if not catalog.metastore.is_local_dataset(namespace_name) and studio:
145
141
  from datachain.studio import remove_studio_dataset
@@ -166,9 +162,7 @@ def edit_dataset(
166
162
  attrs: Optional[list[str]] = None,
167
163
  team: Optional[str] = None,
168
164
  ):
169
- namespace_name, project_name, name = parse_dataset_name(name)
170
- namespace_name = namespace_name or catalog.metastore.default_namespace_name
171
- project_name = project_name or catalog.metastore.default_project_name
165
+ namespace_name, project_name, name = catalog.get_full_dataset_name(name)
172
166
 
173
167
  if catalog.metastore.is_local_dataset(namespace_name):
174
168
  try:
@@ -132,6 +132,7 @@ class AbstractMetastore(ABC, Serializable):
132
132
  description: Optional[str] = None,
133
133
  uuid: Optional[str] = None,
134
134
  ignore_if_exists: bool = True,
135
+ validate: bool = True,
135
136
  **kwargs,
136
137
  ) -> Namespace:
137
138
  """Creates new namespace"""
@@ -192,6 +193,7 @@ class AbstractMetastore(ABC, Serializable):
192
193
  description: Optional[str] = None,
193
194
  uuid: Optional[str] = None,
194
195
  ignore_if_exists: bool = True,
196
+ validate: bool = True,
195
197
  **kwargs,
196
198
  ) -> Project:
197
199
  """Creates new project in specific namespace"""
@@ -725,8 +727,11 @@ class AbstractDBMetastore(AbstractMetastore):
725
727
  description: Optional[str] = None,
726
728
  uuid: Optional[str] = None,
727
729
  ignore_if_exists: bool = True,
730
+ validate: bool = True,
728
731
  **kwargs,
729
732
  ) -> Namespace:
733
+ if validate:
734
+ Namespace.validate_name(name)
730
735
  query = self._namespaces_insert().values(
731
736
  name=name,
732
737
  uuid=uuid or str(uuid4()),
@@ -775,12 +780,15 @@ class AbstractDBMetastore(AbstractMetastore):
775
780
  description: Optional[str] = None,
776
781
  uuid: Optional[str] = None,
777
782
  ignore_if_exists: bool = True,
783
+ validate: bool = True,
778
784
  **kwargs,
779
785
  ) -> Project:
786
+ if validate:
787
+ Project.validate_name(name)
780
788
  try:
781
789
  namespace = self.get_namespace(namespace_name)
782
790
  except NamespaceNotFoundError:
783
- namespace = self.create_namespace(namespace_name)
791
+ namespace = self.create_namespace(namespace_name, validate=validate)
784
792
 
785
793
  query = self._projects_insert().values(
786
794
  namespace_id=namespace.id,
@@ -817,11 +825,14 @@ class AbstractDBMetastore(AbstractMetastore):
817
825
  """Gets a single project inside some namespace by name"""
818
826
  n = self._namespaces
819
827
  p = self._projects
828
+ validate = True
829
+
820
830
  if self._is_listing_project(name, namespace_name) or self._is_default_project(
821
831
  name, namespace_name
822
832
  ):
823
833
  # we are always creating default and listing projects if they don't exist
824
834
  create = True
835
+ validate = False
825
836
 
826
837
  query = self._projects_select(
827
838
  *(getattr(n.c, f) for f in self._namespaces_fields),
@@ -834,7 +845,7 @@ class AbstractDBMetastore(AbstractMetastore):
834
845
  rows = list(self.db.execute(query, conn=conn))
835
846
  if not rows:
836
847
  if create:
837
- return self.create_project(namespace_name, name)
848
+ return self.create_project(namespace_name, name, validate=validate)
838
849
  raise ProjectNotFoundError(
839
850
  f"Project {name} in namespace {namespace_name} not found."
840
851
  )
@@ -468,8 +468,12 @@ class SQLiteMetastore(AbstractDBMetastore):
468
468
  be created implicitly though, to keep the same fully qualified name with
469
469
  Studio dataset.
470
470
  """
471
- system_namespace = self.create_namespace(Namespace.system(), "System namespace")
472
- self.create_project(system_namespace.name, Project.listing(), "Listing project")
471
+ system_namespace = self.create_namespace(
472
+ Namespace.system(), "System namespace", validate=False
473
+ )
474
+ self.create_project(
475
+ system_namespace.name, Project.listing(), "Listing project", validate=False
476
+ )
473
477
 
474
478
  def _check_schema_version(self) -> None:
475
479
  """
datachain/dataset.py CHANGED
@@ -81,8 +81,10 @@ def create_dataset_uri(
81
81
  def parse_dataset_name(name: str) -> tuple[Optional[str], Optional[str], str]:
82
82
  """Parses dataset name and returns namespace, project and name"""
83
83
  if not name:
84
- raise ValueError("Name must be defined to parse it")
84
+ raise InvalidDatasetNameError("Name must be defined to parse it")
85
85
  split = name.split(".")
86
+ if len(split) > 3:
87
+ raise InvalidDatasetNameError(f"Invalid dataset name {name}")
86
88
  name = split[-1]
87
89
  project_name = split[-2] if len(split) > 1 else None
88
90
  namespace_name = split[-3] if len(split) > 2 else None
@@ -24,7 +24,7 @@ from pydantic import BaseModel
24
24
  from tqdm import tqdm
25
25
 
26
26
  from datachain import semver
27
- from datachain.dataset import DatasetRecord, parse_dataset_name
27
+ from datachain.dataset import DatasetRecord
28
28
  from datachain.delta import delta_disabled
29
29
  from datachain.error import ProjectCreateNotAllowedError, ProjectNotFoundError
30
30
  from datachain.func import literal
@@ -557,6 +557,7 @@ class DataChain:
557
557
  update_version: which part of the dataset version to automatically increase.
558
558
  Available values: `major`, `minor` or `patch`. Default is `patch`.
559
559
  """
560
+ catalog = self.session.catalog
560
561
  if version is not None:
561
562
  semver.validate(version)
562
563
 
@@ -570,17 +571,10 @@ class DataChain:
570
571
  " patch"
571
572
  )
572
573
 
573
- namespace_name, project_name, name = parse_dataset_name(name)
574
-
575
- namespace_name = (
576
- namespace_name
577
- or self._settings.namespace
578
- or self.session.catalog.metastore.default_namespace_name
579
- )
580
- project_name = (
581
- project_name
582
- or self._settings.project
583
- or self.session.catalog.metastore.default_project_name
574
+ namespace_name, project_name, name = catalog.get_full_dataset_name(
575
+ name,
576
+ namespace_name=self._settings.namespace,
577
+ project_name=self._settings.project,
584
578
  )
585
579
 
586
580
  try:
@@ -1,7 +1,6 @@
1
1
  from collections.abc import Sequence
2
2
  from typing import TYPE_CHECKING, Optional, Union, get_origin, get_type_hints
3
3
 
4
- from datachain.dataset import parse_dataset_name
5
4
  from datachain.error import (
6
5
  DatasetNotFoundError,
7
6
  DatasetVersionNotFoundError,
@@ -125,11 +124,11 @@ def read_dataset(
125
124
  session = Session.get(session)
126
125
  catalog = session.catalog
127
126
 
128
- namespace_name, project_name, name = parse_dataset_name(name)
129
- namespace_name = (
130
- namespace_name or namespace or catalog.metastore.default_namespace_name
127
+ namespace_name, project_name, name = catalog.get_full_dataset_name(
128
+ name,
129
+ project_name=project,
130
+ namespace_name=namespace,
131
131
  )
132
- project_name = project_name or project or catalog.metastore.default_project_name
133
132
 
134
133
  if version is not None:
135
134
  try:
@@ -320,11 +319,11 @@ def delete_dataset(
320
319
  session = Session.get(session, in_memory=in_memory)
321
320
  catalog = session.catalog
322
321
 
323
- namespace_name, project_name, name = parse_dataset_name(name)
324
- namespace_name = (
325
- namespace_name or namespace or catalog.metastore.default_namespace_name
322
+ namespace_name, project_name, name = catalog.get_full_dataset_name(
323
+ name,
324
+ project_name=project,
325
+ namespace_name=namespace,
326
326
  )
327
- project_name = project_name or project or catalog.metastore.default_project_name
328
327
 
329
328
  if not catalog.metastore.is_local_dataset(namespace_name) and studio:
330
329
  return remove_studio_dataset(
@@ -97,4 +97,4 @@ def read_records(
97
97
  for chunk in batched(records, INSERT_BATCH_SIZE):
98
98
  warehouse.insert_rows(table, chunk)
99
99
  warehouse.insert_rows_done(table)
100
- return read_dataset(name=dsr.name, session=session, settings=settings)
100
+ return read_dataset(name=dsr.full_name, session=session, settings=settings)
@@ -25,6 +25,7 @@ from pydantic import BaseModel, Field, create_model
25
25
  from sqlalchemy import ColumnElement
26
26
  from typing_extensions import Literal as LiteralEx
27
27
 
28
+ from datachain.func import literal
28
29
  from datachain.func.func import Func
29
30
  from datachain.lib.convert.python_to_sql import python_to_sql
30
31
  from datachain.lib.convert.sql_to_python import sql_to_python
@@ -659,6 +660,7 @@ class SignalSchema:
659
660
 
660
661
  def mutate(self, args_map: dict) -> "SignalSchema":
661
662
  new_values = self.values.copy()
663
+ primitives = (bool, str, int, float)
662
664
 
663
665
  for name, value in args_map.items():
664
666
  if isinstance(value, Column) and value.name in self.values:
@@ -679,6 +681,12 @@ class SignalSchema:
679
681
  # adding new signal with function
680
682
  new_values[name] = value.get_result_type(self)
681
683
  continue
684
+ if isinstance(value, primitives):
685
+ # For primitives, store the type, not the value
686
+ val = literal(value)
687
+ val.type = python_to_sql(type(value))()
688
+ new_values[name] = sql_to_python(val)
689
+ continue
682
690
  if isinstance(value, ColumnElement):
683
691
  # adding new signal
684
692
  new_values[name] = sql_to_python(value)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datachain
3
- Version: 0.22.0
3
+ Version: 0.23.0
4
4
  Summary: Wrangle unstructured AI data at scale
5
5
  Author-email: Dmitry Petrov <support@dvc.org>
6
6
  License-Expression: Apache-2.0
@@ -3,7 +3,7 @@ datachain/__main__.py,sha256=hG3Y4ARGEqe1AWwNMd259rBlqtphx1Wk39YbueQ0yV8,91
3
3
  datachain/asyn.py,sha256=RH_jFwJcTXxhEFomaI9yL6S3Onau6NZ6FSKfKFGtrJE,9689
4
4
  datachain/cache.py,sha256=ESVRaCJXEThMIfGEFVHx6wJPOZA7FYk9V6WxjyuqUBY,3626
5
5
  datachain/config.py,sha256=g8qbNV0vW2VEKpX-dGZ9pAn0DAz6G2ZFcr7SAV3PoSM,4272
6
- datachain/dataset.py,sha256=--7UI8lZ2lVhk2mNCsHACGigQe96-jBUcbnDMebj-cE,24089
6
+ datachain/dataset.py,sha256=Dn8zx_4zuiW28dF-cNtAXFLS7Her0015YYdLlFGwMas,24195
7
7
  datachain/delta.py,sha256=4RqLLc9dJLF8x9GG9IDgi86DwuPerZQ4HAUnNBeACw8,8446
8
8
  datachain/error.py,sha256=OWwWMkzZYJrkcoEDGhJHMf7SfKvxcsOLRF94mjPf29I,1609
9
9
  datachain/job.py,sha256=x5PB6d5sqx00hePNNkirESlOVAvnmkEM5ygUgQmAhsk,1262
@@ -21,13 +21,13 @@ datachain/studio.py,sha256=bLok-eJNFRHQScEyAyA_Fas52dmijd5r-73KudWxV4k,13337
21
21
  datachain/telemetry.py,sha256=0A4IOPPp9VlP5pyW9eBfaTK3YhHGzHl7dQudQjUAx9A,994
22
22
  datachain/utils.py,sha256=DNqOi-Ydb7InyWvD9m7_yailxz6-YGpZzh00biQaHNo,15305
23
23
  datachain/catalog/__init__.py,sha256=cMZzSz3VoUi-6qXSVaHYN-agxQuAcz2XSqnEPZ55crE,353
24
- datachain/catalog/catalog.py,sha256=43Yp1xQnwrozl2_VhVWQDFHxYPunXP95v7n1PoGc6mw,63546
24
+ datachain/catalog/catalog.py,sha256=j8aH0zAHJRCJ_-uOYapK0spp9Go_Ehwxz_1FI3p-Q5I,64733
25
25
  datachain/catalog/datasource.py,sha256=IkGMh0Ttg6Q-9DWfU_H05WUnZepbGa28HYleECi6K7I,1353
26
26
  datachain/catalog/loader.py,sha256=UXjYD6BNRoupPvkiz3-b04jepXhtLHCA4gzKFnXxOtQ,5987
27
27
  datachain/cli/__init__.py,sha256=WvBqnwjG8Wp9xGCn-4eqfoZ3n7Sj1HJemCi4MayJh_c,8221
28
28
  datachain/cli/utils.py,sha256=wrLnAh7Wx8O_ojZE8AE4Lxn5WoxHbOj7as8NWlLAA74,3036
29
29
  datachain/cli/commands/__init__.py,sha256=zp3bYIioO60x_X04A4-IpZqSYVnpwOa1AdERQaRlIhI,493
30
- datachain/cli/commands/datasets.py,sha256=Bva9gTi1HMvvCQPFUPxLYrHQduDlJDWV8EN6IcJcC3Y,6949
30
+ datachain/cli/commands/datasets.py,sha256=LfOe22O9OCEDI8K2cy05Gp4_Q-GFHOHRv4bXQ-USM4s,6472
31
31
  datachain/cli/commands/du.py,sha256=9edEzDEs98K2VYk8Wf-ZMpUzALcgm9uD6YtoqbvtUGU,391
32
32
  datachain/cli/commands/index.py,sha256=eglNaIe1yyIadUHHumjtNbgIjht6kme7SS7xE3YHR88,198
33
33
  datachain/cli/commands/ls.py,sha256=CBmk838Q-EQp04lE2Qdnpsc1GXAkC4-I-b-a_828n1E,5272
@@ -49,10 +49,10 @@ datachain/client/s3.py,sha256=6DNVGLg-woPS1DVlYVX2rIlunNblsuxyOnI1rSzhW3k,7515
49
49
  datachain/data_storage/__init__.py,sha256=9Wit-oe5P46V7CJQTD0BJ5MhOa2Y9h3ddJ4VWTe-Lec,273
50
50
  datachain/data_storage/db_engine.py,sha256=n8ojCbvVMPY2e3SG8fUaaD0b9GkVfpl_Naa_6EiHfWg,3788
51
51
  datachain/data_storage/job.py,sha256=9r0OGwh22bHNIvLHqg8_-eJSP1YYB-BN5HOla5TdCxw,402
52
- datachain/data_storage/metastore.py,sha256=YhkHEHvE--jKoOpCS5LkcLDMekfCX76VwubbXPoAiic,52317
52
+ datachain/data_storage/metastore.py,sha256=9mWYOKK3AoHeKPGFm-WBfPrmnYHhwYeXx5MOueKTe7I,52657
53
53
  datachain/data_storage/schema.py,sha256=o3JbURKXRg3IJyIVA4QjHHkn6byRuz7avbydU2FlvNY,9897
54
54
  datachain/data_storage/serializer.py,sha256=6G2YtOFqqDzJf1KbvZraKGXl2XHZyVml2krunWUum5o,927
55
- datachain/data_storage/sqlite.py,sha256=bGb4_kEFvnGf3ZWekiv8z3VMZBzQyO0bSaNB5RrpUUs,29991
55
+ datachain/data_storage/sqlite.py,sha256=tT_soVi6l_pFSKaDktA1t4qW_vmPvXnvYSf4TZTKZYk,30067
56
56
  datachain/data_storage/warehouse.py,sha256=_7btARw-kd-Nx19S0qW6JqdF3VYyypQXFzsXq68SWKI,32327
57
57
  datachain/diff/__init__.py,sha256=-OFZzgOplqO84iWgGY7kfe60NXaWR9JRIh9T-uJboAM,9668
58
58
  datachain/fs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -85,7 +85,7 @@ datachain/lib/namespaces.py,sha256=it52UbbwB8dzhesO2pMs_nThXiPQ1Ph9sD9I3GQkg5s,2
85
85
  datachain/lib/projects.py,sha256=C-HTzTLUbIB735_iBSV6MjWnntV6gaKCEIkMSR1YEQw,2596
86
86
  datachain/lib/pytorch.py,sha256=oBBd6cxYrcwaFz7IQajKqhGqDdNnwUZWs0wJPRizrjk,7712
87
87
  datachain/lib/settings.py,sha256=9wi0FoHxRxNiyn99pR28IYsMkoo47jQxeXuObQr2Ar0,2929
88
- datachain/lib/signal_schema.py,sha256=Zhg8qThFDf9eoNWFH6KGeYB-sIGys7A_ybq2CUBG7Dg,36127
88
+ datachain/lib/signal_schema.py,sha256=dVEqqrQQ_BS3yzU_49-Gari7IjVyMl1UT8h1WIsZabs,36489
89
89
  datachain/lib/tar.py,sha256=MLcVjzIgBqRuJacCNpZ6kwSZNq1i2tLyROc8PVprHsA,999
90
90
  datachain/lib/text.py,sha256=UNHm8fhidk7wdrWqacEWaA6I9ykfYqarQ2URby7jc7M,1261
91
91
  datachain/lib/udf.py,sha256=3uITkhO8IZnX49aePheObzd5ORYi2DIDYZVMQlBAJ-s,16687
@@ -103,14 +103,14 @@ datachain/lib/convert/values_to_tuples.py,sha256=j5yZMrVUH6W7b-7yUvdCTGI7JCUAYUO
103
103
  datachain/lib/dc/__init__.py,sha256=HD0NYrdy44u6kkpvgGjJcvGz-UGTHui2azghcT8ZUg0,838
104
104
  datachain/lib/dc/csv.py,sha256=q6a9BpapGwP6nwy6c5cklxQumep2fUp9l2LAjtTJr6s,4411
105
105
  datachain/lib/dc/database.py,sha256=g5M6NjYR1T0vKte-abV-3Ejnm-HqxTIMir5cRi_SziE,6051
106
- datachain/lib/dc/datachain.py,sha256=B6z8e33ZAUKbJ-cqQko-VJEtmia2bfUnuqH7BQQVt_A,85998
107
- datachain/lib/dc/datasets.py,sha256=xiVNe7PosuIsyACFhly9qNxGmRQy1J2TQw3AD6uj9UM,12747
106
+ datachain/lib/dc/datachain.py,sha256=dFI7JX5-41HLgA-TUR99dtR1lvk2vokaMC3mbIW1XT4,85814
107
+ datachain/lib/dc/datasets.py,sha256=H7s_4B68MMpRvLrK874QS8xSq1FeT1f6j7qPhnP2i64,12490
108
108
  datachain/lib/dc/hf.py,sha256=PJl2wiLjdRsMz0SYbLT-6H8b-D5i2WjeH7li8HHOk_0,2145
109
109
  datachain/lib/dc/json.py,sha256=dNijfJ-H92vU3soyR7X1IiDrWhm6yZIGG3bSnZkPdAE,2733
110
110
  datachain/lib/dc/listings.py,sha256=eVBUP25W81dv46DLqkv8K0X7N3nxhoZm77gFrByeT_E,4660
111
111
  datachain/lib/dc/pandas.py,sha256=ObueUXDUFKJGu380GmazdG02ARpKAHPhSaymfmOH13E,1489
112
112
  datachain/lib/dc/parquet.py,sha256=zYcSgrWwyEDW9UxGUSVdIVsCu15IGEf0xL8KfWQqK94,1782
113
- datachain/lib/dc/records.py,sha256=AMtfWc7K6mtbW2OiaeIm3SjHTxDGnSgCEQW5u984Qh0,3111
113
+ datachain/lib/dc/records.py,sha256=FpPbApWopUri1gIaSMsfXN4fevja4mjmfb6Q5eiaGxI,3116
114
114
  datachain/lib/dc/storage.py,sha256=8xiV3c6k-sG14RGwNJCp0AbV6L0mNDsTVZ-Est-ccnw,7672
115
115
  datachain/lib/dc/utils.py,sha256=VawOAlJSvAtZbsMg33s5tJe21TRx1Km3QggI1nN6tnw,3984
116
116
  datachain/lib/dc/values.py,sha256=7l1n352xWrEdql2NhBcZ3hj8xyPglWiY4qHjFPjn6iw,1428
@@ -157,9 +157,9 @@ datachain/sql/sqlite/vector.py,sha256=ncW4eu2FlJhrP_CIpsvtkUabZlQdl2D5Lgwy_cbfqR
157
157
  datachain/toolkit/__init__.py,sha256=eQ58Q5Yf_Fgv1ZG0IO5dpB4jmP90rk8YxUWmPc1M2Bo,68
158
158
  datachain/toolkit/split.py,sha256=ktGWzY4kyzjWyR86dhvzw-Zhl0lVk_LOX3NciTac6qo,2914
159
159
  datachain/torch/__init__.py,sha256=gIS74PoEPy4TB3X6vx9nLO0Y3sLJzsA8ckn8pRWihJM,579
160
- datachain-0.22.0.dist-info/licenses/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
161
- datachain-0.22.0.dist-info/METADATA,sha256=xfdXuYjS-y5_IokpYEC7ZlmB6Wx1ouF7bh6K-TAacJI,13281
162
- datachain-0.22.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
163
- datachain-0.22.0.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
164
- datachain-0.22.0.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
165
- datachain-0.22.0.dist-info/RECORD,,
160
+ datachain-0.23.0.dist-info/licenses/LICENSE,sha256=8DnqK5yoPI_E50bEg_zsHKZHY2HqPy4rYN338BHQaRA,11344
161
+ datachain-0.23.0.dist-info/METADATA,sha256=cS34xnVaaqHRes2oZ1uJnJVtDZ8L_jIBq6bXyTno8aI,13281
162
+ datachain-0.23.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
163
+ datachain-0.23.0.dist-info/entry_points.txt,sha256=0GMJS6B_KWq0m3VT98vQI2YZodAMkn4uReZ_okga9R4,49
164
+ datachain-0.23.0.dist-info/top_level.txt,sha256=lZPpdU_2jJABLNIg2kvEOBi8PtsYikbN1OdMLHk8bTg,10
165
+ datachain-0.23.0.dist-info/RECORD,,