maxframe 1.0.0rc3__cp38-cp38-win32.whl → 1.1.0__cp38-cp38-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of maxframe might be problematic. Click here for more details.

Files changed (112) hide show
  1. maxframe/_utils.cp38-win32.pyd +0 -0
  2. maxframe/codegen.py +1 -0
  3. maxframe/config/config.py +16 -1
  4. maxframe/conftest.py +52 -14
  5. maxframe/core/entity/executable.py +1 -1
  6. maxframe/core/graph/core.cp38-win32.pyd +0 -0
  7. maxframe/core/operator/base.py +2 -0
  8. maxframe/dataframe/arithmetic/docstring.py +26 -2
  9. maxframe/dataframe/arithmetic/equal.py +4 -2
  10. maxframe/dataframe/arithmetic/greater.py +4 -2
  11. maxframe/dataframe/arithmetic/greater_equal.py +4 -2
  12. maxframe/dataframe/arithmetic/less.py +2 -2
  13. maxframe/dataframe/arithmetic/less_equal.py +4 -2
  14. maxframe/dataframe/arithmetic/not_equal.py +4 -2
  15. maxframe/dataframe/arithmetic/tests/test_arithmetic.py +17 -16
  16. maxframe/dataframe/core.py +26 -2
  17. maxframe/dataframe/datasource/read_odps_query.py +116 -28
  18. maxframe/dataframe/datasource/read_odps_table.py +3 -1
  19. maxframe/dataframe/datasource/tests/test_datasource.py +93 -12
  20. maxframe/dataframe/datastore/to_odps.py +7 -0
  21. maxframe/dataframe/extensions/__init__.py +8 -0
  22. maxframe/dataframe/extensions/apply_chunk.py +649 -0
  23. maxframe/dataframe/extensions/flatjson.py +131 -0
  24. maxframe/dataframe/extensions/flatmap.py +314 -0
  25. maxframe/dataframe/extensions/reshuffle.py +1 -1
  26. maxframe/dataframe/extensions/tests/test_apply_chunk.py +186 -0
  27. maxframe/dataframe/extensions/tests/test_extensions.py +108 -3
  28. maxframe/dataframe/groupby/__init__.py +1 -0
  29. maxframe/dataframe/groupby/aggregation.py +1 -0
  30. maxframe/dataframe/groupby/apply.py +9 -1
  31. maxframe/dataframe/groupby/core.py +1 -1
  32. maxframe/dataframe/groupby/fill.py +4 -1
  33. maxframe/dataframe/groupby/getitem.py +6 -0
  34. maxframe/dataframe/groupby/tests/test_groupby.py +1 -1
  35. maxframe/dataframe/groupby/transform.py +8 -2
  36. maxframe/dataframe/indexing/add_prefix_suffix.py +1 -1
  37. maxframe/dataframe/indexing/loc.py +6 -4
  38. maxframe/dataframe/indexing/rename.py +11 -0
  39. maxframe/dataframe/initializer.py +11 -1
  40. maxframe/dataframe/merge/__init__.py +9 -1
  41. maxframe/dataframe/merge/concat.py +41 -31
  42. maxframe/dataframe/merge/merge.py +1 -1
  43. maxframe/dataframe/merge/tests/test_merge.py +3 -1
  44. maxframe/dataframe/misc/apply.py +3 -0
  45. maxframe/dataframe/misc/drop_duplicates.py +23 -2
  46. maxframe/dataframe/misc/map.py +3 -1
  47. maxframe/dataframe/misc/tests/test_misc.py +24 -2
  48. maxframe/dataframe/misc/transform.py +22 -13
  49. maxframe/dataframe/reduction/__init__.py +3 -0
  50. maxframe/dataframe/reduction/aggregation.py +1 -0
  51. maxframe/dataframe/reduction/median.py +56 -0
  52. maxframe/dataframe/reduction/tests/test_reduction.py +17 -7
  53. maxframe/dataframe/statistics/quantile.py +8 -2
  54. maxframe/dataframe/statistics/tests/test_statistics.py +4 -4
  55. maxframe/dataframe/tests/test_initializer.py +33 -2
  56. maxframe/dataframe/tests/test_utils.py +60 -0
  57. maxframe/dataframe/utils.py +110 -7
  58. maxframe/dataframe/window/expanding.py +5 -3
  59. maxframe/dataframe/window/tests/test_expanding.py +2 -2
  60. maxframe/io/objects/tests/test_object_io.py +39 -12
  61. maxframe/io/odpsio/arrow.py +30 -2
  62. maxframe/io/odpsio/schema.py +28 -8
  63. maxframe/io/odpsio/tableio.py +55 -133
  64. maxframe/io/odpsio/tests/test_schema.py +40 -4
  65. maxframe/io/odpsio/tests/test_tableio.py +5 -5
  66. maxframe/io/odpsio/tests/test_volumeio.py +35 -11
  67. maxframe/io/odpsio/volumeio.py +36 -6
  68. maxframe/learn/contrib/__init__.py +3 -1
  69. maxframe/learn/contrib/graph/__init__.py +15 -0
  70. maxframe/learn/contrib/graph/connected_components.py +215 -0
  71. maxframe/learn/contrib/graph/tests/__init__.py +13 -0
  72. maxframe/learn/contrib/graph/tests/test_connected_components.py +53 -0
  73. maxframe/learn/contrib/llm/__init__.py +16 -0
  74. maxframe/learn/contrib/llm/core.py +54 -0
  75. maxframe/learn/contrib/llm/models/__init__.py +14 -0
  76. maxframe/learn/contrib/llm/models/dashscope.py +73 -0
  77. maxframe/learn/contrib/llm/multi_modal.py +42 -0
  78. maxframe/learn/contrib/llm/text.py +42 -0
  79. maxframe/learn/contrib/xgboost/classifier.py +3 -3
  80. maxframe/learn/contrib/xgboost/predict.py +8 -39
  81. maxframe/learn/contrib/xgboost/train.py +4 -3
  82. maxframe/lib/mmh3.cp38-win32.pyd +0 -0
  83. maxframe/lib/sparse/tests/test_sparse.py +15 -15
  84. maxframe/opcodes.py +10 -1
  85. maxframe/protocol.py +6 -1
  86. maxframe/serialization/core.cp38-win32.pyd +0 -0
  87. maxframe/serialization/core.pyx +13 -1
  88. maxframe/serialization/pandas.py +50 -20
  89. maxframe/serialization/serializables/core.py +24 -5
  90. maxframe/serialization/serializables/field_type.py +4 -1
  91. maxframe/serialization/serializables/tests/test_serializable.py +8 -1
  92. maxframe/serialization/tests/test_serial.py +2 -1
  93. maxframe/session.py +9 -2
  94. maxframe/tensor/__init__.py +19 -7
  95. maxframe/tensor/indexing/getitem.py +2 -0
  96. maxframe/tensor/merge/concatenate.py +23 -20
  97. maxframe/tensor/merge/vstack.py +5 -1
  98. maxframe/tensor/misc/transpose.py +1 -1
  99. maxframe/tests/utils.py +16 -0
  100. maxframe/udf.py +27 -0
  101. maxframe/utils.py +64 -14
  102. {maxframe-1.0.0rc3.dist-info → maxframe-1.1.0.dist-info}/METADATA +2 -2
  103. {maxframe-1.0.0rc3.dist-info → maxframe-1.1.0.dist-info}/RECORD +112 -96
  104. {maxframe-1.0.0rc3.dist-info → maxframe-1.1.0.dist-info}/WHEEL +1 -1
  105. maxframe_client/clients/framedriver.py +4 -1
  106. maxframe_client/fetcher.py +28 -10
  107. maxframe_client/session/consts.py +3 -0
  108. maxframe_client/session/odps.py +104 -20
  109. maxframe_client/session/task.py +42 -26
  110. maxframe_client/session/tests/test_task.py +0 -4
  111. maxframe_client/tests/test_session.py +44 -12
  112. {maxframe-1.0.0rc3.dist-info → maxframe-1.1.0.dist-info}/top_level.txt +0 -0
@@ -31,7 +31,7 @@ def switch_table_io(request):
31
31
  old_use_common_table = options.use_common_table
32
32
  try:
33
33
  options.use_common_table = request.param
34
- yield
34
+ yield request.param
35
35
  finally:
36
36
  options.use_common_table = old_use_common_table
37
37
 
@@ -45,7 +45,7 @@ def test_empty_table_io(switch_table_io):
45
45
  table_io = ODPSTableIO(o)
46
46
 
47
47
  # test read from empty table
48
- empty_table_name = tn("test_empty_table_halo_read")
48
+ empty_table_name = tn("test_empty_table_halo_read_" + str(switch_table_io).lower())
49
49
  o.delete_table(empty_table_name, if_exists=True)
50
50
  tb = o.create_table(empty_table_name, "col1 string", lifecycle=1)
51
51
 
@@ -65,7 +65,7 @@ def test_table_io_without_parts(switch_table_io):
65
65
  table_io = ODPSTableIO(o)
66
66
 
67
67
  # test read and write tables without partition
68
- no_part_table_name = tn("test_no_part_halo_write")
68
+ no_part_table_name = tn("test_no_part_halo_write_" + str(switch_table_io).lower())
69
69
  o.delete_table(no_part_table_name, if_exists=True)
70
70
  col_desc = ",".join(f"{c} double" for c in "abcde") + ", f datetime"
71
71
  tb = o.create_table(no_part_table_name, col_desc, lifecycle=1)
@@ -99,7 +99,7 @@ def test_table_io_with_range_reader(switch_table_io):
99
99
  table_io = ODPSTableIO(o)
100
100
 
101
101
  # test read and write tables without partition
102
- no_part_table_name = tn("test_no_part_halo_write")
102
+ no_part_table_name = tn("test_halo_write_range_" + str(switch_table_io).lower())
103
103
  o.delete_table(no_part_table_name, if_exists=True)
104
104
  tb = o.create_table(
105
105
  no_part_table_name, ",".join(f"{c} double" for c in "abcde"), lifecycle=1
@@ -139,7 +139,7 @@ def test_table_io_with_parts(switch_table_io):
139
139
  table_io = ODPSTableIO(o)
140
140
 
141
141
  # test read and write tables with partition
142
- parted_table_name = tn("test_parted_halo_write")
142
+ parted_table_name = tn("test_parted_halo_write_" + str(switch_table_io).lower())
143
143
  o.delete_table(parted_table_name, if_exists=True)
144
144
  tb = o.create_table(
145
145
  parted_table_name,
@@ -42,15 +42,33 @@ def create_volume(request, oss_config):
42
42
  oss_bucket_name,
43
43
  oss_endpoint,
44
44
  ) = oss_config.oss_config
45
- test_location = "oss://%s:%s@%s/%s/%s" % (
46
- oss_access_id,
47
- oss_secret_access_key,
48
- oss_endpoint,
49
- oss_bucket_name,
50
- oss_test_dir_name,
51
- )
45
+
46
+ if "test" in oss_endpoint:
47
+ # offline config
48
+ test_location = "oss://%s:%s@%s/%s/%s" % (
49
+ oss_access_id,
50
+ oss_secret_access_key,
51
+ oss_endpoint,
52
+ oss_bucket_name,
53
+ oss_test_dir_name,
54
+ )
55
+ rolearn = None
56
+ else:
57
+ # online config
58
+ endpoint_parts = oss_endpoint.split(".", 1)
59
+ if "-internal" not in endpoint_parts[0]:
60
+ endpoint_parts[0] += "-internal"
61
+ test_location = "oss://%s/%s/%s" % (
62
+ ".".join(endpoint_parts),
63
+ oss_bucket_name,
64
+ oss_test_dir_name,
65
+ )
66
+ rolearn = oss_config.oss_rolearn
67
+
52
68
  oss_config.oss_bucket.put_object(oss_test_dir_name + "/", b"")
53
- odps_entry.create_external_volume(test_vol_name, location=test_location)
69
+ odps_entry.create_external_volume(
70
+ test_vol_name, location=test_location, rolearn=rolearn
71
+ )
54
72
  try:
55
73
  yield test_vol_name
56
74
  finally:
@@ -75,13 +93,19 @@ def test_read_write_volume(create_volume):
75
93
 
76
94
  odps_entry = ODPS.from_environments()
77
95
 
78
- writer = ODPSVolumeWriter(odps_entry, create_volume, test_vol_dir)
96
+ writer = ODPSVolumeWriter(
97
+ odps_entry, create_volume, test_vol_dir, replace_internal_host=True
98
+ )
79
99
 
80
- writer = ODPSVolumeWriter(odps_entry, create_volume, test_vol_dir)
100
+ writer = ODPSVolumeWriter(
101
+ odps_entry, create_volume, test_vol_dir, replace_internal_host=True
102
+ )
81
103
  writer.write_file("file1", b"content1")
82
104
  writer.write_file("file2", b"content2")
83
105
 
84
- reader = ODPSVolumeReader(odps_entry, create_volume, test_vol_dir)
106
+ reader = ODPSVolumeReader(
107
+ odps_entry, create_volume, test_vol_dir, replace_internal_host=True
108
+ )
85
109
  assert reader.read_file("file1") == b"content1"
86
110
  assert reader.read_file("file2") == b"content2"
87
111
 
@@ -13,16 +13,28 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import inspect
16
- from typing import Iterator, List, Union
16
+ from typing import Iterator, List, Optional, Union
17
17
 
18
18
  from odps import ODPS
19
+ from odps import __version__ as pyodps_version
20
+
21
+ from ...lib.version import Version
22
+
23
+ _has_replace_internal_host = Version(pyodps_version) >= Version("0.12.0")
19
24
 
20
25
 
21
26
  class ODPSVolumeReader:
22
- def __init__(self, odps_entry: ODPS, volume_name: str, volume_dir: str):
27
+ def __init__(
28
+ self,
29
+ odps_entry: ODPS,
30
+ volume_name: str,
31
+ volume_dir: str,
32
+ replace_internal_host: bool = False,
33
+ ):
23
34
  self._odps_entry = odps_entry
24
35
  self._volume = odps_entry.get_volume(volume_name)
25
36
  self._volume_dir = volume_dir
37
+ self._replace_internal_host = replace_internal_host
26
38
 
27
39
  def list_files(self) -> List[str]:
28
40
  def _get_file_name(vol_file):
@@ -38,18 +50,36 @@ class ODPSVolumeReader:
38
50
  ]
39
51
 
40
52
  def read_file(self, file_name: str) -> bytes:
41
- with self._volume.open_reader(self._volume_dir + "/" + file_name) as reader:
53
+ kw = {}
54
+ if _has_replace_internal_host and self._replace_internal_host:
55
+ kw = {"replace_internal_host": self._replace_internal_host}
56
+ with self._volume.open_reader(
57
+ self._volume_dir + "/" + file_name, **kw
58
+ ) as reader:
42
59
  return reader.read()
43
60
 
44
61
 
45
62
  class ODPSVolumeWriter:
46
- def __init__(self, odps_entry: ODPS, volume_name: str, volume_dir: str):
63
+ def __init__(
64
+ self,
65
+ odps_entry: ODPS,
66
+ volume_name: str,
67
+ volume_dir: str,
68
+ schema_name: Optional[str] = None,
69
+ replace_internal_host: bool = False,
70
+ ):
47
71
  self._odps_entry = odps_entry
48
- self._volume = odps_entry.get_volume(volume_name)
72
+ self._volume = odps_entry.get_volume(volume_name, schema=schema_name)
49
73
  self._volume_dir = volume_dir
74
+ self._replace_internal_host = replace_internal_host
50
75
 
51
76
  def write_file(self, file_name: str, data: Union[bytes, Iterator[bytes]]):
52
- with self._volume.open_writer(self._volume_dir + "/" + file_name) as writer:
77
+ kw = {}
78
+ if _has_replace_internal_host and self._replace_internal_host:
79
+ kw = {"replace_internal_host": self._replace_internal_host}
80
+ with self._volume.open_writer(
81
+ self._volume_dir + "/" + file_name, **kw
82
+ ) as writer:
53
83
  if not inspect.isgenerator(data):
54
84
  writer.write(data)
55
85
  else:
@@ -12,6 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from . import pytorch
15
+ from . import graph, llm, pytorch
16
16
 
17
+ del graph
18
+ del llm
17
19
  del pytorch
@@ -0,0 +1,15 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from .connected_components import connected_components
@@ -0,0 +1,215 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+ import pandas as pd
17
+
18
+ from maxframe import opcodes
19
+
20
+ from ....core import OutputType
21
+ from ....dataframe.operators import DataFrameOperator, DataFrameOperatorMixin
22
+ from ....dataframe.utils import make_dtypes, parse_index
23
+ from ....serialization.serializables import Int32Field, StringField
24
+
25
+
26
+ class DataFrameConnectedComponentsOperator(DataFrameOperator, DataFrameOperatorMixin):
27
+ _op_type_ = opcodes.CONNECTED_COMPONENTS
28
+
29
+ vertex_col1 = StringField("vertex_col1", default=None)
30
+ vertex_col2 = StringField("vertex_col2", default=None)
31
+ max_iter = Int32Field("max_iter", default=6)
32
+
33
+ def __call__(self, df):
34
+ node_id_dtype = df.dtypes[self.vertex_col1]
35
+ dtypes = make_dtypes({"id": node_id_dtype, "component": node_id_dtype})
36
+ # this will return a dataframe and a bool flag
37
+ new_dataframe_tileable_kw = {
38
+ "shape": (np.nan, 2),
39
+ "index_value": parse_index(pd.RangeIndex(0)),
40
+ "columns_value": parse_index(dtypes.index, store_data=True),
41
+ "dtypes": dtypes,
42
+ }
43
+ new_scalar_tileable_kw = {"dtype": np.dtype(np.bool_), "shape": ()}
44
+ return self.new_tileables(
45
+ [df],
46
+ kws=[new_dataframe_tileable_kw, new_scalar_tileable_kw],
47
+ )
48
+
49
+ @property
50
+ def output_limit(self):
51
+ return 2
52
+
53
+
54
+ def connected_components(
55
+ dataframe, vertex_col1: str, vertex_col2: str, max_iter: int = 6
56
+ ):
57
+ """
58
+ The connected components algorithm labels each node as belonging to a specific connected component with the ID of
59
+ its lowest-numbered vertex.
60
+
61
+ Parameters
62
+ ----------
63
+ dataframe : DataFrame
64
+ A DataFrame containing the edges of the graph.
65
+
66
+ vertex_col1 : str
67
+ The name of the column in `dataframe` that contains the one of edge vertices. The column value must be an
68
+ integer.
69
+
70
+ vertex_col2 : str
71
+ The name of the column in `dataframe` that contains the other one of edge vertices. The column value must be an
72
+ integer.
73
+
74
+ max_iter : int
75
+ The algorithm use large and small star transformation to find all connected components, `max_iter`
76
+ controls the max round of the iterations before finds all edges. Default is 6.
77
+
78
+
79
+ Returns
80
+ -------
81
+ DataFrame
82
+ Return dataFrame contains all connected component edges by two columns `id` and `component`. `component` is
83
+ the lowest-numbered vertex in the connected components.
84
+
85
+ Notes
86
+ -------
87
+ After `execute()`, the dataframe has a bool member `flag` to indicate if the `connected_components` already
88
+ converged in `max_iter` rounds. `True` means the dataframe already contains all edges of the connected components.
89
+ If `False` you can run `connected_components` more times to reach the converged state.
90
+
91
+ Examples
92
+ --------
93
+ >>> import numpy as np
94
+ >>> import maxframe.dataframe as md
95
+ >>> import maxframe.learn.contrib.graph.connected_components
96
+ >>> df = md.DataFrame({'x': [4, 1], 'y': [0, 4]})
97
+ >>> df.execute()
98
+ x y
99
+ 0 4 1
100
+ 1 0 4
101
+
102
+ Get connected components with 1 round iteration.
103
+
104
+ >>> components, converged = connected_components(df, "x", "y", 1)
105
+ >>> session.execute(components, converged)
106
+ >>> components
107
+ A B
108
+ 0 1 0
109
+ 1 4 0
110
+
111
+ >>> converged
112
+ True
113
+
114
+ Sometimes, a single iteration may not be sufficient to propagate the connectivity of all edges.
115
+ By default, `connected_components` performs 6 iterations of calculations.
116
+ If you are unsure whether the connected components have converged, you can check the `flag` variable in
117
+ the output DataFrame after calling `execute()`.
118
+
119
+ >>> df = md.DataFrame({'x': [4, 1, 7, 5, 8, 11, 11], 'y': [0, 4, 4, 7, 7, 9, 13]})
120
+ >>> df.execute()
121
+ x y
122
+ 0 4 0
123
+ 1 1 4
124
+ 2 7 4
125
+ 3 5 7
126
+ 4 8 7
127
+ 5 11 9
128
+ 6 11 13
129
+
130
+ >>> components, converged = connected_components(df, "x", "y", 1)
131
+ >>> session.execute(components, converged)
132
+ >>> components
133
+ id component
134
+ 0 4 0
135
+ 1 7 0
136
+ 2 8 4
137
+ 3 13 9
138
+ 4 1 0
139
+ 5 5 0
140
+ 6 11 9
141
+
142
+ If `flag` is True, it means convergence has been achieved.
143
+
144
+ >>> converged
145
+ False
146
+
147
+ You can determine whether to continue iterating or to use a larger number of iterations
148
+ (but not too large, which would result in wasted computational overhead).
149
+
150
+ >>> components, converged = connected_components(components, "id", "component", 1)
151
+ >>> session.execute(components, converged)
152
+ >>> components
153
+ id component
154
+ 0 4 0
155
+ 1 7 0
156
+ 2 13 9
157
+ 3 1 0
158
+ 4 5 0
159
+ 5 11 9
160
+ 6 8 0
161
+
162
+ >>> components, converged = connected_components(df, "x", "y")
163
+ >>> session.execute(components, converged)
164
+ >>> components
165
+ id component
166
+ 0 4 0
167
+ 1 7 0
168
+ 2 13 9
169
+ 3 1 0
170
+ 4 5 0
171
+ 5 11 9
172
+ 6 8 0
173
+ """
174
+
175
+ # Check if vertex columns are provided
176
+ if not vertex_col1 or not vertex_col2:
177
+ raise ValueError("Both vertex_col1 and vertex_col2 must be provided.")
178
+
179
+ # Check if max_iter is provided and within the valid range
180
+ if max_iter is None:
181
+ raise ValueError("max_iter must be provided.")
182
+ if not (1 <= max_iter <= 50):
183
+ raise ValueError("max_iter must be an integer between 1 and 50.")
184
+
185
+ # Verify that the vertex columns exist in the dataframe
186
+ missing_cols = [
187
+ col for col in (vertex_col1, vertex_col2) if col not in dataframe.dtypes
188
+ ]
189
+ if missing_cols:
190
+ raise ValueError(
191
+ f"The following required columns {missing_cols} are not in {list(dataframe.dtypes.index)}"
192
+ )
193
+
194
+ # Ensure that the vertex columns are of integer type
195
+ # TODO support string dtype
196
+ incorrect_dtypes = [
197
+ col
198
+ for col in (vertex_col1, vertex_col2)
199
+ if dataframe[col].dtype != np.dtype("int")
200
+ ]
201
+ if incorrect_dtypes:
202
+ dtypes_str = ", ".join(str(dataframe[col].dtype) for col in incorrect_dtypes)
203
+ raise ValueError(
204
+ f"Columns {incorrect_dtypes} should be of integer type, but found {dtypes_str}."
205
+ )
206
+
207
+ op = DataFrameConnectedComponentsOperator(
208
+ vertex_col1=vertex_col1,
209
+ vertex_col2=vertex_col2,
210
+ _output_types=[OutputType.dataframe, OutputType.scalar],
211
+ max_iter=max_iter,
212
+ )
213
+ return op(
214
+ dataframe,
215
+ )
@@ -0,0 +1,13 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
@@ -0,0 +1,53 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+ import pytest
17
+
18
+ from ..... import dataframe as md
19
+ from .....dataframe.core import DataFrameData
20
+ from .....tensor.core import TensorData
21
+ from .. import connected_components
22
+
23
+
24
+ @pytest.fixture
25
+ def df1():
26
+ return md.DataFrame({"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3]})
27
+
28
+
29
+ @pytest.fixture
30
+ def df2():
31
+ return md.DataFrame(
32
+ [[1, "2"], [1, "2"]],
33
+ columns=["a", "b"],
34
+ )
35
+
36
+
37
+ def test_connected_components(df1, df2):
38
+ edges, flag = connected_components(df1, "a", "b")
39
+ assert edges.op.max_iter == 6
40
+ assert edges.shape == (np.nan, 2)
41
+ assert isinstance(edges.data, DataFrameData)
42
+ assert isinstance(flag.data, TensorData)
43
+ assert flag.shape == ()
44
+ assert "id" in edges.dtypes and "component" in edges.dtypes
45
+
46
+ with pytest.raises(ValueError):
47
+ connected_components(df1, "a", "x")
48
+
49
+ with pytest.raises(ValueError):
50
+ connected_components(df1, "a", "b", 0)
51
+
52
+ with pytest.raises(ValueError):
53
+ connected_components(df2, "a", "b")
@@ -0,0 +1,16 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ from . import models, multi_modal, text
15
+
16
+ del models
@@ -0,0 +1,54 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ from typing import Any, Dict
15
+
16
+ import numpy as np
17
+ import pandas as pd
18
+
19
+ from ....core.entity.output_types import OutputType
20
+ from ....core.operator.base import Operator
21
+ from ....core.operator.core import TileableOperatorMixin
22
+ from ....dataframe.utils import parse_index
23
+ from ....serialization.serializables.core import Serializable
24
+ from ....serialization.serializables.field import AnyField, DictField, StringField
25
+
26
+
27
+ class LLM(Serializable):
28
+ name = StringField("name", default=None)
29
+
30
+ def validate_params(self, params: Dict[str, Any]):
31
+ pass
32
+
33
+
34
+ class LLMOperator(Operator, TileableOperatorMixin):
35
+ model = AnyField("model", default=None)
36
+ prompt_template = AnyField("prompt_template", default=None)
37
+ params = DictField("params", default=None)
38
+
39
+ def __init__(self, output_types=None, **kw):
40
+ if output_types is None:
41
+ output_types = [OutputType.dataframe]
42
+ super().__init__(_output_types=output_types, **kw)
43
+
44
+ def __call__(self, data):
45
+ col_names = ["response", "success"]
46
+ columns = parse_index(pd.Index(col_names), store_data=True)
47
+ out_dtypes = pd.Series([np.dtype("O"), np.dtype("bool")], index=col_names)
48
+ return self.new_tileable(
49
+ inputs=[data],
50
+ dtypes=out_dtypes,
51
+ shape=(data.shape[0], len(col_names)),
52
+ index_value=data.index_value,
53
+ columns_value=columns,
54
+ )
@@ -0,0 +1,14 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ from .dashscope import DashScopeMultiModalLLM, DashScopeTextLLM
@@ -0,0 +1,73 @@
1
+ # Copyright 1999-2024 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ from typing import Any, Dict
15
+
16
+ from ..... import opcodes
17
+ from .....serialization.serializables.core import Serializable
18
+ from .....serialization.serializables.field import StringField
19
+ from ..core import LLMOperator
20
+ from ..multi_modal import MultiModalLLM
21
+ from ..text import TextLLM
22
+
23
+
24
+ class DashScopeLLMMixin(Serializable):
25
+ __slots__ = ()
26
+
27
+ _not_supported_params = {"stream", "incremental_output"}
28
+
29
+ def validate_params(self, params: Dict[str, Any]):
30
+ for k in params.keys():
31
+ if k in self._not_supported_params:
32
+ raise ValueError(f"{k} is not supported")
33
+
34
+
35
+ class DashScopeTextLLM(TextLLM, DashScopeLLMMixin):
36
+ api_key_resource = StringField("api_key_resource", default=None)
37
+
38
+ def generate(
39
+ self,
40
+ data,
41
+ prompt_template: Dict[str, Any],
42
+ params: Dict[str, Any] = None,
43
+ ):
44
+ return DashScopeTextGenerationOperator(
45
+ model=self,
46
+ prompt_template=prompt_template,
47
+ params=params,
48
+ )(data)
49
+
50
+
51
+ class DashScopeMultiModalLLM(MultiModalLLM, DashScopeLLMMixin):
52
+ api_key_resource = StringField("api_key_resource", default=None)
53
+
54
+ def generate(
55
+ self,
56
+ data,
57
+ prompt_template: Dict[str, Any],
58
+ params: Dict[str, Any] = None,
59
+ ):
60
+ # TODO add precheck here
61
+ return DashScopeMultiModalGenerationOperator(
62
+ model=self,
63
+ prompt_template=prompt_template,
64
+ params=params,
65
+ )(data)
66
+
67
+
68
+ class DashScopeTextGenerationOperator(LLMOperator):
69
+ _op_type_ = opcodes.DASHSCOPE_TEXT_GENERATION
70
+
71
+
72
+ class DashScopeMultiModalGenerationOperator(LLMOperator):
73
+ _op_type_ = opcodes.DASHSCOPE_MULTI_MODAL_GENERATION