maxframe 2.2.0__cp38-cp38-win_amd64.whl → 2.3.0rc1__cp38-cp38-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of maxframe might be problematic. Click here for more details.

Files changed (114) hide show
  1. maxframe/_utils.cp38-win_amd64.pyd +0 -0
  2. maxframe/codegen/core.py +3 -2
  3. maxframe/codegen/spe/dataframe/merge.py +4 -0
  4. maxframe/codegen/spe/dataframe/misc.py +2 -0
  5. maxframe/codegen/spe/dataframe/reduction.py +18 -0
  6. maxframe/codegen/spe/dataframe/sort.py +9 -1
  7. maxframe/codegen/spe/dataframe/tests/test_reduction.py +13 -0
  8. maxframe/codegen/spe/dataframe/tseries.py +9 -0
  9. maxframe/codegen/spe/learn/contrib/lightgbm.py +4 -3
  10. maxframe/codegen/spe/tensor/datasource.py +1 -0
  11. maxframe/config/config.py +3 -0
  12. maxframe/conftest.py +10 -0
  13. maxframe/core/base.py +2 -1
  14. maxframe/core/entity/tileables.py +2 -0
  15. maxframe/core/graph/core.cp38-win_amd64.pyd +0 -0
  16. maxframe/core/graph/entity.py +7 -1
  17. maxframe/core/mode.py +6 -1
  18. maxframe/dataframe/__init__.py +2 -2
  19. maxframe/dataframe/arithmetic/__init__.py +4 -0
  20. maxframe/dataframe/arithmetic/maximum.py +33 -0
  21. maxframe/dataframe/arithmetic/minimum.py +33 -0
  22. maxframe/dataframe/core.py +98 -106
  23. maxframe/dataframe/datasource/core.py +6 -0
  24. maxframe/dataframe/datasource/direct.py +57 -0
  25. maxframe/dataframe/datasource/read_csv.py +19 -11
  26. maxframe/dataframe/datasource/read_odps_query.py +29 -6
  27. maxframe/dataframe/datasource/read_odps_table.py +32 -10
  28. maxframe/dataframe/datasource/read_parquet.py +38 -39
  29. maxframe/dataframe/datastore/__init__.py +6 -0
  30. maxframe/dataframe/datastore/direct.py +268 -0
  31. maxframe/dataframe/datastore/to_odps.py +6 -0
  32. maxframe/dataframe/extensions/flatjson.py +2 -1
  33. maxframe/dataframe/groupby/__init__.py +5 -1
  34. maxframe/dataframe/groupby/aggregation.py +10 -6
  35. maxframe/dataframe/groupby/apply_chunk.py +1 -3
  36. maxframe/dataframe/groupby/core.py +20 -4
  37. maxframe/dataframe/indexing/__init__.py +2 -1
  38. maxframe/dataframe/indexing/insert.py +45 -17
  39. maxframe/dataframe/merge/__init__.py +3 -0
  40. maxframe/dataframe/merge/combine.py +244 -0
  41. maxframe/dataframe/misc/__init__.py +14 -3
  42. maxframe/dataframe/misc/check_unique.py +41 -10
  43. maxframe/dataframe/misc/drop.py +31 -0
  44. maxframe/dataframe/misc/infer_dtypes.py +251 -0
  45. maxframe/dataframe/misc/map.py +31 -18
  46. maxframe/dataframe/misc/repeat.py +159 -0
  47. maxframe/dataframe/misc/tests/test_misc.py +35 -1
  48. maxframe/dataframe/missing/checkna.py +3 -2
  49. maxframe/dataframe/reduction/__init__.py +10 -5
  50. maxframe/dataframe/reduction/aggregation.py +6 -6
  51. maxframe/dataframe/reduction/argmax.py +7 -4
  52. maxframe/dataframe/reduction/argmin.py +7 -4
  53. maxframe/dataframe/reduction/core.py +18 -9
  54. maxframe/dataframe/reduction/mode.py +144 -0
  55. maxframe/dataframe/reduction/nunique.py +10 -3
  56. maxframe/dataframe/reduction/tests/test_reduction.py +12 -0
  57. maxframe/dataframe/sort/__init__.py +9 -2
  58. maxframe/dataframe/sort/argsort.py +7 -1
  59. maxframe/dataframe/sort/core.py +1 -1
  60. maxframe/dataframe/sort/rank.py +147 -0
  61. maxframe/dataframe/tseries/__init__.py +19 -0
  62. maxframe/dataframe/tseries/at_time.py +61 -0
  63. maxframe/dataframe/tseries/between_time.py +122 -0
  64. maxframe/dataframe/utils.py +30 -26
  65. maxframe/learn/contrib/llm/core.py +16 -7
  66. maxframe/learn/contrib/llm/deploy/__init__.py +13 -0
  67. maxframe/learn/contrib/llm/deploy/config.py +221 -0
  68. maxframe/learn/contrib/llm/deploy/core.py +247 -0
  69. maxframe/learn/contrib/llm/deploy/framework.py +35 -0
  70. maxframe/learn/contrib/llm/deploy/loader.py +360 -0
  71. maxframe/learn/contrib/llm/deploy/tests/__init__.py +13 -0
  72. maxframe/learn/contrib/llm/deploy/tests/test_register_models.py +359 -0
  73. maxframe/learn/contrib/llm/models/__init__.py +1 -0
  74. maxframe/learn/contrib/llm/models/dashscope.py +12 -6
  75. maxframe/learn/contrib/llm/models/managed.py +76 -11
  76. maxframe/learn/contrib/llm/models/openai.py +72 -0
  77. maxframe/learn/contrib/llm/tests/__init__.py +13 -0
  78. maxframe/learn/contrib/llm/tests/test_core.py +34 -0
  79. maxframe/learn/contrib/llm/tests/test_openai.py +187 -0
  80. maxframe/learn/contrib/llm/tests/test_text_gen.py +155 -0
  81. maxframe/learn/contrib/llm/text.py +348 -42
  82. maxframe/learn/contrib/models.py +4 -1
  83. maxframe/learn/contrib/xgboost/classifier.py +2 -0
  84. maxframe/learn/contrib/xgboost/core.py +31 -7
  85. maxframe/learn/contrib/xgboost/predict.py +4 -2
  86. maxframe/learn/contrib/xgboost/regressor.py +5 -0
  87. maxframe/learn/contrib/xgboost/train.py +2 -0
  88. maxframe/learn/preprocessing/_data/min_max_scaler.py +34 -23
  89. maxframe/learn/preprocessing/_data/standard_scaler.py +34 -25
  90. maxframe/learn/utils/__init__.py +1 -0
  91. maxframe/learn/utils/extmath.py +42 -9
  92. maxframe/learn/utils/odpsio.py +80 -11
  93. maxframe/lib/filesystem/_oss_lib/common.py +2 -0
  94. maxframe/lib/mmh3.cp38-win_amd64.pyd +0 -0
  95. maxframe/opcodes.py +9 -1
  96. maxframe/remote/core.py +4 -0
  97. maxframe/serialization/core.cp38-win_amd64.pyd +0 -0
  98. maxframe/serialization/tests/test_serial.py +2 -2
  99. maxframe/tensor/arithmetic/__init__.py +1 -1
  100. maxframe/tensor/arithmetic/core.py +2 -2
  101. maxframe/tensor/arithmetic/tests/test_arithmetic.py +0 -9
  102. maxframe/tensor/core.py +3 -0
  103. maxframe/tensor/misc/copyto.py +1 -1
  104. maxframe/tests/test_udf.py +61 -0
  105. maxframe/tests/test_utils.py +8 -5
  106. maxframe/udf.py +103 -7
  107. maxframe/utils.py +61 -8
  108. {maxframe-2.2.0.dist-info → maxframe-2.3.0rc1.dist-info}/METADATA +1 -2
  109. {maxframe-2.2.0.dist-info → maxframe-2.3.0rc1.dist-info}/RECORD +113 -90
  110. maxframe_client/session/task.py +8 -1
  111. maxframe_client/tests/test_session.py +24 -0
  112. maxframe/dataframe/arrays.py +0 -864
  113. {maxframe-2.2.0.dist-info → maxframe-2.3.0rc1.dist-info}/WHEEL +0 -0
  114. {maxframe-2.2.0.dist-info → maxframe-2.3.0rc1.dist-info}/top_level.txt +0 -0
Binary file
maxframe/codegen/core.py CHANGED
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union
23
23
  from odps.types import OdpsSchema
24
24
  from odps.utils import camel_to_underline
25
25
 
26
- from ..core import OperatorType, Tileable, TileableGraph
26
+ from ..core import OperatorType, Tileable, TileableGraph, enter_mode
27
27
  from ..core.operator import Fetch, Operator
28
28
  from ..extension import iter_extensions
29
29
  from ..io.odpsio import build_dataframe_table_meta
@@ -465,6 +465,7 @@ class DAGCodeGenerator(metaclass=abc.ABCMeta):
465
465
  def _generate_delete_code(self, var_name: str) -> List[str]:
466
466
  return []
467
467
 
468
+ @enter_mode(build=True)
468
469
  def generate_code(self, dag: TileableGraph) -> List[str]:
469
470
  """
470
471
  Generate the code of the input dag.
@@ -494,7 +495,7 @@ class DAGCodeGenerator(metaclass=abc.ABCMeta):
494
495
  code_lines.extend(adapter.generate_pre_op_code(op, self._context))
495
496
  if self._generate_comments_enabled:
496
497
  code_lines.extend(adapter.generate_comment(op, self._context))
497
- code_lines.extend(adapter.generate_code(op, self._context))
498
+ code_lines.extend(adapter.generate_code(op, self._context) or [])
498
499
  code_lines.extend(adapter.generate_post_op_code(op, self._context))
499
500
  code_lines.append("") # Append an empty line to separate operators
500
501
 
@@ -15,6 +15,7 @@
15
15
  from typing import List
16
16
 
17
17
  from ....dataframe.merge import (
18
+ DataFrameCombine,
18
19
  DataFrameCompare,
19
20
  DataFrameConcat,
20
21
  DataFrameMerge,
@@ -23,6 +24,9 @@ from ....dataframe.merge import (
23
24
  from ..core import SPECodeContext, SPEOperatorAdapter, register_op_adapter
24
25
  from ..utils import build_method_call_adapter
25
26
 
27
+ DataFrameCombineAdapter = build_method_call_adapter(
28
+ DataFrameCombine, "combine", 1, "func", kw_keys=["fill_value"]
29
+ )
26
30
  DataFrameCompareAdapter = build_method_call_adapter(
27
31
  DataFrameCompare,
28
32
  "compare",
@@ -32,6 +32,7 @@ from ....dataframe.misc.get_dummies import DataFrameGetDummies
32
32
  from ....dataframe.misc.isin import DataFrameIsin
33
33
  from ....dataframe.misc.map import DataFrameMap
34
34
  from ....dataframe.misc.memory_usage import DataFrameMemoryUsage
35
+ from ....dataframe.misc.repeat import DataFrameRepeat
35
36
  from ....dataframe.misc.shift import DataFrameShift
36
37
  from ....dataframe.misc.to_numeric import DataFrameToNumeric
37
38
  from ....dataframe.misc.transform import DataFrameTransform
@@ -127,6 +128,7 @@ class DataFrameIsinAdapter(SPEOperatorAdapter):
127
128
  DataFrameMapAdapter = build_method_call_adapter(
128
129
  DataFrameMap, "map", "arg", na_action=None
129
130
  )
131
+ DataFrameRepeatAdapter = build_method_call_adapter(DataFrameRepeat, "repeat", "repeats")
130
132
  DataFrameShiftAdapter = build_method_call_adapter(
131
133
  DataFrameShift, "shift", kw_keys=["periods", "freq", "axis", "fill_value"]
132
134
  )
@@ -28,6 +28,7 @@ from ....dataframe.reduction import (
28
28
  DataFrameMean,
29
29
  DataFrameMedian,
30
30
  DataFrameMin,
31
+ DataFrameMode,
31
32
  DataFrameNunique,
32
33
  DataFrameProd,
33
34
  DataFrameSem,
@@ -163,3 +164,20 @@ class DataFrameUniqueAdapter(SPEOperatorAdapter):
163
164
  input_var_name = context.get_input_tileable_variable(op.inputs[0])
164
165
  res_var_name = context.get_output_tileable_variable(op.outputs[0])
165
166
  return [f"{res_var_name} = pd.unique({input_var_name})"]
167
+
168
+
169
+ @register_op_adapter(DataFrameMode)
170
+ class DataFrameModeAdapter(SPEOperatorAdapter):
171
+ def generate_code(self, op: DataFrameMode, context: SPECodeContext) -> List[str]:
172
+ input_var_name = context.get_input_tileable_variable(op.inputs[0])
173
+ args = []
174
+ if op.inputs[0].ndim == 2:
175
+ if op.axis is not None:
176
+ args.append(f"axis={op.axis!r}")
177
+ if op.numeric_only is not None:
178
+ args.append(f"numeric_only={op.numeric_only!r}")
179
+ if op.dropna is not None:
180
+ args.append(f"dropna={op.dropna!r}")
181
+ args_str = ", ".join(args)
182
+ res_var_name = context.get_output_tileable_variable(op.outputs[0])
183
+ return [f"{res_var_name} = {input_var_name}.mode({args_str})"]
@@ -16,9 +16,17 @@ from abc import abstractmethod
16
16
  from typing import Any, Dict, List
17
17
 
18
18
  from ....dataframe.core import DATAFRAME_TYPE
19
- from ....dataframe.sort import DataFrameSortIndex, DataFrameSortValues
19
+ from ....dataframe.sort import DataFrameRank, DataFrameSortIndex, DataFrameSortValues
20
20
  from ....dataframe.sort.core import DataFrameSortOperator
21
21
  from ..core import SPECodeContext, SPEOperatorAdapter, register_op_adapter
22
+ from ..utils import build_method_call_adapter
23
+
24
+ DataFrameRankAdapter = build_method_call_adapter(
25
+ DataFrameRank,
26
+ "rank",
27
+ kw_keys=["axis", "method", "numeric_only", "na_option", "ascending", "pct"],
28
+ skip_none=True,
29
+ )
22
30
 
23
31
 
24
32
  class _DataFrameSortAdapter(SPEOperatorAdapter):
@@ -19,6 +19,7 @@ from ...core import SPECodeContext
19
19
  from ..reduction import (
20
20
  DataFrameAggregateAdapter,
21
21
  DataFrameKurtosisAdapter,
22
+ DataFrameModeAdapter,
22
23
  DataFrameNuniqueAdapter,
23
24
  DataFrameReductionAdapter,
24
25
  DataFrameUniqueAdapter,
@@ -96,6 +97,18 @@ def test_median(df1):
96
97
  assert results == expected_results
97
98
 
98
99
 
100
+ def test_mode(df1, s1):
101
+ df = df1.mode(dropna=False, numeric_only=True)
102
+ results = DataFrameModeAdapter().generate_code(df.op, SPECodeContext())
103
+ expected_results = ["var_1 = var_0.mode(axis=0, numeric_only=True, dropna=False)"]
104
+ assert results == expected_results
105
+
106
+ s = s1.mode()
107
+ results = DataFrameModeAdapter().generate_code(s.op, SPECodeContext())
108
+ expected_results = ["var_1 = var_0.mode(dropna=True)"]
109
+ assert results == expected_results
110
+
111
+
99
112
  def test_aggregate(df1):
100
113
  df = df1.agg(["sum", "mean"])
101
114
  context = SPECodeContext()
@@ -14,9 +14,18 @@
14
14
 
15
15
  from typing import List
16
16
 
17
+ from ....dataframe.tseries.between_time import DataFrameBetweenTime
17
18
  from ....dataframe.tseries.to_datetime import DataFrameToDatetime
18
19
  from ....utils import no_default
19
20
  from ..core import SPECodeContext, SPEOperatorAdapter, register_op_adapter
21
+ from ..utils import build_method_call_adapter
22
+
23
+ DataFrameBetweenTimeAdapter = build_method_call_adapter(
24
+ DataFrameBetweenTime,
25
+ "between_time",
26
+ kw_keys=["start_time", "end_time", "inclusive", "axis"],
27
+ skip_none=True,
28
+ )
20
29
 
21
30
 
22
31
  @register_op_adapter(DataFrameToDatetime)
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import inspect
16
- from typing import List
16
+ from typing import List, Optional
17
17
 
18
18
  from .....learn.contrib.lightgbm._predict import LGBMPredict
19
19
  from .....learn.contrib.lightgbm._train import LGBMTrain
@@ -23,11 +23,12 @@ from ...core import SPECodeContext, SPEOperatorAdapter, register_op_adapter
23
23
 
24
24
 
25
25
  class _LightGBMAdapter(SPEOperatorAdapter):
26
- def generate_code(self, op: OperatorType, context: SPECodeContext) -> List[str]:
26
+ def generate_code(
27
+ self, op: OperatorType, context: SPECodeContext
28
+ ) -> Optional[List[str]]:
27
29
  context.register_import("lightgbm")
28
30
  context.register_import("pandas", "pd")
29
31
  context.register_import("numpy", "np")
30
- return []
31
32
 
32
33
 
33
34
  @register_op_adapter(ToLGBMDataset)
@@ -142,6 +142,7 @@ class TensorOnesZerosLikeAdapter(SPEOperatorAdapter):
142
142
  ]
143
143
 
144
144
 
145
+ @register_op_adapter(Scalar)
145
146
  class TensorScalarAdapter(SPEOperatorAdapter):
146
147
  def accepts(self, op: Scalar) -> EngineAcceptance:
147
148
  return EngineAcceptance.SUCCESSOR
maxframe/config/config.py CHANGED
@@ -435,6 +435,9 @@ default_options.register_option(
435
435
  default_options.register_option(
436
436
  "session.quota_name", None, validator=is_null | is_string, remote=True
437
437
  )
438
+ default_options.register_option(
439
+ "session.gu_quota_name", None, validator=is_null | is_string, remote=True
440
+ )
438
441
  default_options.register_option(
439
442
  "session.region_id", None, validator=is_null | is_string, remote=True
440
443
  )
maxframe/conftest.py CHANGED
@@ -61,6 +61,10 @@ def _get_account_env(test_config: ConfigParser, section_name: str) -> ODPS:
61
61
  tunnel_endpoint = test_config.get("odps", "tunnel_endpoint")
62
62
  except NoOptionError:
63
63
  tunnel_endpoint = None
64
+ try:
65
+ namespace = test_config.get("odps", "namespace")
66
+ except NoOptionError:
67
+ namespace = None
64
68
  return ODPS(
65
69
  access_id,
66
70
  secret_access_key,
@@ -68,6 +72,7 @@ def _get_account_env(test_config: ConfigParser, section_name: str) -> ODPS:
68
72
  endpoint,
69
73
  tunnel_endpoint=tunnel_endpoint,
70
74
  overwrite_global=False,
75
+ namespace=namespace,
71
76
  )
72
77
 
73
78
 
@@ -85,6 +90,7 @@ def _get_bearer_token_env(test_config: ConfigParser, section_name: str) -> ODPS:
85
90
  project=entry.project,
86
91
  endpoint=entry.endpoint,
87
92
  tunnel_endpoint=entry.tunnel_endpoint,
93
+ namespace=entry.namespace,
88
94
  )
89
95
 
90
96
 
@@ -97,6 +103,7 @@ def _enter_odps_envs(entry, drop_temp_tables=True):
97
103
  "ODPS_ENDPOINT",
98
104
  "RAY_ISOLATION_UT_ENV",
99
105
  "ODPS_TUNNEL_ENDPOINT",
106
+ "ODPS_NAMESPACE",
100
107
  ):
101
108
  if env_name in os.environ:
102
109
  stored_envs[env_name] = os.environ[env_name]
@@ -105,6 +112,8 @@ def _enter_odps_envs(entry, drop_temp_tables=True):
105
112
  os.environ["ODPS_BEARER_TOKEN"] = entry.account.token
106
113
  os.environ["ODPS_PROJECT_NAME"] = entry.project
107
114
  os.environ["ODPS_ENDPOINT"] = entry.endpoint
115
+ if entry.namespace:
116
+ os.environ["ODPS_NAMESPACE"] = entry.namespace
108
117
  os.environ["RAY_ISOLATION_UT_ENV"] = "UT"
109
118
  if entry.tunnel_endpoint:
110
119
  os.environ["ODPS_TUNNEL_ENDPOINT"] = entry.tunnel_endpoint
@@ -115,6 +124,7 @@ def _enter_odps_envs(entry, drop_temp_tables=True):
115
124
  os.environ.pop("ODPS_BEARER_TOKEN", None)
116
125
  os.environ.pop("ODPS_PROJECT_NAME", None)
117
126
  os.environ.pop("ODPS_ENDPOINT", None)
127
+ os.environ.pop("ODPS_NAMESPACE", None)
118
128
  os.environ.pop("ODPS_TUNNEL_ENDPOINT", None)
119
129
  os.environ.pop("RAY_ISOLATION_UT_ENV", None)
120
130
 
maxframe/core/base.py CHANGED
@@ -126,12 +126,13 @@ class Base(Serializable):
126
126
 
127
127
  def to_kv(
128
128
  self,
129
- exclude_fields: Tuple[str],
129
+ exclude_fields: Tuple[str] = None,
130
130
  accept_value_types: Optional[Tuple[Type]] = None,
131
131
  ):
132
132
  fields = self._FIELDS
133
133
  kv = {}
134
134
  no_value = object()
135
+ exclude_fields = exclude_fields or ()
135
136
  for name, field in fields.items():
136
137
  if name not in exclude_fields:
137
138
  value = getattr(self, name, no_value)
@@ -318,6 +318,8 @@ class HasShapeTileableData(TileableData):
318
318
  try:
319
319
  return int(self.shape[0])
320
320
  except (IndexError, ValueError): # pragma: no cover
321
+ if len(self.shape) == 0:
322
+ raise TypeError("len() of unsized object") from None
321
323
  return 0
322
324
 
323
325
  @property
Binary file
@@ -117,10 +117,11 @@ class SerializableGraph(Serializable):
117
117
  _successors = DictField("successors")
118
118
  _results = ListField("results")
119
119
  _graph_cls = StringField("graph_cls")
120
+ _extra_params = DictField("extra_params", default=None)
120
121
 
121
122
  @classmethod
122
123
  def from_graph(cls, graph: EntityGraph) -> "SerializableGraph":
123
- return SerializableGraph(
124
+ kw = dict(
124
125
  _is_chunk=False,
125
126
  _fetch_nodes=[chunk for chunk in graph if chunk.is_fetch()],
126
127
  _nodes=graph._nodes,
@@ -129,6 +130,9 @@ class SerializableGraph(Serializable):
129
130
  _results=graph.results,
130
131
  _graph_cls=extract_class_name(type(graph)),
131
132
  )
133
+ if hasattr(graph, "extra_params"):
134
+ kw["_extra_params"] = graph.extra_params
135
+ return SerializableGraph(**kw)
132
136
 
133
137
  def to_graph(self) -> EntityGraph:
134
138
  graph_cls = (
@@ -140,6 +144,8 @@ class SerializableGraph(Serializable):
140
144
  graph._nodes.update(self._nodes)
141
145
  graph._predecessors.update(self._predecessors)
142
146
  graph._successors.update(self._successors)
147
+ if self._extra_params:
148
+ graph.extra_params = self._extra_params
143
149
  return graph
144
150
 
145
151
 
maxframe/core/mode.py CHANGED
@@ -42,6 +42,10 @@ def is_build_mode():
42
42
  return bool(getattr(_internal_mode, "build", False))
43
43
 
44
44
 
45
+ def is_mock_mode():
46
+ return bool(getattr(_internal_mode, "mock", False))
47
+
48
+
45
49
  class _EnterModeFuncWrapper:
46
50
  def __init__(self, mode_name_to_value):
47
51
  self.mode_name_to_value = mode_name_to_value
@@ -86,10 +90,11 @@ class _EnterModeFuncWrapper:
86
90
  return _inner
87
91
 
88
92
 
89
- def enter_mode(kernel=None, build=None):
93
+ def enter_mode(kernel=None, build=None, mock=None):
90
94
  mode_name_to_value = {
91
95
  "kernel": kernel,
92
96
  "build": build,
97
+ "mock": mock,
93
98
  }
94
99
  mode_name_to_value = {k: v for k, v in mode_name_to_value.items() if v is not None}
95
100
 
@@ -32,6 +32,7 @@ from . import (
32
32
  window,
33
33
  )
34
34
  from .datasource.date_range import date_range
35
+ from .datasource.direct import read_clipboard
35
36
  from .datasource.from_index import series_from_index
36
37
  from .datasource.from_records import from_records
37
38
  from .datasource.from_tensor import dataframe_from_tensor, series_from_tensor
@@ -40,7 +41,6 @@ from .datasource.read_odps_query import read_odps_query
40
41
  from .datasource.read_odps_table import read_odps_table
41
42
  from .datasource.read_parquet import read_parquet
42
43
  from .datastore.to_odps import to_odps_table
43
- from .groupby import NamedAgg
44
44
  from .initializer import DataFrame, Index, Series, read_pandas
45
45
  from .merge import concat, merge
46
46
  from .misc.cut import cut
@@ -49,7 +49,7 @@ from .misc.get_dummies import get_dummies
49
49
  from .misc.qcut import qcut
50
50
  from .misc.to_numeric import to_numeric
51
51
  from .missing import isna, isnull, notna, notnull
52
- from .reduction import CustomReduction, unique
52
+ from .reduction import CustomReduction, NamedAgg, unique
53
53
  from .reshape.melt import melt
54
54
  from .reshape.pivot import pivot
55
55
  from .reshape.pivot_table import pivot_table
@@ -53,6 +53,8 @@ from .less_equal import DataFrameLessEqual, le
53
53
  from .log import DataFrameLog
54
54
  from .log2 import DataFrameLog2
55
55
  from .log10 import DataFrameLog10
56
+ from .maximum import DataFrameMaximum
57
+ from .minimum import DataFrameMinimum
56
58
  from .mod import DataFrameMod, mod, rmod
57
59
  from .multiply import DataFrameMul, mul, rmul
58
60
  from .negative import DataFrameNegative, negative
@@ -250,6 +252,8 @@ def _install():
250
252
  DataFramePower,
251
253
  DataFrameSubtract,
252
254
  DataFrameTrueDiv,
255
+ DataFrameMaximum,
256
+ DataFrameMinimum,
253
257
  ]
254
258
  for ufunc_op in ufunc_ops:
255
259
  register_tensor_ufunc(ufunc_op)
@@ -0,0 +1,33 @@
1
+ # Copyright 1999-2025 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+
17
+ from ... import opcodes
18
+ from ...utils import classproperty
19
+ from .core import DataFrameBinopUfunc
20
+
21
+
22
+ class DataFrameMaximum(DataFrameBinopUfunc):
23
+ _op_type_ = opcodes.MAXIMUM
24
+
25
+ @classproperty
26
+ def _operator(self):
27
+ return np.maximum
28
+
29
+ @classproperty
30
+ def tensor_op_type(self):
31
+ from ...tensor.arithmetic import TensorMaximum
32
+
33
+ return TensorMaximum
@@ -0,0 +1,33 @@
1
+ # Copyright 1999-2025 Alibaba Group Holding Ltd.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import numpy as np
16
+
17
+ from ... import opcodes
18
+ from ...utils import classproperty
19
+ from .core import DataFrameBinopUfunc
20
+
21
+
22
+ class DataFrameMinimum(DataFrameBinopUfunc):
23
+ _op_type_ = opcodes.MINIMUM
24
+
25
+ @classproperty
26
+ def _operator(self):
27
+ return np.minimum
28
+
29
+ @classproperty
30
+ def tensor_op_type(self):
31
+ from ...tensor.arithmetic import TensorMinimum
32
+
33
+ return TensorMinimum