kumoai 2.12.0.dev202511071730__cp310-cp310-win_amd64.whl → 2.13.0.dev202512021731__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. kumoai/__init__.py +6 -9
  2. kumoai/_version.py +1 -1
  3. kumoai/client/client.py +9 -13
  4. kumoai/client/rfm.py +15 -7
  5. kumoai/connector/utils.py +23 -2
  6. kumoai/experimental/rfm/__init__.py +164 -46
  7. kumoai/experimental/rfm/backend/__init__.py +0 -0
  8. kumoai/experimental/rfm/backend/local/__init__.py +38 -0
  9. kumoai/experimental/rfm/backend/local/table.py +244 -0
  10. kumoai/experimental/rfm/backend/snow/__init__.py +32 -0
  11. kumoai/experimental/rfm/backend/sqlite/__init__.py +30 -0
  12. kumoai/experimental/rfm/backend/sqlite/table.py +124 -0
  13. kumoai/experimental/rfm/base/__init__.py +7 -0
  14. kumoai/experimental/rfm/base/column.py +66 -0
  15. kumoai/experimental/rfm/{local_table.py → base/table.py} +71 -139
  16. kumoai/experimental/rfm/{local_graph.py → graph.py} +144 -57
  17. kumoai/experimental/rfm/infer/__init__.py +2 -0
  18. kumoai/experimental/rfm/infer/stype.py +35 -0
  19. kumoai/experimental/rfm/local_graph_sampler.py +0 -2
  20. kumoai/experimental/rfm/local_graph_store.py +12 -11
  21. kumoai/experimental/rfm/local_pquery_driver.py +2 -2
  22. kumoai/experimental/rfm/rfm.py +83 -28
  23. kumoai/experimental/rfm/sagemaker.py +138 -0
  24. kumoai/experimental/rfm/utils.py +1 -120
  25. kumoai/kumolib.cp310-win_amd64.pyd +0 -0
  26. kumoai/spcs.py +1 -3
  27. kumoai/testing/decorators.py +1 -1
  28. kumoai/utils/progress_logger.py +10 -4
  29. {kumoai-2.12.0.dev202511071730.dist-info → kumoai-2.13.0.dev202512021731.dist-info}/METADATA +11 -2
  30. {kumoai-2.12.0.dev202511071730.dist-info → kumoai-2.13.0.dev202512021731.dist-info}/RECORD +33 -23
  31. {kumoai-2.12.0.dev202511071730.dist-info → kumoai-2.13.0.dev202512021731.dist-info}/WHEEL +0 -0
  32. {kumoai-2.12.0.dev202511071730.dist-info → kumoai-2.13.0.dev202512021731.dist-info}/licenses/LICENSE +0 -0
  33. {kumoai-2.12.0.dev202511071730.dist-info → kumoai-2.13.0.dev202512021731.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,32 @@
1
+ from typing import Any, TypeAlias
2
+
3
+ try:
4
+ import snowflake.connector
5
+ except ImportError:
6
+ raise ImportError("No module named 'snowflake'. Please install Kumo SDK "
7
+ "with the 'snowflake' extension via "
8
+ "`pip install kumoai[snowflake]`.")
9
+
10
+ Connection: TypeAlias = snowflake.connector.SnowflakeConnection
11
+
12
+
13
+ def connect(**kwargs: Any) -> Connection:
14
+ r"""Opens a connection to a :class:`snowflake` database.
15
+
16
+ If available, will return a connection to the active session.
17
+
18
+ kwargs: Connection arguments, following the :class:`snowflake` protocol.
19
+ """
20
+ try:
21
+ from snowflake.snowpark.context import get_active_session
22
+ return get_active_session().connection
23
+ except Exception:
24
+ pass
25
+
26
+ return snowflake.connector.connect(**kwargs)
27
+
28
+
29
+ __all__ = [
30
+ 'connect',
31
+ 'Connection',
32
+ ]
@@ -0,0 +1,30 @@
1
+ from pathlib import Path
2
+ from typing import Any, TypeAlias, Union
3
+
4
+ try:
5
+ import adbc_driver_sqlite.dbapi as adbc
6
+ except ImportError:
7
+ raise ImportError("No module named 'adbc_driver_sqlite'. Please install "
8
+ "Kumo SDK with the 'sqlite' extension via "
9
+ "`pip install kumoai[sqlite]`.")
10
+
11
+ Connection: TypeAlias = adbc.AdbcSqliteConnection
12
+
13
+
14
+ def connect(uri: Union[str, Path, None] = None, **kwargs: Any) -> Connection:
15
+ r"""Opens a connection to a :class:`sqlite` database.
16
+
17
+ uri: The path to the database file to be opened.
18
+ kwargs: Additional connection arguments, following the
19
+ :class:`adbc_driver_sqlite` protocol.
20
+ """
21
+ return adbc.connect(uri, **kwargs)
22
+
23
+
24
+ from .table import SQLiteTable # noqa: E402
25
+
26
+ __all__ = [
27
+ 'connect',
28
+ 'Connection',
29
+ 'SQLiteTable',
30
+ ]
@@ -0,0 +1,124 @@
1
+ import re
2
+ from typing import Dict, List, Optional, Sequence, Tuple
3
+
4
+ import pyarrow as pa
5
+ from kumoapi.typing import Dtype, Stype
6
+ from typing_extensions import Self
7
+
8
+ from kumoai.experimental.rfm.backend.sqlite import Connection
9
+ from kumoai.experimental.rfm.base import Table
10
+ from kumoai.experimental.rfm.infer import infer_stype
11
+
12
+
13
+ class SQLiteTable(Table):
14
+ r"""A table backed by a :class:`sqlite` database.
15
+
16
+ Args:
17
+ connection: The connection to a :class:`sqlite` database.
18
+ name: The name of this table.
19
+ columns: The selected columns of this table.
20
+ primary_key: The name of the primary key of this table, if it exists.
21
+ time_column: The name of the time column of this table, if it exists.
22
+ end_time_column: The name of the end time column of this table, if it
23
+ exists.
24
+ """
25
+ def __init__(
26
+ self,
27
+ connection: Connection,
28
+ name: str,
29
+ columns: Optional[Sequence[str]] = None,
30
+ primary_key: Optional[str] = None,
31
+ time_column: Optional[str] = None,
32
+ end_time_column: Optional[str] = None,
33
+ ) -> None:
34
+
35
+ self._connection = connection
36
+ self._dtype_dict: Dict[str, Dtype] = {}
37
+
38
+ with connection.cursor() as cursor:
39
+ cursor.execute(f"PRAGMA table_info({name})")
40
+ for _, column, dtype, _, _, is_pkey in cursor.fetchall():
41
+ if bool(is_pkey):
42
+ if primary_key is not None and primary_key != column:
43
+ raise ValueError(f"Found duplicate primary key "
44
+ f"definition '{primary_key}' and "
45
+ f"'{column}' in table '{name}'")
46
+ primary_key = column
47
+
48
+ # Determine colun affinity:
49
+ dtype = dtype.strip().upper()
50
+ if re.search('INT', dtype):
51
+ self._dtype_dict[column] = Dtype.int
52
+ elif re.search('TEXT|CHAR|CLOB', dtype):
53
+ self._dtype_dict[column] = Dtype.string
54
+ elif re.search('REAL|FLOA|DOUB', dtype):
55
+ self._dtype_dict[column] = Dtype.float
56
+ else: # NUMERIC affinity.
57
+ self._dtype_dict[column] = Dtype.unsupported
58
+
59
+ if len(self._dtype_dict) > 0:
60
+ column_names = ', '.join(self._dtype_dict.keys())
61
+ cursor.execute(f"SELECT {column_names} FROM {name} "
62
+ f"ORDER BY rowid LIMIT 1000")
63
+ self._sample = cursor.fetch_arrow_table()
64
+
65
+ for column_name in list(self._dtype_dict.keys()):
66
+ if self._dtype_dict[column_name] == Dtype.unsupported:
67
+ dtype = self._sample[column_name].type
68
+ if pa.types.is_integer(dtype):
69
+ self._dtype_dict[column_name] = Dtype.int
70
+ elif pa.types.is_floating(dtype):
71
+ self._dtype_dict[column_name] = Dtype.float
72
+ elif pa.types.is_decimal(dtype):
73
+ self._dtype_dict[column_name] = Dtype.float
74
+ elif pa.types.is_string(dtype):
75
+ self._dtype_dict[column_name] = Dtype.string
76
+ else:
77
+ del self._dtype_dict[column_name]
78
+
79
+ if len(self._dtype_dict) == 0:
80
+ raise RuntimeError(f"Table '{name}' does not exist or does not "
81
+ f"hold any column with a supported data type")
82
+
83
+ super().__init__(
84
+ name=name,
85
+ columns=columns or list(self._dtype_dict.keys()),
86
+ primary_key=primary_key,
87
+ time_column=time_column,
88
+ end_time_column=end_time_column,
89
+ )
90
+
91
+ def infer_metadata(self, verbose: bool = True) -> Self:
92
+ r"""Infers metadata, *i.e.*, primary keys and time columns, in the
93
+ table.
94
+
95
+ Args:
96
+ verbose: Whether to print verbose output.
97
+ """
98
+ return self
99
+
100
+ def _has_source_column(self, name: str) -> bool:
101
+ return name in self._dtype_dict
102
+
103
+ def _get_source_dtype(self, name: str) -> Dtype:
104
+ return self._dtype_dict[name]
105
+
106
+ def _get_source_stype(self, name: str, dtype: Dtype) -> Stype:
107
+ return infer_stype(self._sample[name].to_pandas(), name, dtype)
108
+
109
+ def _get_source_foreign_keys(self) -> List[Tuple[str, str, str]]:
110
+ edges: List[Tuple[str, str, str]] = []
111
+ with self._connection.cursor() as cursor:
112
+ cursor.execute(f"PRAGMA foreign_key_list({self.name})")
113
+ for _, _, dst_table, fkey, pkey, _, _, _ in cursor.fetchall():
114
+ edges.append((fkey, dst_table, pkey))
115
+ return edges
116
+
117
+ def _infer_primary_key(self, candidates: List[str]) -> Optional[str]:
118
+ return None # TODO
119
+
120
+ def _infer_time_column(self, candidates: List[str]) -> Optional[str]:
121
+ return None # TODO
122
+
123
+ def _num_rows(self) -> Optional[int]:
124
+ return None
@@ -0,0 +1,7 @@
1
+ from .column import Column
2
+ from .table import Table
3
+
4
+ __all__ = [
5
+ 'Column',
6
+ 'Table',
7
+ ]
@@ -0,0 +1,66 @@
1
+ from dataclasses import dataclass
2
+ from typing import Any
3
+
4
+ from kumoapi.typing import Dtype, Stype
5
+
6
+
7
+ @dataclass(init=False, repr=False, eq=False)
8
+ class Column:
9
+ stype: Stype
10
+
11
+ def __init__(
12
+ self,
13
+ name: str,
14
+ dtype: Dtype,
15
+ stype: Stype,
16
+ is_primary_key: bool = False,
17
+ is_time_column: bool = False,
18
+ is_end_time_column: bool = False,
19
+ ) -> None:
20
+ self._name = name
21
+ self._dtype = Dtype(dtype)
22
+ self._is_primary_key = is_primary_key
23
+ self._is_time_column = is_time_column
24
+ self._is_end_time_column = is_end_time_column
25
+ self.stype = Stype(stype)
26
+
27
+ @property
28
+ def name(self) -> str:
29
+ return self._name
30
+
31
+ @property
32
+ def dtype(self) -> Dtype:
33
+ return self._dtype
34
+
35
+ def __setattr__(self, key: str, val: Any) -> None:
36
+ if key == 'stype':
37
+ if isinstance(val, str):
38
+ val = Stype(val)
39
+ assert isinstance(val, Stype)
40
+ if not val.supports_dtype(self.dtype):
41
+ raise ValueError(f"Column '{self.name}' received an "
42
+ f"incompatible semantic type (got "
43
+ f"dtype='{self.dtype}' and stype='{val}')")
44
+ if self._is_primary_key and val != Stype.ID:
45
+ raise ValueError(f"Primary key '{self.name}' must have 'ID' "
46
+ f"semantic type (got '{val}')")
47
+ if self._is_time_column and val != Stype.timestamp:
48
+ raise ValueError(f"Time column '{self.name}' must have "
49
+ f"'timestamp' semantic type (got '{val}')")
50
+ if self._is_end_time_column and val != Stype.timestamp:
51
+ raise ValueError(f"End time column '{self.name}' must have "
52
+ f"'timestamp' semantic type (got '{val}')")
53
+
54
+ super().__setattr__(key, val)
55
+
56
+ def __hash__(self) -> int:
57
+ return hash((self.name, self.stype, self.dtype))
58
+
59
+ def __eq__(self, other: Any) -> bool:
60
+ if not isinstance(other, Column):
61
+ return False
62
+ return hash(self) == hash(other)
63
+
64
+ def __repr__(self) -> str:
65
+ return (f'{self.__class__.__name__}(name={self.name}, '
66
+ f'stype={self.stype}, dtype={self.dtype})')
@@ -1,5 +1,5 @@
1
- from dataclasses import dataclass
2
- from typing import Any, Dict, List, Optional
1
+ from abc import ABC, abstractmethod
2
+ from typing import Dict, List, Optional, Sequence, Tuple
3
3
 
4
4
  import pandas as pd
5
5
  from kumoapi.source_table import UnavailableSourceTable
@@ -9,107 +9,17 @@ from kumoapi.typing import Dtype, Stype
9
9
  from typing_extensions import Self
10
10
 
11
11
  from kumoai import in_notebook
12
- from kumoai.experimental.rfm import utils
12
+ from kumoai.experimental.rfm.base import Column
13
13
 
14
14
 
15
- @dataclass(init=False, repr=False, eq=False)
16
- class Column:
17
- stype: Stype
18
-
19
- def __init__(
20
- self,
21
- name: str,
22
- dtype: Dtype,
23
- stype: Stype,
24
- is_primary_key: bool = False,
25
- is_time_column: bool = False,
26
- is_end_time_column: bool = False,
27
- ) -> None:
28
- self._name = name
29
- self._dtype = Dtype(dtype)
30
- self._is_primary_key = is_primary_key
31
- self._is_time_column = is_time_column
32
- self._is_end_time_column = is_end_time_column
33
- self.stype = Stype(stype)
34
-
35
- @property
36
- def name(self) -> str:
37
- return self._name
38
-
39
- @property
40
- def dtype(self) -> Dtype:
41
- return self._dtype
42
-
43
- def __setattr__(self, key: str, val: Any) -> None:
44
- if key == 'stype':
45
- if isinstance(val, str):
46
- val = Stype(val)
47
- assert isinstance(val, Stype)
48
- if not val.supports_dtype(self.dtype):
49
- raise ValueError(f"Column '{self.name}' received an "
50
- f"incompatible semantic type (got "
51
- f"dtype='{self.dtype}' and stype='{val}')")
52
- if self._is_primary_key and val != Stype.ID:
53
- raise ValueError(f"Primary key '{self.name}' must have 'ID' "
54
- f"semantic type (got '{val}')")
55
- if self._is_time_column and val != Stype.timestamp:
56
- raise ValueError(f"Time column '{self.name}' must have "
57
- f"'timestamp' semantic type (got '{val}')")
58
- if self._is_end_time_column and val != Stype.timestamp:
59
- raise ValueError(f"End time column '{self.name}' must have "
60
- f"'timestamp' semantic type (got '{val}')")
61
-
62
- super().__setattr__(key, val)
63
-
64
- def __hash__(self) -> int:
65
- return hash((self.name, self.stype, self.dtype))
66
-
67
- def __eq__(self, other: Any) -> bool:
68
- if not isinstance(other, Column):
69
- return False
70
- return hash(self) == hash(other)
71
-
72
- def __repr__(self) -> str:
73
- return (f'{self.__class__.__name__}(name={self.name}, '
74
- f'stype={self.stype}, dtype={self.dtype})')
75
-
76
-
77
- class LocalTable:
78
- r"""A table backed by a :class:`pandas.DataFrame`.
79
-
80
- A :class:`LocalTable` fully specifies the relevant metadata, *i.e.*
81
- selected columns, column semantic types, primary keys and time columns.
82
- :class:`LocalTable` is used to create a :class:`LocalGraph`.
83
-
84
- .. code-block:: python
85
-
86
- import pandas as pd
87
- import kumoai.experimental.rfm as rfm
88
-
89
- # Load data from a CSV file:
90
- df = pd.read_csv("data.csv")
91
-
92
- # Create a table from a `pandas.DataFrame` and infer its metadata ...
93
- table = rfm.LocalTable(df, name="my_table").infer_metadata()
94
-
95
- # ... or create a table explicitly:
96
- table = rfm.LocalTable(
97
- df=df,
98
- name="my_table",
99
- primary_key="id",
100
- time_column="time",
101
- end_time_column=None,
102
- )
103
-
104
- # Verify metadata:
105
- table.print_metadata()
106
-
107
- # Change the semantic type of a column:
108
- table[column].stype = "text"
15
+ class Table(ABC):
16
+ r"""A :class:`Table` fully specifies the relevant metadata of a single
17
+ table, *i.e.* its selected columns, data types, semantic types, primary
18
+ keys and time columns.
109
19
 
110
20
  Args:
111
- df: The data frame to create the table from.
112
- name: The name of the table.
21
+ name: The name of this table.
22
+ columns: The selected columns of this table.
113
23
  primary_key: The name of the primary key of this table, if it exists.
114
24
  time_column: The name of the time column of this table, if it exists.
115
25
  end_time_column: The name of the end time column of this table, if it
@@ -117,46 +27,40 @@ class LocalTable:
117
27
  """
118
28
  def __init__(
119
29
  self,
120
- df: pd.DataFrame,
121
30
  name: str,
31
+ columns: Optional[Sequence[str]] = None,
122
32
  primary_key: Optional[str] = None,
123
33
  time_column: Optional[str] = None,
124
34
  end_time_column: Optional[str] = None,
125
35
  ) -> None:
126
36
 
127
- if df.empty:
128
- raise ValueError("Data frame must have at least one row")
129
- if isinstance(df.columns, pd.MultiIndex):
130
- raise ValueError("Data frame must not have a multi-index")
131
- if not df.columns.is_unique:
132
- raise ValueError("Data frame must have unique column names")
133
- if any(col == '' for col in df.columns):
134
- raise ValueError("Data frame must have non-empty column names")
135
-
136
- df = df.copy(deep=False)
137
-
138
- self._data = df
139
37
  self._name = name
140
38
  self._primary_key: Optional[str] = None
141
39
  self._time_column: Optional[str] = None
142
40
  self._end_time_column: Optional[str] = None
143
41
 
144
42
  self._columns: Dict[str, Column] = {}
145
- for column_name in df.columns:
43
+ for column_name in columns or []:
146
44
  self.add_column(column_name)
147
45
 
148
46
  if primary_key is not None:
47
+ if primary_key not in self:
48
+ self.add_column(primary_key)
149
49
  self.primary_key = primary_key
150
50
 
151
51
  if time_column is not None:
52
+ if time_column not in self:
53
+ self.add_column(time_column)
152
54
  self.time_column = time_column
153
55
 
154
56
  if end_time_column is not None:
57
+ if end_time_column not in self:
58
+ self.add_column(end_time_column)
155
59
  self.end_time_column = end_time_column
156
60
 
157
61
  @property
158
62
  def name(self) -> str:
159
- r"""The name of the table."""
63
+ r"""The name of this table."""
160
64
  return self._name
161
65
 
162
66
  # Data column #############################################################
@@ -200,24 +104,25 @@ class LocalTable:
200
104
  raise KeyError(f"Column '{name}' already exists in table "
201
105
  f"'{self.name}'")
202
106
 
203
- if name not in self._data.columns:
204
- raise KeyError(f"Column '{name}' does not exist in the underyling "
205
- f"data frame")
107
+ if not self._has_source_column(name):
108
+ raise KeyError(f"Column '{name}' does not exist in the underlying "
109
+ f"source table")
206
110
 
207
111
  try:
208
- dtype = utils.to_dtype(self._data[name])
112
+ dtype = self._get_source_dtype(name)
209
113
  except Exception as e:
210
- raise RuntimeError(f"Data type inference for column '{name}' in "
211
- f"table '{self.name}' failed. Consider "
212
- f"changing the data type of the column or "
213
- f"removing it from the table.") from e
114
+ raise RuntimeError(f"Could not obtain data type for column "
115
+ f"'{name}' in table '{self.name}'. Change "
116
+ f"the data type of the column in the source "
117
+ f"table or remove it from the table.") from e
118
+
214
119
  try:
215
- stype = utils.infer_stype(self._data[name], name, dtype)
120
+ stype = self._get_source_stype(name, dtype)
216
121
  except Exception as e:
217
- raise RuntimeError(f"Semantic type inference for column '{name}' "
218
- f"in table '{self.name}' failed. Consider "
219
- f"changing the data type of the column or "
220
- f"removing it from the table.") from e
122
+ raise RuntimeError(f"Could not obtain semantic type for column "
123
+ f"'{name}' in table '{self.name}'. Change "
124
+ f"the data type of the column in the source "
125
+ f"table or remove it from the table.") from e
221
126
 
222
127
  self._columns[name] = Column(
223
128
  name=name,
@@ -432,12 +337,14 @@ class LocalTable:
432
337
  })
433
338
 
434
339
  def print_metadata(self) -> None:
435
- r"""Prints the :meth:`~LocalTable.metadata` of the table."""
340
+ r"""Prints the :meth:`~metadata` of this table."""
341
+ num_rows = self._num_rows()
342
+ num_rows_repr = ' ({num_rows:,} rows)' if num_rows is not None else ''
343
+
436
344
  if in_notebook():
437
345
  from IPython.display import Markdown, display
438
- display(
439
- Markdown(f"### 🏷️ Metadata of Table `{self.name}` "
440
- f"({len(self._data):,} rows)"))
346
+ md_repr = f"### 🏷️ Metadata of Table `{self.name}`{num_rows_repr}"
347
+ display(Markdown(md_repr))
441
348
  df = self.metadata
442
349
  try:
443
350
  if hasattr(df.style, 'hide'):
@@ -447,8 +354,7 @@ class LocalTable:
447
354
  except ImportError:
448
355
  print(df.to_string(index=False)) # missing jinja2
449
356
  else:
450
- print(f"🏷️ Metadata of Table '{self.name}' "
451
- f"({len(self._data):,} rows):")
357
+ print(f"🏷️ Metadata of Table '{self.name}'{num_rows_repr}")
452
358
  print(self.metadata.to_string(index=False))
453
359
 
454
360
  def infer_metadata(self, verbose: bool = True) -> Self:
@@ -478,11 +384,7 @@ class LocalTable:
478
384
  column.name for column in self.columns if is_candidate(column)
479
385
  ]
480
386
 
481
- if primary_key := utils.detect_primary_key(
482
- table_name=self.name,
483
- df=self._data,
484
- candidates=candidates,
485
- ):
387
+ if primary_key := self._infer_primary_key(candidates):
486
388
  self.primary_key = primary_key
487
389
  logs.append(f"primary key '{primary_key}'")
488
390
 
@@ -493,7 +395,7 @@ class LocalTable:
493
395
  if column.stype == Stype.timestamp
494
396
  and column.name != self._end_time_column
495
397
  ]
496
- if time_column := utils.detect_time_column(self._data, candidates):
398
+ if time_column := self._infer_time_column(candidates):
497
399
  self.time_column = time_column
498
400
  logs.append(f"time column '{time_column}'")
499
401
 
@@ -543,3 +445,33 @@ class LocalTable:
543
445
  f' time_column={self._time_column},\n'
544
446
  f' end_time_column={self._end_time_column},\n'
545
447
  f')')
448
+
449
+ # Abstract method #########################################################
450
+
451
+ @abstractmethod
452
+ def _has_source_column(self, name: str) -> bool:
453
+ pass
454
+
455
+ @abstractmethod
456
+ def _get_source_dtype(self, name: str) -> Dtype:
457
+ pass
458
+
459
+ @abstractmethod
460
+ def _get_source_stype(self, name: str, dtype: Dtype) -> Stype:
461
+ pass
462
+
463
+ @abstractmethod
464
+ def _get_source_foreign_keys(self) -> List[Tuple[str, str, str]]:
465
+ pass
466
+
467
+ @abstractmethod
468
+ def _infer_primary_key(self, candidates: List[str]) -> Optional[str]:
469
+ pass
470
+
471
+ @abstractmethod
472
+ def _infer_time_column(self, candidates: List[str]) -> Optional[str]:
473
+ pass
474
+
475
+ @abstractmethod
476
+ def _num_rows(self) -> Optional[int]:
477
+ pass