FlowerPower 0.9.13.1__py3-none-any.whl → 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. flowerpower/__init__.py +17 -2
  2. flowerpower/cfg/__init__.py +201 -149
  3. flowerpower/cfg/base.py +122 -24
  4. flowerpower/cfg/pipeline/__init__.py +254 -0
  5. flowerpower/cfg/pipeline/adapter.py +66 -0
  6. flowerpower/cfg/pipeline/run.py +40 -11
  7. flowerpower/cfg/pipeline/schedule.py +69 -79
  8. flowerpower/cfg/project/__init__.py +149 -0
  9. flowerpower/cfg/project/adapter.py +57 -0
  10. flowerpower/cfg/project/job_queue.py +165 -0
  11. flowerpower/cli/__init__.py +92 -37
  12. flowerpower/cli/job_queue.py +878 -0
  13. flowerpower/cli/mqtt.py +32 -1
  14. flowerpower/cli/pipeline.py +559 -406
  15. flowerpower/cli/utils.py +29 -18
  16. flowerpower/flowerpower.py +12 -8
  17. flowerpower/fs/__init__.py +20 -2
  18. flowerpower/fs/base.py +350 -26
  19. flowerpower/fs/ext.py +797 -216
  20. flowerpower/fs/storage_options.py +1097 -55
  21. flowerpower/io/base.py +13 -18
  22. flowerpower/io/loader/__init__.py +28 -0
  23. flowerpower/io/loader/deltatable.py +7 -10
  24. flowerpower/io/metadata.py +1 -0
  25. flowerpower/io/saver/__init__.py +28 -0
  26. flowerpower/io/saver/deltatable.py +4 -3
  27. flowerpower/job_queue/__init__.py +252 -0
  28. flowerpower/job_queue/apscheduler/__init__.py +11 -0
  29. flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
  30. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
  31. flowerpower/job_queue/apscheduler/manager.py +1063 -0
  32. flowerpower/job_queue/apscheduler/setup.py +524 -0
  33. flowerpower/job_queue/apscheduler/trigger.py +169 -0
  34. flowerpower/job_queue/apscheduler/utils.py +309 -0
  35. flowerpower/job_queue/base.py +382 -0
  36. flowerpower/job_queue/rq/__init__.py +10 -0
  37. flowerpower/job_queue/rq/_trigger.py +37 -0
  38. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
  39. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
  40. flowerpower/job_queue/rq/manager.py +1449 -0
  41. flowerpower/job_queue/rq/setup.py +150 -0
  42. flowerpower/job_queue/rq/utils.py +69 -0
  43. flowerpower/pipeline/__init__.py +5 -0
  44. flowerpower/pipeline/base.py +118 -0
  45. flowerpower/pipeline/io.py +407 -0
  46. flowerpower/pipeline/job_queue.py +505 -0
  47. flowerpower/pipeline/manager.py +1586 -0
  48. flowerpower/pipeline/registry.py +560 -0
  49. flowerpower/pipeline/runner.py +560 -0
  50. flowerpower/pipeline/visualizer.py +142 -0
  51. flowerpower/plugins/mqtt/__init__.py +12 -0
  52. flowerpower/plugins/mqtt/cfg.py +16 -0
  53. flowerpower/plugins/mqtt/manager.py +789 -0
  54. flowerpower/settings.py +110 -0
  55. flowerpower/utils/logging.py +21 -0
  56. flowerpower/utils/misc.py +57 -9
  57. flowerpower/utils/sql.py +122 -24
  58. flowerpower/utils/templates.py +2 -142
  59. flowerpower-1.0.0b1.dist-info/METADATA +324 -0
  60. flowerpower-1.0.0b1.dist-info/RECORD +94 -0
  61. flowerpower/_web/__init__.py +0 -61
  62. flowerpower/_web/routes/config.py +0 -103
  63. flowerpower/_web/routes/pipelines.py +0 -173
  64. flowerpower/_web/routes/scheduler.py +0 -136
  65. flowerpower/cfg/pipeline/tracker.py +0 -14
  66. flowerpower/cfg/project/open_telemetry.py +0 -8
  67. flowerpower/cfg/project/tracker.py +0 -11
  68. flowerpower/cfg/project/worker.py +0 -19
  69. flowerpower/cli/scheduler.py +0 -309
  70. flowerpower/cli/web.py +0 -44
  71. flowerpower/event_handler.py +0 -23
  72. flowerpower/mqtt.py +0 -609
  73. flowerpower/pipeline.py +0 -2499
  74. flowerpower/scheduler.py +0 -680
  75. flowerpower/tui.py +0 -79
  76. flowerpower/utils/datastore.py +0 -186
  77. flowerpower/utils/eventbroker.py +0 -127
  78. flowerpower/utils/executor.py +0 -58
  79. flowerpower/utils/trigger.py +0 -140
  80. flowerpower-0.9.13.1.dist-info/METADATA +0 -586
  81. flowerpower-0.9.13.1.dist-info/RECORD +0 -76
  82. /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
  83. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/WHEEL +0 -0
  84. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/entry_points.txt +0 -0
  85. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/top_level.txt +0 -0
flowerpower/tui.py DELETED
@@ -1,79 +0,0 @@
1
- from textual.app import App
2
- from textual.layouts.grid import GridLayout
3
- from textual.reactive import Reactive
4
- from textual.scroll_view import ScrollView
5
- from textual.widgets import Footer, Header, Static
6
-
7
- from flowerpower.pipeline import PipelineManager
8
-
9
-
10
- class PipelineList(Static):
11
- def __init__(self, pipelines):
12
- super().__init__()
13
- self.pipelines = pipelines
14
-
15
- def render(self):
16
- return "\n".join(f"- {pipeline.name}" for pipeline in self.pipelines)
17
-
18
-
19
- class FlowerPowerTUI(App):
20
- pipelines = Reactive([])
21
-
22
- async def on_load(self):
23
- await self.bind("q", "quit") # , "Beenden")
24
- await self.bind("a", "add_pipeline") # , "Pipeline hinzufügen")
25
- await self.bind("d", "delete_pipeline") # , "Pipeline löschen")
26
-
27
- async def on_mount(self):
28
- self.pipeline_manager = PipelineManager()
29
- self.refresh_pipelines()
30
-
31
- grid = await self.view.dock_grid(edge="top")
32
-
33
- grid.add_column(fraction=1, name="left", min_size=30)
34
- grid.add_column(fraction=3, name="right")
35
-
36
- grid.add_row(fraction=1, name="row")
37
-
38
- grid.add_areas(
39
- area_left="left,row",
40
- area_right="right,row",
41
- )
42
-
43
- header = Header()
44
- footer = Footer()
45
-
46
- self.pipeline_list = PipelineList(self.pipelines)
47
- self.details_view = ScrollView()
48
-
49
- await self.view.dock(header, edge="top")
50
- await self.view.dock(footer, edge="bottom")
51
- grid.place(area_left=self.pipeline_list, area_right=self.details_view)
52
-
53
- await self.view.dock(grid)
54
-
55
- def refresh_pipelines(self):
56
- self.pipelines = self.pipeline_manager.find_pipelines()
57
-
58
- async def action_add_pipeline(self):
59
- # Logik zum Hinzufügen einer neuen Pipeline
60
- pass
61
-
62
- async def action_delete_pipeline(self):
63
- # Logik zum Löschen einer Pipeline
64
- pass
65
-
66
- async def on_key(self, event):
67
- if event.key == "up":
68
- # Navigiere nach oben in der Pipeline-Liste
69
- pass
70
- elif event.key == "down":
71
- # Navigiere nach unten in der Pipeline-Liste
72
- pass
73
- elif event.key == "enter":
74
- # Zeige Details zur ausgewählten Pipeline an
75
- pass
76
-
77
-
78
- if __name__ == "__main__":
79
- FlowerPowerTUI().run()
@@ -1,186 +0,0 @@
1
- ALL_DATA_STORES = [
2
- "sqlalchemy",
3
- "asyncpg",
4
- "psycopg2",
5
- "postgresql",
6
- "sqlite",
7
- "sqlite3",
8
- "mysql",
9
- "mongodb",
10
- "local",
11
- "memory",
12
- ]
13
-
14
-
15
- class DataStore:
16
- def __init__(
17
- self,
18
- type: str | None = None,
19
- engine_or_uri: str | None = None,
20
- schema: str | None = "flowerpower",
21
- username: str | None = None,
22
- password: str | None = None,
23
- ssl: bool = False,
24
- **kwargs,
25
- ):
26
- self.type = type or "memory"
27
- self.engine_or_uri = engine_or_uri
28
- self.sqla_engine = None
29
- self.schema = schema
30
- self.username = username
31
- self.password = password
32
- self.ssl = ssl
33
- self._kwargs = kwargs
34
-
35
- if self.type not in ALL_DATA_STORES:
36
- raise ValueError(
37
- f"Invalid data store type: {type}. Valid data store types are: {ALL_DATA_STORES}"
38
- )
39
- if (
40
- type
41
- in [
42
- "sqlalchemy",
43
- "postgresql",
44
- "asyncpg",
45
- "psycopg2",
46
- "sqlite",
47
- "sqlite3",
48
- "mysql",
49
- "mongodb",
50
- ]
51
- and not engine_or_uri
52
- ):
53
- raise ValueError(f"Data store type {type} requires an engine or uri")
54
-
55
- def _gen_uri(self):
56
- import urllib.parse
57
-
58
- if self.password and "@" not in self.engine_or_uri:
59
- if not self.username:
60
- raise ValueError(
61
- "Data store type `sqlalchemy` requires a username when a password is provided"
62
- )
63
- password = urllib.parse.quote(self.password)
64
- self.engine_or_uri = self.engine_or_uri.replace(
65
- "://", f"://{self.username}:{password}@"
66
- )
67
- if self.ssl and "?ssl" not in self.engine_or_uri:
68
- self.engine_or_uri = self.engine_or_uri + "?ssl=allow"
69
-
70
- # asyncio.run(self.setup_db())
71
- # self.setup_db()
72
-
73
- def _setup_sqlalchemy(self):
74
- from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
75
- from sqlalchemy.ext.asyncio import create_async_engine
76
-
77
- self._gen_uri()
78
- self.setup_db()
79
- self.sqla_engine = create_async_engine(self.engine_or_uri)
80
- self._data_store = SQLAlchemyDataStore(self.sqla_engine, schema=self.schema)
81
-
82
- def _setup_mongodb(self, uri: str):
83
- from apscheduler.datastores.mongodb import MongoDBDataStore
84
-
85
- self._data_store = MongoDBDataStore(self.engine_or_uri, database=self.schema)
86
-
87
- def _setup_memory(self):
88
- from apscheduler.datastores.memory import MemoryDataStore
89
-
90
- self._data_store = MemoryDataStore()
91
-
92
- def setup(
93
- self,
94
- ):
95
- if self.type in [
96
- "sqlalchemy",
97
- "sqlite",
98
- "postgresql",
99
- "asyncpg",
100
- "psychopg2",
101
- "sqlite3",
102
- "mysql",
103
- ]:
104
- self._setup_sqlalchemy()
105
- elif self.type == "mongodb":
106
- self._setup_mongodb()
107
- else:
108
- self._setup_memory()
109
-
110
- def get(self) -> tuple:
111
- return self._data_store, self.sqla_engine
112
-
113
- async def _setup_db(self):
114
- from sqlalchemy import text
115
- from sqlalchemy.ext.asyncio import create_async_engine
116
-
117
- self._gen_uri()
118
- sqla_engine = create_async_engine(self.engine_or_uri)
119
- try:
120
- async with sqla_engine.begin() as conn:
121
- if self.schema:
122
- await conn.execute(
123
- text(f"CREATE SCHEMA IF NOT EXISTS {self.schema}")
124
- )
125
- await conn.commit()
126
- except Exception as e:
127
- _ = e
128
- database_name = self.engine_or_uri.split("/")[-1].split("?")[0]
129
- temp_engine = create_async_engine(
130
- self.engine_or_uri.replace(f"/{database_name}", "/template1")
131
- )
132
- async with temp_engine.begin() as conn:
133
- await conn.execute(text("COMMIT"))
134
- try:
135
- await conn.execute(text(f"CREATE DATABASE {database_name}"))
136
- # await conn.commit()
137
- except Exception as e:
138
- _ = e
139
- pass
140
- finally:
141
- await conn.execute(text("COMMIT"))
142
- if self.schema:
143
- async with sqla_engine.begin() as conn:
144
- await conn.execute(
145
- text(f"CREATE SCHEMA IF NOT EXISTS {self.schema}")
146
- )
147
- await conn.commit()
148
-
149
- def setup_db(self):
150
- from anyio.from_thread import start_blocking_portal
151
-
152
- with start_blocking_portal() as portal:
153
- portal.call(self._setup_db)
154
-
155
-
156
- def setup_data_store(
157
- type: str,
158
- engine_or_uri: str,
159
- schema: str | None = "flowerpower",
160
- username: str | None = None,
161
- password: str | None = None,
162
- ssl: bool = False,
163
- **kwargs,
164
- ) -> tuple:
165
-
166
- # ds = DataStore(
167
- # type=type,
168
- # engine_or_uri=engine_or_uri,
169
- # schema=schema,
170
- # username=username,
171
- # password=password,
172
- # ssl=ssl,
173
- # **kwargs,
174
- # )
175
- # ds.setup_db()
176
- ds2 = DataStore(
177
- type=type,
178
- engine_or_uri=engine_or_uri,
179
- schema=schema,
180
- username=username,
181
- password=password,
182
- ssl=ssl,
183
- **kwargs,
184
- )
185
- ds2.setup()
186
- return ds2.get()
@@ -1,127 +0,0 @@
1
- from sqlalchemy.engine import Engine
2
-
3
- ALL_EVENT_BROKERS = [
4
- "sqlalchemy",
5
- "asyncpg",
6
- "psycopg2",
7
- "postgresql",
8
- "mqtt",
9
- "redis",
10
- "local",
11
- "memory",
12
- ]
13
-
14
-
15
- class EventBroker:
16
- def __init__(
17
- self,
18
- type: str | None = None,
19
- uri: str | None = None,
20
- sqla_engine: Engine | None = None,
21
- host: str | None = None,
22
- port: int = 0,
23
- username: str | None = None,
24
- password: str | None = None,
25
- ):
26
- self.type = type or "memory"
27
- self.uri = uri
28
- self.host = host
29
- self.port = port
30
- self.username = username
31
- self.password = password
32
- self._sqla_engine = sqla_engine
33
-
34
- if self.type not in ALL_EVENT_BROKERS:
35
- raise ValueError(
36
- f"Invalid event broker type: {type}. Valid event broker types are: {ALL_EVENT_BROKERS}"
37
- )
38
- if type in ["sqlalchemy", "asyncpg", "psycopg3", "postgresql"] and not (
39
- sqla_engine or uri
40
- ):
41
- raise ValueError(
42
- f"Event broker type `{type} requires an `sqla_engine` or `uri`"
43
- )
44
- if type == "mqtt" and not ((host and port) or uri):
45
- raise ValueError(
46
- "Event broker type `mqtt` requires a `host` and `port` or `uri`"
47
- )
48
- if type == "redis" and not (uri or (host and port)):
49
- raise ValueError(
50
- "Event broker type `redis` requires a `uri` or `host` and `port`"
51
- )
52
-
53
- def _setup_asyncpg_event_broker(self):
54
- from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
55
-
56
- if self._sqla_engine is None:
57
- self._event_broker = AsyncpgEventBroker.from_dsn(dsn=self.uri)
58
- else:
59
- self._event_broker = AsyncpgEventBroker.from_async_sqla_engine(
60
- engine=self._sqla_engine
61
- )
62
-
63
- def _setup_mqtt_event_broker(self):
64
- from apscheduler.eventbrokers.mqtt import MQTTEventBroker
65
-
66
- if self.uri is not None:
67
- if ":" in self.uri:
68
- self.host, self.port = self.uri.split(":")
69
- self.port = int(self.port)
70
- else:
71
- self.host = self.uri
72
-
73
- self._event_broker = MQTTEventBroker(
74
- self.host, self.port, topic="flowerpower/scheduler"
75
- )
76
- if (self.username is not None) and (self.password is not None):
77
- self._event_broker._client.username_pw_set(
78
- self.username,
79
- self.password,
80
- )
81
-
82
- def _setup_redis_event_broker(self):
83
- from apscheduler.eventbrokers.redis import RedisEventBroker
84
-
85
- if self.uri is None:
86
- self.uri = f"redis://{self.host}:{self.port}"
87
- self._event_broker = RedisEventBroker(self.uri)
88
-
89
- def _setup_local_event_broker(self):
90
- from apscheduler.eventbrokers.local import LocalEventBroker
91
-
92
- self._event_broker = LocalEventBroker()
93
-
94
- def setup(self):
95
- if self.type in ["sqlalchemy", "asyncpg", "psycopg3", "postgresql"]:
96
- self._setup_asyncpg_event_broker()
97
- elif self.type == "mqtt":
98
- self._setup_mqtt_event_broker()
99
- elif self.type == "redis":
100
- self._setup_redis_event_broker()
101
- else:
102
- self._setup_local_event_broker()
103
-
104
- def get(self):
105
- return self._event_broker
106
-
107
-
108
- def setup_event_broker(
109
- type: str,
110
- uri: str | None = None,
111
- sqla_engine: Engine | None = None,
112
- host: str | None = None,
113
- port: int = 0,
114
- username: str | None = None,
115
- password: str | None = None,
116
- ):
117
- eb = EventBroker(
118
- type=type,
119
- uri=uri,
120
- sqla_engine=sqla_engine,
121
- host=host,
122
- port=port,
123
- username=username,
124
- password=password,
125
- )
126
- eb.setup()
127
- return eb.get()
@@ -1,58 +0,0 @@
1
- import importlib
2
- import importlib.util
3
-
4
- from hamilton.execution import executors
5
- from loguru import logger
6
-
7
- if importlib.util.find_spec("distributed"):
8
- from dask import distributed
9
- else:
10
- distributed = None
11
-
12
-
13
- if importlib.util.find_spec("ray"):
14
- import ray
15
- else:
16
- ray = None
17
-
18
-
19
- def get_executor(mode: str, max_tasks: int = 10, num_cpus: int = 4):
20
- shutdown = None
21
-
22
- if mode == "processpool" or mode == "process" or mode == "multiprocessing":
23
- remote_executor = executors.MultiProcessingExecutor(max_tasks=max_tasks)
24
- elif mode == "threadpool" or mode == "future_adapter" or mode == "threading":
25
- remote_executor = executors.MultiThreadingExecutor(max_tasks=max_tasks)
26
- elif mode == "dask":
27
- if distributed:
28
- from hamilton.plugins import h_dask
29
-
30
- cluster = distributed.LocalCluster()
31
- client = distributed.Client(cluster)
32
- remote_executor = h_dask.DaskExecutor(client=client)
33
- shutdown = cluster.close
34
- else:
35
- logger.info(
36
- "Dask is not installed. If you want to use Dask for distributed execution, install it using via:"
37
- "`pip install dask`"
38
- "'conda install dask'"
39
- "`pip install flowerpower[dask]`"
40
- )
41
- remote_executor = None # executors.SynchronousLocalTaskExecutor()
42
- elif mode == "ray":
43
- if ray:
44
- from hamilton.plugins import h_ray
45
-
46
- remote_executor = h_ray.RayTaskExecutor(num_cpus=num_cpus)
47
- shutdown = ray.shutdown
48
- else:
49
- logger.info(
50
- "Ray is not installed. If you want to use Ray for distributed execution, install it using via:"
51
- "`pip install ray`"
52
- "'conda install ray'"
53
- "`pip install flowerpower[ray]`"
54
- )
55
- remote_executor = None # executors.SynchronousLocalTaskExecutor()
56
- else:
57
- remote_executor = None # executors.SynchronousLocalTaskExecutor()
58
- return remote_executor, shutdown
@@ -1,140 +0,0 @@
1
- import datetime as dt
2
-
3
- # from tzlocal import get_localzone
4
-
5
- ALL_TRIGGER_TYPES = [
6
- "cron",
7
- "interval",
8
- "calendarinterval",
9
- "date",
10
- ]
11
- ALL_TRIGGER_KWARGS = {
12
- "cron": [
13
- "crontab",
14
- "year",
15
- "month",
16
- "week",
17
- "day",
18
- "day_of_week",
19
- "hour",
20
- "minute",
21
- "second",
22
- "start_time",
23
- "end_time",
24
- "timezone",
25
- ],
26
- "interval": [
27
- "weeks",
28
- "days",
29
- "hours",
30
- "minutes",
31
- "seconds",
32
- "microseconds",
33
- "start_time",
34
- "end_time",
35
- ],
36
- "calendarinterval": [
37
- "years",
38
- "months",
39
- "weeks",
40
- "days",
41
- "hour",
42
- "minute",
43
- "second",
44
- "start_date",
45
- "end_date",
46
- "timezone",
47
- ],
48
- "date": [
49
- "run_time",
50
- ],
51
- }
52
-
53
-
54
- class Trigger:
55
- def __init__(
56
- self,
57
- type_: str,
58
- ):
59
- if type_ not in ALL_TRIGGER_TYPES:
60
- raise ValueError(
61
- f"Invalid trigger type: {type_}. Valid trigger types are: {ALL_TRIGGER_TYPES}"
62
- )
63
- self.trigger_type = type_
64
-
65
- def _check_kwargs(self, **kwargs):
66
- for k, v in kwargs.items():
67
- if k not in ALL_TRIGGER_KWARGS[self.trigger_type]:
68
- raise ValueError(
69
- f"Invalid argument: {k}. Valid arguments are: {ALL_TRIGGER_KWARGS[self.trigger_type]}"
70
- )
71
-
72
- def _filter_kwargs(self, **kwargs):
73
- return {
74
- k: v
75
- for k, v in kwargs.items()
76
- if k in ALL_TRIGGER_KWARGS[self.trigger_type] and v is not None
77
- }
78
-
79
- def get(self, **kwargs):
80
- #
81
- kwargs = self._filter_kwargs(**kwargs)
82
- self._check_kwargs(**kwargs)
83
-
84
- if self.trigger_type == "cron":
85
- return self._get_cron_trigger(**kwargs)
86
- elif self.trigger_type == "interval":
87
- return self._get_interval_trigger(**kwargs)
88
- elif self.trigger_type == "calendarinterval":
89
- return self._get_calendar_trigger(**kwargs)
90
- elif self.trigger_type == "date":
91
- return self._get_date_trigger(**kwargs)
92
-
93
- def _get_cron_trigger(
94
- self,
95
- **kwargs,
96
- ):
97
- from apscheduler.triggers.cron import CronTrigger
98
-
99
- crontab = kwargs.pop("crontab", None)
100
- print(crontab)
101
-
102
- if crontab is not None:
103
- return CronTrigger.from_crontab(crontab) # , kwargs)
104
- else:
105
- return CronTrigger(
106
- **kwargs,
107
- )
108
-
109
- def _get_interval_trigger(
110
- self,
111
- **kwargs,
112
- ):
113
- from apscheduler.triggers.interval import IntervalTrigger
114
-
115
- return IntervalTrigger(
116
- **kwargs,
117
- )
118
-
119
- def _get_calendar_trigger(
120
- self,
121
- **kwargs,
122
- ):
123
- from apscheduler.triggers.calendarinterval import \
124
- CalendarIntervalTrigger
125
-
126
- return CalendarIntervalTrigger(
127
- **kwargs,
128
- )
129
-
130
- def _get_date_trigger(self, **kwargs):
131
- from apscheduler.triggers.date import DateTrigger
132
-
133
- if "run_time" not in kwargs:
134
- kwargs["run_time"] = dt.datetime.now()
135
- return DateTrigger(**kwargs)
136
-
137
-
138
- def get_trigger(type_: str, **kwargs):
139
- trigger = Trigger(type_)
140
- return trigger.get(**kwargs)