cloudquery-plugin-sdk 0.1.8__tar.gz → 0.1.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/PKG-INFO +9 -9
  2. cloudquery-plugin-sdk-0.1.9/cloudquery/sdk/internal/memdb/memdb.py +99 -0
  3. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/plugin/plugin.py +35 -1
  4. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/schema/table.py +2 -0
  5. cloudquery-plugin-sdk-0.1.9/cloudquery/sdk/serve/plugin.py +400 -0
  6. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/types/json.py +3 -0
  7. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/types/uuid.py +3 -0
  8. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/PKG-INFO +9 -9
  9. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/requires.txt +8 -8
  10. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/setup.py +9 -9
  11. cloudquery-plugin-sdk-0.1.8/cloudquery/sdk/internal/memdb/memdb.py +0 -40
  12. cloudquery-plugin-sdk-0.1.8/cloudquery/sdk/serve/plugin.py +0 -160
  13. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/README.md +0 -0
  14. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/__init__.py +0 -0
  15. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/__init__.py +0 -0
  16. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/memdb/__init__.py +0 -0
  17. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/servers/__init__.py +0 -0
  18. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/servers/discovery_v1/__init__.py +0 -0
  19. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/servers/discovery_v1/discovery.py +0 -0
  20. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/servers/plugin_v3/__init__.py +0 -0
  21. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/internal/servers/plugin_v3/plugin.py +0 -0
  22. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/message/__init__.py +0 -0
  23. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/message/sync.py +0 -0
  24. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/message/write.py +0 -0
  25. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/plugin/__init__.py +0 -0
  26. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/py.typed +0 -0
  27. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/__init__.py +0 -0
  28. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/binary.py +0 -0
  29. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/bool.py +0 -0
  30. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/date32.py +0 -0
  31. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/date64.py +0 -0
  32. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/float.py +0 -0
  33. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/int.py +0 -0
  34. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/json.py +0 -0
  35. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/list.py +0 -0
  36. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/scalar.py +0 -0
  37. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/scalar_factory.py +0 -0
  38. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/string.py +0 -0
  39. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/timestamp.py +0 -0
  40. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/uint.py +0 -0
  41. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/uuid.py +0 -0
  42. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scalar/vector.py +0 -0
  43. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scheduler/__init__.py +0 -0
  44. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scheduler/scheduler.py +0 -0
  45. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/scheduler/table_resolver.py +0 -0
  46. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/schema/__init__.py +0 -0
  47. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/schema/arrow.py +0 -0
  48. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/schema/column.py +0 -0
  49. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/schema/resource.py +0 -0
  50. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/serve/__init__.py +0 -0
  51. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/transformers/__init__.py +0 -0
  52. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/transformers/openapi.py +0 -0
  53. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/transformers/transformers.py +0 -0
  54. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery/sdk/types/__init__.py +0 -0
  55. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/SOURCES.txt +0 -0
  56. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/dependency_links.txt +0 -0
  57. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/namespace_packages.txt +0 -0
  58. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/not-zip-safe +0 -0
  59. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/cloudquery_plugin_sdk.egg-info/top_level.txt +0 -0
  60. {cloudquery-plugin-sdk-0.1.8 → cloudquery-plugin-sdk-0.1.9}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cloudquery-plugin-sdk
3
- Version: 0.1.8
3
+ Version: 0.1.9
4
4
  Summary: CloudQuery Plugin SDK for Python
5
5
  Home-page: https://github.com/cloudquery/plugin-sdk-python
6
6
  Author: CloudQuery LTD
@@ -20,24 +20,24 @@ Classifier: Operating System :: OS Independent
20
20
  Classifier: Topic :: Internet
21
21
  Requires-Python: >=3.7
22
22
  Requires-Dist: cloudquery-plugin-pb==0.0.20
23
- Requires-Dist: exceptiongroup==1.1.3
24
- Requires-Dist: black==23.10.1
25
- Requires-Dist: grpcio==1.59.2
26
- Requires-Dist: grpcio-tools==1.59.2
23
+ Requires-Dist: exceptiongroup==1.2.0
24
+ Requires-Dist: black==23.12.0
25
+ Requires-Dist: grpcio==1.60.0
26
+ Requires-Dist: grpcio-tools==1.60.0
27
27
  Requires-Dist: iniconfig==2.0.0
28
28
  Requires-Dist: Jinja2==3.1.2
29
29
  Requires-Dist: MarkupSafe==2.1.3
30
- Requires-Dist: numpy==1.26.1
30
+ Requires-Dist: numpy==1.26.2
31
31
  Requires-Dist: packaging==23.2
32
- Requires-Dist: pandas==2.1.2
32
+ Requires-Dist: pandas==2.1.4
33
33
  Requires-Dist: pluggy==1.3.0
34
- Requires-Dist: protobuf==4.24.4
34
+ Requires-Dist: protobuf==4.25.1
35
35
  Requires-Dist: pyarrow==14.0.1
36
36
  Requires-Dist: pytest==7.4.3
37
37
  Requires-Dist: python-dateutil==2.8.2
38
38
  Requires-Dist: pytz==2023.3.post1
39
39
  Requires-Dist: six==1.16.0
40
- Requires-Dist: structlog==23.1.0
40
+ Requires-Dist: structlog==23.2.0
41
41
  Requires-Dist: tomli==2.0.1
42
42
  Requires-Dist: tzdata==2023.3
43
43
 
@@ -0,0 +1,99 @@
1
+ from cloudquery.sdk import plugin
2
+ from cloudquery.sdk import message
3
+ from cloudquery.sdk import schema
4
+ from typing import List, Generator, Dict
5
+ import pyarrow as pa
6
+ from cloudquery.sdk.types import JSONType
7
+
8
+ NAME = "memdb"
9
+ VERSION = "development"
10
+
11
+
12
+ class MemDB(plugin.Plugin):
13
+ def __init__(self) -> None:
14
+ super().__init__(
15
+ NAME, VERSION, opts=plugin.plugin.Options(team="cloudquery", kind="source")
16
+ )
17
+ self._db: Dict[str, pa.RecordBatch] = {}
18
+ self._tables: Dict[str, schema.Table] = {
19
+ "table_1": schema.Table(
20
+ name="table_1",
21
+ columns=[
22
+ schema.Column(
23
+ name="name",
24
+ type=pa.string(),
25
+ primary_key=True,
26
+ not_null=True,
27
+ unique=True,
28
+ ),
29
+ schema.Column(
30
+ name="id",
31
+ type=pa.string(),
32
+ primary_key=True,
33
+ not_null=True,
34
+ unique=True,
35
+ incremental_key=True,
36
+ ),
37
+ ],
38
+ title="Table 1",
39
+ description="Test Table 1",
40
+ is_incremental=True,
41
+ relations=[
42
+ schema.Table(
43
+ name="table_1_relation_1",
44
+ columns=[
45
+ schema.Column(
46
+ name="name",
47
+ type=pa.string(),
48
+ primary_key=True,
49
+ not_null=True,
50
+ unique=True,
51
+ ),
52
+ schema.Column(name="data", type=JSONType()),
53
+ ],
54
+ title="Table 1 Relation 1",
55
+ description="Test Table 1 Relation 1",
56
+ )
57
+ ],
58
+ ),
59
+ "table_2": schema.Table(
60
+ name="table_2",
61
+ columns=[
62
+ schema.Column(
63
+ name="name",
64
+ type=pa.string(),
65
+ primary_key=True,
66
+ not_null=True,
67
+ unique=True,
68
+ ),
69
+ schema.Column(name="id", type=pa.string()),
70
+ ],
71
+ title="Table 2",
72
+ description="Test Table 2",
73
+ ),
74
+ }
75
+
76
+ def get_tables(self, options: plugin.TableOptions = None) -> List[plugin.Table]:
77
+ tables = list(self._tables.values())
78
+ return schema.filter_dfs(tables, options.tables, options.skip_tables)
79
+
80
+ def sync(
81
+ self, options: plugin.SyncOptions
82
+ ) -> Generator[message.SyncMessage, None, None]:
83
+ for table, record in self._db.items():
84
+ yield message.SyncInsertMessage(record)
85
+
86
+ def write(self, writer: Generator[message.WriteMessage, None, None]) -> None:
87
+ for msg in writer:
88
+ if isinstance(msg, message.WriteMigrateTableMessage):
89
+ if msg.table.name not in self._db:
90
+ self._db[msg.table.name] = msg.table
91
+ self._tables[msg.table.name] = msg.table
92
+ elif isinstance(msg, message.WriteInsertMessage):
93
+ table = schema.Table.from_arrow_schema(msg.record.schema)
94
+ self._db[table.name] = msg.record
95
+ else:
96
+ raise NotImplementedError(f"Unknown message type {type(msg)}")
97
+
98
+ def close(self) -> None:
99
+ self._db = {}
@@ -29,10 +29,32 @@ class SyncOptions:
29
29
  backend_options: BackendOptions = None
30
30
 
31
31
 
32
+ @dataclass
33
+ class BuildTarget:
34
+ os: str = None
35
+ arch: str = None
36
+
37
+
38
+ @dataclass
39
+ class Options:
40
+ dockerfile: str = None
41
+ build_targets: List[BuildTarget] = None
42
+ team: str = None
43
+ kind: str = None
44
+
45
+
32
46
  class Plugin:
33
- def __init__(self, name: str, version: str) -> None:
47
+ def __init__(self, name: str, version: str, opts: Options = None) -> None:
34
48
  self._name = name
35
49
  self._version = version
50
+ self._opts = Options() if opts is None else opts
51
+ if self._opts.dockerfile is None:
52
+ self._opts.dockerfile = "Dockerfile"
53
+ if self._opts.build_targets is None:
54
+ self._opts.build_targets = [
55
+ BuildTarget("linux", "amd64"),
56
+ BuildTarget("linux", "arm64"),
57
+ ]
36
58
 
37
59
  def init(self, spec: bytes, no_connection: bool = False) -> None:
38
60
  pass
@@ -46,6 +68,18 @@ class Plugin:
46
68
  def version(self) -> str:
47
69
  return self._version
48
70
 
71
+ def team(self) -> str:
72
+ return self._opts.team
73
+
74
+ def kind(self) -> str:
75
+ return self._opts.kind
76
+
77
+ def dockerfile(self) -> str:
78
+ return self._opts.dockerfile
79
+
80
+ def build_targets(self) -> List[BuildTarget]:
81
+ return self._opts.build_targets
82
+
49
83
  def get_tables(self, options: TableOptions) -> List[Table]:
50
84
  raise NotImplementedError()
51
85
 
@@ -151,6 +151,8 @@ def filter_dfs_func(tt: List[Table], include, exclude, skip_dependent_tables: bo
151
151
  filtered_tables = []
152
152
  for t in tt:
153
153
  filtered_table = copy.deepcopy(t)
154
+ for r in filtered_table.relations:
155
+ r.parent = filtered_table
154
156
  filtered_table = _filter_dfs_impl(
155
157
  filtered_table, False, include, exclude, skip_dependent_tables
156
158
  )
@@ -0,0 +1,400 @@
1
+ import argparse
2
+ import hashlib
3
+ import json
4
+ import logging
5
+ import os
6
+ import shutil
7
+ import subprocess
8
+ import tarfile
9
+ from concurrent import futures
10
+ from pathlib import Path
11
+
12
+ import grpc
13
+ import structlog
14
+ import sys
15
+ from cloudquery.discovery_v1 import discovery_pb2_grpc
16
+ from cloudquery.plugin_v3 import plugin_pb2_grpc
17
+ from structlog import wrap_logger
18
+ from cloudquery.sdk import plugin
19
+
20
+
21
+ from cloudquery.sdk.internal.servers.discovery_v1.discovery import DiscoveryServicer
22
+ from cloudquery.sdk.internal.servers.plugin_v3 import PluginServicer
23
+ from cloudquery.sdk.plugin.plugin import Plugin
24
+ from cloudquery.sdk.schema import table
25
+
26
+ _IS_WINDOWS = sys.platform == "win32"
27
+
28
+ try:
29
+ import colorama
30
+ except ImportError:
31
+ colorama = None
32
+
33
+ if _IS_WINDOWS: # pragma: no cover
34
+ # On Windows, use colors by default only if Colorama is installed.
35
+ _has_colors = colorama is not None
36
+ else:
37
+ # On other OSes, use colors by default.
38
+ _has_colors = True
39
+
40
+
41
+ def get_logger(args):
42
+ log_level_map = {
43
+ "debug": logging.DEBUG,
44
+ "info": logging.INFO,
45
+ "warning": logging.WARNING,
46
+ "error": logging.ERROR,
47
+ "critical": logging.CRITICAL,
48
+ }
49
+
50
+ logging.basicConfig(
51
+ format="%(message)s",
52
+ stream=sys.stdout,
53
+ level=log_level_map.get(args.log_level.lower(), logging.INFO),
54
+ )
55
+
56
+ processors = [
57
+ structlog.contextvars.merge_contextvars,
58
+ structlog.processors.add_log_level,
59
+ structlog.processors.StackInfoRenderer(),
60
+ structlog.dev.set_exc_info,
61
+ structlog.stdlib.filter_by_level,
62
+ structlog.processors.TimeStamper(fmt="%Y-%m-%dT%H:%M:%SZ", utc=True),
63
+ ]
64
+ if args.log_format == "text":
65
+ processors.append(
66
+ structlog.dev.ConsoleRenderer(
67
+ colors=os.environ.get("NO_COLOR", "") == ""
68
+ and (
69
+ os.environ.get("FORCE_COLOR", "") != ""
70
+ or (
71
+ _has_colors
72
+ and sys.stdout is not None
73
+ and hasattr(sys.stdout, "isatty")
74
+ and sys.stdout.isatty()
75
+ )
76
+ )
77
+ )
78
+ )
79
+ else:
80
+ processors.append(structlog.processors.JSONRenderer())
81
+
82
+ log = wrap_logger(logging.getLogger(), processors=processors)
83
+ return log
84
+
85
+
86
+ def calc_sha256_checksum(filename: str):
87
+ with open(filename, "rb") as f:
88
+ file_hash = hashlib.sha256()
89
+ while chunk := f.read(32768):
90
+ file_hash.update(chunk)
91
+ return file_hash.hexdigest()
92
+
93
+
94
+ class PluginCommand:
95
+ def __init__(self, plugin: Plugin):
96
+ self._plugin = plugin
97
+
98
+ def run(self, args):
99
+ parser = argparse.ArgumentParser()
100
+ subparsers = parser.add_subparsers(dest="command", required=True)
101
+
102
+ self._register_serve_command(subparsers)
103
+ self._register_package_command(subparsers)
104
+
105
+ parsed_args = parser.parse_args(args)
106
+
107
+ if parsed_args.command == "serve":
108
+ self._serve(parsed_args)
109
+ elif parsed_args.command == "package":
110
+ self._package(parsed_args)
111
+ else:
112
+ parser.print_help()
113
+ sys.exit(1)
114
+
115
+ def _register_serve_command(self, subparsers):
116
+ serve_parser = subparsers.add_parser("serve", help="Start plugin server")
117
+ serve_parser.add_argument(
118
+ "--log-format",
119
+ type=str,
120
+ default="text",
121
+ choices=["text", "json"],
122
+ help="logging format",
123
+ )
124
+ serve_parser.add_argument(
125
+ "--log-level",
126
+ type=str,
127
+ default="info",
128
+ choices=["trace", "debug", "info", "warn", "error"],
129
+ help="log level",
130
+ )
131
+ # ignored for now
132
+ serve_parser.add_argument(
133
+ "--no-sentry",
134
+ action="store_true",
135
+ help="disable sentry (placeholder for future use)",
136
+ )
137
+ # ignored for now
138
+ serve_parser.add_argument(
139
+ "--otel-endpoint",
140
+ type=str,
141
+ default="",
142
+ help="Open Telemetry HTTP collector endpoint (placeholder for future use)",
143
+ )
144
+ # ignored for now
145
+ serve_parser.add_argument(
146
+ "--otel-endpoint-insecure",
147
+ type=str,
148
+ default="",
149
+ help="Open Telemetry HTTP collector endpoint (for development only) (placeholder for future use)",
150
+ )
151
+ serve_parser.add_argument(
152
+ "--address",
153
+ type=str,
154
+ default="localhost:7777",
155
+ help="address to serve on. can be tcp: 'localhost:7777' or unix socket: '/tmp/plugin.rpc.sock'",
156
+ )
157
+ serve_parser.add_argument(
158
+ "--network",
159
+ type=str,
160
+ default="tcp",
161
+ choices=["tcp", "unix"],
162
+ help="network to serve on. can be tcp or unix",
163
+ )
164
+
165
+ def _register_package_command(self, subparsers):
166
+ package_parser = subparsers.add_parser(
167
+ "package", help="Package the plugin as a Docker image"
168
+ )
169
+ package_parser.add_argument(
170
+ "version", help="version to tag the Docker image with"
171
+ )
172
+ package_parser.add_argument("plugin-directory")
173
+ package_parser.add_argument(
174
+ "--log-format",
175
+ type=str,
176
+ default="text",
177
+ choices=["text", "json"],
178
+ help="logging format",
179
+ )
180
+ package_parser.add_argument(
181
+ "--log-level",
182
+ type=str,
183
+ default="info",
184
+ choices=["trace", "debug", "info", "warn", "error"],
185
+ help="log level",
186
+ )
187
+ package_parser.add_argument(
188
+ "-D",
189
+ "--dist-dir",
190
+ type=str,
191
+ help="dist directory to output the built plugin. (default: <plugin_directory>/dist)",
192
+ )
193
+ package_parser.add_argument(
194
+ "--docs-dir",
195
+ type=str,
196
+ help="docs directory containing markdown files to copy to the dist directory. (default: <plugin_directory>/docs)",
197
+ )
198
+ package_parser.add_argument(
199
+ "-m",
200
+ "--message",
201
+ type=str,
202
+ required=True,
203
+ help="message that summarizes what is new or changed in this version. Use @<file> to read from file. Supports markdown.",
204
+ )
205
+
206
+ def _package(self, args):
207
+ logger = get_logger(args)
208
+ self._plugin.set_logger(logger)
209
+
210
+ def _is_empty(val):
211
+ return val == None or len(val) == 0
212
+
213
+ if _is_empty(self._plugin.name()):
214
+ raise Exception("plugin name is required")
215
+ if _is_empty(self._plugin.team()):
216
+ raise Exception("plugin team is required")
217
+ if _is_empty(self._plugin.kind()):
218
+ raise Exception("plugin kind is required")
219
+ if _is_empty(self._plugin.dockerfile()):
220
+ raise Exception("plugin dockerfile is required")
221
+ if _is_empty(self._plugin.build_targets()):
222
+ raise Exception("at least one build target is required")
223
+
224
+ plugin_directory, version, message = (
225
+ getattr(args, "plugin-directory"),
226
+ getattr(args, "version"),
227
+ getattr(args, "message"),
228
+ )
229
+ dist_dir = (
230
+ "%s/dist" % plugin_directory if args.dist_dir == None else args.dist_dir
231
+ )
232
+ docs_dir = (
233
+ "%s/docs" % plugin_directory if args.docs_dir == None else args.docs_dir
234
+ )
235
+ Path(dist_dir).mkdir(0o755, exist_ok=True, parents=True)
236
+
237
+ self._copy_docs(logger, docs_dir, dist_dir)
238
+ self._write_tables_json(logger, dist_dir)
239
+ supported_targets = self._build_dockerfile(
240
+ logger, plugin_directory, dist_dir, version
241
+ )
242
+ self._write_package_json(logger, dist_dir, message, version, supported_targets)
243
+ logger.info("Done packaging plugin to '%s'" % dist_dir)
244
+
245
+ def _write_package_json(self, logger, dist_dir, message, version, supportedTargets):
246
+ package_json_path = "%s/package.json" % dist_dir
247
+ logger.info("Writing package.json to '%s'" % package_json_path)
248
+ content = {
249
+ "schema_version": 1,
250
+ "name": self._plugin.name(),
251
+ "team": self._plugin.team(),
252
+ "kind": self._plugin.kind(),
253
+ "version": version,
254
+ "message": message,
255
+ "protocols": [3],
256
+ "supported_targets": supportedTargets,
257
+ "package_type": "docker",
258
+ }
259
+ with open("%s/package.json" % dist_dir, "w") as f:
260
+ f.write(json.dumps(content, indent=2))
261
+
262
+ def _copy_docs(self, logger, docs_dir, dist_dir):
263
+ # check is docs_dir exists
264
+ if not os.path.isdir(docs_dir):
265
+ raise Exception("docs directory '%s' does not exist" % docs_dir)
266
+
267
+ output_docs_dir = "%s/docs" % dist_dir
268
+ logger.info("Copying docs from '%s' to '%s'" % (docs_dir, output_docs_dir))
269
+ shutil.copytree(docs_dir, output_docs_dir, dirs_exist_ok=True)
270
+
271
+ def _write_tables_json(self, logger, dist_dir):
272
+ if self._plugin.kind() != "source":
273
+ return
274
+
275
+ tables_json_output_path = "%s/tables.json" % dist_dir
276
+ logger.info("Writing tables to '%s'" % tables_json_output_path)
277
+ self._plugin.init(spec=b"", no_connection=True)
278
+ tables = self._plugin.get_tables(
279
+ options=plugin.plugin.TableOptions(
280
+ tables=["*"], skip_tables=[], skip_dependent_tables=False
281
+ )
282
+ )
283
+ flattened_tables = table.flatten_tables(tables)
284
+
285
+ def column_to_json(column: table.Column):
286
+ return {
287
+ "name": column.name,
288
+ "type": str(column.type),
289
+ "description": column.description,
290
+ "incremental_key": column.incremental_key,
291
+ "primary_key": column.primary_key,
292
+ "not_null": column.not_null,
293
+ "unique": column.unique,
294
+ }
295
+
296
+ def table_to_json(table: table.Table):
297
+ return {
298
+ "name": table.name,
299
+ "title": table.title,
300
+ "description": table.description,
301
+ "is_incremental": table.is_incremental,
302
+ "parent": table.parent.name if table.parent else "",
303
+ "relations": list(map(lambda r: r.name, table.relations)),
304
+ "columns": list(map(column_to_json, table.columns)),
305
+ }
306
+
307
+ tables_json = list(map(table_to_json, flattened_tables))
308
+ with open(tables_json_output_path, "w") as f:
309
+ f.write(json.dumps(tables_json))
310
+ logger.info(
311
+ "Wrote %d tables to '%s'" % (len(tables_json), tables_json_output_path)
312
+ )
313
+
314
+ def _build_dockerfile(self, logger, plugin_dir, dist_dir, version):
315
+ dockerfile_path = "%s/%s" % (plugin_dir, self._plugin.dockerfile())
316
+ if not os.path.isfile(dockerfile_path):
317
+ raise Exception("Dockerfile '%s' does not exist" % dockerfile_path)
318
+
319
+ def run_docker_cmd(cmd, plugin_dir):
320
+ result = subprocess.run(cmd, capture_output=True, cwd=plugin_dir)
321
+ if result.returncode != 0:
322
+ err = (
323
+ ""
324
+ if result.stderr is None
325
+ else result.stderr.decode("ascii").strip()
326
+ )
327
+ raise ChildProcessError("Unable to run Docker command: %s" % err)
328
+
329
+ def build_target(target: plugin.plugin.BuildTarget):
330
+ image_repository = "registry.cloudquery.io/%s/%s-%s" % (
331
+ self._plugin.team(),
332
+ self._plugin.kind(),
333
+ self._plugin.name(),
334
+ )
335
+ image_tag = "%s:%s-%s-%s" % (
336
+ image_repository,
337
+ version,
338
+ target.os,
339
+ target.arch,
340
+ )
341
+ image_tar = "plugin-%s-%s-%s-%s.tar" % (
342
+ self._plugin.name(),
343
+ version,
344
+ target.os,
345
+ target.arch,
346
+ )
347
+ image_path = "%s/%s" % (dist_dir, image_tar)
348
+ logger.info("Building docker image %s" % image_tag)
349
+ docker_build_arguments = [
350
+ "docker",
351
+ "buildx",
352
+ "build",
353
+ "-t",
354
+ image_tag,
355
+ "--platform",
356
+ "%s/%s" % (target.os, target.arch),
357
+ "-f",
358
+ dockerfile_path,
359
+ ".",
360
+ "--progress",
361
+ "plain",
362
+ "--load",
363
+ ]
364
+ logger.debug(
365
+ "Running command 'docker %s'" % " ".join(docker_build_arguments)
366
+ )
367
+ run_docker_cmd(docker_build_arguments, plugin_dir)
368
+ logger.debug("Saving docker image '%s' to '%s'" % (image_tag, image_path))
369
+ docker_save_arguments = ["docker", "save", "-o", image_path, image_tag]
370
+ logger.debug("Running command 'docker %s'", " ".join(docker_save_arguments))
371
+ run_docker_cmd(docker_save_arguments, plugin_dir)
372
+ return {
373
+ "os": target.os,
374
+ "arch": target.arch,
375
+ "path": image_tar,
376
+ "checksum": calc_sha256_checksum(image_path),
377
+ "docker_image_tag": image_tag,
378
+ }
379
+
380
+ logger.info("Building %d targets" % len(self._plugin.build_targets()))
381
+ supported_targets = list(map(build_target, self._plugin.build_targets()))
382
+ return supported_targets
383
+
384
+ def _serve(self, args):
385
+ logger = get_logger(args)
386
+ self._plugin.set_logger(logger)
387
+ self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
388
+ discovery_pb2_grpc.add_DiscoveryServicer_to_server(
389
+ DiscoveryServicer([3]), self._server
390
+ )
391
+ plugin_pb2_grpc.add_PluginServicer_to_server(
392
+ PluginServicer(self._plugin, logger), self._server
393
+ )
394
+ self._server.add_insecure_port(args.address)
395
+ logger.info("Starting server", address=args.address)
396
+ self._server.start()
397
+ self._server.wait_for_termination()
398
+
399
+ def stop(self):
400
+ self._server.stop(5)
@@ -13,6 +13,9 @@ class JSONType(pa.ExtensionType):
13
13
  # metadata to be deserialized
14
14
  return b"json-serialized"
15
15
 
16
+ def __str__(self):
17
+ return "json"
18
+
16
19
  @classmethod
17
20
  def __arrow_ext_deserialize__(self, storage_type, serialized):
18
21
  # return an instance of this subclass given the serialized
@@ -15,6 +15,9 @@ class UUIDType(pa.ExtensionType):
15
15
  # metadata to be deserialized
16
16
  return b"uuid-serialized"
17
17
 
18
+ def __str__(self):
19
+ return "uuid"
20
+
18
21
  @classmethod
19
22
  def __arrow_ext_deserialize__(self, storage_type, serialized):
20
23
  # return an instance of this subclass given the serialized
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cloudquery-plugin-sdk
3
- Version: 0.1.8
3
+ Version: 0.1.9
4
4
  Summary: CloudQuery Plugin SDK for Python
5
5
  Home-page: https://github.com/cloudquery/plugin-sdk-python
6
6
  Author: CloudQuery LTD
@@ -20,24 +20,24 @@ Classifier: Operating System :: OS Independent
20
20
  Classifier: Topic :: Internet
21
21
  Requires-Python: >=3.7
22
22
  Requires-Dist: cloudquery-plugin-pb==0.0.20
23
- Requires-Dist: exceptiongroup==1.1.3
24
- Requires-Dist: black==23.10.1
25
- Requires-Dist: grpcio==1.59.2
26
- Requires-Dist: grpcio-tools==1.59.2
23
+ Requires-Dist: exceptiongroup==1.2.0
24
+ Requires-Dist: black==23.12.0
25
+ Requires-Dist: grpcio==1.60.0
26
+ Requires-Dist: grpcio-tools==1.60.0
27
27
  Requires-Dist: iniconfig==2.0.0
28
28
  Requires-Dist: Jinja2==3.1.2
29
29
  Requires-Dist: MarkupSafe==2.1.3
30
- Requires-Dist: numpy==1.26.1
30
+ Requires-Dist: numpy==1.26.2
31
31
  Requires-Dist: packaging==23.2
32
- Requires-Dist: pandas==2.1.2
32
+ Requires-Dist: pandas==2.1.4
33
33
  Requires-Dist: pluggy==1.3.0
34
- Requires-Dist: protobuf==4.24.4
34
+ Requires-Dist: protobuf==4.25.1
35
35
  Requires-Dist: pyarrow==14.0.1
36
36
  Requires-Dist: pytest==7.4.3
37
37
  Requires-Dist: python-dateutil==2.8.2
38
38
  Requires-Dist: pytz==2023.3.post1
39
39
  Requires-Dist: six==1.16.0
40
- Requires-Dist: structlog==23.1.0
40
+ Requires-Dist: structlog==23.2.0
41
41
  Requires-Dist: tomli==2.0.1
42
42
  Requires-Dist: tzdata==2023.3
43
43
 
@@ -1,21 +1,21 @@
1
1
  cloudquery-plugin-pb==0.0.20
2
- exceptiongroup==1.1.3
3
- black==23.10.1
4
- grpcio==1.59.2
5
- grpcio-tools==1.59.2
2
+ exceptiongroup==1.2.0
3
+ black==23.12.0
4
+ grpcio==1.60.0
5
+ grpcio-tools==1.60.0
6
6
  iniconfig==2.0.0
7
7
  Jinja2==3.1.2
8
8
  MarkupSafe==2.1.3
9
- numpy==1.26.1
9
+ numpy==1.26.2
10
10
  packaging==23.2
11
- pandas==2.1.2
11
+ pandas==2.1.4
12
12
  pluggy==1.3.0
13
- protobuf==4.24.4
13
+ protobuf==4.25.1
14
14
  pyarrow==14.0.1
15
15
  pytest==7.4.3
16
16
  python-dateutil==2.8.2
17
17
  pytz==2023.3.post1
18
18
  six==1.16.0
19
- structlog==23.1.0
19
+ structlog==23.2.0
20
20
  tomli==2.0.1
21
21
  tzdata==2023.3
@@ -11,24 +11,24 @@ description = "CloudQuery Plugin SDK for Python"
11
11
 
12
12
  dependencies = [
13
13
  "cloudquery-plugin-pb==0.0.20",
14
- "exceptiongroup==1.1.3",
15
- "black==23.10.1",
16
- "grpcio==1.59.2",
17
- "grpcio-tools==1.59.2",
14
+ "exceptiongroup==1.2.0",
15
+ "black==23.12.0",
16
+ "grpcio==1.60.0",
17
+ "grpcio-tools==1.60.0",
18
18
  "iniconfig==2.0.0",
19
19
  "Jinja2==3.1.2",
20
20
  "MarkupSafe==2.1.3",
21
- "numpy==1.26.1",
21
+ "numpy==1.26.2",
22
22
  "packaging==23.2",
23
- "pandas==2.1.2",
23
+ "pandas==2.1.4",
24
24
  "pluggy==1.3.0",
25
- "protobuf==4.24.4",
25
+ "protobuf==4.25.1",
26
26
  "pyarrow==14.0.1",
27
27
  "pytest==7.4.3",
28
28
  "python-dateutil==2.8.2",
29
29
  "pytz==2023.3.post1",
30
30
  "six==1.16.0",
31
- "structlog==23.1.0",
31
+ "structlog==23.2.0",
32
32
  "tomli==2.0.1",
33
33
  "tzdata==2023.3",
34
34
  ]
@@ -53,7 +53,7 @@ packages = [
53
53
  ]
54
54
  setuptools.setup(
55
55
  name=name,
56
- version="0.1.8",
56
+ version="0.1.9",
57
57
  description=description,
58
58
  long_description=long_description,
59
59
  author="CloudQuery LTD",
@@ -1,40 +0,0 @@
1
- from cloudquery.sdk import plugin
2
- from cloudquery.sdk import message
3
- from cloudquery.sdk import schema
4
- from typing import List, Generator, Dict
5
- import pyarrow as pa
6
-
7
- NAME = "memdb"
8
- VERSION = "development"
9
-
10
-
11
- class MemDB(plugin.Plugin):
12
- def __init__(self) -> None:
13
- super().__init__(NAME, VERSION)
14
- self._db: Dict[str, pa.RecordBatch] = {}
15
- self._tables: Dict[str, schema.Table] = {}
16
-
17
- def get_tables(self, options: plugin.TableOptions = None) -> List[plugin.Table]:
18
- tables = list(self._tables.values())
19
- return schema.filter_dfs(tables, options.tables, options.skip_tables)
20
-
21
- def sync(
22
- self, options: plugin.SyncOptions
23
- ) -> Generator[message.SyncMessage, None, None]:
24
- for table, record in self._db.items():
25
- yield message.SyncInsertMessage(record)
26
-
27
- def write(self, writer: Generator[message.WriteMessage, None, None]) -> None:
28
- for msg in writer:
29
- if isinstance(msg, message.WriteMigrateTableMessage):
30
- if msg.table.name not in self._db:
31
- self._db[msg.table.name] = msg.table
32
- self._tables[msg.table.name] = msg.table
33
- elif isinstance(msg, message.WriteInsertMessage):
34
- table = schema.Table.from_arrow_schema(msg.record.schema)
35
- self._db[table.name] = msg.record
36
- else:
37
- raise NotImplementedError(f"Unknown message type {type(msg)}")
38
-
39
- def close(self) -> None:
40
- self._db = {}
@@ -1,160 +0,0 @@
1
- import argparse
2
- import logging
3
- import os
4
- from concurrent import futures
5
-
6
- import grpc
7
- import structlog
8
- import sys
9
- from cloudquery.discovery_v1 import discovery_pb2_grpc
10
- from cloudquery.plugin_v3 import plugin_pb2_grpc
11
- from structlog import wrap_logger
12
-
13
- from cloudquery.sdk.internal.servers.discovery_v1.discovery import DiscoveryServicer
14
- from cloudquery.sdk.internal.servers.plugin_v3 import PluginServicer
15
- from cloudquery.sdk.plugin.plugin import Plugin
16
-
17
- _IS_WINDOWS = sys.platform == "win32"
18
-
19
- try:
20
- import colorama
21
- except ImportError:
22
- colorama = None
23
-
24
- if _IS_WINDOWS: # pragma: no cover
25
- # On Windows, use colors by default only if Colorama is installed.
26
- _has_colors = colorama is not None
27
- else:
28
- # On other OSes, use colors by default.
29
- _has_colors = True
30
-
31
-
32
- def get_logger(args):
33
- log_level_map = {
34
- "debug": logging.DEBUG,
35
- "info": logging.INFO,
36
- "warning": logging.WARNING,
37
- "error": logging.ERROR,
38
- "critical": logging.CRITICAL,
39
- }
40
-
41
- logging.basicConfig(
42
- format="%(message)s",
43
- stream=sys.stdout,
44
- level=log_level_map.get(args.log_level.lower(), logging.INFO),
45
- )
46
-
47
- processors = [
48
- structlog.contextvars.merge_contextvars,
49
- structlog.processors.add_log_level,
50
- structlog.processors.StackInfoRenderer(),
51
- structlog.dev.set_exc_info,
52
- structlog.stdlib.filter_by_level,
53
- structlog.processors.TimeStamper(fmt="%Y-%m-%dT%H:%M:%SZ", utc=True),
54
- ]
55
- if args.log_format == "text":
56
- processors.append(
57
- structlog.dev.ConsoleRenderer(
58
- colors=os.environ.get("NO_COLOR", "") == ""
59
- and (
60
- os.environ.get("FORCE_COLOR", "") != ""
61
- or (
62
- _has_colors
63
- and sys.stdout is not None
64
- and hasattr(sys.stdout, "isatty")
65
- and sys.stdout.isatty()
66
- )
67
- )
68
- )
69
- )
70
- else:
71
- processors.append(structlog.processors.JSONRenderer())
72
-
73
- log = wrap_logger(logging.getLogger(), processors=processors)
74
- return log
75
-
76
-
77
- class PluginCommand:
78
- def __init__(self, plugin: Plugin):
79
- self._plugin = plugin
80
-
81
- def run(self, args):
82
- parser = argparse.ArgumentParser()
83
- subparsers = parser.add_subparsers(dest="command", required=True)
84
-
85
- serve_parser = subparsers.add_parser("serve", help="Start plugin server")
86
- serve_parser.add_argument(
87
- "--log-format",
88
- type=str,
89
- default="text",
90
- choices=["text", "json"],
91
- help="logging format",
92
- )
93
- serve_parser.add_argument(
94
- "--log-level",
95
- type=str,
96
- default="info",
97
- choices=["trace", "debug", "info", "warn", "error"],
98
- help="log level",
99
- )
100
-
101
- # ignored for now
102
- serve_parser.add_argument(
103
- "--no-sentry",
104
- action="store_true",
105
- help="disable sentry (placeholder for future use)",
106
- )
107
- # ignored for now
108
- serve_parser.add_argument(
109
- "--otel-endpoint",
110
- type=str,
111
- default="",
112
- help="Open Telemetry HTTP collector endpoint (placeholder for future use)",
113
- )
114
- # ignored for now
115
- serve_parser.add_argument(
116
- "--otel-endpoint-insecure",
117
- type=str,
118
- default="",
119
- help="Open Telemetry HTTP collector endpoint (for development only) (placeholder for future use)",
120
- )
121
-
122
- serve_parser.add_argument(
123
- "--address",
124
- type=str,
125
- default="localhost:7777",
126
- help="address to serve on. can be tcp: 'localhost:7777' or unix socket: '/tmp/plugin.rpc.sock'",
127
- )
128
- serve_parser.add_argument(
129
- "--network",
130
- type=str,
131
- default="tcp",
132
- choices=["tcp", "unix"],
133
- help="network to serve on. can be tcp or unix",
134
- )
135
-
136
- parsed_args = parser.parse_args(args)
137
-
138
- if parsed_args.command == "serve":
139
- self._serve(parsed_args)
140
- else:
141
- parser.print_help()
142
- sys.exit(1)
143
-
144
- def _serve(self, args):
145
- logger = get_logger(args)
146
- self._plugin.set_logger(logger)
147
- self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
148
- discovery_pb2_grpc.add_DiscoveryServicer_to_server(
149
- DiscoveryServicer([3]), self._server
150
- )
151
- plugin_pb2_grpc.add_PluginServicer_to_server(
152
- PluginServicer(self._plugin, logger), self._server
153
- )
154
- self._server.add_insecure_port(args.address)
155
- logger.info("Starting server", address=args.address)
156
- self._server.start()
157
- self._server.wait_for_termination()
158
-
159
- def stop(self):
160
- self._server.stop(5)