cloudquery-plugin-sdk 0.1.7__py2.py3-none-any.whl → 0.1.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,6 +3,7 @@ from cloudquery.sdk import message
3
3
  from cloudquery.sdk import schema
4
4
  from typing import List, Generator, Dict
5
5
  import pyarrow as pa
6
+ from cloudquery.sdk.types import JSONType
6
7
 
7
8
  NAME = "memdb"
8
9
  VERSION = "development"
@@ -10,9 +11,67 @@ VERSION = "development"
10
11
 
11
12
  class MemDB(plugin.Plugin):
12
13
  def __init__(self) -> None:
13
- super().__init__(NAME, VERSION)
14
+ super().__init__(
15
+ NAME, VERSION, opts=plugin.plugin.Options(team="cloudquery", kind="source")
16
+ )
14
17
  self._db: Dict[str, pa.RecordBatch] = {}
15
- self._tables: Dict[str, schema.Table] = {}
18
+ self._tables: Dict[str, schema.Table] = {
19
+ "table_1": schema.Table(
20
+ name="table_1",
21
+ columns=[
22
+ schema.Column(
23
+ name="name",
24
+ type=pa.string(),
25
+ primary_key=True,
26
+ not_null=True,
27
+ unique=True,
28
+ ),
29
+ schema.Column(
30
+ name="id",
31
+ type=pa.string(),
32
+ primary_key=True,
33
+ not_null=True,
34
+ unique=True,
35
+ incremental_key=True,
36
+ ),
37
+ ],
38
+ title="Table 1",
39
+ description="Test Table 1",
40
+ is_incremental=True,
41
+ relations=[
42
+ schema.Table(
43
+ name="table_1_relation_1",
44
+ columns=[
45
+ schema.Column(
46
+ name="name",
47
+ type=pa.string(),
48
+ primary_key=True,
49
+ not_null=True,
50
+ unique=True,
51
+ ),
52
+ schema.Column(name="data", type=JSONType()),
53
+ ],
54
+ title="Table 1 Relation 1",
55
+ description="Test Table 1 Relation 1",
56
+ )
57
+ ],
58
+ ),
59
+ "table_2": schema.Table(
60
+ name="table_2",
61
+ columns=[
62
+ schema.Column(
63
+ name="name",
64
+ type=pa.string(),
65
+ primary_key=True,
66
+ not_null=True,
67
+ unique=True,
68
+ ),
69
+ schema.Column(name="id", type=pa.string()),
70
+ ],
71
+ title="Table 2",
72
+ description="Test Table 2",
73
+ ),
74
+ }
16
75
 
17
76
  def get_tables(self, options: plugin.TableOptions = None) -> List[plugin.Table]:
18
77
  tables = list(self._tables.values())
@@ -29,10 +29,32 @@ class SyncOptions:
29
29
  backend_options: BackendOptions = None
30
30
 
31
31
 
32
+ @dataclass
33
+ class BuildTarget:
34
+ os: str = None
35
+ arch: str = None
36
+
37
+
38
+ @dataclass
39
+ class Options:
40
+ dockerfile: str = None
41
+ build_targets: List[BuildTarget] = None
42
+ team: str = None
43
+ kind: str = None
44
+
45
+
32
46
  class Plugin:
33
- def __init__(self, name: str, version: str) -> None:
47
+ def __init__(self, name: str, version: str, opts: Options = None) -> None:
34
48
  self._name = name
35
49
  self._version = version
50
+ self._opts = Options() if opts is None else opts
51
+ if self._opts.dockerfile is None:
52
+ self._opts.dockerfile = "Dockerfile"
53
+ if self._opts.build_targets is None:
54
+ self._opts.build_targets = [
55
+ BuildTarget("linux", "amd64"),
56
+ BuildTarget("linux", "arm64"),
57
+ ]
36
58
 
37
59
  def init(self, spec: bytes, no_connection: bool = False) -> None:
38
60
  pass
@@ -46,6 +68,18 @@ class Plugin:
46
68
  def version(self) -> str:
47
69
  return self._version
48
70
 
71
+ def team(self) -> str:
72
+ return self._opts.team
73
+
74
+ def kind(self) -> str:
75
+ return self._opts.kind
76
+
77
+ def dockerfile(self) -> str:
78
+ return self._opts.dockerfile
79
+
80
+ def build_targets(self) -> List[BuildTarget]:
81
+ return self._opts.build_targets
82
+
49
83
  def get_tables(self, options: TableOptions) -> List[Table]:
50
84
  raise NotImplementedError()
51
85
 
@@ -151,6 +151,8 @@ def filter_dfs_func(tt: List[Table], include, exclude, skip_dependent_tables: bo
151
151
  filtered_tables = []
152
152
  for t in tt:
153
153
  filtered_table = copy.deepcopy(t)
154
+ for r in filtered_table.relations:
155
+ r.parent = filtered_table
154
156
  filtered_table = _filter_dfs_impl(
155
157
  filtered_table, False, include, exclude, skip_dependent_tables
156
158
  )
@@ -1,7 +1,13 @@
1
1
  import argparse
2
+ import hashlib
3
+ import json
2
4
  import logging
3
5
  import os
6
+ import shutil
7
+ import subprocess
8
+ import tarfile
4
9
  from concurrent import futures
10
+ from pathlib import Path
5
11
 
6
12
  import grpc
7
13
  import structlog
@@ -9,10 +15,13 @@ import sys
9
15
  from cloudquery.discovery_v1 import discovery_pb2_grpc
10
16
  from cloudquery.plugin_v3 import plugin_pb2_grpc
11
17
  from structlog import wrap_logger
18
+ from cloudquery.sdk import plugin
19
+
12
20
 
13
21
  from cloudquery.sdk.internal.servers.discovery_v1.discovery import DiscoveryServicer
14
22
  from cloudquery.sdk.internal.servers.plugin_v3 import PluginServicer
15
23
  from cloudquery.sdk.plugin.plugin import Plugin
24
+ from cloudquery.sdk.schema import table
16
25
 
17
26
  _IS_WINDOWS = sys.platform == "win32"
18
27
 
@@ -74,6 +83,14 @@ def get_logger(args):
74
83
  return log
75
84
 
76
85
 
86
+ def calc_sha256_checksum(filename: str):
87
+ with open(filename, "rb") as f:
88
+ file_hash = hashlib.sha256()
89
+ while chunk := f.read(32768):
90
+ file_hash.update(chunk)
91
+ return file_hash.hexdigest()
92
+
93
+
77
94
  class PluginCommand:
78
95
  def __init__(self, plugin: Plugin):
79
96
  self._plugin = plugin
@@ -82,6 +99,20 @@ class PluginCommand:
82
99
  parser = argparse.ArgumentParser()
83
100
  subparsers = parser.add_subparsers(dest="command", required=True)
84
101
 
102
+ self._register_serve_command(subparsers)
103
+ self._register_package_command(subparsers)
104
+
105
+ parsed_args = parser.parse_args(args)
106
+
107
+ if parsed_args.command == "serve":
108
+ self._serve(parsed_args)
109
+ elif parsed_args.command == "package":
110
+ self._package(parsed_args)
111
+ else:
112
+ parser.print_help()
113
+ sys.exit(1)
114
+
115
+ def _register_serve_command(self, subparsers):
85
116
  serve_parser = subparsers.add_parser("serve", help="Start plugin server")
86
117
  serve_parser.add_argument(
87
118
  "--log-format",
@@ -97,7 +128,6 @@ class PluginCommand:
97
128
  choices=["trace", "debug", "info", "warn", "error"],
98
129
  help="log level",
99
130
  )
100
-
101
131
  # ignored for now
102
132
  serve_parser.add_argument(
103
133
  "--no-sentry",
@@ -118,7 +148,6 @@ class PluginCommand:
118
148
  default="",
119
149
  help="Open Telemetry HTTP collector endpoint (for development only) (placeholder for future use)",
120
150
  )
121
-
122
151
  serve_parser.add_argument(
123
152
  "--address",
124
153
  type=str,
@@ -133,13 +162,224 @@ class PluginCommand:
133
162
  help="network to serve on. can be tcp or unix",
134
163
  )
135
164
 
136
- parsed_args = parser.parse_args(args)
165
+ def _register_package_command(self, subparsers):
166
+ package_parser = subparsers.add_parser(
167
+ "package", help="Package the plugin as a Docker image"
168
+ )
169
+ package_parser.add_argument(
170
+ "version", help="version to tag the Docker image with"
171
+ )
172
+ package_parser.add_argument("plugin-directory")
173
+ package_parser.add_argument(
174
+ "--log-format",
175
+ type=str,
176
+ default="text",
177
+ choices=["text", "json"],
178
+ help="logging format",
179
+ )
180
+ package_parser.add_argument(
181
+ "--log-level",
182
+ type=str,
183
+ default="info",
184
+ choices=["trace", "debug", "info", "warn", "error"],
185
+ help="log level",
186
+ )
187
+ package_parser.add_argument(
188
+ "-D",
189
+ "--dist-dir",
190
+ type=str,
191
+ help="dist directory to output the built plugin. (default: <plugin_directory>/dist)",
192
+ )
193
+ package_parser.add_argument(
194
+ "--docs-dir",
195
+ type=str,
196
+ help="docs directory containing markdown files to copy to the dist directory. (default: <plugin_directory>/docs)",
197
+ )
198
+ package_parser.add_argument(
199
+ "-m",
200
+ "--message",
201
+ type=str,
202
+ required=True,
203
+ help="message that summarizes what is new or changed in this version. Use @<file> to read from file. Supports markdown.",
204
+ )
137
205
 
138
- if parsed_args.command == "serve":
139
- self._serve(parsed_args)
140
- else:
141
- parser.print_help()
142
- sys.exit(1)
206
+ def _package(self, args):
207
+ logger = get_logger(args)
208
+ self._plugin.set_logger(logger)
209
+
210
+ def _is_empty(val):
211
+ return val == None or len(val) == 0
212
+
213
+ if _is_empty(self._plugin.name()):
214
+ raise Exception("plugin name is required")
215
+ if _is_empty(self._plugin.team()):
216
+ raise Exception("plugin team is required")
217
+ if _is_empty(self._plugin.kind()):
218
+ raise Exception("plugin kind is required")
219
+ if _is_empty(self._plugin.dockerfile()):
220
+ raise Exception("plugin dockerfile is required")
221
+ if _is_empty(self._plugin.build_targets()):
222
+ raise Exception("at least one build target is required")
223
+
224
+ plugin_directory, version, message = (
225
+ getattr(args, "plugin-directory"),
226
+ getattr(args, "version"),
227
+ getattr(args, "message"),
228
+ )
229
+ dist_dir = (
230
+ "%s/dist" % plugin_directory if args.dist_dir == None else args.dist_dir
231
+ )
232
+ docs_dir = (
233
+ "%s/docs" % plugin_directory if args.docs_dir == None else args.docs_dir
234
+ )
235
+ Path(dist_dir).mkdir(0o755, exist_ok=True, parents=True)
236
+
237
+ self._copy_docs(logger, docs_dir, dist_dir)
238
+ self._write_tables_json(logger, dist_dir)
239
+ supported_targets = self._build_dockerfile(
240
+ logger, plugin_directory, dist_dir, version
241
+ )
242
+ self._write_package_json(logger, dist_dir, message, version, supported_targets)
243
+ logger.info("Done packaging plugin to '%s'" % dist_dir)
244
+
245
+ def _write_package_json(self, logger, dist_dir, message, version, supportedTargets):
246
+ package_json_path = "%s/package.json" % dist_dir
247
+ logger.info("Writing package.json to '%s'" % package_json_path)
248
+ content = {
249
+ "schema_version": 1,
250
+ "name": self._plugin.name(),
251
+ "team": self._plugin.team(),
252
+ "kind": self._plugin.kind(),
253
+ "version": version,
254
+ "message": message,
255
+ "protocols": [3],
256
+ "supported_targets": supportedTargets,
257
+ "package_type": "docker",
258
+ }
259
+ with open("%s/package.json" % dist_dir, "w") as f:
260
+ f.write(json.dumps(content, indent=2))
261
+
262
+ def _copy_docs(self, logger, docs_dir, dist_dir):
263
+ # check is docs_dir exists
264
+ if not os.path.isdir(docs_dir):
265
+ raise Exception("docs directory '%s' does not exist" % docs_dir)
266
+
267
+ output_docs_dir = "%s/docs" % dist_dir
268
+ logger.info("Copying docs from '%s' to '%s'" % (docs_dir, output_docs_dir))
269
+ shutil.copytree(docs_dir, output_docs_dir, dirs_exist_ok=True)
270
+
271
+ def _write_tables_json(self, logger, dist_dir):
272
+ if self._plugin.kind() != "source":
273
+ return
274
+
275
+ tables_json_output_path = "%s/tables.json" % dist_dir
276
+ logger.info("Writing tables to '%s'" % tables_json_output_path)
277
+ self._plugin.init(spec=b"", no_connection=True)
278
+ tables = self._plugin.get_tables(
279
+ options=plugin.plugin.TableOptions(
280
+ tables=["*"], skip_tables=[], skip_dependent_tables=False
281
+ )
282
+ )
283
+ flattened_tables = table.flatten_tables(tables)
284
+
285
+ def column_to_json(column: table.Column):
286
+ return {
287
+ "name": column.name,
288
+ "type": str(column.type),
289
+ "description": column.description,
290
+ "incremental_key": column.incremental_key,
291
+ "primary_key": column.primary_key,
292
+ "not_null": column.not_null,
293
+ "unique": column.unique,
294
+ }
295
+
296
+ def table_to_json(table: table.Table):
297
+ return {
298
+ "name": table.name,
299
+ "title": table.title,
300
+ "description": table.description,
301
+ "is_incremental": table.is_incremental,
302
+ "parent": table.parent.name if table.parent else "",
303
+ "relations": list(map(lambda r: r.name, table.relations)),
304
+ "columns": list(map(column_to_json, table.columns)),
305
+ }
306
+
307
+ tables_json = list(map(table_to_json, flattened_tables))
308
+ with open(tables_json_output_path, "w") as f:
309
+ f.write(json.dumps(tables_json))
310
+ logger.info(
311
+ "Wrote %d tables to '%s'" % (len(tables_json), tables_json_output_path)
312
+ )
313
+
314
+ def _build_dockerfile(self, logger, plugin_dir, dist_dir, version):
315
+ dockerfile_path = "%s/%s" % (plugin_dir, self._plugin.dockerfile())
316
+ if not os.path.isfile(dockerfile_path):
317
+ raise Exception("Dockerfile '%s' does not exist" % dockerfile_path)
318
+
319
+ def run_docker_cmd(cmd, plugin_dir):
320
+ result = subprocess.run(cmd, capture_output=True, cwd=plugin_dir)
321
+ if result.returncode != 0:
322
+ err = (
323
+ ""
324
+ if result.stderr is None
325
+ else result.stderr.decode("ascii").strip()
326
+ )
327
+ raise ChildProcessError("Unable to run Docker command: %s" % err)
328
+
329
+ def build_target(target: plugin.plugin.BuildTarget):
330
+ image_repository = "registry.cloudquery.io/%s/%s-%s" % (
331
+ self._plugin.team(),
332
+ self._plugin.kind(),
333
+ self._plugin.name(),
334
+ )
335
+ image_tag = "%s:%s-%s-%s" % (
336
+ image_repository,
337
+ version,
338
+ target.os,
339
+ target.arch,
340
+ )
341
+ image_tar = "plugin-%s-%s-%s-%s.tar" % (
342
+ self._plugin.name(),
343
+ version,
344
+ target.os,
345
+ target.arch,
346
+ )
347
+ image_path = "%s/%s" % (dist_dir, image_tar)
348
+ logger.info("Building docker image %s" % image_tag)
349
+ docker_build_arguments = [
350
+ "docker",
351
+ "buildx",
352
+ "build",
353
+ "-t",
354
+ image_tag,
355
+ "--platform",
356
+ "%s/%s" % (target.os, target.arch),
357
+ "-f",
358
+ dockerfile_path,
359
+ ".",
360
+ "--progress",
361
+ "plain",
362
+ "--load",
363
+ ]
364
+ logger.debug(
365
+ "Running command 'docker %s'" % " ".join(docker_build_arguments)
366
+ )
367
+ run_docker_cmd(docker_build_arguments, plugin_dir)
368
+ logger.debug("Saving docker image '%s' to '%s'" % (image_tag, image_path))
369
+ docker_save_arguments = ["docker", "save", "-o", image_path, image_tag]
370
+ logger.debug("Running command 'docker %s'", " ".join(docker_save_arguments))
371
+ run_docker_cmd(docker_save_arguments, plugin_dir)
372
+ return {
373
+ "os": target.os,
374
+ "arch": target.arch,
375
+ "path": image_tar,
376
+ "checksum": calc_sha256_checksum(image_path),
377
+ "docker_image_tag": image_tag,
378
+ }
379
+
380
+ logger.info("Building %d targets" % len(self._plugin.build_targets()))
381
+ supported_targets = list(map(build_target, self._plugin.build_targets()))
382
+ return supported_targets
143
383
 
144
384
  def _serve(self, args):
145
385
  logger = get_logger(args)
@@ -13,6 +13,9 @@ class JSONType(pa.ExtensionType):
13
13
  # metadata to be deserialized
14
14
  return b"json-serialized"
15
15
 
16
+ def __str__(self):
17
+ return "json"
18
+
16
19
  @classmethod
17
20
  def __arrow_ext_deserialize__(self, storage_type, serialized):
18
21
  # return an instance of this subclass given the serialized
@@ -15,6 +15,9 @@ class UUIDType(pa.ExtensionType):
15
15
  # metadata to be deserialized
16
16
  return b"uuid-serialized"
17
17
 
18
+ def __str__(self):
19
+ return "uuid"
20
+
18
21
  @classmethod
19
22
  def __arrow_ext_deserialize__(self, storage_type, serialized):
20
23
  # return an instance of this subclass given the serialized
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cloudquery-plugin-sdk
3
- Version: 0.1.7
3
+ Version: 0.1.9
4
4
  Summary: CloudQuery Plugin SDK for Python
5
5
  Home-page: https://github.com/cloudquery/plugin-sdk-python
6
6
  Author: CloudQuery LTD
@@ -19,25 +19,25 @@ Classifier: Programming Language :: Python :: 3.11
19
19
  Classifier: Operating System :: OS Independent
20
20
  Classifier: Topic :: Internet
21
21
  Requires-Python: >=3.7
22
- Requires-Dist: cloudquery-plugin-pb ==0.0.18
23
- Requires-Dist: exceptiongroup ==1.1.3
24
- Requires-Dist: black ==23.9.1
25
- Requires-Dist: grpcio ==1.59.0
26
- Requires-Dist: grpcio-tools ==1.59.0
22
+ Requires-Dist: cloudquery-plugin-pb ==0.0.20
23
+ Requires-Dist: exceptiongroup ==1.2.0
24
+ Requires-Dist: black ==23.12.0
25
+ Requires-Dist: grpcio ==1.60.0
26
+ Requires-Dist: grpcio-tools ==1.60.0
27
27
  Requires-Dist: iniconfig ==2.0.0
28
28
  Requires-Dist: Jinja2 ==3.1.2
29
29
  Requires-Dist: MarkupSafe ==2.1.3
30
- Requires-Dist: numpy ==1.26.0
31
- Requires-Dist: packaging ==23.1
32
- Requires-Dist: pandas ==2.1.1
30
+ Requires-Dist: numpy ==1.26.2
31
+ Requires-Dist: packaging ==23.2
32
+ Requires-Dist: pandas ==2.1.4
33
33
  Requires-Dist: pluggy ==1.3.0
34
- Requires-Dist: protobuf ==4.24.3
35
- Requires-Dist: pyarrow ==13.0.0
36
- Requires-Dist: pytest ==7.4.2
34
+ Requires-Dist: protobuf ==4.25.1
35
+ Requires-Dist: pyarrow ==14.0.1
36
+ Requires-Dist: pytest ==7.4.3
37
37
  Requires-Dist: python-dateutil ==2.8.2
38
38
  Requires-Dist: pytz ==2023.3.post1
39
39
  Requires-Dist: six ==1.16.0
40
- Requires-Dist: structlog ==23.1.0
40
+ Requires-Dist: structlog ==23.2.0
41
41
  Requires-Dist: tomli ==2.0.1
42
42
  Requires-Dist: tzdata ==2023.3
43
43
 
@@ -1,9 +1,9 @@
1
- cloudquery_plugin_sdk-0.1.7-py3.11-nspkg.pth,sha256=81VobgPokBFlVYv95nk97soxyPM43712TaNhfD4PUIw,559
1
+ cloudquery_plugin_sdk-0.1.9-py3.11-nspkg.pth,sha256=81VobgPokBFlVYv95nk97soxyPM43712TaNhfD4PUIw,559
2
2
  cloudquery/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  cloudquery/sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  cloudquery/sdk/internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  cloudquery/sdk/internal/memdb/__init__.py,sha256=fzRjFUy47sNj4GzDcxPQPg9MwgSlRJK-XXoOxb40qM8,25
6
- cloudquery/sdk/internal/memdb/memdb.py,sha256=8Lfqq25SIfjMsiSASrSXRqCWkXsFUJp2NvqaudEUCP8,1505
6
+ cloudquery/sdk/internal/memdb/memdb.py,sha256=Ad0U3fm3a4PNs9PuvhG0nmQIPK0joQ6wsqdJEAB-vgU,3676
7
7
  cloudquery/sdk/internal/servers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  cloudquery/sdk/internal/servers/discovery_v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  cloudquery/sdk/internal/servers/discovery_v1/discovery.py,sha256=iYQpSw1aSXgKK4zcHxIZSvaA9j-IQfDkeHnVur4sogw,360
@@ -13,7 +13,7 @@ cloudquery/sdk/message/__init__.py,sha256=KfRu1ZmLz4r-9qgOsXqgAZYetj2_MKjKAJ2HRg
13
13
  cloudquery/sdk/message/sync.py,sha256=O2deyqCbZ84s5DvYgEozN1JMpWmlhXVPUOatM4fjq3w,283
14
14
  cloudquery/sdk/message/write.py,sha256=i30d2nC_YGcGlei0qBd3yHatbJ2C72ZF6QYWURdysYM,545
15
15
  cloudquery/sdk/plugin/__init__.py,sha256=xksQLxBZNbWE0dhPLSttDDW-ns6oXrEjd03g_mH0Pkg,61
16
- cloudquery/sdk/plugin/plugin.py,sha256=txZ-nIybLKfGFhMjhVNq4nD9JdJxC9cVMclvkwVvqr4,1424
16
+ cloudquery/sdk/plugin/plugin.py,sha256=SkPICIV0Y7PZvdr9d54zW49zJxNc1rZPVohptitEIKU,2281
17
17
  cloudquery/sdk/scalar/__init__.py,sha256=EEp4c0kNhYZpD_vt84OLnYcFW82HoyrYgKOik53uM0Y,412
18
18
  cloudquery/sdk/scalar/binary.py,sha256=8MxVoqjmZCkHyKtjUzsHQKmnpsZxX8UfL6Rd4sOVgBg,980
19
19
  cloudquery/sdk/scalar/bool.py,sha256=mtOqB8na2zRKga7OGDYg24XcbIZ6uo24cAe5HdwZfIg,1344
@@ -37,17 +37,17 @@ cloudquery/sdk/schema/__init__.py,sha256=6y65eQdg7OhBevUAlLzVmuhOXe0dd5Gr28hzygM
37
37
  cloudquery/sdk/schema/arrow.py,sha256=Voj9HeEuU4dTB-blsG4yKc-8UFdW_Z5qRcYNeI7wHFg,437
38
38
  cloudquery/sdk/schema/column.py,sha256=7kK2S6mmh4RBaCcVVlgG0v8dsIumPixcK00FLM_BFy8,2751
39
39
  cloudquery/sdk/schema/resource.py,sha256=R75VuFAGb1GBsRZdZgh5gvHbxhQkyYa_4_EVuFmbFIY,866
40
- cloudquery/sdk/schema/table.py,sha256=QSIVcOEqeJPG4pZFrXILTuNyeiDVLHC3z9-5LVQ9ptM,5944
40
+ cloudquery/sdk/schema/table.py,sha256=DxXVsXDXMAuwnQ1ROBpOF8HxhYLtUqbAr4_FP8vncsY,6025
41
41
  cloudquery/sdk/serve/__init__.py,sha256=zNN32S3GdnVbJ9ouWwnvlxj0TpS5G_woty8vURoRk6s,34
42
- cloudquery/sdk/serve/plugin.py,sha256=JU5e-rgey2ajF1Hj92WRvQ5NOLd8Fnyq_BfqhsgNTqQ,4918
42
+ cloudquery/sdk/serve/plugin.py,sha256=3dHphqmrJb4c-zG8qoj0tIY1_nLvlk9SxoklvS91R08,14209
43
43
  cloudquery/sdk/transformers/__init__.py,sha256=hGnQNiUbOE2qWrWZZLIIM6KqXvFrz_6WhWGMPdch8jY,97
44
44
  cloudquery/sdk/transformers/openapi.py,sha256=5qHM62OtybWvzVqGh1cqh2ZbN9KtKx-zmdSK2JH3pVg,1517
45
45
  cloudquery/sdk/transformers/transformers.py,sha256=9USd5VDEMkyk-RfQboMhr9BwC05935MTuM4DZIxhPv4,330
46
46
  cloudquery/sdk/types/__init__.py,sha256=VRRTI0mHh_TxkNht9qyr3eouNV71jv1v6z9UDFvLPU8,54
47
- cloudquery/sdk/types/json.py,sha256=1nxthqF_KjuumWlA-R9XfnupL-xTXMqa1puUwF87OFc,607
48
- cloudquery/sdk/types/uuid.py,sha256=_Wx-LWAlOJX04r8wx-PU4jKZFs7QEiUx4xejtAIUecw,631
49
- cloudquery_plugin_sdk-0.1.7.dist-info/METADATA,sha256=Joq6yS7CbR9cXTF8NjFJfeBn-A1vGgsVZyFTJakQG0I,1830
50
- cloudquery_plugin_sdk-0.1.7.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110
51
- cloudquery_plugin_sdk-0.1.7.dist-info/namespace_packages.txt,sha256=D13SSF-LACnBbtgkIVGVOAVT5hqwn2E-v0NGcuyprk4,11
52
- cloudquery_plugin_sdk-0.1.7.dist-info/top_level.txt,sha256=D13SSF-LACnBbtgkIVGVOAVT5hqwn2E-v0NGcuyprk4,11
53
- cloudquery_plugin_sdk-0.1.7.dist-info/RECORD,,
47
+ cloudquery/sdk/types/json.py,sha256=d1EYb8-C74U71IUajHgbetD4wTS7fIsh04qY84mq2-U,653
48
+ cloudquery/sdk/types/uuid.py,sha256=Xhax4Pyfwd-rxnBSBZLt9k2Mlx2FVCyUOcKWcRiHA0U,677
49
+ cloudquery_plugin_sdk-0.1.9.dist-info/METADATA,sha256=XTT8DUl8hlIfWgfkkGHqXnL-_TMAAa3jbzbjamyoOtU,1831
50
+ cloudquery_plugin_sdk-0.1.9.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110
51
+ cloudquery_plugin_sdk-0.1.9.dist-info/namespace_packages.txt,sha256=D13SSF-LACnBbtgkIVGVOAVT5hqwn2E-v0NGcuyprk4,11
52
+ cloudquery_plugin_sdk-0.1.9.dist-info/top_level.txt,sha256=D13SSF-LACnBbtgkIVGVOAVT5hqwn2E-v0NGcuyprk4,11
53
+ cloudquery_plugin_sdk-0.1.9.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.2)
2
+ Generator: bdist_wheel (0.42.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py2-none-any
5
5
  Tag: py3-none-any