quollio-core 0.5.3__py3-none-any.whl → 0.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
quollio_core/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Quollio Core"""
2
2
 
3
- __version__ = "0.5.3"
3
+ __version__ = "0.6.1"
4
4
  __author__ = "Quollio Technologies, Inc"
@@ -8,6 +8,11 @@ quollio_intelligence_snowflake:
8
8
  schema: {{ account_schema }}
9
9
  type: snowflake
10
10
  user: {{ account_user }}
11
- password: {{ account_password }}
12
11
  warehouse: {{ account_warehouse }}
13
12
  threads: {{ threads }}
13
+ {% if private_key is defined %}
14
+ private_key: |
15
+ {{ private_key | indent(8) }}
16
+ {% else %}
17
+ password: {{ account_password }}
18
+ {% endif %}
@@ -8,6 +8,7 @@ from jinja2 import Environment, FileSystemLoader
8
8
  def new_global_id(tenant_id: str, cluster_id: str, data_id: str, data_type: str) -> str:
9
9
  prefix = ""
10
10
  data_types = {
11
+ "data_source": "dsrc-",
11
12
  "schema": "schm-",
12
13
  "table": "tbl-",
13
14
  "column": "clmn-",
@@ -0,0 +1,23 @@
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional
3
+
4
+ from dataclasses_avroschema import AvroModel
5
+
6
+
7
+ @dataclass
8
+ class AvroAsset(AvroModel):
9
+ "AvroAsset"
10
+
11
+ id: str
12
+ object_type: str
13
+ parents: List[str]
14
+ name: str
15
+ stats_max: Optional[str] = None
16
+ stats_min: Optional[str] = None
17
+ stats_mean: Optional[str] = None
18
+ stats_median: Optional[str] = None
19
+ stats_mode: Optional[str] = None
20
+ stats_stddev: Optional[str] = None
21
+ stats_number_of_null: Optional[str] = None
22
+ stats_number_of_unique: Optional[str] = None
23
+ upstream: Optional[List[str]] = None
@@ -0,0 +1,36 @@
1
+ from dataclasses import asdict, dataclass
2
+ from typing import Dict
3
+
4
+
5
+ @dataclass
6
+ class GetImportURLRequest:
7
+ service_name: str
8
+ source_name: str
9
+ file_name: str
10
+ override_logical_name: str
11
+ update_mode: str
12
+
13
+ def as_dict(self) -> Dict[str, str]:
14
+ return asdict(self)
15
+
16
+
17
+ @dataclass
18
+ class DataSourceMetadataResponseBody:
19
+ user_id: str
20
+ job_key: str
21
+ service_name: str
22
+ source_name: str
23
+ source_type: str
24
+ override_logical_name: str
25
+
26
+ def as_dict(self) -> Dict[str, str]:
27
+ return asdict(self)
28
+
29
+
30
+ @dataclass
31
+ class GetImportURLResponse:
32
+ location: str
33
+ datasource_metadata_response_body: DataSourceMetadataResponseBody
34
+
35
+ def as_dict(self) -> Dict[str, str]:
36
+ return asdict(self)
@@ -3,6 +3,7 @@ from dataclasses import asdict, dataclass
3
3
  from typing import Dict, List, Tuple, Union
4
4
 
5
5
  from quollio_core.helper.core import new_global_id
6
+ from quollio_core.models.avroasset import AvroAsset
6
7
 
7
8
 
8
9
  @dataclass
@@ -23,6 +24,101 @@ class LineageInputs:
23
24
  upstreams: LineageInput
24
25
 
25
26
 
27
+ def gen_table_avro_lineage_payload(
28
+ tenant_id: str,
29
+ endpoint: str,
30
+ tables: List[Dict[str, Union[Dict[str, str], str]]],
31
+ existing_global_ids: Dict[str, bool],
32
+ ) -> List[Dict[str, str]]:
33
+ payload = list()
34
+ for table in tables:
35
+ downstream_table_fqn = table["DOWNSTREAM_TABLE_NAME"].split(".")
36
+ if len(downstream_table_fqn) != 3:
37
+ continue
38
+ else:
39
+ global_id_arg = "{db}{schema}{table}".format(
40
+ db=downstream_table_fqn[0], schema=downstream_table_fqn[1], table=downstream_table_fqn[2]
41
+ )
42
+ downstream_table_global_id = new_global_id(
43
+ tenant_id=tenant_id, cluster_id=endpoint, data_id=global_id_arg, data_type="table"
44
+ )
45
+ if existing_global_ids.get(downstream_table_global_id) is not True:
46
+ continue
47
+ upstreams = list()
48
+ for upstream_table in table["UPSTREAM_TABLES"]:
49
+ upstream_table_fqn = upstream_table["upstream_object_name"].split(".")
50
+ if len(upstream_table_fqn) != 3:
51
+ continue
52
+ else:
53
+ upstream_global_id_arg = "{db}{schema}{table}".format(
54
+ db=upstream_table_fqn[0], schema=upstream_table_fqn[1], table=upstream_table_fqn[2]
55
+ )
56
+ upstream_table_global_id = new_global_id(
57
+ tenant_id=tenant_id, cluster_id=endpoint, data_id=upstream_global_id_arg, data_type="table"
58
+ )
59
+ upstreams.append(upstream_table_global_id)
60
+
61
+ avro_assets = AvroAsset(
62
+ id=downstream_table_global_id,
63
+ object_type="table",
64
+ parents=[downstream_table_fqn[0], downstream_table_fqn[1]],
65
+ name=downstream_table_fqn[2],
66
+ upstream=upstreams,
67
+ )
68
+ payload.append(avro_assets.to_dict())
69
+ return payload
70
+
71
+
72
+ def gen_column_avro_lineage_payload(
73
+ tenant_id: str, endpoint: str, columns: List[Dict[str, str]], existing_global_ids: Dict[str, bool]
74
+ ) -> List[Dict[str, str]]:
75
+ payload = list()
76
+ for column in columns:
77
+ downstream_table_fqn = column["DOWNSTREAM_TABLE_NAME"].split(".")
78
+ if len(downstream_table_fqn) != 3:
79
+ continue
80
+ else:
81
+ global_id_arg = "{db}{schema}{table}{column}".format(
82
+ db=downstream_table_fqn[0],
83
+ schema=downstream_table_fqn[1],
84
+ table=downstream_table_fqn[2],
85
+ column=column["DOWNSTREAM_COLUMN_NAME"],
86
+ )
87
+ downstream_column_global_id = new_global_id(
88
+ tenant_id=tenant_id, cluster_id=endpoint, data_id=global_id_arg, data_type="column"
89
+ )
90
+ if existing_global_ids.get(downstream_column_global_id) is not True:
91
+ continue
92
+ upstream_columns: List[Dict[str, str]] = json.loads(column["UPSTREAM_COLUMNS"])
93
+ upstreams = list()
94
+ for upstream_column in upstream_columns:
95
+ upstream_table_fqn = upstream_column["upstream_table_name"].split(".")
96
+ if len(upstream_table_fqn) != 3:
97
+ continue
98
+ elif not upstream_column.get("upstream_column_name"):
99
+ continue
100
+ else:
101
+ upstream_global_id_arg = "{db}{schema}{table}{column}".format(
102
+ db=upstream_table_fqn[0],
103
+ schema=upstream_table_fqn[1],
104
+ table=upstream_table_fqn[2],
105
+ column=upstream_column["upstream_column_name"],
106
+ )
107
+ upstream_column_global_id = new_global_id(
108
+ tenant_id=tenant_id, cluster_id=endpoint, data_id=upstream_global_id_arg, data_type="column"
109
+ )
110
+ upstreams.append(upstream_column_global_id)
111
+ avro_assets = AvroAsset(
112
+ id=downstream_column_global_id,
113
+ object_type="column",
114
+ parents=[downstream_table_fqn[0], downstream_table_fqn[1], downstream_table_fqn[2]],
115
+ name=column["DOWNSTREAM_COLUMN_NAME"],
116
+ upstream=upstreams,
117
+ )
118
+ payload.append(avro_assets.to_dict())
119
+ return payload
120
+
121
+
26
122
  def gen_table_lineage_payload(
27
123
  tenant_id: str, endpoint: str, tables: List[Dict[str, Union[Dict[str, str], str]]]
28
124
  ) -> List[LineageInputs]:
@@ -0,0 +1,27 @@
1
+ import logging
2
+ from io import BytesIO
3
+ from typing import Dict
4
+
5
+ import fastavro
6
+
7
+ from quollio_core.helper.core import new_global_id
8
+ from quollio_core.models.avroasset import AvroAsset
9
+ from quollio_core.repository import qdc
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def gen_existing_global_id_dict(avro_content: bytes) -> Dict[str, bool]:
15
+ byte_io = BytesIO(avro_content)
16
+ avro_schema = AvroAsset.avro_schema_to_python()
17
+ reader = fastavro.reader(byte_io, avro_schema)
18
+ records = {record["id"]: True for record in reader}
19
+ return records
20
+
21
+
22
+ def get_avro_file_content(tenant_id: str, account_id: str, qdc_client: qdc.QDCExternalAPIClient) -> bytes:
23
+ datasource_id = new_global_id(tenant_id=tenant_id, cluster_id=account_id, data_id="", data_type="data_source")
24
+ logger.debug("Datasource id: {dsrc_id}".format(dsrc_id=datasource_id))
25
+ res = qdc_client.get_export_url(datasource_id=datasource_id)
26
+ file_content = qdc_client.download_file(res).content
27
+ return file_content
@@ -1,13 +1,20 @@
1
+ import io
1
2
  import logging
2
- from typing import List
3
+ import os
4
+ from typing import Dict, List
3
5
 
6
+ from fastavro import writer
7
+
8
+ from quollio_core.helper.core import new_global_id
9
+ from quollio_core.models.avroasset import AvroAsset
10
+ from quollio_core.models.qdc import GetImportURLRequest
4
11
  from quollio_core.profilers.lineage import (
5
- gen_column_lineage_payload,
6
- gen_table_lineage_payload,
12
+ gen_column_avro_lineage_payload,
13
+ gen_table_avro_lineage_payload,
7
14
  parse_snowflake_results,
8
15
  )
9
16
  from quollio_core.profilers.sqllineage import SQLLineage
10
- from quollio_core.profilers.stats import gen_table_stats_payload, get_is_target_stats_items, render_sql_for_stats
17
+ from quollio_core.profilers.stats import gen_table_stats_avro_payload, get_is_target_stats_items, render_sql_for_stats
11
18
  from quollio_core.repository import qdc, snowflake
12
19
 
13
20
  logger = logging.getLogger(__name__)
@@ -17,6 +24,7 @@ def snowflake_table_to_table_lineage(
17
24
  conn: snowflake.SnowflakeConnectionConfig,
18
25
  qdc_client: qdc.QDCExternalAPIClient,
19
26
  tenant_id: str,
27
+ existing_global_ids: Dict[str, bool],
20
28
  ) -> None:
21
29
  with snowflake.SnowflakeQueryExecutor(conn) as sf_executor:
22
30
  results, err = sf_executor.get_query_results(
@@ -38,28 +46,41 @@ def snowflake_table_to_table_lineage(
38
46
  )
39
47
  return
40
48
  parsed_results = parse_snowflake_results(results=results)
41
- update_table_lineage_inputs = gen_table_lineage_payload(
49
+ update_table_lineage_inputs = gen_table_avro_lineage_payload(
42
50
  tenant_id=tenant_id,
43
51
  endpoint=conn.account_id,
44
52
  tables=parsed_results,
53
+ existing_global_ids=existing_global_ids,
54
+ )
55
+ stack_name = os.getenv("CF_STACK")
56
+ import_req = GetImportURLRequest(
57
+ service_name="snowflake",
58
+ source_name=stack_name,
59
+ file_name="{name}.avro".format(name=stack_name),
60
+ override_logical_name="false",
61
+ update_mode="partial",
62
+ )
63
+ datasource_id = new_global_id(
64
+ tenant_id=tenant_id, cluster_id=conn.account_id, data_id="", data_type="data_source"
45
65
  )
66
+ logger.debug("Datasource id: {dsrc_id}".format(dsrc_id=datasource_id))
67
+ import_res = qdc_client.get_import_url(datasource_id=datasource_id, payload=import_req)
68
+ if import_res is None:
69
+ logger.error("get_import_url failed. Please retry `load_lineage` again")
70
+ return
71
+ logger.debug("ImportResponse: {res}".format(res=import_res))
46
72
 
47
- req_count = 0
48
- for update_table_lineage_input in update_table_lineage_inputs:
49
- logger.info(
50
- "Generating table lineage. downstream: {db} -> {schema} -> {table}".format(
51
- db=update_table_lineage_input.downstream_database_name,
52
- schema=update_table_lineage_input.downstream_schema_name,
53
- table=update_table_lineage_input.downstream_table_name,
54
- )
55
- )
56
- status_code = qdc_client.update_lineage_by_id(
57
- global_id=update_table_lineage_input.downstream_global_id,
58
- payload=update_table_lineage_input.upstreams.as_dict(),
59
- )
60
- if status_code == 200:
61
- req_count += 1
62
- logger.info(f"Generating table lineage is finished. {req_count} lineages are ingested.")
73
+ avro_schema = AvroAsset.avro_schema_to_python()
74
+
75
+ buffer = io.BytesIO()
76
+ writer(buffer, avro_schema, update_table_lineage_inputs)
77
+ res = qdc_client.upload_file(
78
+ url=import_res.location,
79
+ metadata=import_res.datasource_metadata_response_body,
80
+ buffer=buffer.getbuffer().tobytes(),
81
+ )
82
+ if res == 200:
83
+ logger.info("Upload table lineage is finished.")
63
84
  return
64
85
 
65
86
 
@@ -67,6 +88,7 @@ def snowflake_column_to_column_lineage(
67
88
  conn: snowflake.SnowflakeConnectionConfig,
68
89
  qdc_client: qdc.QDCExternalAPIClient,
69
90
  tenant_id: str,
91
+ existing_global_ids: Dict[str, bool],
70
92
  ) -> None:
71
93
  with snowflake.SnowflakeQueryExecutor(conn) as sf_executor:
72
94
  results, err = sf_executor.get_query_results(
@@ -87,29 +109,39 @@ def snowflake_column_to_column_lineage(
87
109
  "No lineage data in ACCOUNT_USAGE.SNOWFLAKE. Please check the data in `QUOLLIO_LINEAGE_COLUMN_LEVEL`."
88
110
  )
89
111
  return
90
- update_column_lineage_inputs = gen_column_lineage_payload(
91
- tenant_id=tenant_id,
92
- endpoint=conn.account_id,
93
- columns=results,
112
+ update_column_lineage_inputs = gen_column_avro_lineage_payload(
113
+ tenant_id=tenant_id, endpoint=conn.account_id, columns=results, existing_global_ids=existing_global_ids
94
114
  )
95
115
 
96
- req_count = 0
97
- for update_column_lineage_input in update_column_lineage_inputs:
98
- logger.info(
99
- "Generating column lineage. downstream: {db} -> {schema} -> {table} -> {column}".format(
100
- db=update_column_lineage_input.downstream_database_name,
101
- schema=update_column_lineage_input.downstream_schema_name,
102
- table=update_column_lineage_input.downstream_table_name,
103
- column=update_column_lineage_input.downstream_column_name,
104
- )
105
- )
106
- status_code = qdc_client.update_lineage_by_id(
107
- global_id=update_column_lineage_input.downstream_global_id,
108
- payload=update_column_lineage_input.upstreams.as_dict(),
109
- )
110
- if status_code == 200:
111
- req_count += 1
112
- logger.info(f"Generating column lineage is finished. {req_count} lineages are ingested.")
116
+ stack_name = os.getenv("CF_STACK")
117
+ import_req = GetImportURLRequest(
118
+ service_name="snowflake",
119
+ source_name=stack_name,
120
+ file_name="{name}.avro".format(name=stack_name),
121
+ override_logical_name="false",
122
+ update_mode="partial",
123
+ )
124
+ datasource_id = new_global_id(
125
+ tenant_id=tenant_id, cluster_id=conn.account_id, data_id="", data_type="data_source"
126
+ )
127
+ logger.debug("Datasource id: {dsrc_id}".format(dsrc_id=datasource_id))
128
+ import_res = qdc_client.get_import_url(datasource_id=datasource_id, payload=import_req)
129
+ if import_res is None:
130
+ logger.error("get_import_url failed. Please retry load_lineage again")
131
+ return
132
+ logger.debug("ImportResponse: {res}".format(res=import_res))
133
+
134
+ avro_schema = AvroAsset.avro_schema_to_python()
135
+
136
+ buffer = io.BytesIO()
137
+ writer(buffer, avro_schema, update_column_lineage_inputs)
138
+ res = qdc_client.upload_file(
139
+ url=import_res.location,
140
+ metadata=import_res.datasource_metadata_response_body,
141
+ buffer=buffer.getbuffer().tobytes(),
142
+ )
143
+ if res == 200:
144
+ logger.info("Upload column lineage is finished.")
113
145
  return
114
146
 
115
147
 
@@ -177,6 +209,7 @@ def snowflake_table_stats(
177
209
  qdc_client: qdc.QDCExternalAPIClient,
178
210
  tenant_id: str,
179
211
  stats_items: List[str],
212
+ existing_global_ids: Dict[str, bool],
180
213
  ) -> None:
181
214
  with snowflake.SnowflakeQueryExecutor(conn) as sf_executor:
182
215
  get_stats_view_query = _gen_get_stats_views_query(
@@ -193,8 +226,8 @@ and fix it or grant usage permission to both `{conn.account_database}` and `{con
193
226
  and select permissions to views begins with `QUOLLIO_STATS_COLUMNS_`."
194
227
  )
195
228
  return
196
- req_count = 0
197
229
  is_aggregate_items = get_is_target_stats_items(stats_items=stats_items)
230
+ update_stats_inputs = list()
198
231
  for stats_view in stats_views:
199
232
  table_fqn = '"{catalog}"."{schema}"."{table}"'.format(
200
233
  catalog=stats_view["TABLE_CATALOG"], schema=stats_view["TABLE_SCHEMA"], table=stats_view["TABLE_NAME"]
@@ -210,23 +243,43 @@ and select permissions to views begins with `QUOLLIO_STATS_COLUMNS_`."
210
243
  or user has select permission to it."
211
244
  )
212
245
  continue
213
- payloads = gen_table_stats_payload(tenant_id=tenant_id, endpoint=conn.account_id, stats=stats_result)
214
- for payload in payloads:
215
- logger.info(
216
- "Generating table stats. asset: {db} -> {schema} -> {table} -> {column}".format(
217
- db=payload.db,
218
- schema=payload.schema,
219
- table=payload.table,
220
- column=payload.column,
221
- )
222
- )
223
- status_code = qdc_client.update_stats_by_id(
224
- global_id=payload.global_id,
225
- payload=payload.body.get_column_stats(),
226
- )
227
- if status_code == 200:
228
- req_count += 1
229
- logger.info(f"Generating table stats is finished. {req_count} stats are ingested.")
246
+ payloads = gen_table_stats_avro_payload(
247
+ tenant_id=tenant_id,
248
+ endpoint=conn.account_id,
249
+ stats=stats_result,
250
+ existing_global_ids=existing_global_ids,
251
+ )
252
+ update_stats_inputs += payloads
253
+
254
+ stack_name = os.getenv("CF_STACK")
255
+ import_req = GetImportURLRequest(
256
+ service_name="snowflake",
257
+ source_name=stack_name,
258
+ file_name="{name}.avro".format(name=stack_name),
259
+ override_logical_name="false",
260
+ update_mode="partial",
261
+ )
262
+ datasource_id = new_global_id(
263
+ tenant_id=tenant_id, cluster_id=conn.account_id, data_id="", data_type="data_source"
264
+ )
265
+ logger.debug("Datasource id: {dsrc_id}".format(dsrc_id=datasource_id))
266
+ import_res = qdc_client.get_import_url(datasource_id=datasource_id, payload=import_req)
267
+ if import_res is None:
268
+ logger.error("get_import_url failed. Please retry load_stats again")
269
+ return
270
+ logger.debug("ImportResponse: {res}".format(res=import_res))
271
+
272
+ avro_schema = AvroAsset.avro_schema_to_python()
273
+
274
+ buffer = io.BytesIO()
275
+ writer(buffer, avro_schema, update_stats_inputs)
276
+ res = qdc_client.upload_file(
277
+ url=import_res.location,
278
+ metadata=import_res.datasource_metadata_response_body,
279
+ buffer=buffer.getbuffer().tobytes(),
280
+ )
281
+ if res == 200:
282
+ logger.info("Generating table stats is finished.")
230
283
  return
231
284
 
232
285
 
@@ -6,6 +6,7 @@ from typing import Dict, List, Tuple, Union
6
6
  from jinja2 import Template
7
7
 
8
8
  from quollio_core.helper.core import new_global_id
9
+ from quollio_core.models.avroasset import AvroAsset
9
10
 
10
11
  logger = logging.getLogger(__name__)
11
12
 
@@ -70,6 +71,41 @@ def convert_value_type(obj, cast_str: bool = False):
70
71
  return obj
71
72
 
72
73
 
74
+ def gen_table_stats_avro_payload(
75
+ tenant_id: str, endpoint: str, stats: List[Dict[str, str]], existing_global_ids: Dict[str, bool]
76
+ ) -> List[Dict[str, str]]:
77
+ payloads = list()
78
+ for stat in stats:
79
+ db_name = stat.get("DB_NAME", stat.get("db_name"))
80
+ schema_name = stat.get("SCHEMA_NAME", stat.get("schema_name"))
81
+ table_name = stat.get("TABLE_NAME", stat.get("table_name"))
82
+ column_name = stat.get("COLUMN_NAME", stat.get("column_name"))
83
+ global_id_arg = "{db}{schema}{table}{column}".format(
84
+ db=db_name, schema=schema_name, table=table_name, column=column_name
85
+ )
86
+ column_global_id = new_global_id(
87
+ tenant_id=tenant_id, cluster_id=endpoint, data_id=global_id_arg, data_type="column"
88
+ )
89
+ if existing_global_ids.get(column_global_id) is not True:
90
+ continue
91
+ avro_assets = AvroAsset(
92
+ id=column_global_id,
93
+ object_type="column",
94
+ parents=[db_name, schema_name, table_name],
95
+ name=column_name,
96
+ stats_max=convert_value_type(stat.get("MAX_VALUE", stat.get("max_value")), True),
97
+ stats_min=convert_value_type(stat.get("MIN_VALUE", stat.get("min_value")), True),
98
+ stats_mean=convert_value_type(stat.get("AVG_VALUE", stat.get("avg_value")), True),
99
+ stats_median=convert_value_type(stat.get("MEDIAN_VALUE", stat.get("median_value")), True),
100
+ stats_mode=convert_value_type(stat.get("MODE_VALUE", stat.get("mode_value")), True),
101
+ stats_stddev=convert_value_type(stat.get("STDDEV_VALUE", stat.get("stddev_value")), True),
102
+ stats_number_of_null=convert_value_type(stat.get("NULL_COUNT", stat.get("null_count")), True),
103
+ stats_number_of_unique=convert_value_type(stat.get("CARDINALITY", stat.get("cardinality")), True),
104
+ )
105
+ payloads.append(avro_assets.to_dict())
106
+ return payloads
107
+
108
+
73
109
  def gen_table_stats_payload(tenant_id: str, endpoint: str, stats: List[Dict[str, str]]) -> List[StatsRequest]:
74
110
  payloads = list()
75
111
  for stat in stats:
@@ -8,6 +8,8 @@ import jwt
8
8
  import requests # type: ignore
9
9
  from requests.exceptions import ConnectionError, HTTPError, RequestException, Timeout
10
10
 
11
+ from quollio_core.models.qdc import DataSourceMetadataResponseBody, GetImportURLRequest, GetImportURLResponse
12
+
11
13
  logger = logging.getLogger(__name__)
12
14
 
13
15
 
@@ -64,6 +66,95 @@ class QDCExternalAPIClient:
64
66
  session.mount("https://", requests.adapters.HTTPAdapter(max_retries=retry))
65
67
  return session
66
68
 
69
+ def get_export_url(self, datasource_id: str) -> GetImportURLResponse:
70
+ self._refresh_token_if_expired()
71
+ headers = {"content-type": "application/json", "authorization": f"Bearer {self.auth_token}"}
72
+ endpoint = "{base_url}/v2/sources/{dsrc_id}/export-avro".format(base_url=self.base_url, dsrc_id=datasource_id)
73
+ try:
74
+ res = self.session.post(endpoint, headers=headers, data={})
75
+ res.raise_for_status()
76
+ except ConnectionError as ce:
77
+ logger.error(f"Connection Error: {ce} global_id: {datasource_id}.")
78
+ except HTTPError as he:
79
+ logger.error(f"HTTP Error: {he} global_id: {datasource_id}.")
80
+ except Timeout as te:
81
+ logger.error(f"Timeout Error: {te} global_id: {datasource_id}.")
82
+ except RequestException as re:
83
+ logger.error(f"RequestException Error: {re} global_id: {datasource_id}.")
84
+ else:
85
+ res = json.loads(res.text)
86
+ location = res.get("data").get("location")
87
+ return location
88
+
89
+ def download_file(self, url: str) -> requests.Response:
90
+ self._refresh_token_if_expired()
91
+
92
+ try:
93
+ res = self.session.get(url)
94
+ res.raise_for_status()
95
+ except ConnectionError as ce:
96
+ logger.error(f"Connection Error: {ce}.")
97
+ except HTTPError as he:
98
+ logger.error(f"HTTP Error: {he}.")
99
+ except Timeout as te:
100
+ logger.error(f"Timeout Error: {te}")
101
+ except RequestException as re:
102
+ logger.error(f"RequestException Error: {re}")
103
+ else:
104
+ return res
105
+
106
+ def get_import_url(self, datasource_id: str, payload: GetImportURLRequest) -> GetImportURLResponse:
107
+ self._refresh_token_if_expired()
108
+ headers = {"content-type": "application/json", "authorization": f"Bearer {self.auth_token}"}
109
+ endpoint = "{base_url}/v2/sources/{dsrc_id}/import".format(base_url=self.base_url, dsrc_id=datasource_id)
110
+ try:
111
+ payload_dict = payload.as_dict()
112
+ res = self.session.post(endpoint, headers=headers, json=payload_dict)
113
+ logger.debug(f"Got the result of import_url request: {res.text}")
114
+ res.raise_for_status()
115
+ except ConnectionError as ce:
116
+ logger.error(f"Connection Error: {ce} global_id: {datasource_id}.")
117
+ except HTTPError as he:
118
+ logger.error(f"HTTP Error: {he} global_id: {datasource_id}.")
119
+ except Timeout as te:
120
+ logger.error(f"Timeout Error: {te} global_id: {datasource_id}.")
121
+ except RequestException as re:
122
+ logger.error(f"RequestException Error: {re} global_id: {datasource_id}.")
123
+ else:
124
+ res = json.loads(res.text)
125
+ datasource_metadata_response = DataSourceMetadataResponseBody(**res.get("data").get("metadata"))
126
+ location = res.get("data").get("location")
127
+ response = GetImportURLResponse(
128
+ location=location, datasource_metadata_response_body=datasource_metadata_response
129
+ )
130
+ return response
131
+
132
+ def upload_file(self, url: str, metadata: DataSourceMetadataResponseBody, buffer: bytes):
133
+ self._refresh_token_if_expired()
134
+ headers = {
135
+ "Content-Type": "application/octet-stream",
136
+ "x-amz-meta-user_id": metadata.user_id,
137
+ "x-amz-meta-job_key": metadata.job_key,
138
+ "x-amz-meta-service_name": metadata.service_name,
139
+ "x-amz-meta-source_name": metadata.source_name,
140
+ "x-amz-meta-source_type": metadata.source_type,
141
+ "x-amz-meta-override_logical_name": metadata.override_logical_name,
142
+ "Content-Length": str(len(buffer)),
143
+ }
144
+ try:
145
+ res = self.session.put(url, headers=headers, data=buffer)
146
+ res.raise_for_status()
147
+ except ConnectionError as ce:
148
+ logger.error(f"Connection Error: {ce}.")
149
+ except HTTPError as he:
150
+ logger.error(f"HTTP Error: {he}.")
151
+ except Timeout as te:
152
+ logger.error(f"Timeout Error: {te}")
153
+ except RequestException as re:
154
+ logger.error(f"RequestException Error: {re}")
155
+ else:
156
+ return res.status_code
157
+
67
158
  def update_stats_by_id(self, global_id: str, payload: Dict[str, List[str]]) -> int:
68
159
  self._refresh_token_if_expired()
69
160
  headers = {"content-type": "application/json", "authorization": f"Bearer {self.auth_token}"}
@@ -1,7 +1,9 @@
1
1
  import logging
2
- from dataclasses import asdict, dataclass
2
+ from dataclasses import dataclass
3
3
  from typing import Dict, List, Tuple
4
4
 
5
+ from cryptography.hazmat.backends import default_backend
6
+ from cryptography.hazmat.primitives import serialization
5
7
  from snowflake.connector import DictCursor, connect, errors
6
8
  from snowflake.connector.connection import SnowflakeConnection
7
9
 
@@ -12,16 +14,67 @@ logger = logging.getLogger(__name__)
12
14
  class SnowflakeConnectionConfig:
13
15
  account_id: str
14
16
  account_user: str
15
- account_password: str
16
17
  account_build_role: str
17
18
  account_query_role: str
18
19
  account_warehouse: str
19
20
  account_database: str
20
21
  account_schema: str
22
+ account_password: str = None
23
+ private_key: str = None
21
24
  threads: int = 3
22
25
 
23
26
  def as_dict(self) -> Dict[str, str]:
24
- return asdict(self)
27
+ """Convert config to dictionary, handling both auth methods for DBT."""
28
+ base_params = {
29
+ "account_id": self.account_id,
30
+ "account_user": self.account_user,
31
+ "account_build_role": self.account_build_role,
32
+ "account_query_role": self.account_query_role,
33
+ "account_warehouse": self.account_warehouse,
34
+ "account_database": self.account_database,
35
+ "account_schema": self.account_schema,
36
+ "threads": self.threads,
37
+ }
38
+
39
+ # Add auth parameters based on method
40
+ if self.private_key:
41
+ # Keep private key as is, template will handle formatting
42
+ base_params["private_key"] = self.private_key
43
+ elif self.account_password:
44
+ base_params["account_password"] = self.account_password
45
+
46
+ return {k: v for k, v in base_params.items() if v is not None}
47
+
48
+ def get_connection_params(self) -> Dict[str, str]:
49
+ """Get the appropriate connection parameters based on authentication method."""
50
+ params = {
51
+ "user": self.account_user,
52
+ "account": self.account_id,
53
+ "warehouse": self.account_warehouse,
54
+ "database": self.account_database,
55
+ "schema": self.account_schema,
56
+ "role": self.account_query_role,
57
+ }
58
+
59
+ # Add authentication parameters based on method
60
+ if self.private_key:
61
+ try:
62
+ # Parse private key content into RSA key object
63
+ pkey = serialization.load_pem_private_key(
64
+ self.private_key.encode("utf-8"),
65
+ password=None,
66
+ backend=default_backend(),
67
+ )
68
+ params["private_key"] = pkey
69
+ except Exception as e:
70
+ logger.error(f"Failed to parse private key: {str(e)}")
71
+ raise
72
+ elif self.account_password:
73
+ params["password"] = self.account_password
74
+ else:
75
+ raise ValueError("Either password or private key authentication must be configured")
76
+
77
+ return params
25
78
 
26
79
 
27
80
  class SnowflakeQueryExecutor:
@@ -35,16 +88,12 @@ class SnowflakeQueryExecutor:
35
88
  self.conn.close()
36
89
 
37
90
  def __initialize(self, config: SnowflakeConnectionConfig) -> SnowflakeConnection:
38
- conn: SnowflakeConnection = connect(
39
- user=config.account_user,
40
- password=config.account_password,
41
- role=config.account_query_role,
42
- account=config.account_id,
43
- warehouse=config.account_warehouse,
44
- database=config.account_database,
45
- schema=config.account_schema,
46
- )
47
- return conn
91
+ try:
92
+ conn: SnowflakeConnection = connect(**config.get_connection_params())
93
+ return conn
94
+ except Exception as e:
95
+ logger.error(f"Failed to initialize Snowflake connection: {str(e)}")
96
+ raise
48
97
 
49
98
  def get_query_results(self, query: str) -> Tuple[List[Dict[str, str]], Exception]:
50
99
  with self.conn.cursor(DictCursor) as cur:
quollio_core/snowflake.py CHANGED
@@ -6,6 +6,7 @@ import shutil
6
6
  from quollio_core.helper.core import is_valid_domain, setup_dbt_profile
7
7
  from quollio_core.helper.env_default import env_default
8
8
  from quollio_core.helper.log import set_log_level
9
+ from quollio_core.profilers.qdc import gen_existing_global_id_dict, get_avro_file_content
9
10
  from quollio_core.profilers.snowflake import (
10
11
  snowflake_column_to_column_lineage,
11
12
  snowflake_table_level_sqllineage,
@@ -93,10 +94,15 @@ def load_lineage(
93
94
  ) -> None:
94
95
  logger.info("Generate Snowflake table to table lineage.")
95
96
 
96
- snowflake_table_to_table_lineage(
97
- conn=conn,
98
- qdc_client=qdc_client,
97
+ file_content = get_avro_file_content(
99
98
  tenant_id=tenant_id,
99
+ account_id=conn.account_id,
100
+ qdc_client=qdc_client,
101
+ )
102
+ existing_global_ids = gen_existing_global_id_dict(avro_content=file_content)
103
+
104
+ snowflake_table_to_table_lineage(
105
+ conn=conn, qdc_client=qdc_client, tenant_id=tenant_id, existing_global_ids=existing_global_ids
100
106
  )
101
107
 
102
108
  if enable_column_lineage:
@@ -104,9 +110,7 @@ def load_lineage(
104
110
  f"enable_column_lineage is set to {enable_column_lineage}.Generate Snowflake column to column lineage."
105
111
  )
106
112
  snowflake_column_to_column_lineage(
107
- conn=conn,
108
- qdc_client=qdc_client,
109
- tenant_id=tenant_id,
113
+ conn=conn, qdc_client=qdc_client, tenant_id=tenant_id, existing_global_ids=existing_global_ids
110
114
  )
111
115
  else:
112
116
  logger.info("Skip column lineage ingestion. Set enable_column_lineage to True if you ingest column lineage.")
@@ -124,6 +128,13 @@ def load_stats(
124
128
  ) -> None:
125
129
  logger.info("Generate Snowflake stats.")
126
130
 
131
+ file_content = get_avro_file_content(
132
+ tenant_id=tenant_id,
133
+ account_id=conn.account_id,
134
+ qdc_client=qdc_client,
135
+ )
136
+ existing_global_ids = gen_existing_global_id_dict(avro_content=file_content)
137
+
127
138
  if stats_items is None:
128
139
  raise ValueError("No stats items are not selected. Please specify any value to `stats_items` param.")
129
140
 
@@ -133,6 +144,7 @@ def load_stats(
133
144
  qdc_client=qdc_client,
134
145
  tenant_id=tenant_id,
135
146
  stats_items=stats_items,
147
+ existing_global_ids=existing_global_ids,
136
148
  )
137
149
 
138
150
  logger.info("Stats data is successfully finished.")
@@ -237,7 +249,11 @@ if __name__ == "__main__":
237
249
  "--target_tables",
238
250
  type=str,
239
251
  nargs="*",
240
- choices=["quollio_lineage_column_level", "quollio_lineage_table_level", "quollio_stats_columns"],
252
+ choices=[
253
+ "quollio_lineage_column_level",
254
+ "quollio_lineage_table_level",
255
+ "quollio_stats_columns",
256
+ ],
241
257
  action=env_default("SNOWFLAKE_TARGET_TABLES"),
242
258
  required=False,
243
259
  help="Target table name if you want to create only specific tables. \
@@ -335,6 +351,22 @@ if __name__ == "__main__":
335
351
  help="Access method to Quollio API. Default 'PUBLIC'. Choose 'VPC_ENDPOINT'\
336
352
  if you use API Gateway VPC Endpoint, DefaultValue is set to PUBLIC.",
337
353
  )
354
+ parser.add_argument(
355
+ "--auth_type",
356
+ type=str,
357
+ choices=["PASSWORD", "KEYPAIR"],
358
+ action=env_default("SNOWFLAKE_AUTH_TYPE"),
359
+ default="PASSWORD",
360
+ required=False,
361
+ help="Authentication method to use (PASSWORD or KEYPAIR)",
362
+ )
363
+ parser.add_argument(
364
+ "--private_key",
365
+ type=str,
366
+ action=env_default("SNOWFLAKE_PRIVATE_KEY"),
367
+ required=False,
368
+ help="Private key content for keypair authentication",
369
+ )
338
370
 
339
371
  stats_items = get_column_stats_items()
340
372
  parser.add_argument(
@@ -351,16 +383,31 @@ if __name__ == "__main__":
351
383
  args = parser.parse_args()
352
384
  set_log_level(level=args.log_level)
353
385
 
354
- conn = snowflake.SnowflakeConnectionConfig(
355
- account_id=args.account_id,
356
- account_user=args.user,
357
- account_password=args.password,
358
- account_build_role=args.build_role,
359
- account_query_role=args.query_role,
360
- account_warehouse=args.warehouse,
361
- account_database=args.database,
362
- account_schema=args.schema,
363
- )
386
+ # Update authentication handling
387
+ auth_params = {
388
+ "account_id": args.account_id,
389
+ "account_user": args.user,
390
+ "account_build_role": args.build_role,
391
+ "account_query_role": args.query_role,
392
+ "account_warehouse": args.warehouse,
393
+ "account_database": args.database,
394
+ "account_schema": args.schema,
395
+ }
396
+
397
+ # Add authentication specific parameters based on method
398
+ if args.auth_type == "KEYPAIR":
399
+ if not args.private_key:
400
+ raise ValueError("private_key is required when using keypair authentication")
401
+ auth_params["private_key"] = args.private_key
402
+ logger.info("Using keypair authentication")
403
+ else:
404
+ if not args.password:
405
+ raise ValueError("password is required when using password authentication")
406
+ auth_params["account_password"] = args.password
407
+ logger.info("Using password authentication")
408
+ logger.warning("Password authentication is being deprecated. Please consider using keypair authentication.")
409
+
410
+ conn = snowflake.SnowflakeConnectionConfig(**auth_params)
364
411
 
365
412
  if len(args.commands) == 0:
366
413
  raise ValueError("No command is provided")
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: quollio-core
3
- Version: 0.5.3
3
+ Version: 0.6.1
4
4
  Summary: Quollio Core
5
5
  Author-email: quollio-dev <qt.dev@quollio.com>
6
6
  Maintainer-email: RyoAriyama <ryo.arym@gmail.com>, tharuta <35373297+TakumiHaruta@users.noreply.github.com>
@@ -17,6 +17,7 @@ Classifier: Operating System :: OS Independent
17
17
  Classifier: Development Status :: 3 - Alpha
18
18
  Classifier: Intended Audience :: Developers
19
19
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
+ License-File: LICENSE
20
21
  Requires-Dist: blake3==0.3.3
21
22
  Requires-Dist: dbt-core==1.7.10
22
23
  Requires-Dist: dbt-snowflake==1.7.0
@@ -38,6 +39,8 @@ Requires-Dist: google-cloud-datacatalog==3.19.0
38
39
  Requires-Dist: google-cloud-datacatalog-lineage==0.3.6
39
40
  Requires-Dist: google-api-python-client==2.131.0
40
41
  Requires-Dist: teradatasql==20.0.0.15
42
+ Requires-Dist: dataclasses_avroschema==0.63.9
43
+ Requires-Dist: fastavro==1.9.7
41
44
  Requires-Dist: black>=22.3.0 ; extra == "test"
42
45
  Requires-Dist: coverage>=7.3.2 ; extra == "test"
43
46
  Requires-Dist: isort>=5.10.1 ; extra == "test"
@@ -1,8 +1,8 @@
1
- quollio_core/__init__.py,sha256=YrefYO__k4ewIsYGUmshPK7ySvzQxHKIdRv7kTYGaVk,83
1
+ quollio_core/__init__.py,sha256=UKsQb_ngmTusubotU4aVGiWhbSJfs4FBWUasBuPnlg0,83
2
2
  quollio_core/bigquery.py,sha256=6Oq4DVGpa3X21Es_nbrsb8pK3vaxwb9Egnvq3huo95k,5894
3
3
  quollio_core/bricks.py,sha256=8h3kbI2b6lGH2s-56jE_Q5-R5-nIsQYMfvtRrkFOzoU,10784
4
4
  quollio_core/redshift.py,sha256=KcdljY95xYf9JYrsaMOBoP_XxQQ8wFVE5ue_XEMVSFc,11504
5
- quollio_core/snowflake.py,sha256=wKBfyoqdoLN-kYsceT0ctqWeay_Sn8OpKMSYedlBjD4,14558
5
+ quollio_core/snowflake.py,sha256=YM9rnlk7iKY7rg2BxEPQgKtCuHf17Tb5HTr4a7YV8oo,16467
6
6
  quollio_core/teradata.py,sha256=H2VUcJvr8W-M2wvm3710Gf1ENb-BSscrDRKNm8gdHJE,8227
7
7
  quollio_core/dbt_projects/databricks/.gitignore,sha256=1jJAyXSzJ3YUm0nx3i7wUSE4RjQMX3ad6F8O88UbtzI,29
8
8
  quollio_core/dbt_projects/databricks/README.md,sha256=ZpRQyhFAODAiS8dc1Kb_ndkul4cu4o4udN_EMa49CU4,440
@@ -59,34 +59,37 @@ quollio_core/dbt_projects/snowflake/models/quollio_stats_columns.yml,sha256=V_BE
59
59
  quollio_core/dbt_projects/snowflake/models/quollio_stats_profiling_columns.sql,sha256=ubMEzZNHq55zCncg7HbzdMKMSdqHnwbJmVKYpet8Otc,2968
60
60
  quollio_core/dbt_projects/snowflake/models/quollio_stats_profiling_columns.yml,sha256=W39VAmFnnX6RBoW7B_4CConC1lm0Jm9o50Jsz9bYZzY,538
61
61
  quollio_core/dbt_projects/snowflake/models/sources.yml,sha256=vGSV33cNj4UUyPUcYS-JFgc3r8KvSLfiA7qhbDCUU9s,10975
62
- quollio_core/dbt_projects/snowflake/profiles/profiles_template.yml,sha256=gcZsgdGP461QuUM9jLbBKdadT8cHTXgNarq_azOOMhk,379
62
+ quollio_core/dbt_projects/snowflake/profiles/profiles_template.yml,sha256=6yIlaFSKxWxqnX-LboO1xVC-7DtPYcAKrRjn3rPGNfk,511
63
63
  quollio_core/dbt_projects/snowflake/seeds/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
64
  quollio_core/dbt_projects/snowflake/snapshots/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
65
65
  quollio_core/helper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
66
- quollio_core/helper/core.py,sha256=ddV5VLa49Je11eHUjlRN5m3nhtqPMMUSeG4135HuZC8,1390
66
+ quollio_core/helper/core.py,sha256=PMpc80fWVf2tdf3J1qtK9WghXgU2l7i72ufamDeHwcw,1422
67
67
  quollio_core/helper/env_default.py,sha256=H6gbSGUPrEDZr4YDrL49hbOpw6RntI4U82kX1q6vUnI,2148
68
68
  quollio_core/helper/log.py,sha256=flxyZZ44G79l1TaUp3OT58uCHcnE5z_pCduwoeI6IUs,645
69
69
  quollio_core/helper/log_utils.py,sha256=QontLKETHjSAbQniJ7YqS0RY2AYvFHSjrlPiGr317LE,1391
70
+ quollio_core/models/avroasset.py,sha256=YZHzOS62N0_sidneXI3IZ2MA8Bz1vFVgF6F9_UilC3s,603
71
+ quollio_core/models/qdc.py,sha256=UObaUpvAQ4vOhI6jfwvNFrJ3--6AX2v9yl9_d3Juy7M,739
70
72
  quollio_core/profilers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
71
73
  quollio_core/profilers/bigquery.py,sha256=mEr7CFQNgBFqWR8XfCOk8WTm5k5qZhLF8ODVWfskPRI,5797
72
74
  quollio_core/profilers/databricks.py,sha256=ik4RiR_GOeU3S7s6C6Y9SGe1D_Y_f98BDWJVlEJXL4U,7868
73
- quollio_core/profilers/lineage.py,sha256=4FyxIuPBrUFihqZryqTQBcfB0Z7634lKl_WwkD82vzE,6865
75
+ quollio_core/profilers/lineage.py,sha256=GMWue6lgiz7wFYnNpaHVFivprA-iqhbCHf63IsVB8Vk,11260
76
+ quollio_core/profilers/qdc.py,sha256=P0STRfe5G4d-UI7RdVbYmAfP_tAn1HbFUjeirxWipz4,995
74
77
  quollio_core/profilers/redshift.py,sha256=p6ONDCkhndZAOcKAwEyQ5fsi-jsQrlwHHb7LTI_m1uk,6473
75
- quollio_core/profilers/snowflake.py,sha256=m9Ivv2LRwnrmgKS36a039AhrO27sR1EaOOdqNF26PhI,11156
78
+ quollio_core/profilers/snowflake.py,sha256=ewvULWIlcq2h0jOyRzUpedW0NS8QlkSgICS-dZDYl18,13027
76
79
  quollio_core/profilers/sqllineage.py,sha256=h0FT6CYb0A20zSc68GELZ7Q8bDbaHLQnZQHsXBEXBug,5261
77
- quollio_core/profilers/stats.py,sha256=OLQrdrh0y64jo9rmzvGlDdxy_c7gMz_GnlXPJzWkBjM,7343
80
+ quollio_core/profilers/stats.py,sha256=PwMNyr7JwdrVbSSfVd-XsgCZV7sKoRjihaRhhZ88uyM,9328
78
81
  quollio_core/profilers/teradata/lineage.py,sha256=2wNksBQD8vC6UTQwCglPsF53YMEVIkAb2CWTmpiTHDU,7368
79
82
  quollio_core/profilers/teradata/stats.py,sha256=OagvkTRFiWVbiLABwZwR3wQ7y36edwOViDetHsYiyxI,9277
80
83
  quollio_core/repository/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
84
  quollio_core/repository/bigquery.py,sha256=3AyGcJNYGnUyMweyc6lGm4quwrOzd-ZBS2zNnFwafII,3990
82
85
  quollio_core/repository/databricks.py,sha256=9Cgdv8qBnVaHqu3RA-IUBieAqb69moQ-KAAMVSf5Ds4,1877
83
86
  quollio_core/repository/dbt.py,sha256=cnLwJPywLi8VowVW7zfIBa9jxVwDWO7xzzNRn1vWiuw,659
84
- quollio_core/repository/qdc.py,sha256=IPGiYafnJYkuD7_kLapVM98-9ZhEwq5S-dGY2bO8fVk,4624
87
+ quollio_core/repository/qdc.py,sha256=Ni0rk9CX8ienqM_HYLuWLBKTwycvTENC7x7wGWhzjXs,8978
85
88
  quollio_core/repository/redshift.py,sha256=p2ouEuYcDCjx1oBhc6H1ekQsvEqHGd3bFu3PW0ngYBc,2880
86
- quollio_core/repository/snowflake.py,sha256=zL9-xi98AIftdW9MuKI-M3pZ1kQuuH-UiZH8HcJvmk4,1769
89
+ quollio_core/repository/snowflake.py,sha256=yCYXrYf4I5GL_ITNTXoggj0xNbQsdwxPSmsVvZYwUVU,3869
87
90
  quollio_core/repository/ssm.py,sha256=xpm1FzbBnIsBptuYPUNnPgkKU2AH3XxI-ZL0bEetvW0,2182
88
91
  quollio_core/repository/teradata.py,sha256=1AExxRBTswpSyF4OVyAUkoiZ0yVRfqt4T99FdllkTEI,3763
89
- quollio_core-0.5.3.dist-info/LICENSE,sha256=V8j_M8nAz8PvAOZQocyRDX7keai8UJ9skgmnwqETmdY,34520
90
- quollio_core-0.5.3.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
91
- quollio_core-0.5.3.dist-info/METADATA,sha256=aQkusHnQ6mw4E6KUM5Yo4LrzFFFHqDbIgIyzjHZFJc4,6924
92
- quollio_core-0.5.3.dist-info/RECORD,,
92
+ quollio_core-0.6.1.dist-info/licenses/LICENSE,sha256=V8j_M8nAz8PvAOZQocyRDX7keai8UJ9skgmnwqETmdY,34520
93
+ quollio_core-0.6.1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
94
+ quollio_core-0.6.1.dist-info/METADATA,sha256=ZdX8sgnt97nINrc021GvEp-o-i2Ph01vRLtjc_3J8fg,7023
95
+ quollio_core-0.6.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.10.1
2
+ Generator: flit 3.11.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any