qlever 0.5.27__py3-none-any.whl → 0.5.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of qlever might be problematic. Click here for more details.

@@ -22,7 +22,7 @@ MULTI_INPUT_JSON = { "cmd": "zcat ${INPUT_FILES}", "parallel": "true" }
22
22
  STXXL_MEMORY = 5G
23
23
  PARSER_BUFFER_SIZE = 50M
24
24
  SETTINGS_JSON = { "num-triples-per-batch": 5000000 }
25
- ENCODE_AS_IDS = https://www.openhistoricalmap.org/node/ http://www.openhistoricalmap.org/node/ https://www.openhistoricalmap.org/way/ https://www.openhistoricalmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
25
+ ENCODE_AS_ID = https://www.openhistoricalmap.org/node/ http://www.openhistoricalmap.org/node/ https://www.openhistoricalmap.org/way/ https://www.openhistoricalmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#ohmnode_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#ohmnode_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#ohmway_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#ohmrel_ https://www.openstreetmap.org/changeset/
26
26
 
27
27
  [server]
28
28
  PORT = 7037
@@ -25,13 +25,13 @@ INPUT_FILES = ${data:NAME}.ttl.bz2
25
25
  CAT_INPUT_FILES = bzcat ${data:NAME}.ttl.bz2
26
26
  STXXL_MEMORY = 10G
27
27
  SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
28
- ENCODE_AS_IDS = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
28
+ ENCODE_AS_ID = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmnode_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmnode_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmway_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmrel_ https://www.openstreetmap.org/changeset/
29
29
 
30
30
  # Server settings
31
31
  [server]
32
32
  HOSTNAME = localhost
33
33
  PORT = 7025
34
- ACCESS_TOKEN = ${data:NAME}_%RANDOM%
34
+ ACCESS_TOKEN = ${data:NAME}
35
35
  MEMORY_FOR_QUERIES = 20G
36
36
  CACHE_MAX_SIZE = 10G
37
37
  CACHE_MAX_SIZE_SINGLE_ENTRY = 5G
@@ -15,13 +15,13 @@ DESCRIPTION = OSM from ${GET_DATA_URL} (converted to RDF using osm2rdf, enhance
15
15
 
16
16
  [index]
17
17
  INPUT_FILES = ${data:NAME}.ttl.bz2
18
- CAT_INPUT_FILES = lbzcat -n 2 ${INPUT_FILES}
19
- PARALLEL_PARSING = true
18
+ MULTI_INPUT_JSON = { "cmd": "lbzcat -n 2 ${INPUT_FILES}", "parallel": "true" }
19
+ VOCABULARY_TYPE = on-disk-compressed-geo-split
20
20
  PARSER_BUFFER_SIZE = 100M
21
- STXXL_MEMORY = 40G
21
+ STXXL_MEMORY = 60G
22
22
  SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
23
- ULIMIT = 10000
24
- ENCODE_AS_IDS = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
23
+ ULIMIT = 50000
24
+ ENCODE_AS_ID = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmnode_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmway_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmrel_ https://www.openstreetmap.org/changeset/
25
25
 
26
26
  [server]
27
27
  PORT = 7007
@@ -0,0 +1,42 @@
1
+ # Qleverfile for OSM Planet, use with the QLever CLI (`pip install qlever`)
2
+ #
3
+ # qlever get-data # download ~100 GB (pbf), convert with osm2rdf, ~200B triples
4
+ # qlever index # ~40 hours, ~60 GB RAM, ~2.5 TB index size on disk
5
+ # qlever start # a few seconds, adjust MEMORY_FOR_QUERIES as needed
6
+ #
7
+ # Measured on an AMD Ryzen 9 9950X with 128 GB RAM and 4 x 8 TB NVMe (02.10.2025)
8
+
9
+ [data]
10
+ NAME = osm-planet
11
+ PLANET_PBF = planet-250929.osm.pbf
12
+ GET_DATA_URL = https://planet.openstreetmap.org/pbf/${PLANET_PBF}
13
+ GET_PBF_CMD = unbuffer wget -O ${PLANET_PBF} ${GET_DATA_URL}
14
+ OSM2RDF_CMD = unbuffer osm2rdf ${PLANET_PBF} -o ${NAME}.ttl --num-threads 20 --output-compression gz --cache . --store-locations disk-dense --iri-prefix-for-untagged-nodes http://www.openstreetmap.org/node/ --split-tag-key-by-semicolon ref --split-tag-key-by-semicolon service
15
+ GET_DATA_CMD = ${GET_PBF_CMD} && ${OSM2RDF_CMD} 2>&1 | tee ${NAME}.osm2rdf-log.txt
16
+ VERSION = $$(date -r ${PLANET_PBF} +%d.%m.%Y || echo "NO_DATE")
17
+ DESCRIPTION = OSM from ${GET_DATA_URL}, converted to RDF using osm2rdf, enhanced by GeoSPARQL triples ogc:sfContains, ogc:sfCovers, ogc:sfIntersects, ogc:sfEquals, ogc:sfTouches, ogc:sfCrosses, ogc:sfOverlaps
18
+
19
+ [index]
20
+ INPUT_FILES = ${data:NAME}.ttl.gz
21
+ MULTI_INPUT_JSON = { "cmd": "zcat ${INPUT_FILES}", "parallel": "true" }
22
+ VOCABULARY_TYPE = on-disk-compressed-geo-split
23
+ PARSER_BUFFER_SIZE = 100M
24
+ STXXL_MEMORY = 60G
25
+ SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
26
+ ULIMIT = 50000
27
+ ENCODE_AS_ID = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmnode_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmnode_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmway_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osmrel_ https://www.openstreetmap.org/changeset/
28
+
29
+ [server]
30
+ PORT = 7007
31
+ ACCESS_TOKEN = ${data:NAME}
32
+ MEMORY_FOR_QUERIES = 40G
33
+ CACHE_MAX_SIZE = 20G
34
+ CACHE_MAX_SIZE_SINGLE_ENTRY = 10G
35
+ TIMEOUT = 600s
36
+
37
+ [runtime]
38
+ SYSTEM = docker
39
+ IMAGE = docker.io/adfreiburg/qlever:latest
40
+
41
+ [ui]
42
+ UI_CONFIG = osm-planet
@@ -55,8 +55,8 @@ MULTI_INPUT_JSON = [{ "cmd": "zcat {}", "graph": "http://sparql.uniprot.org/unip
55
55
  { "cmd": "zcat ${data:TTL_DIR}/examples_uniprot.ttl.gz", "graph": "http://sparql.uniprot.org/.well-known/sparql-examples" },
56
56
  { "cmd": "zcat ${data:TTL_DIR}/core.ttl.gz", "graph": "http://purl.uniprot.org/core" }]
57
57
  SETTINGS_JSON = { "languages-internal": [], "prefixes-external": [""], "locale": { "language": "en", "country": "US", "ignore-punctuation": true }, "ascii-prefixes-only": true, "num-triples-per-batch": 25000000 }
58
- STXXL_MEMORY = 60G
59
- ULIMIT = 10000
58
+ STXXL_MEMORY = 80G
59
+ ULIMIT = 50000
60
60
 
61
61
  [server]
62
62
  PORT = 7018
@@ -335,9 +335,7 @@ class BenchmarkQueriesCommand(QleverCommand):
335
335
  )
336
336
  return []
337
337
 
338
- return [
339
- (query['query'], query['sparql']) for query in data["queries"]
340
- ]
338
+ return [(query["query"], query["sparql"]) for query in data["queries"]]
341
339
 
342
340
  def get_result_size(
343
341
  self,
@@ -552,8 +550,7 @@ class BenchmarkQueriesCommand(QleverCommand):
552
550
 
553
551
  # Show what the command will do.
554
552
  example_queries_cmd = (
555
- "curl -sv https://qlever.cs.uni-freiburg.de/"
556
- f"api/examples/{args.ui_config}"
553
+ f"curl -sv https://qlever.dev/api/examples/{args.ui_config}"
557
554
  )
558
555
  sparql_endpoint = (
559
556
  args.sparql_endpoint
@@ -44,6 +44,7 @@ class SettingsCommand(QleverCommand):
44
44
  "lazy-result-max-cache-size",
45
45
  "query-planning-budget",
46
46
  "request-body-limit",
47
+ "service-max-redirects",
47
48
  "service-max-value-rows",
48
49
  "sort-estimate-cancellation-factor",
49
50
  "spatial-join-prefilter-max-size",
@@ -252,46 +252,74 @@ class UpdateWikidataCommand(QleverCommand):
252
252
  # operation = event_data.get("operation")
253
253
  rdf_added_data = event_data.get("rdf_added_data")
254
254
  rdf_deleted_data = event_data.get("rdf_deleted_data")
255
+ rdf_linked_shared_data = event_data.get(
256
+ "rdf_linked_shared_data"
257
+ )
258
+ rdf_unlinked_shared_data = event_data.get(
259
+ "rdf_unlinked_shared_data"
260
+ )
255
261
 
256
262
  # Process the to-be-deleted triples.
257
- if rdf_deleted_data is not None:
258
- try:
259
- rdf_deleted_data = rdf_deleted_data.get("data")
260
- graph = Graph()
261
- log.debug(f"RDF deleted data: {rdf_deleted_data}")
262
- graph.parse(data=rdf_deleted_data, format="turtle")
263
- for s, p, o in graph:
264
- triple = f"{s.n3()} {p.n3()} {o.n3()}"
265
- # NOTE: In case there was a previous `insert` of that
266
- # triple, it is safe to remove that `insert`, but not
267
- # the `delete` (in case the triple is contained in the
268
- # original data).
269
- if triple in insert_triples:
270
- insert_triples.remove(triple)
271
- delete_triples.add(triple)
272
- except Exception as e:
273
- log.error(f"Error reading `rdf_deleted_data`: {e}")
274
- return False
263
+ for rdf_to_be_deleted in (
264
+ rdf_deleted_data,
265
+ rdf_unlinked_shared_data,
266
+ ):
267
+ if rdf_to_be_deleted is not None:
268
+ try:
269
+ rdf_to_be_deleted_data = rdf_to_be_deleted.get(
270
+ "data"
271
+ )
272
+ graph = Graph()
273
+ log.debug(
274
+ f"RDF to_be_deleted data: {rdf_to_be_deleted_data}"
275
+ )
276
+ graph.parse(
277
+ data=rdf_to_be_deleted_data, format="turtle"
278
+ )
279
+ for s, p, o in graph:
280
+ triple = f"{s.n3()} {p.n3()} {o.n3()}"
281
+ # NOTE: In case there was a previous `insert` of that
282
+ # triple, it is safe to remove that `insert`, but not
283
+ # the `delete` (in case the triple is contained in the
284
+ # original data).
285
+ if triple in insert_triples:
286
+ insert_triples.remove(triple)
287
+ delete_triples.add(triple)
288
+ except Exception as e:
289
+ log.error(
290
+ f"Error reading `rdf_to_be_deleted_data`: {e}"
291
+ )
292
+ return False
275
293
 
276
294
  # Process the to-be-added triples.
277
- if rdf_added_data is not None:
278
- try:
279
- rdf_added_data = rdf_added_data.get("data")
280
- graph = Graph()
281
- log.debug("RDF added data: {rdf_added_data}")
282
- graph.parse(data=rdf_added_data, format="turtle")
283
- for s, p, o in graph:
284
- triple = f"{s.n3()} {p.n3()} {o.n3()}"
285
- # NOTE: In case there was a previous `delete` of that
286
- # triple, it is safe to remove that `delete`, but not
287
- # the `insert` (in case the triple is not contained in
288
- # the original data).
289
- if triple in delete_triples:
290
- delete_triples.remove(triple)
291
- insert_triples.add(triple)
292
- except Exception as e:
293
- log.error(f"Error reading `rdf_added_data`: {e}")
294
- return False
295
+ for rdf_to_be_added in (
296
+ rdf_added_data,
297
+ rdf_linked_shared_data,
298
+ ):
299
+ if rdf_to_be_added is not None:
300
+ try:
301
+ rdf_to_be_added_data = rdf_to_be_added.get("data")
302
+ graph = Graph()
303
+ log.debug(
304
+ "RDF to be added data: {rdf_to_be_added_data}"
305
+ )
306
+ graph.parse(
307
+ data=rdf_to_be_added_data, format="turtle"
308
+ )
309
+ for s, p, o in graph:
310
+ triple = f"{s.n3()} {p.n3()} {o.n3()}"
311
+ # NOTE: In case there was a previous `delete` of that
312
+ # triple, it is safe to remove that `delete`, but not
313
+ # the `insert` (in case the triple is not contained in
314
+ # the original data).
315
+ if triple in delete_triples:
316
+ delete_triples.remove(triple)
317
+ insert_triples.add(triple)
318
+ except Exception as e:
319
+ log.error(
320
+ f"Error reading `rdf_to_be_added_data`: {e}"
321
+ )
322
+ return False
295
323
 
296
324
  except Exception as e:
297
325
  log.error(f"Error reading data from message: {e}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qlever
3
- Version: 0.5.27
3
+ Version: 0.5.29
4
4
  Summary: Command-line tool for using the QLever graph database
5
5
  Author-email: Hannah Bast <bast@cs.uni-freiburg.de>
6
6
  License: Apache-2.0
@@ -14,21 +14,22 @@ qlever/Qleverfiles/Qleverfile.dnb,sha256=43w_CVi00yf7FHdDvBtHHQR3yU1d-JCNnD_uxYZ
14
14
  qlever/Qleverfiles/Qleverfile.fbeasy,sha256=9dwCMltT0BIMN4LRmaZFp1a7aV0kh0nJ9XLiQb_NJNo,940
15
15
  qlever/Qleverfiles/Qleverfile.freebase,sha256=eFMOxeyuWVbb06Gv2-VFkuKE5tTyckddTDHdw5wbZN8,1028
16
16
  qlever/Qleverfiles/Qleverfile.imdb,sha256=1xUBFimgnEHKP_o6tlqwJvIVpEE4Zx6UK_JnnQsG7Ew,1638
17
- qlever/Qleverfiles/Qleverfile.ohm-planet,sha256=t64aYwwAP0njwg2C3_YttD-ZxHL1UtmmlRH0fd1_tIw,2874
17
+ qlever/Qleverfiles/Qleverfile.ohm-planet,sha256=JRib-xJCeREGrxn_oKoBbtItKEGW5Y-O848kcruiTBs,2746
18
18
  qlever/Qleverfiles/Qleverfile.olympics,sha256=5w9BOFwEBhdSzPz-0LRxwhv-7Gj6xbF539HOXr3cqD0,1088
19
19
  qlever/Qleverfiles/Qleverfile.orkg,sha256=Uizz-RhlSeExgfckWztewa4l_v3zMN8IR7NaGYKrqt4,937
20
- qlever/Qleverfiles/Qleverfile.osm-country,sha256=L__IIFW1pGhLSZDkBWwY7_B7S3jybCEFYDUKYcyT2LU,2422
21
- qlever/Qleverfiles/Qleverfile.osm-planet,sha256=XVQdBG8vNBL-YybU0elBcOC00F8ji9PnZmNV5JMsJ38,2005
20
+ qlever/Qleverfiles/Qleverfile.osm-country,sha256=njFpn5aHzgqJptpkF8ul6fLZMEOc8i2HX5Ibu1PIvSA,2285
21
+ qlever/Qleverfiles/Qleverfile.osm-planet,sha256=uPLEmz_vdZQmDBsXdqyQh50_i5VXrmXVbOvzOML0blc,1865
22
+ qlever/Qleverfiles/Qleverfile.osm-planet-from-pbf,sha256=cdf432bakOpvTzbYjirNnaQyUPNkMH8HhMmdwSZ39fg,2295
22
23
  qlever/Qleverfiles/Qleverfile.pubchem,sha256=ooSj2gqTzbGY_pMCvfL-MfE7Z0d5hQB4_EF5Pp2Mn6M,14465
23
24
  qlever/Qleverfiles/Qleverfile.scientists,sha256=9eZ2c6P9a3E3VHa3RR7LdOQbF4k3oyyrn56Z3u4LZYs,1164
24
- qlever/Qleverfiles/Qleverfile.uniprot,sha256=MoaCjtZ_aLk1ZJMDea_sTNdBb_fUNmyky07TrEP1zWQ,6265
25
+ qlever/Qleverfiles/Qleverfile.uniprot,sha256=pETMO70IVtbNB7w3IbbV_DA_DRKc92OF2bPACcklGFg,6265
25
26
  qlever/Qleverfiles/Qleverfile.vvz,sha256=cLzm85erKoFCDllH5eFcSi35MdR6Tahj1MgtvGRxanM,922
26
27
  qlever/Qleverfiles/Qleverfile.wikidata,sha256=zVUXF75XJyK1h-J-7EjFemzmkSyoPtng1mNY3U7S78M,2061
27
28
  qlever/Qleverfiles/Qleverfile.wikipathways,sha256=GENI4KYtrn_4M9mnGnfGPNkKy_lAPfO2LwnzbOx3fCE,1982
28
29
  qlever/Qleverfiles/Qleverfile.yago-4,sha256=hAS_2ZmC1zxNsKXip7t1F_iqu3CC-6O7v6HZhuFbnWY,1819
29
30
  qlever/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
31
  qlever/commands/add_text_index.py,sha256=xJ49Iq1-CszXjHDvOpllqLw1J1kCxQl7H848XD1vEz0,3820
31
- qlever/commands/benchmark_queries.py,sha256=2KZ8lwPz-i4cQqjDDjeru3IqQHFyZ-Kk_M2jgqUXZbs,39257
32
+ qlever/commands/benchmark_queries.py,sha256=PBc0krAC18iQ10l5Mt4G4fIzYf0Zex-otKBbASyq484,39205
32
33
  qlever/commands/cache_stats.py,sha256=0bSfR8fxcmHREUWQW7CNXhaSJ4B1EG6CH65Ufm2elE0,4335
33
34
  qlever/commands/clear_cache.py,sha256=kwNayV4qtgqh_Gf5SjS6WzmfgUsJ-9YhPoWYWGoNNn8,2967
34
35
  qlever/commands/extract_queries.py,sha256=TZBmZLz_NknU1LKbl9nPmxdb82lsPeDhTWjIo81llvA,3942
@@ -37,18 +38,18 @@ qlever/commands/index.py,sha256=oudLc5f6wsWGPkfhsFXIkSH14Oh37bGqK76D5lXJmBk,1333
37
38
  qlever/commands/index_stats.py,sha256=9EBo1Oq5PGjajrvWJNafJ-Wg_d90DaO5AGq9a5plSRM,11720
38
39
  qlever/commands/log.py,sha256=vLqkgtx1udnQqoUBMWB5G9rwr-l7UKrDpyFYSMuoXWw,1987
39
40
  qlever/commands/query.py,sha256=rRiR4TFRZixLfBmITAvKVtWHn6mhOiboGG8a_Jcwc8k,4653
40
- qlever/commands/settings.py,sha256=mheUn2DQFLZCEJ3-FTrbGu7M6PSVOKWl-2qTCggn_CI,4171
41
+ qlever/commands/settings.py,sha256=thD3_-PV57cuM0aXDNhFiLKIcdfchTo2gF4Jmk12Hww,4208
41
42
  qlever/commands/setup_config.py,sha256=wEy1LAunpOnqrUCbazMpt1u9HJCKgXJEMxF3zjh0jb0,3344
42
43
  qlever/commands/start.py,sha256=g_5-BUiSYJjL10ae91jMA5SgI0zk4O4gPMN_BOuERmc,10854
43
44
  qlever/commands/status.py,sha256=TtnBqcdkF3zTDKft07zpVcIX7kFu7d_nOy9b6Ohh9vQ,1650
44
45
  qlever/commands/stop.py,sha256=5BNKArOzoJ8kYiTVAmtN81w7nQ42fkxISgsxL-qJpO0,3463
45
46
  qlever/commands/system_info.py,sha256=I84EKgMO5J8pvsTDhkVKHzsRLtPajNg9KTQN5kWjqLU,4660
46
47
  qlever/commands/ui.py,sha256=Kjv5FKN0pjMCpJS6otbrczs364x24FAnsJjtnc98mJQ,9811
47
- qlever/commands/update_wikidata.py,sha256=1IkYqrgbIV2tJliEORhzv1rMjjipnxe-5FTLrKes1FA,23312
48
+ qlever/commands/update_wikidata.py,sha256=MQWNTe7KtadlTAsxCsntKYWZLSoX6H8l6WI4s6UfFqY,24484
48
49
  qlever/commands/warmup.py,sha256=kJHzS7HJo8pD2CphJuaXDj_CYP02YDo2DVM-pun3A80,1029
49
- qlever-0.5.27.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
50
- qlever-0.5.27.dist-info/METADATA,sha256=lLL_F3OX1rWTr53ytV7wyKwnvfKaOhR3Cy7fyQ_MEOU,5151
51
- qlever-0.5.27.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
52
- qlever-0.5.27.dist-info/entry_points.txt,sha256=U_1U6SFIEZ-AnNlvk2nzcL0e4jnjEpuSbxYZ_E0XpEg,51
53
- qlever-0.5.27.dist-info/top_level.txt,sha256=kd3zsYqiFd0--Czh5XTVkfEq6XR-XgRFW35X0v0GT-c,7
54
- qlever-0.5.27.dist-info/RECORD,,
50
+ qlever-0.5.29.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
51
+ qlever-0.5.29.dist-info/METADATA,sha256=Wv95p0tI8Nn6WUylsLGKWn3zXMKiLihB481G2WnT8yw,5151
52
+ qlever-0.5.29.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ qlever-0.5.29.dist-info/entry_points.txt,sha256=U_1U6SFIEZ-AnNlvk2nzcL0e4jnjEpuSbxYZ_E0XpEg,51
54
+ qlever-0.5.29.dist-info/top_level.txt,sha256=kd3zsYqiFd0--Czh5XTVkfEq6XR-XgRFW35X0v0GT-c,7
55
+ qlever-0.5.29.dist-info/RECORD,,