qlever 0.5.23__py3-none-any.whl → 0.5.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of qlever might be problematic. Click here for more details.

@@ -11,7 +11,7 @@ NAME = ohm-planet
11
11
  GET_DATA_URL = https://planet.openhistoricalmap.org/planet
12
12
  CHECK_BINARIES = osm2rdf -h > /dev/null || (echo "osm2rdf not found, make sure that it's installed and in your PATH" && exit 1)
13
13
  GET_DATA_CMD_1 = unbuffer wget -O ${NAME}.pbf $$(curl -s ${GET_DATA_URL}/state.txt) 2>&1 | tee ${NAME}.download-log.txt
14
- GET_DATA_CMD_2 = osm2rdf ${NAME}.pbf -o ${NAME}.ttl --source-dataset OHM --output-compression gz --store-locations=disk-dense --cache . --num-threads 12 --add-way-node-order --no-untagged-nodes-geometric-relations 2>&1 | tee ${NAME}.osm2rdf-log.txt
14
+ GET_DATA_CMD_2 = osm2rdf ${NAME}.pbf -o ${NAME}.ttl --source-dataset OHM --output-compression gz --store-locations=disk-dense --cache . --num-threads 12 --iri-prefix-for-untagged-nodes http://www.openhistoricalmap.org/node/ 2>&1 | tee ${NAME}.osm2rdf-log.txt
15
15
  GET_DATA_CMD = ${CHECK_BINARIES} && ${GET_DATA_CMD_1} && echo && ${GET_DATA_CMD_2}
16
16
  VERSION = $$(date -r ${NAME}.pbf +%d.%m.%Y || echo "NO_DATE")
17
17
  DESCRIPTION = OHM Planet, data from ${GET_DATA_URL} version ${VERSION} (with GeoSPARQL predicates ogc:sfContains and ogc:sfIntersects)
@@ -22,6 +22,7 @@ MULTI_INPUT_JSON = { "cmd": "zcat ${INPUT_FILES}", "parallel": "true" }
22
22
  STXXL_MEMORY = 5G
23
23
  PARSER_BUFFER_SIZE = 50M
24
24
  SETTINGS_JSON = { "num-triples-per-batch": 5000000 }
25
+ ENCODE_AS_IDS = https://www.openhistoricalmap.org/node/ http://www.openhistoricalmap.org/node/ https://www.openhistoricalmap.org/way/ https://www.openhistoricalmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
25
26
 
26
27
  [server]
27
28
  PORT = 7037
@@ -16,7 +16,7 @@ NAME = osm-${COUNTRY}
16
16
  PBF = ${NAME}.pbf
17
17
  WITH_TEXT = false
18
18
  VERSION = $$(ls -l --time-style=+%d.%m.%Y ${PBF} 2> /dev/null | cut -d' ' -f6)
19
- GET_DATA_CMD = wget -nc -O ${PBF} https://download.geofabrik.de/${CONTINENT}/${COUNTRY}-latest.osm.pbf; rm -f ${NAME}.*.bz2; ( time osm2rdf ${PBF} -o ${NAME}.ttl --cache . ) 2>&1 | tee ${NAME}.osm2rdf-log.txt; rm -f spatial-*
19
+ GET_DATA_CMD = wget -nc -O ${PBF} https://download.geofabrik.de/${CONTINENT}/${COUNTRY}-latest.osm.pbf; rm -f ${NAME}.*.bz2; ( time osm2rdf ${PBF} -o ${NAME}.ttl --cache . --iri-prefix-for-untagged-nodes http://www.openstreetmap.org/node/) 2>&1 | tee ${NAME}.osm2rdf-log.txt; rm -f spatial-*
20
20
  DESCRIPTION = OSM ${COUNTRY}, dump from ${VERSION} with ogc:sfContains
21
21
 
22
22
  # Indexer settings
@@ -24,7 +24,8 @@ DESCRIPTION = OSM ${COUNTRY}, dump from ${VERSION} with ogc:sfContains
24
24
  INPUT_FILES = ${data:NAME}.ttl.bz2
25
25
  CAT_INPUT_FILES = bzcat ${data:NAME}.ttl.bz2
26
26
  STXXL_MEMORY = 10G
27
- SETTINGS_JSON = { "prefixes-external": [ "\"LINESTRING(", "\"MULTIPOLYGON(", "\"POLYGON(" ], "ascii-prefixes-only": false, "num-triples-per-batch": 1000000 }
27
+ SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
28
+ ENCODE_AS_IDS = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
28
29
 
29
30
  # Server settings
30
31
  [server]
@@ -8,10 +8,10 @@
8
8
 
9
9
  [data]
10
10
  NAME = osm-planet
11
- DATA_URL = https://osm2rdf.cs.uni-freiburg.de/ttl/planet.osm.ttl.bz2
12
- GET_DATA_CMD = unbuffer wget -O ${NAME}.ttl.bz2 ${DATA_URL} | tee ${NAME}.download-log.txt
11
+ GET_DATA_URL = https://osm2rdf.cs.uni-freiburg.de/ttl/planet.osm.ttl.bz2
12
+ GET_DATA_CMD = unbuffer wget -O ${NAME}.ttl.bz2 ${GET_DATA_URL} | tee ${NAME}.download-log.txt
13
13
  VERSION = $$(date -r ${NAME}.ttl.bz2 +"%d.%m.%Y" || echo "NO_DATE")
14
- DESCRIPTION = OSM Planet, data from ${DATA_URL} version ${VERSION} (complete OSM data, with GeoSPARQL predicates ogc:sfContains and ogc:sfIntersects)
14
+ DESCRIPTION = OSM from ${GET_DATA_URL} (converted to RDF using osm2rdf, enhanced by GeoSPARQL triples ogc:sfContains, ogc:sfCovers, ogc:sfIntersects, ogc:sfEquals, ogc:sfTouches, ogc:sfCrosses, ogc:sfOverlaps)
15
15
 
16
16
  [index]
17
17
  INPUT_FILES = ${data:NAME}.ttl.bz2
@@ -21,6 +21,7 @@ PARSER_BUFFER_SIZE = 100M
21
21
  STXXL_MEMORY = 40G
22
22
  SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
23
23
  ULIMIT = 10000
24
+ ENCODE_AS_IDS = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
24
25
 
25
26
  [server]
26
27
  PORT = 7007
@@ -12,7 +12,7 @@
12
12
 
13
13
  [data]
14
14
  NAME = uniprot
15
- DATE = 2024-11-27
15
+ DATE = 2025-06-18
16
16
  RDFXML_DIR = rdf.${DATE}
17
17
  TTL_DIR = ttl.${DATE}
18
18
  UNIPROT_URL = https://ftp.uniprot.org/pub/databases/uniprot/current_release/rdf
@@ -56,6 +56,7 @@ MULTI_INPUT_JSON = [{ "cmd": "zcat {}", "graph": "http://sparql.uniprot.org/unip
56
56
  { "cmd": "zcat ${data:TTL_DIR}/core.ttl.gz", "graph": "http://purl.uniprot.org/core" }]
57
57
  SETTINGS_JSON = { "languages-internal": [], "prefixes-external": [""], "locale": { "language": "en", "country": "US", "ignore-punctuation": true }, "ascii-prefixes-only": true, "num-triples-per-batch": 25000000 }
58
58
  STXXL_MEMORY = 60G
59
+ ULIMIT = 10000
59
60
 
60
61
  [server]
61
62
  PORT = 7018
@@ -8,7 +8,7 @@
8
8
 
9
9
  [data]
10
10
  NAME = wikipathways
11
- RELEASE = 20240810
11
+ RELEASE = current
12
12
  GET_DATA_URL = https://data.wikipathways.org/${RELEASE}/rdf
13
13
  GET_DATA_CMD = wget -O wikipathways-rdf-void.ttl ${GET_DATA_URL}/wikipathways-rdf-void.ttl && \
14
14
  wget ${GET_DATA_URL}/wikipathways-${RELEASE}-rdf-wp.zip && \