qlever 0.5.24__py3-none-any.whl → 0.5.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of qlever might be problematic. Click here for more details.
- qlever/Qleverfiles/Qleverfile.ohm-planet +2 -1
- qlever/Qleverfiles/Qleverfile.osm-country +3 -2
- qlever/Qleverfiles/Qleverfile.osm-planet +4 -3
- qlever/Qleverfiles/Qleverfile.uniprot +2 -1
- qlever/Qleverfiles/Qleverfile.wikipathways +1 -1
- qlever/commands/cache_stats.py +52 -32
- qlever/commands/index.py +3 -0
- qlever/commands/query.py +1 -1
- qlever/commands/update_wikidata.py +4 -1
- qlever/qleverfile.py +8 -0
- {qlever-0.5.24.dist-info → qlever-0.5.26.dist-info}/METADATA +40 -29
- {qlever-0.5.24.dist-info → qlever-0.5.26.dist-info}/RECORD +16 -16
- {qlever-0.5.24.dist-info → qlever-0.5.26.dist-info}/WHEEL +0 -0
- {qlever-0.5.24.dist-info → qlever-0.5.26.dist-info}/entry_points.txt +0 -0
- {qlever-0.5.24.dist-info → qlever-0.5.26.dist-info}/licenses/LICENSE +0 -0
- {qlever-0.5.24.dist-info → qlever-0.5.26.dist-info}/top_level.txt +0 -0
|
@@ -11,7 +11,7 @@ NAME = ohm-planet
|
|
|
11
11
|
GET_DATA_URL = https://planet.openhistoricalmap.org/planet
|
|
12
12
|
CHECK_BINARIES = osm2rdf -h > /dev/null || (echo "osm2rdf not found, make sure that it's installed and in your PATH" && exit 1)
|
|
13
13
|
GET_DATA_CMD_1 = unbuffer wget -O ${NAME}.pbf $$(curl -s ${GET_DATA_URL}/state.txt) 2>&1 | tee ${NAME}.download-log.txt
|
|
14
|
-
GET_DATA_CMD_2 = osm2rdf ${NAME}.pbf -o ${NAME}.ttl --source-dataset OHM --output-compression gz --store-locations=disk-dense --cache . --num-threads 12 2>&1 | tee ${NAME}.osm2rdf-log.txt
|
|
14
|
+
GET_DATA_CMD_2 = osm2rdf ${NAME}.pbf -o ${NAME}.ttl --source-dataset OHM --output-compression gz --store-locations=disk-dense --cache . --num-threads 12 --iri-prefix-for-untagged-nodes http://www.openhistoricalmap.org/node/ 2>&1 | tee ${NAME}.osm2rdf-log.txt
|
|
15
15
|
GET_DATA_CMD = ${CHECK_BINARIES} && ${GET_DATA_CMD_1} && echo && ${GET_DATA_CMD_2}
|
|
16
16
|
VERSION = $$(date -r ${NAME}.pbf +%d.%m.%Y || echo "NO_DATE")
|
|
17
17
|
DESCRIPTION = OHM Planet, data from ${GET_DATA_URL} version ${VERSION} (with GeoSPARQL predicates ogc:sfContains and ogc:sfIntersects)
|
|
@@ -22,6 +22,7 @@ MULTI_INPUT_JSON = { "cmd": "zcat ${INPUT_FILES}", "parallel": "true" }
|
|
|
22
22
|
STXXL_MEMORY = 5G
|
|
23
23
|
PARSER_BUFFER_SIZE = 50M
|
|
24
24
|
SETTINGS_JSON = { "num-triples-per-batch": 5000000 }
|
|
25
|
+
ENCODE_AS_IDS = https://www.openhistoricalmap.org/node/ http://www.openhistoricalmap.org/node/ https://www.openhistoricalmap.org/way/ https://www.openhistoricalmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
|
|
25
26
|
|
|
26
27
|
[server]
|
|
27
28
|
PORT = 7037
|
|
@@ -16,7 +16,7 @@ NAME = osm-${COUNTRY}
|
|
|
16
16
|
PBF = ${NAME}.pbf
|
|
17
17
|
WITH_TEXT = false
|
|
18
18
|
VERSION = $$(ls -l --time-style=+%d.%m.%Y ${PBF} 2> /dev/null | cut -d' ' -f6)
|
|
19
|
-
GET_DATA_CMD = wget -nc -O ${PBF} https://download.geofabrik.de/${CONTINENT}/${COUNTRY}-latest.osm.pbf; rm -f ${NAME}.*.bz2; ( time osm2rdf ${PBF} -o ${NAME}.ttl --cache . ) 2>&1 | tee ${NAME}.osm2rdf-log.txt; rm -f spatial-*
|
|
19
|
+
GET_DATA_CMD = wget -nc -O ${PBF} https://download.geofabrik.de/${CONTINENT}/${COUNTRY}-latest.osm.pbf; rm -f ${NAME}.*.bz2; ( time osm2rdf ${PBF} -o ${NAME}.ttl --cache . --iri-prefix-for-untagged-nodes http://www.openstreetmap.org/node/) 2>&1 | tee ${NAME}.osm2rdf-log.txt; rm -f spatial-*
|
|
20
20
|
DESCRIPTION = OSM ${COUNTRY}, dump from ${VERSION} with ogc:sfContains
|
|
21
21
|
|
|
22
22
|
# Indexer settings
|
|
@@ -24,7 +24,8 @@ DESCRIPTION = OSM ${COUNTRY}, dump from ${VERSION} with ogc:sfContains
|
|
|
24
24
|
INPUT_FILES = ${data:NAME}.ttl.bz2
|
|
25
25
|
CAT_INPUT_FILES = bzcat ${data:NAME}.ttl.bz2
|
|
26
26
|
STXXL_MEMORY = 10G
|
|
27
|
-
SETTINGS_JSON
|
|
27
|
+
SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
|
|
28
|
+
ENCODE_AS_IDS = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
|
|
28
29
|
|
|
29
30
|
# Server settings
|
|
30
31
|
[server]
|
|
@@ -8,10 +8,10 @@
|
|
|
8
8
|
|
|
9
9
|
[data]
|
|
10
10
|
NAME = osm-planet
|
|
11
|
-
|
|
12
|
-
GET_DATA_CMD = unbuffer wget -O ${NAME}.ttl.bz2 ${
|
|
11
|
+
GET_DATA_URL = https://osm2rdf.cs.uni-freiburg.de/ttl/planet.osm.ttl.bz2
|
|
12
|
+
GET_DATA_CMD = unbuffer wget -O ${NAME}.ttl.bz2 ${GET_DATA_URL} | tee ${NAME}.download-log.txt
|
|
13
13
|
VERSION = $$(date -r ${NAME}.ttl.bz2 +"%d.%m.%Y" || echo "NO_DATE")
|
|
14
|
-
DESCRIPTION = OSM
|
|
14
|
+
DESCRIPTION = OSM from ${GET_DATA_URL} (converted to RDF using osm2rdf, enhanced by GeoSPARQL triples ogc:sfContains, ogc:sfCovers, ogc:sfIntersects, ogc:sfEquals, ogc:sfTouches, ogc:sfCrosses, ogc:sfOverlaps)
|
|
15
15
|
|
|
16
16
|
[index]
|
|
17
17
|
INPUT_FILES = ${data:NAME}.ttl.bz2
|
|
@@ -21,6 +21,7 @@ PARSER_BUFFER_SIZE = 100M
|
|
|
21
21
|
STXXL_MEMORY = 40G
|
|
22
22
|
SETTINGS_JSON = { "num-triples-per-batch": 10000000 }
|
|
23
23
|
ULIMIT = 10000
|
|
24
|
+
ENCODE_AS_IDS = https://www.openstreetmap.org/node/ http://www.openstreetmap.org/node/ https://www.openstreetmap.org/way/ https://www.openstreetmap.org/relation/ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_tagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_node_untagged_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_way_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relation_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_wayarea_ https://osm2rdf.cs.uni-freiburg.de/rdf/geom#osm_relationarea_ https://www.openstreetmap.org/changeset/
|
|
24
25
|
|
|
25
26
|
[server]
|
|
26
27
|
PORT = 7007
|
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
|
|
13
13
|
[data]
|
|
14
14
|
NAME = uniprot
|
|
15
|
-
DATE =
|
|
15
|
+
DATE = 2025-06-18
|
|
16
16
|
RDFXML_DIR = rdf.${DATE}
|
|
17
17
|
TTL_DIR = ttl.${DATE}
|
|
18
18
|
UNIPROT_URL = https://ftp.uniprot.org/pub/databases/uniprot/current_release/rdf
|
|
@@ -56,6 +56,7 @@ MULTI_INPUT_JSON = [{ "cmd": "zcat {}", "graph": "http://sparql.uniprot.org/unip
|
|
|
56
56
|
{ "cmd": "zcat ${data:TTL_DIR}/core.ttl.gz", "graph": "http://purl.uniprot.org/core" }]
|
|
57
57
|
SETTINGS_JSON = { "languages-internal": [], "prefixes-external": [""], "locale": { "language": "en", "country": "US", "ignore-punctuation": true }, "ascii-prefixes-only": true, "num-triples-per-batch": 25000000 }
|
|
58
58
|
STXXL_MEMORY = 60G
|
|
59
|
+
ULIMIT = 10000
|
|
59
60
|
|
|
60
61
|
[server]
|
|
61
62
|
PORT = 7018
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
|
|
9
9
|
[data]
|
|
10
10
|
NAME = wikipathways
|
|
11
|
-
RELEASE =
|
|
11
|
+
RELEASE = current
|
|
12
12
|
GET_DATA_URL = https://data.wikipathways.org/${RELEASE}/rdf
|
|
13
13
|
GET_DATA_CMD = wget -O wikipathways-rdf-void.ttl ${GET_DATA_URL}/wikipathways-rdf-void.ttl && \
|
|
14
14
|
wget ${GET_DATA_URL}/wikipathways-${RELEASE}-rdf-wp.zip && \
|
qlever/commands/cache_stats.py
CHANGED
|
@@ -17,43 +17,54 @@ class CacheStatsCommand(QleverCommand):
|
|
|
17
17
|
pass
|
|
18
18
|
|
|
19
19
|
def description(self) -> str:
|
|
20
|
-
return
|
|
20
|
+
return "Show how much of the cache is currently being used"
|
|
21
21
|
|
|
22
22
|
def should_have_qleverfile(self) -> bool:
|
|
23
23
|
return False
|
|
24
24
|
|
|
25
|
-
def relevant_qleverfile_arguments(self) -> dict[str: list[str]]:
|
|
25
|
+
def relevant_qleverfile_arguments(self) -> dict[str : list[str]]:
|
|
26
26
|
return {"server": ["host_name", "port"]}
|
|
27
27
|
|
|
28
28
|
def additional_arguments(self, subparser) -> None:
|
|
29
|
-
subparser.add_argument(
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
29
|
+
subparser.add_argument(
|
|
30
|
+
"--server-url",
|
|
31
|
+
help="URL of the QLever server, default is {host_name}:{port}",
|
|
32
|
+
)
|
|
33
|
+
subparser.add_argument(
|
|
34
|
+
"--detailed",
|
|
35
|
+
action="store_true",
|
|
36
|
+
default=False,
|
|
37
|
+
help="Show detailed statistics and settings",
|
|
38
|
+
)
|
|
36
39
|
|
|
37
40
|
def execute(self, args) -> bool:
|
|
38
41
|
# Construct the two curl commands.
|
|
39
|
-
server_url = (
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
42
|
+
server_url = (
|
|
43
|
+
args.server_url
|
|
44
|
+
if args.server_url
|
|
45
|
+
else f"{args.host_name}:{args.port}"
|
|
46
|
+
)
|
|
47
|
+
cache_stats_cmd = (
|
|
48
|
+
f'curl -s {server_url} --data-urlencode "cmd=cache-stats"'
|
|
49
|
+
)
|
|
50
|
+
cache_settings_cmd = (
|
|
51
|
+
f'curl -s {server_url} --data-urlencode "cmd=get-settings"'
|
|
52
|
+
)
|
|
45
53
|
|
|
46
54
|
# Show them.
|
|
47
|
-
self.show(
|
|
48
|
-
|
|
55
|
+
self.show(
|
|
56
|
+
"\n".join([cache_stats_cmd, cache_settings_cmd]),
|
|
57
|
+
only_show=args.show,
|
|
58
|
+
)
|
|
49
59
|
if args.show:
|
|
50
60
|
return True
|
|
51
61
|
|
|
52
62
|
# Execute them.
|
|
53
63
|
try:
|
|
54
64
|
cache_stats = subprocess.check_output(cache_stats_cmd, shell=True)
|
|
55
|
-
cache_settings = subprocess.check_output(
|
|
56
|
-
|
|
65
|
+
cache_settings = subprocess.check_output(
|
|
66
|
+
cache_settings_cmd, shell=True
|
|
67
|
+
)
|
|
57
68
|
cache_stats_dict = json.loads(cache_stats)
|
|
58
69
|
cache_settings_dict = json.loads(cache_settings)
|
|
59
70
|
if isinstance(cache_settings_dict, list):
|
|
@@ -66,27 +77,35 @@ class CacheStatsCommand(QleverCommand):
|
|
|
66
77
|
if not args.detailed:
|
|
67
78
|
cache_size = cache_settings_dict["cache-max-size"]
|
|
68
79
|
if not cache_size.endswith(" GB"):
|
|
69
|
-
log.error(
|
|
70
|
-
|
|
80
|
+
log.error(
|
|
81
|
+
f"Cache size {cache_size} is not in GB, "
|
|
82
|
+
f"QLever should return bytes instead"
|
|
83
|
+
)
|
|
71
84
|
return False
|
|
72
85
|
else:
|
|
73
86
|
cache_size = float(cache_size[:-3])
|
|
74
|
-
pinned_size = cache_stats_dict["
|
|
75
|
-
non_pinned_size = cache_stats_dict["
|
|
87
|
+
pinned_size = cache_stats_dict["cache-size-pinned"] / 1e9
|
|
88
|
+
non_pinned_size = cache_stats_dict["cache-size-unpinned"] / 1e9
|
|
76
89
|
cached_size = pinned_size + non_pinned_size
|
|
77
90
|
free_size = cache_size - cached_size
|
|
78
91
|
if cached_size == 0:
|
|
79
92
|
log.info(f"Cache is empty, all {cache_size:.1f} GB available")
|
|
80
93
|
else:
|
|
81
|
-
log.info(
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
94
|
+
log.info(
|
|
95
|
+
f"Pinned queries : "
|
|
96
|
+
f"{pinned_size:5.1f} GB of {cache_size:5.1f} GB"
|
|
97
|
+
f" [{pinned_size / cache_size:5.1%}]"
|
|
98
|
+
)
|
|
99
|
+
log.info(
|
|
100
|
+
f"Non-pinned queries : "
|
|
101
|
+
f"{non_pinned_size:5.1f} GB of {cache_size:5.1f} GB"
|
|
102
|
+
f" [{non_pinned_size / cache_size:5.1%}]"
|
|
103
|
+
)
|
|
104
|
+
log.info(
|
|
105
|
+
f"FREE : "
|
|
106
|
+
f"{free_size:5.1f} GB of {cache_size:5.1f} GB"
|
|
107
|
+
f" [{1 - cached_size / cache_size:5.1%}]"
|
|
108
|
+
)
|
|
90
109
|
return True
|
|
91
110
|
|
|
92
111
|
# Complete version.
|
|
@@ -98,6 +117,7 @@ class CacheStatsCommand(QleverCommand):
|
|
|
98
117
|
if re.match(r"^\d+\.\d+$", value):
|
|
99
118
|
value = "{:.2f}".format(float(value))
|
|
100
119
|
log.info(f"{key.ljust(max_key_len)} : {value}")
|
|
120
|
+
|
|
101
121
|
show_dict_as_table(cache_stats_dict.items())
|
|
102
122
|
log.info("")
|
|
103
123
|
show_dict_as_table(cache_settings_dict.items())
|
qlever/commands/index.py
CHANGED
|
@@ -36,6 +36,7 @@ class IndexCommand(QleverCommand):
|
|
|
36
36
|
"index": [
|
|
37
37
|
"input_files",
|
|
38
38
|
"cat_input_files",
|
|
39
|
+
"encode_as_id",
|
|
39
40
|
"multi_input_json",
|
|
40
41
|
"parallel_parsing",
|
|
41
42
|
"settings_json",
|
|
@@ -215,6 +216,8 @@ class IndexCommand(QleverCommand):
|
|
|
215
216
|
return False
|
|
216
217
|
|
|
217
218
|
# Add remaining options.
|
|
219
|
+
if args.encode_as_id:
|
|
220
|
+
index_cmd += f" --encode-as-id {args.encode_as_id}"
|
|
218
221
|
if args.only_pso_and_pos_permutations:
|
|
219
222
|
index_cmd += " --only-pso-and-pos-permutations --no-patterns"
|
|
220
223
|
if not args.use_patterns:
|
qlever/commands/query.py
CHANGED
|
@@ -95,7 +95,7 @@ class QueryCommand(QleverCommand):
|
|
|
95
95
|
if args.pin_to_cache:
|
|
96
96
|
args.accept = "application/qlever-results+json"
|
|
97
97
|
curl_cmd_additions = (
|
|
98
|
-
f" --data
|
|
98
|
+
f" --data pin-result=true --data send=0"
|
|
99
99
|
f" --data access-token="
|
|
100
100
|
f"{shlex.quote(args.access_token)}"
|
|
101
101
|
f" | jq .resultsize | numfmt --grouping"
|
|
@@ -217,8 +217,11 @@ class UpdateWikidataCommand(QleverCommand):
|
|
|
217
217
|
f"before processing the next batch"
|
|
218
218
|
)
|
|
219
219
|
log.info("")
|
|
220
|
-
time.sleep(args.wait_between_batches)
|
|
221
220
|
wait_before_next_batch = False
|
|
221
|
+
for _ in range(args.wait_between_batches):
|
|
222
|
+
if self.ctrl_c_pressed:
|
|
223
|
+
break
|
|
224
|
+
time.sleep(1)
|
|
222
225
|
|
|
223
226
|
# Check if the `args.batch_size` is reached (note that we come here
|
|
224
227
|
# after a `continue` due to an error).
|
qlever/qleverfile.py
CHANGED
|
@@ -151,6 +151,14 @@ class Qleverfile:
|
|
|
151
151
|
"large enough to contain the end of at least one statement "
|
|
152
152
|
"(default: 10M)",
|
|
153
153
|
)
|
|
154
|
+
index_args["encode_as_id"] = arg(
|
|
155
|
+
"--encode-as-id",
|
|
156
|
+
type=str,
|
|
157
|
+
help="Space-separated list of IRI prefixes (without angle "
|
|
158
|
+
"brackets); IRIs that start with one of these prefixes, followed "
|
|
159
|
+
"by a sequence of digits, do not require a vocabulary entry but "
|
|
160
|
+
"are directly encoded in the ID (default: none)",
|
|
161
|
+
)
|
|
154
162
|
index_args["only_pso_and_pos_permutations"] = arg(
|
|
155
163
|
"--only-pso-and-pos-permutations",
|
|
156
164
|
action="store_true",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: qlever
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.26
|
|
4
4
|
Summary: Command-line tool for using the QLever graph database
|
|
5
5
|
Author-email: Hannah Bast <bast@cs.uni-freiburg.de>
|
|
6
6
|
License: Apache-2.0
|
|
@@ -21,37 +21,30 @@ Dynamic: license-file
|
|
|
21
21
|
|
|
22
22
|
# QLever
|
|
23
23
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
does, in particular, creating SPARQL endpoints for arbitrary RDF datasets. It
|
|
31
|
-
is supposed to be very easy to use and self-explanatory as you use it. In
|
|
32
|
-
particular, the tool provides context-sensitive autocompletion of all its
|
|
33
|
-
commands and options. If you use a container system (like Docker or Podman),
|
|
34
|
-
you don't even have to download any QLever code, but the script will download
|
|
35
|
-
the required image for you.
|
|
36
|
-
|
|
37
|
-
NOTE: There has been a major update on 24.03.2024, which changed some of the
|
|
38
|
-
Qleverfile variables and command-line options (all for the better, of course).
|
|
39
|
-
If you encounter any problems, please contact us by opening an issue on
|
|
40
|
-
https://github.com/ad-freiburg/qlever-control/issues.
|
|
24
|
+
This repository provides a self-documenting and easy-to-use command-line tool
|
|
25
|
+
for QLever (pronounced "Clever"), a graph database implementing the
|
|
26
|
+
[RDF](https://www.w3.org/TR/rdf11-concepts/) and
|
|
27
|
+
[SPARQL](https://www.w3.org/TR/sparql11-overview/) standards.
|
|
28
|
+
For a detailed description of what QLever is and what it can do, see
|
|
29
|
+
[here](https://github.com/ad-freiburg/qlever).
|
|
41
30
|
|
|
42
31
|
# Installation
|
|
43
32
|
|
|
44
|
-
Simply do `pip install qlever` and make sure that the directory where pip
|
|
33
|
+
Simply do `pip install qlever` and make sure that the directory where `pip`
|
|
45
34
|
installs the package is in your `PATH`. Typically, `pip` will warn you when
|
|
46
|
-
that is not the case and tell you what to do.
|
|
35
|
+
that is not the case and tell you what to do. If you encounter an "Externally
|
|
36
|
+
managed Environment" error, try `pipx` instead of `pip`.
|
|
37
|
+
|
|
38
|
+
Type `qlever` without arguments to check that the installation worked. When
|
|
39
|
+
using it for the first time, you will see a warning at the top with
|
|
40
|
+
instructions on how to enable autocompletion. Do it, it makes using `qlever`
|
|
41
|
+
so much easier (`pip` cannot do that for you automatically, sorry).
|
|
47
42
|
|
|
48
43
|
# Usage
|
|
49
44
|
|
|
50
45
|
Create an empty directory, with a name corresponding to the dataset you want to
|
|
51
46
|
work with. For the following example, take `olympics`. Go to that directory
|
|
52
|
-
and do the following.
|
|
53
|
-
activate autocompletion for all its commands and options (it's very easy, but
|
|
54
|
-
`pip` cannot do that automatically).
|
|
47
|
+
and do the following.
|
|
55
48
|
|
|
56
49
|
```
|
|
57
50
|
qlever setup-config olympics # Get Qleverfile (config file) for this dataset
|
|
@@ -78,16 +71,34 @@ qlever index --show
|
|
|
78
71
|
```
|
|
79
72
|
|
|
80
73
|
There are many more commands and options, see `qlever --help` for general help,
|
|
81
|
-
`qlever <command> --help` for help on a specific command, or just the
|
|
74
|
+
`qlever <command> --help` for help on a specific command, or just use the
|
|
82
75
|
autocompletion.
|
|
83
76
|
|
|
77
|
+
# Use on macOS and Windows
|
|
78
|
+
|
|
79
|
+
By default, `qlever` uses [QLever's official Docker
|
|
80
|
+
image](https://hub.docker.com/r/adfreiburg/qlever). In principle, that image
|
|
81
|
+
runs on Linux, macOS, and Windows. On Linux, Docker runs natively
|
|
82
|
+
and incurs only a relatively small overhead regarding performance and RAM
|
|
83
|
+
consumption. On macOS and Windows, Docker runs in a virtual machine, which
|
|
84
|
+
incurs a significant and sometimes unpredictable overhead. For example, `qlever
|
|
85
|
+
index` might abort prematurely (without a proper error message) because the
|
|
86
|
+
virtual machine runs out of RAM.
|
|
87
|
+
|
|
88
|
+
For optimal performance, compile QLever from source on your machine. For Linux,
|
|
89
|
+
this is relatively straightforward: just follow the `RUN` instructions in the
|
|
90
|
+
[Dockerfile](https://github.com/ad-freiburg/qlever/blob/master/Dockerfile). For
|
|
91
|
+
macOS, this is more complicated, see [this
|
|
92
|
+
workflow](https://github.com/ad-freiburg/qlever/blob/master/.github/workflows/macos.yml).
|
|
93
|
+
|
|
84
94
|
# Use with your own dataset
|
|
85
95
|
|
|
86
|
-
To use QLever with your own dataset, you
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
change it according to your needs (the variable names should be
|
|
90
|
-
Pick one for a dataset that is similar to yours and when in
|
|
96
|
+
To use QLever with your own dataset, you need a `Qleverfile`, like in the
|
|
97
|
+
example above. The easiest way to write a `Qleverfile` is to get one of the
|
|
98
|
+
existing ones (using `qlever setup-config ...` as explained above) and then
|
|
99
|
+
change it according to your needs (the variable names should be
|
|
100
|
+
self-explanatory). Pick one for a dataset that is similar to yours and when in
|
|
101
|
+
doubt, pick `olympics`.
|
|
91
102
|
|
|
92
103
|
# For developers
|
|
93
104
|
|
|
@@ -4,7 +4,7 @@ qlever/config.py,sha256=gNw2_-jj1TjzhzqLOuUI_Dh19q_ViCiArrtrgXL2F4E,10354
|
|
|
4
4
|
qlever/containerize.py,sha256=G1_ei9nBnYl5-7miiy0eWjb9HMnt06X21P7iU8bm6A0,5369
|
|
5
5
|
qlever/log.py,sha256=WLscWV4fFF_w_uXSOfvWzhyzRM7t_61inE2ks3zf6Gw,1317
|
|
6
6
|
qlever/qlever_main.py,sha256=QlVXq7azyuAG0QhH_pER2fdZL8el2mG0I6d9r0dGgOA,2593
|
|
7
|
-
qlever/qleverfile.py,sha256=
|
|
7
|
+
qlever/qleverfile.py,sha256=qiXOD5xGz-pyLFtwjlDRwZdMQaji3cDP1neJpom8nvU,18357
|
|
8
8
|
qlever/util.py,sha256=n4sJoBCrCSVaDS20z5B5tfa5GMqo3vGOdn5D1efjkmY,10940
|
|
9
9
|
qlever/Qleverfiles/Qleverfile.dblp,sha256=oVVPFMpKX0Lfe0HDYPuL3qYhlC-3Lz18AT2tHmJ32WE,1282
|
|
10
10
|
qlever/Qleverfiles/Qleverfile.dblp-plus,sha256=TJHxp8I1P6JKJjbuAllEpB32-huuY1gH0FlenqPVJ5g,1334
|
|
@@ -14,29 +14,29 @@ qlever/Qleverfiles/Qleverfile.dnb,sha256=43w_CVi00yf7FHdDvBtHHQR3yU1d-JCNnD_uxYZ
|
|
|
14
14
|
qlever/Qleverfiles/Qleverfile.fbeasy,sha256=9dwCMltT0BIMN4LRmaZFp1a7aV0kh0nJ9XLiQb_NJNo,940
|
|
15
15
|
qlever/Qleverfiles/Qleverfile.freebase,sha256=eFMOxeyuWVbb06Gv2-VFkuKE5tTyckddTDHdw5wbZN8,1028
|
|
16
16
|
qlever/Qleverfiles/Qleverfile.imdb,sha256=1xUBFimgnEHKP_o6tlqwJvIVpEE4Zx6UK_JnnQsG7Ew,1638
|
|
17
|
-
qlever/Qleverfiles/Qleverfile.ohm-planet,sha256=
|
|
17
|
+
qlever/Qleverfiles/Qleverfile.ohm-planet,sha256=t64aYwwAP0njwg2C3_YttD-ZxHL1UtmmlRH0fd1_tIw,2874
|
|
18
18
|
qlever/Qleverfiles/Qleverfile.olympics,sha256=5w9BOFwEBhdSzPz-0LRxwhv-7Gj6xbF539HOXr3cqD0,1088
|
|
19
19
|
qlever/Qleverfiles/Qleverfile.orkg,sha256=Uizz-RhlSeExgfckWztewa4l_v3zMN8IR7NaGYKrqt4,937
|
|
20
|
-
qlever/Qleverfiles/Qleverfile.osm-country,sha256=
|
|
21
|
-
qlever/Qleverfiles/Qleverfile.osm-planet,sha256
|
|
20
|
+
qlever/Qleverfiles/Qleverfile.osm-country,sha256=L__IIFW1pGhLSZDkBWwY7_B7S3jybCEFYDUKYcyT2LU,2422
|
|
21
|
+
qlever/Qleverfiles/Qleverfile.osm-planet,sha256=XVQdBG8vNBL-YybU0elBcOC00F8ji9PnZmNV5JMsJ38,2005
|
|
22
22
|
qlever/Qleverfiles/Qleverfile.pubchem,sha256=ooSj2gqTzbGY_pMCvfL-MfE7Z0d5hQB4_EF5Pp2Mn6M,14465
|
|
23
23
|
qlever/Qleverfiles/Qleverfile.scientists,sha256=9eZ2c6P9a3E3VHa3RR7LdOQbF4k3oyyrn56Z3u4LZYs,1164
|
|
24
|
-
qlever/Qleverfiles/Qleverfile.uniprot,sha256=
|
|
24
|
+
qlever/Qleverfiles/Qleverfile.uniprot,sha256=MoaCjtZ_aLk1ZJMDea_sTNdBb_fUNmyky07TrEP1zWQ,6265
|
|
25
25
|
qlever/Qleverfiles/Qleverfile.vvz,sha256=cLzm85erKoFCDllH5eFcSi35MdR6Tahj1MgtvGRxanM,922
|
|
26
26
|
qlever/Qleverfiles/Qleverfile.wikidata,sha256=zVUXF75XJyK1h-J-7EjFemzmkSyoPtng1mNY3U7S78M,2061
|
|
27
|
-
qlever/Qleverfiles/Qleverfile.wikipathways,sha256=
|
|
27
|
+
qlever/Qleverfiles/Qleverfile.wikipathways,sha256=GENI4KYtrn_4M9mnGnfGPNkKy_lAPfO2LwnzbOx3fCE,1982
|
|
28
28
|
qlever/Qleverfiles/Qleverfile.yago-4,sha256=hAS_2ZmC1zxNsKXip7t1F_iqu3CC-6O7v6HZhuFbnWY,1819
|
|
29
29
|
qlever/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
30
|
qlever/commands/add_text_index.py,sha256=xJ49Iq1-CszXjHDvOpllqLw1J1kCxQl7H848XD1vEz0,3820
|
|
31
31
|
qlever/commands/benchmark_queries.py,sha256=2KZ8lwPz-i4cQqjDDjeru3IqQHFyZ-Kk_M2jgqUXZbs,39257
|
|
32
|
-
qlever/commands/cache_stats.py,sha256=
|
|
32
|
+
qlever/commands/cache_stats.py,sha256=0bSfR8fxcmHREUWQW7CNXhaSJ4B1EG6CH65Ufm2elE0,4335
|
|
33
33
|
qlever/commands/clear_cache.py,sha256=kwNayV4qtgqh_Gf5SjS6WzmfgUsJ-9YhPoWYWGoNNn8,2967
|
|
34
34
|
qlever/commands/extract_queries.py,sha256=TZBmZLz_NknU1LKbl9nPmxdb82lsPeDhTWjIo81llvA,3942
|
|
35
35
|
qlever/commands/get_data.py,sha256=nHOHMjv0tSLWJDOR0ba_LK-Bk-mcGnphb8hbqcVYFhE,1411
|
|
36
|
-
qlever/commands/index.py,sha256=
|
|
36
|
+
qlever/commands/index.py,sha256=oudLc5f6wsWGPkfhsFXIkSH14Oh37bGqK76D5lXJmBk,13338
|
|
37
37
|
qlever/commands/index_stats.py,sha256=9EBo1Oq5PGjajrvWJNafJ-Wg_d90DaO5AGq9a5plSRM,11720
|
|
38
38
|
qlever/commands/log.py,sha256=vLqkgtx1udnQqoUBMWB5G9rwr-l7UKrDpyFYSMuoXWw,1987
|
|
39
|
-
qlever/commands/query.py,sha256=
|
|
39
|
+
qlever/commands/query.py,sha256=rRiR4TFRZixLfBmITAvKVtWHn6mhOiboGG8a_Jcwc8k,4653
|
|
40
40
|
qlever/commands/settings.py,sha256=eoxVj-Lr9SXtDCUPAPiPh-cbsC3GKGDkDTx1QntEbnA,4121
|
|
41
41
|
qlever/commands/setup_config.py,sha256=wEy1LAunpOnqrUCbazMpt1u9HJCKgXJEMxF3zjh0jb0,3344
|
|
42
42
|
qlever/commands/start.py,sha256=g_5-BUiSYJjL10ae91jMA5SgI0zk4O4gPMN_BOuERmc,10854
|
|
@@ -44,11 +44,11 @@ qlever/commands/status.py,sha256=TtnBqcdkF3zTDKft07zpVcIX7kFu7d_nOy9b6Ohh9vQ,165
|
|
|
44
44
|
qlever/commands/stop.py,sha256=5BNKArOzoJ8kYiTVAmtN81w7nQ42fkxISgsxL-qJpO0,3463
|
|
45
45
|
qlever/commands/system_info.py,sha256=I84EKgMO5J8pvsTDhkVKHzsRLtPajNg9KTQN5kWjqLU,4660
|
|
46
46
|
qlever/commands/ui.py,sha256=Kjv5FKN0pjMCpJS6otbrczs364x24FAnsJjtnc98mJQ,9811
|
|
47
|
-
qlever/commands/update_wikidata.py,sha256=
|
|
47
|
+
qlever/commands/update_wikidata.py,sha256=1IkYqrgbIV2tJliEORhzv1rMjjipnxe-5FTLrKes1FA,23312
|
|
48
48
|
qlever/commands/warmup.py,sha256=kJHzS7HJo8pD2CphJuaXDj_CYP02YDo2DVM-pun3A80,1029
|
|
49
|
-
qlever-0.5.
|
|
50
|
-
qlever-0.5.
|
|
51
|
-
qlever-0.5.
|
|
52
|
-
qlever-0.5.
|
|
53
|
-
qlever-0.5.
|
|
54
|
-
qlever-0.5.
|
|
49
|
+
qlever-0.5.26.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
50
|
+
qlever-0.5.26.dist-info/METADATA,sha256=GS_dGYncRUVfYJvuV67nBTaMwvb8jQhf25j9nyg76kU,5151
|
|
51
|
+
qlever-0.5.26.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
52
|
+
qlever-0.5.26.dist-info/entry_points.txt,sha256=U_1U6SFIEZ-AnNlvk2nzcL0e4jnjEpuSbxYZ_E0XpEg,51
|
|
53
|
+
qlever-0.5.26.dist-info/top_level.txt,sha256=kd3zsYqiFd0--Czh5XTVkfEq6XR-XgRFW35X0v0GT-c,7
|
|
54
|
+
qlever-0.5.26.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|