qlever 0.5.17__py3-none-any.whl → 0.5.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of qlever might be problematic. Click here for more details.

qlever/qlever_old.py CHANGED
@@ -58,6 +58,8 @@ def track_action_rank(method):
58
58
  method.rank = track_action_rank.counter
59
59
  track_action_rank.counter += 1
60
60
  return method
61
+
62
+
61
63
  track_action_rank.counter = 0 # noqa: E305
62
64
 
63
65
 
@@ -74,18 +76,25 @@ def show_available_config_names():
74
76
  script_dir = os.path.dirname(__file__)
75
77
  try:
76
78
  qleverfiles_dir = os.path.join(script_dir, "Qleverfiles")
77
- config_names = [qleverfile_name.split(".")[1] for
78
- qleverfile_name in os.listdir(qleverfiles_dir)]
79
+ config_names = [
80
+ qleverfile_name.split(".")[1]
81
+ for qleverfile_name in os.listdir(qleverfiles_dir)
82
+ ]
79
83
  if not config_names:
80
- raise Exception(f"Directory \"{qleverfiles_dir}\" exists, but "
81
- f"contains no Qleverfiles")
84
+ raise Exception(
85
+ f'Directory "{qleverfiles_dir}" exists, but '
86
+ f"contains no Qleverfiles"
87
+ )
82
88
  except Exception as e:
83
- log.error(f"Could not find any Qleverfiles in \"{qleverfiles_dir}\" "
84
- f"({e})")
89
+ log.error(
90
+ f'Could not find any Qleverfiles in "{qleverfiles_dir}" ' f"({e})"
91
+ )
85
92
  log.info("")
86
- log.info("Check that you have fully downloaded or cloned "
87
- "https://github.com/ad-freiburg/qlever-control, and "
88
- "not just the script itself")
93
+ log.info(
94
+ "Check that you have fully downloaded or cloned "
95
+ "https://github.com/ad-freiburg/qlever-control, and "
96
+ "not just the script itself"
97
+ )
89
98
  abort_script()
90
99
  # Show available config names.
91
100
  log.info(f"Available config names are: {', '.join(sorted(config_names))}")
@@ -99,10 +108,12 @@ def show_available_action_names():
99
108
  log.info("")
100
109
  log.info(f"Available action names are: {', '.join(action_names)}")
101
110
  log.info("")
102
- log.info("To get autocompletion for these, run the following or "
103
- "add it to your `.bashrc`:")
111
+ log.info(
112
+ "To get autocompletion for these, run the following or "
113
+ "add it to your `.bashrc`:"
114
+ )
104
115
  log.info("")
105
- log.info(f"{BLUE}eval \"$(qlever-old setup-autocompletion)\"{NORMAL}")
116
+ log.info(f'{BLUE}eval "$(qlever-old setup-autocompletion)"{NORMAL}')
106
117
 
107
118
 
108
119
  # We want to distinguish between exception that we throw intentionally and all
@@ -113,44 +124,53 @@ class ActionException(Exception):
113
124
 
114
125
  # This class contains all the action :-)
115
126
  class Actions:
116
-
117
127
  def __init__(self):
118
128
  self.config = ConfigParser(interpolation=ExtendedInterpolation())
119
129
  # Check if the Qleverfile exists.
120
130
  if not os.path.isfile("Qleverfile"):
121
131
  log.setLevel(logging.INFO)
122
132
  log.info("")
123
- log.error("The qlever script needs a \"Qleverfile\" "
124
- "in the current directory, but I could not find it")
133
+ log.error(
134
+ 'The qlever script needs a "Qleverfile" '
135
+ "in the current directory, but I could not find it"
136
+ )
125
137
  log.info("")
126
- log.info("Run `qlever-old setup-config <config name>` to create a "
127
- "pre-filled Qleverfile")
138
+ log.info(
139
+ "Run `qlever-old setup-config <config name>` to create a "
140
+ "pre-filled Qleverfile"
141
+ )
128
142
  log.info("")
129
143
  show_available_config_names()
130
144
  abort_script()
131
145
  files_read = self.config.read("Qleverfile")
132
146
  if not files_read:
133
- log.error("ConfigParser could not read \"Qleverfile\"")
147
+ log.error('ConfigParser could not read "Qleverfile"')
134
148
  abort_script()
135
- self.name = self.config['data']['name']
149
+ self.name = self.config["data"]["name"]
136
150
  self.yes_values = ["1", "true", "yes"]
137
151
 
138
152
  # Defaults for [server] that carry over from [index].
139
- for option in ["with_text_index", "only_pso_and_pos_permutations",
140
- "use_patterns"]:
141
- if option in self.config['index'] and \
142
- option not in self.config['server']:
143
- self.config['server'][option] = \
144
- self.config['index'][option]
153
+ for option in [
154
+ "with_text_index",
155
+ "only_pso_and_pos_permutations",
156
+ "use_patterns",
157
+ ]:
158
+ if (
159
+ option in self.config["index"]
160
+ and option not in self.config["server"]
161
+ ):
162
+ self.config["server"][option] = self.config["index"][option]
145
163
 
146
164
  # Default values for options that are not mandatory in the Qleverfile.
147
165
  defaults = {
148
166
  "general": {
149
167
  "log_level": "info",
150
168
  "pid": "0",
151
- "example_queries_url": (f"https://qlever.cs.uni-freiburg.de/"
152
- f"api/examples/"
153
- f"{self.config['ui']['config']}"),
169
+ "example_queries_url": (
170
+ f"https://qlever.cs.uni-freiburg.de/"
171
+ f"api/examples/"
172
+ f"{self.config['ui']['config']}"
173
+ ),
154
174
  "example_queries_limit": "10",
155
175
  "example_queries_send": "0",
156
176
  },
@@ -183,8 +203,7 @@ class Actions:
183
203
  "image": "adfreiburg/qlever-ui",
184
204
  "container": "qlever-ui",
185
205
  "url": "https://qlever.cs.uni-freiburg.de/api",
186
-
187
- }
206
+ },
188
207
  }
189
208
  for section in defaults:
190
209
  # If the section does not exist, create it.
@@ -198,16 +217,18 @@ class Actions:
198
217
  # If the log level was not explicitly set by the first command-line
199
218
  # argument (see below), set it according to the Qleverfile.
200
219
  if log.level == logging.NOTSET:
201
- log_level = self.config['general']['log_level'].upper()
220
+ log_level = self.config["general"]["log_level"].upper()
202
221
  try:
203
222
  log.setLevel(getattr(logging, log_level))
204
223
  except AttributeError:
205
- log.error(f"Invalid log level: \"{log_level}\"")
224
+ log.error(f'Invalid log level: "{log_level}"')
206
225
  abort_script()
207
226
 
208
227
  # Show some information (for testing purposes only).
209
- log.debug(f"Parsed Qleverfile, sections are: "
210
- f"{', '.join(self.config.sections())}")
228
+ log.debug(
229
+ f"Parsed Qleverfile, sections are: "
230
+ f"{', '.join(self.config.sections())}"
231
+ )
211
232
 
212
233
  # Check specifics of the installation.
213
234
  self.check_installation()
@@ -225,23 +246,30 @@ class Actions:
225
246
  self.net_connections_enabled = True
226
247
  except Exception as e:
227
248
  self.net_connections_enabled = False
228
- log.debug(f"Note: psutil.net_connections() failed ({e}),"
229
- f" will not scan network connections for action"
230
- f" \"start\"")
249
+ log.debug(
250
+ f"Note: psutil.net_connections() failed ({e}),"
251
+ f" will not scan network connections for action"
252
+ f' "start"'
253
+ )
231
254
 
232
255
  # Check whether docker is installed and works (on MacOS 12, docker
233
256
  # hangs when installed without GUI, hence the timeout).
234
257
  try:
235
258
  completed_process = subprocess.run(
236
- ["docker", "info"], timeout=0.5,
237
- stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
259
+ ["docker", "info"],
260
+ timeout=0.5,
261
+ stdout=subprocess.DEVNULL,
262
+ stderr=subprocess.DEVNULL,
263
+ )
238
264
  if completed_process.returncode != 0:
239
265
  raise Exception("docker info failed")
240
266
  self.docker_enabled = True
241
267
  except Exception:
242
268
  self.docker_enabled = False
243
- print("Note: `docker info` failed, therefore"
244
- " docker.USE_DOCKER=true not supported")
269
+ print(
270
+ "Note: `docker info` failed, therefore"
271
+ " docker.USE_DOCKER=true not supported"
272
+ )
245
273
 
246
274
  def set_config(self, section, option, value):
247
275
  """
@@ -253,8 +281,10 @@ class Actions:
253
281
  log.error(f"Section [{section}] does not exist in Qleverfile")
254
282
  abort_script()
255
283
  if not self.config.has_option(section, option):
256
- log.error(f"Option {option.upper()} does not exist in section "
257
- f"[{section}] in Qleverfile")
284
+ log.error(
285
+ f"Option {option.upper()} does not exist in section "
286
+ f"[{section}] in Qleverfile"
287
+ )
258
288
  abort_script()
259
289
  self.config[section][option] = value
260
290
 
@@ -278,13 +308,17 @@ class Actions:
278
308
 
279
309
  message = "from the qlever script".replace(" ", "%20")
280
310
  curl_cmd = f"curl -s http://localhost:{port}/ping?msg={message}"
281
- exit_code = subprocess.call(curl_cmd, shell=True,
282
- stdout=subprocess.DEVNULL,
283
- stderr=subprocess.DEVNULL)
311
+ exit_code = subprocess.call(
312
+ curl_cmd,
313
+ shell=True,
314
+ stdout=subprocess.DEVNULL,
315
+ stderr=subprocess.DEVNULL,
316
+ )
284
317
  return exit_code == 0
285
318
 
286
- def show_process_info(self, psutil_process,
287
- cmdline_regex, show_heading=True):
319
+ def show_process_info(
320
+ self, psutil_process, cmdline_regex, show_heading=True
321
+ ):
288
322
  """
289
323
  Helper function that shows information about a process if information
290
324
  about the process can be retrieved and the command line matches the
@@ -294,16 +328,23 @@ class Actions:
294
328
 
295
329
  def show_table_line(pid, user, start_time, rss, cmdline):
296
330
  log.info(f"{pid:<8} {user:<8} {start_time:>5} {rss:>5} {cmdline}")
331
+
297
332
  try:
298
333
  pinfo = psutil_process.as_dict(
299
- attrs=['pid', 'username', 'create_time',
300
- 'memory_info', 'cmdline'])
301
- cmdline = " ".join(pinfo['cmdline'])
334
+ attrs=[
335
+ "pid",
336
+ "username",
337
+ "create_time",
338
+ "memory_info",
339
+ "cmdline",
340
+ ]
341
+ )
342
+ cmdline = " ".join(pinfo["cmdline"])
302
343
  if not re.search(cmdline_regex, cmdline):
303
344
  return False
304
- pid = pinfo['pid']
305
- user = pinfo['username'] if pinfo['username'] else ""
306
- start_time = datetime.fromtimestamp(pinfo['create_time'])
345
+ pid = pinfo["pid"]
346
+ user = pinfo["username"] if pinfo["username"] else ""
347
+ start_time = datetime.fromtimestamp(pinfo["create_time"])
307
348
  if start_time.date() == date.today():
308
349
  start_time = start_time.strftime("%H:%M")
309
350
  else:
@@ -326,9 +367,11 @@ class Actions:
326
367
  log.info(f"{BLUE}{action_description}{NORMAL}")
327
368
  log.info("")
328
369
  if only_show:
329
- log.info("You called \"qlever-old ... show\", therefore the "
330
- "action is only shown, but not executed (omit the "
331
- "\"show\" to execute it)")
370
+ log.info(
371
+ 'You called "qlever-old ... show", therefore the '
372
+ "action is only shown, but not executed (omit the "
373
+ '"show" to execute it)'
374
+ )
332
375
 
333
376
  @staticmethod
334
377
  @track_action_rank
@@ -344,35 +387,46 @@ class Actions:
344
387
  if os.path.isfile("Qleverfile"):
345
388
  log.error("Qleverfile already exists in current directory")
346
389
  log.info("")
347
- log.info("If you want to create a new Qleverfile using "
348
- "`qlever-old setup-config`, delete the existing "
349
- "Qleverfile first")
390
+ log.info(
391
+ "If you want to create a new Qleverfile using "
392
+ "`qlever-old setup-config`, delete the existing "
393
+ "Qleverfile first"
394
+ )
350
395
  abort_script()
351
396
 
352
397
  # Get the directory of this script and copy the Qleverfile for `config`
353
398
  # to the current directory.
354
399
  script_dir = os.path.dirname(os.path.realpath(__file__))
355
- qleverfile_path = os.path.join(script_dir,
356
- f"Qleverfiles/Qleverfile.{config_name}")
400
+ qleverfile_path = os.path.join(
401
+ script_dir, f"Qleverfiles/Qleverfile.{config_name}"
402
+ )
357
403
  if not os.path.isfile(qleverfile_path):
358
- log.error(f"File \"{qleverfile_path}\" does not exist")
404
+ log.error(f'File "{qleverfile_path}" does not exist')
359
405
  log.info("")
360
406
  abort_script()
361
407
  try:
362
408
  shutil.copy(qleverfile_path, "Qleverfile")
363
409
  except Exception as e:
364
- log.error(f"Could not copy \"{qleverfile_path}\""
365
- f" to current directory: {e}")
410
+ log.error(
411
+ f'Could not copy "{qleverfile_path}"'
412
+ f" to current directory: {e}"
413
+ )
366
414
  abort_script()
367
- log.info(f"Created Qleverfile for config \"{config_name}\""
368
- f" in current directory")
415
+ log.info(
416
+ f'Created Qleverfile for config "{config_name}"'
417
+ f" in current directory"
418
+ )
369
419
  log.info("")
370
420
  if config_name == "default":
371
- log.info("Since this is the default Qleverfile, you need to "
372
- "edit it before you can continue")
421
+ log.info(
422
+ "Since this is the default Qleverfile, you need to "
423
+ "edit it before you can continue"
424
+ )
373
425
  log.info("")
374
- log.info("Afterwards, run `qlever` without arguments to see "
375
- "which actions are available")
426
+ log.info(
427
+ "Afterwards, run `qlever` without arguments to see "
428
+ "which actions are available"
429
+ )
376
430
  else:
377
431
  show_available_action_names()
378
432
  log.info("")
@@ -384,17 +438,22 @@ class Actions:
384
438
  values for options that are not set explicitly in the Qleverfile.
385
439
  """
386
440
 
387
- print(f"{BLUE}Showing the current configuration, including default"
388
- f" values for options that are not set explicitly in the"
389
- f" Qleverfile{NORMAL}")
441
+ print(
442
+ f"{BLUE}Showing the current configuration, including default"
443
+ f" values for options that are not set explicitly in the"
444
+ f" Qleverfile{NORMAL}"
445
+ )
390
446
  for section in self.config.sections():
391
447
  print()
392
448
  print(f"[{section}]")
393
- max_option_length = max([len(option) for option in
394
- self.config[section]])
449
+ max_option_length = max(
450
+ [len(option) for option in self.config[section]]
451
+ )
395
452
  for option in self.config[section]:
396
- print(f"{option.upper().ljust(max_option_length)} = "
397
- f"{self.config[section][option]}")
453
+ print(
454
+ f"{option.upper().ljust(max_option_length)} = "
455
+ f"{self.config[section][option]}"
456
+ )
398
457
 
399
458
  print()
400
459
 
@@ -405,10 +464,10 @@ class Actions:
405
464
  """
406
465
 
407
466
  # Construct the command line.
408
- if not self.config['data']['get_data_cmd']:
467
+ if not self.config["data"]["get_data_cmd"]:
409
468
  log.error(f"{RED}No GET_DATA_CMD specified in Qleverfile")
410
469
  return
411
- get_data_cmd = self.config['data']['get_data_cmd']
470
+ get_data_cmd = self.config["data"]["get_data_cmd"]
412
471
 
413
472
  # Show it.
414
473
  self.show(get_data_cmd, only_show)
@@ -418,7 +477,8 @@ class Actions:
418
477
  # Execute the command line.
419
478
  subprocess.run(get_data_cmd, shell=True)
420
479
  total_file_size = self.get_total_file_size(
421
- self.config['index']['file_names'].split())
480
+ self.config["index"]["file_names"].split()
481
+ )
422
482
  print(f"Total file size: {total_file_size:.1f} GB")
423
483
 
424
484
  @track_action_rank
@@ -429,75 +489,96 @@ class Actions:
429
489
  """
430
490
 
431
491
  # Construct the command line based on the config file.
432
- index_config = self.config['index']
433
- cmdline = (f"{index_config['cat_files']} | {index_config['binary']}"
434
- f" -F ttl -f -"
435
- f" -i {self.name}"
436
- f" -s {self.name}.settings.json")
437
- if index_config['only_pso_and_pos_permutations'] in self.yes_values:
492
+ index_config = self.config["index"]
493
+ cmdline = (
494
+ f"{index_config['cat_files']} | {index_config['binary']}"
495
+ f" -F ttl -f -"
496
+ f" -i {self.name}"
497
+ f" -s {self.name}.settings.json"
498
+ )
499
+ if index_config["only_pso_and_pos_permutations"] in self.yes_values:
438
500
  cmdline += " --only-pso-and-pos-permutations --no-patterns"
439
- if not index_config['use_patterns'] in self.yes_values:
501
+ if index_config["use_patterns"] not in self.yes_values:
440
502
  cmdline += " --no-patterns"
441
- if index_config['with_text_index'] in \
442
- ["from_text_records", "from_text_records_and_literals"]:
443
- cmdline += (f" -w {self.name}.wordsfile.tsv"
444
- f" -d {self.name}.docsfile.tsv")
445
- if index_config['with_text_index'] in \
446
- ["from_literals", "from_text_records_and_literals"]:
503
+ if index_config["with_text_index"] in [
504
+ "from_text_records",
505
+ "from_text_records_and_literals",
506
+ ]:
507
+ cmdline += (
508
+ f" -w {self.name}.wordsfile.tsv"
509
+ f" -d {self.name}.docsfile.tsv"
510
+ )
511
+ if index_config["with_text_index"] in [
512
+ "from_literals",
513
+ "from_text_records_and_literals",
514
+ ]:
447
515
  cmdline += " --text-words-from-literals"
448
- if 'stxxl_memory' in index_config:
516
+ if "stxxl_memory" in index_config:
449
517
  cmdline += f" --stxxl-memory {index_config['stxxl_memory']}"
450
518
  cmdline += f" | tee {self.name}.index-log.txt"
451
519
 
452
520
  # If the total file size is larger than 10 GB, set ulimit (such that a
453
521
  # large number of open files is allowed).
454
522
  total_file_size = self.get_total_file_size(
455
- self.config['index']['file_names'].split())
523
+ self.config["index"]["file_names"].split()
524
+ )
456
525
  if total_file_size > 10:
457
526
  cmdline = f"ulimit -Sn 1048576; {cmdline}"
458
527
 
459
528
  # If we are using Docker, run the command in a Docker container.
460
529
  # Here is how the shell script does it:
461
- if self.config['docker']['use_docker'] in self.yes_values:
462
- docker_config = self.config['docker']
463
- cmdline = (f"docker run -it --rm -u $(id -u):$(id -g)"
464
- f" -v /etc/localtime:/etc/localtime:ro"
465
- f" -v $(pwd):/index -w /index"
466
- f" --entrypoint bash"
467
- f" --name {docker_config['container_indexer']}"
468
- f" {docker_config['image']}"
469
- f" -c {shlex.quote(cmdline)}")
530
+ if self.config["docker"]["use_docker"] in self.yes_values:
531
+ docker_config = self.config["docker"]
532
+ cmdline = (
533
+ f"docker run -it --rm -u $(id -u):$(id -g)"
534
+ f" -v /etc/localtime:/etc/localtime:ro"
535
+ f" -v $(pwd):/index -w /index"
536
+ f" --entrypoint bash"
537
+ f" --name {docker_config['container_indexer']}"
538
+ f" {docker_config['image']}"
539
+ f" -c {shlex.quote(cmdline)}"
540
+ )
470
541
 
471
542
  # Show the command line.
472
- self.show(f"Write value of config variable index.SETTINGS_JSON to "
473
- f"file {self.name}.settings.json\n"
474
- f"{cmdline}", only_show)
543
+ self.show(
544
+ f"Write value of config variable index.SETTINGS_JSON to "
545
+ f"file {self.name}.settings.json\n"
546
+ f"{cmdline}",
547
+ only_show,
548
+ )
475
549
  if only_show:
476
550
  return
477
551
 
478
552
  # When docker.USE_DOCKER=false, check if the binary for building the
479
553
  # index exists and works.
480
- if self.config['docker']['use_docker'] not in self.yes_values:
554
+ if self.config["docker"]["use_docker"] not in self.yes_values:
481
555
  try:
482
556
  check_binary_cmd = f"{self.config['index']['binary']} --help"
483
- subprocess.run(check_binary_cmd, shell=True, check=True,
484
- stdout=subprocess.DEVNULL,
485
- stderr=subprocess.DEVNULL)
557
+ subprocess.run(
558
+ check_binary_cmd,
559
+ shell=True,
560
+ check=True,
561
+ stdout=subprocess.DEVNULL,
562
+ stderr=subprocess.DEVNULL,
563
+ )
486
564
  except subprocess.CalledProcessError as e:
487
- log.error(f"Running \"{check_binary_cmd}\" failed ({e}), "
488
- f"set index.BINARY to a different binary or "
489
- f"set docker.USE_DOCKER=true")
565
+ log.error(
566
+ f'Running "{check_binary_cmd}" failed ({e}), '
567
+ f"set index.BINARY to a different binary or "
568
+ f"set docker.USE_DOCKER=true"
569
+ )
490
570
  abort_script()
491
571
 
492
572
  # Check if index files (name.index.*) already exist.
493
573
  if glob.glob(f"{self.name}.index.*"):
494
574
  raise ActionException(
495
- f"Index files \"{self.name}.index.*\" already exist, "
496
- f"please delete them if you want to rebuild the index")
575
+ f'Index files "{self.name}.index.*" already exist, '
576
+ f"please delete them if you want to rebuild the index"
577
+ )
497
578
 
498
579
  # Write settings.json file and run the command.
499
580
  with open(f"{self.name}.settings.json", "w") as f:
500
- f.write(self.config['index']['settings_json'])
581
+ f.write(self.config["index"]["settings_json"])
501
582
  subprocess.run(cmdline, shell=True)
502
583
 
503
584
  @track_action_rank
@@ -507,15 +588,18 @@ class Actions:
507
588
  """
508
589
 
509
590
  # List of all the index files (not all of them need to be there).
510
- index_fileglobs = (f"{self.name}.index.*",
511
- f"{self.name}.patterns.*",
512
- f"{self.name}.prefixes",
513
- f"{self.name}.meta-data.json",
514
- f"{self.name}.vocabulary.*")
591
+ index_fileglobs = (
592
+ f"{self.name}.index.*",
593
+ f"{self.name}.patterns.*",
594
+ f"{self.name}.prefixes",
595
+ f"{self.name}.meta-data.json",
596
+ f"{self.name}.vocabulary.*",
597
+ )
515
598
 
516
599
  # Show the command line.
517
- self.show(f"Remove index files {', '.join(index_fileglobs)}",
518
- only_show)
600
+ self.show(
601
+ f"Remove index files {', '.join(index_fileglobs)}", only_show
602
+ )
519
603
  if only_show:
520
604
  return
521
605
 
@@ -529,8 +613,10 @@ class Actions:
529
613
  os.remove(filename)
530
614
  files_removed.append(filename)
531
615
  if files_removed:
532
- log.info(f"Removed the following index files of total size "
533
- f"{total_file_size / 1e9:.1f} GB:")
616
+ log.info(
617
+ f"Removed the following index files of total size "
618
+ f"{total_file_size / 1e9:.1f} GB:"
619
+ )
534
620
  log.info("")
535
621
  log.info(", ".join(files_removed))
536
622
  else:
@@ -545,44 +631,49 @@ class Actions:
545
631
  """
546
632
 
547
633
  # Construct the command line based on the config file.
548
- server_config = self.config['server']
549
- cmdline = (f"{self.config['server']['binary']}"
550
- f" -i {self.name}"
551
- f" -j {server_config['num_threads']}"
552
- f" -p {server_config['port']}"
553
- f" -m {server_config['memory_for_queries']}"
554
- f" -c {server_config['cache_max_size']}"
555
- f" -e {server_config['cache_max_size_single_entry']}"
556
- f" -k {server_config['cache_max_num_entries']}")
557
- if server_config['timeout']:
634
+ server_config = self.config["server"]
635
+ cmdline = (
636
+ f"{self.config['server']['binary']}"
637
+ f" -i {self.name}"
638
+ f" -j {server_config['num_threads']}"
639
+ f" -p {server_config['port']}"
640
+ f" -m {server_config['memory_for_queries']}"
641
+ f" -c {server_config['cache_max_size']}"
642
+ f" -e {server_config['cache_max_size_single_entry']}"
643
+ f" -k {server_config['cache_max_num_entries']}"
644
+ )
645
+ if server_config["timeout"]:
558
646
  cmdline += f" -s {server_config['timeout']}"
559
- if server_config['access_token']:
647
+ if server_config["access_token"]:
560
648
  cmdline += f" -a {server_config['access_token']}"
561
- if server_config['only_pso_and_pos_permutations'] in self.yes_values:
649
+ if server_config["only_pso_and_pos_permutations"] in self.yes_values:
562
650
  cmdline += " --only-pso-and-pos-permutations"
563
- if not server_config['use_patterns'] in self.yes_values:
651
+ if server_config["use_patterns"] not in self.yes_values:
564
652
  cmdline += " --no-patterns"
565
- if server_config['with_text_index'] in \
566
- ["from_text_records",
567
- "from_literals",
568
- "from_text_records_and_literals"]:
653
+ if server_config["with_text_index"] in [
654
+ "from_text_records",
655
+ "from_literals",
656
+ "from_text_records_and_literals",
657
+ ]:
569
658
  cmdline += " -t"
570
659
  cmdline += f" > {self.name}.server-log.txt 2>&1"
571
660
 
572
661
  # If we are using Docker, run the command in a docker container.
573
- if self.config['docker']['use_docker'] in self.yes_values:
574
- docker_config = self.config['docker']
575
- cmdline = (f"docker run -d --restart=unless-stopped"
576
- f" -u $(id -u):$(id -g)"
577
- f" -it -v /etc/localtime:/etc/localtime:ro"
578
- f" -v $(pwd):/index"
579
- f" -p {server_config['port']}:{server_config['port']}"
580
- f" -w /index"
581
- f" --entrypoint bash"
582
- f" --name {docker_config['container_server']}"
583
- f" --init"
584
- f" {docker_config['image']}"
585
- f" -c {shlex.quote(cmdline)}")
662
+ if self.config["docker"]["use_docker"] in self.yes_values:
663
+ docker_config = self.config["docker"]
664
+ cmdline = (
665
+ f"docker run -d --restart=unless-stopped"
666
+ f" -u $(id -u):$(id -g)"
667
+ f" -it -v /etc/localtime:/etc/localtime:ro"
668
+ f" -v $(pwd):/index"
669
+ f" -p {server_config['port']}:{server_config['port']}"
670
+ f" -w /index"
671
+ f" --entrypoint bash"
672
+ f" --name {docker_config['container_server']}"
673
+ f" --init"
674
+ f" {docker_config['image']}"
675
+ f" -c {shlex.quote(cmdline)}"
676
+ )
586
677
  else:
587
678
  cmdline = f"nohup {cmdline} &"
588
679
 
@@ -593,41 +684,53 @@ class Actions:
593
684
 
594
685
  # When docker.USE_DOCKER=false, check if the binary for starting the
595
686
  # server exists and works.
596
- if self.config['docker']['use_docker'] not in self.yes_values:
687
+ if self.config["docker"]["use_docker"] not in self.yes_values:
597
688
  try:
598
689
  check_binary_cmd = f"{self.config['server']['binary']} --help"
599
- subprocess.run(check_binary_cmd, shell=True, check=True,
600
- stdout=subprocess.DEVNULL,
601
- stderr=subprocess.DEVNULL)
690
+ subprocess.run(
691
+ check_binary_cmd,
692
+ shell=True,
693
+ check=True,
694
+ stdout=subprocess.DEVNULL,
695
+ stderr=subprocess.DEVNULL,
696
+ )
602
697
  except subprocess.CalledProcessError as e:
603
- log.error(f"Running \"{check_binary_cmd}\" failed ({e}), "
604
- f"set server.BINARY to a different binary or "
605
- f"set docker.USE_DOCKER=true")
698
+ log.error(
699
+ f'Running "{check_binary_cmd}" failed ({e}), '
700
+ f"set server.BINARY to a different binary or "
701
+ f"set docker.USE_DOCKER=true"
702
+ )
606
703
  abort_script()
607
704
 
608
705
  # Check if a QLever server is already running on this port.
609
- port = server_config['port']
706
+ port = server_config["port"]
610
707
  if self.alive_check(port):
611
708
  raise ActionException(
612
- f"QLever server already running on port {port}")
709
+ f"QLever server already running on port {port}"
710
+ )
613
711
 
614
712
  # Check if another process is already listening.
615
713
  if self.net_connections_enabled:
616
- if port in [conn.laddr.port for conn
617
- in psutil.net_connections()]:
714
+ if port in [conn.laddr.port for conn in psutil.net_connections()]:
618
715
  raise ActionException(
619
- f"Port {port} is already in use by another process")
716
+ f"Port {port} is already in use by another process"
717
+ )
620
718
 
621
719
  # Execute the command line.
622
- subprocess.run(cmdline, shell=True,
623
- stdout=subprocess.DEVNULL,
624
- stderr=subprocess.DEVNULL)
720
+ subprocess.run(
721
+ cmdline,
722
+ shell=True,
723
+ stdout=subprocess.DEVNULL,
724
+ stderr=subprocess.DEVNULL,
725
+ )
625
726
 
626
727
  # Tail the server log until the server is ready (note that the `exec`
627
728
  # is important to make sure that the tail process is killed and not
628
729
  # just the bash process).
629
- log.info(f"Follow {self.name}.server-log.txt until the server is ready"
630
- f" (Ctrl-C stops following the log, but not the server)")
730
+ log.info(
731
+ f"Follow {self.name}.server-log.txt until the server is ready"
732
+ f" (Ctrl-C stops following the log, but not the server)"
733
+ )
631
734
  log.info("")
632
735
  tail_cmd = f"exec tail -f {self.name}.server-log.txt"
633
736
  tail_proc = subprocess.Popen(tail_cmd, shell=True)
@@ -635,20 +738,24 @@ class Actions:
635
738
  time.sleep(1)
636
739
 
637
740
  # Set the access token if specified.
638
- access_token = server_config['access_token']
639
- access_arg = f"--data-urlencode \"access-token={access_token}\""
640
- if "index_description" in self.config['data']:
641
- desc = self.config['data']['index_description']
642
- curl_cmd = (f"curl -Gs http://localhost:{port}/api"
643
- f" --data-urlencode \"index-description={desc}\""
644
- f" {access_arg} > /dev/null")
741
+ access_token = server_config["access_token"]
742
+ access_arg = f'--data-urlencode "access-token={access_token}"'
743
+ if "index_description" in self.config["data"]:
744
+ desc = self.config["data"]["index_description"]
745
+ curl_cmd = (
746
+ f"curl -Gs http://localhost:{port}/api"
747
+ f' --data-urlencode "index-description={desc}"'
748
+ f" {access_arg} > /dev/null"
749
+ )
645
750
  log.debug(curl_cmd)
646
751
  subprocess.run(curl_cmd, shell=True)
647
- if "text_description" in self.config['data']:
648
- desc = self.config['data']['text_description']
649
- curl_cmd = (f"curl -Gs http://localhost:{port}/api"
650
- f" --data-urlencode \"text-description={desc}\""
651
- f" {access_arg} > /dev/null")
752
+ if "text_description" in self.config["data"]:
753
+ desc = self.config["data"]["text_description"]
754
+ curl_cmd = (
755
+ f"curl -Gs http://localhost:{port}/api"
756
+ f' --data-urlencode "text-description={desc}"'
757
+ f" {access_arg} > /dev/null"
758
+ )
652
759
  log.debug(curl_cmd)
653
760
  subprocess.run(curl_cmd, shell=True)
654
761
 
@@ -664,28 +771,39 @@ class Actions:
664
771
  """
665
772
 
666
773
  # Show action description.
667
- docker_container_name = self.config['docker']['container_server']
668
- cmdline_regex = (f"ServerMain.* -i [^ ]*{self.name}")
669
- self.show(f"Checking for process matching \"{cmdline_regex}\" "
670
- f"and for Docker container with name "
671
- f"\"{docker_container_name}\"", only_show)
774
+ docker_container_name = self.config["docker"]["container_server"]
775
+ cmdline_regex = f"ServerMain.* -i [^ ]*{self.name}"
776
+ self.show(
777
+ f'Checking for process matching "{cmdline_regex}" '
778
+ f"and for Docker container with name "
779
+ f'"{docker_container_name}"',
780
+ only_show,
781
+ )
672
782
  if only_show:
673
783
  return
674
784
 
675
785
  # First check if there is docker container running.
676
786
  if self.docker_enabled:
677
- docker_cmd = (f"docker stop {docker_container_name} && "
678
- f"docker rm {docker_container_name}")
787
+ docker_cmd = (
788
+ f"docker stop {docker_container_name} && "
789
+ f"docker rm {docker_container_name}"
790
+ )
679
791
  try:
680
- subprocess.run(docker_cmd, shell=True, check=True,
681
- stdout=subprocess.DEVNULL,
682
- stderr=subprocess.DEVNULL)
683
- log.info(f"Docker container with name "
684
- f"\"{docker_container_name}\" "
685
- f"stopped and removed")
792
+ subprocess.run(
793
+ docker_cmd,
794
+ shell=True,
795
+ check=True,
796
+ stdout=subprocess.DEVNULL,
797
+ stderr=subprocess.DEVNULL,
798
+ )
799
+ log.info(
800
+ f"Docker container with name "
801
+ f'"{docker_container_name}" '
802
+ f"stopped and removed"
803
+ )
686
804
  return
687
805
  except Exception as e:
688
- log.debug(f"Error running \"{docker_cmd}\": {e}")
806
+ log.debug(f'Error running "{docker_cmd}": {e}')
689
807
 
690
808
  # Check if there is a process running on the server port using psutil.
691
809
  #
@@ -694,22 +812,31 @@ class Actions:
694
812
  for proc in psutil.process_iter():
695
813
  try:
696
814
  pinfo = proc.as_dict(
697
- attrs=['pid', 'username', 'create_time',
698
- 'memory_info', 'cmdline'])
699
- cmdline = " ".join(pinfo['cmdline'])
815
+ attrs=[
816
+ "pid",
817
+ "username",
818
+ "create_time",
819
+ "memory_info",
820
+ "cmdline",
821
+ ]
822
+ )
823
+ cmdline = " ".join(pinfo["cmdline"])
700
824
  except Exception as err:
701
825
  log.debug(f"Error getting process info: {err}")
702
826
  if re.match(cmdline_regex, cmdline):
703
- log.info(f"Found process {pinfo['pid']} from user "
704
- f"{pinfo['username']} with command line: {cmdline}")
827
+ log.info(
828
+ f"Found process {pinfo['pid']} from user "
829
+ f"{pinfo['username']} with command line: {cmdline}"
830
+ )
705
831
  print()
706
832
  try:
707
833
  proc.kill()
708
834
  log.info(f"Killed process {pinfo['pid']}")
709
835
  except Exception as e:
710
836
  raise ActionException(
711
- f"Could not kill process with PID "
712
- f"{pinfo['pid']}: {e}")
837
+ f"Could not kill process with PID "
838
+ f"{pinfo['pid']}: {e}"
839
+ )
713
840
  return
714
841
 
715
842
  # No matching process found.
@@ -726,8 +853,9 @@ class Actions:
726
853
  """
727
854
 
728
855
  # Show action description.
729
- self.show("Stop running server if found, then start new server",
730
- only_show)
856
+ self.show(
857
+ "Stop running server if found, then start new server", only_show
858
+ )
731
859
  if only_show:
732
860
  return
733
861
 
@@ -749,8 +877,10 @@ class Actions:
749
877
  return
750
878
 
751
879
  # Do it.
752
- log.info(f"Follow {self.name}.server-log.txt (Ctrl-C stops"
753
- f" following the log, but not the server)")
880
+ log.info(
881
+ f"Follow {self.name}.server-log.txt (Ctrl-C stops"
882
+ f" following the log, but not the server)"
883
+ )
754
884
  log.info("")
755
885
  subprocess.run(log_cmd, shell=True)
756
886
 
@@ -765,9 +895,12 @@ class Actions:
765
895
  # Show action description.
766
896
  cmdline_regex = "(ServerMain|IndexBuilderMain)"
767
897
  # cmdline_regex = f"(ServerMain|IndexBuilderMain).*{self.name}"
768
- self.show(f"{BLUE}Show all processes on this machine where "
769
- f"the command line matches {cmdline_regex}"
770
- f" using Python's psutil library", only_show)
898
+ self.show(
899
+ f"{BLUE}Show all processes on this machine where "
900
+ f"the command line matches {cmdline_regex}"
901
+ f" using Python's psutil library",
902
+ only_show,
903
+ )
771
904
  if only_show:
772
905
  return
773
906
 
@@ -775,8 +908,9 @@ class Actions:
775
908
  num_processes_found = 0
776
909
  for proc in psutil.process_iter():
777
910
  show_heading = num_processes_found == 0
778
- process_shown = self.show_process_info(proc, cmdline_regex,
779
- show_heading=show_heading)
911
+ process_shown = self.show_process_info(
912
+ proc, cmdline_regex, show_heading=show_heading
913
+ )
780
914
  if process_shown:
781
915
  num_processes_found += 1
782
916
  if num_processes_found == 0:
@@ -789,10 +923,12 @@ class Actions:
789
923
  index, based on the log file of th index build.
790
924
  """
791
925
 
792
- log_file_name = self.config['data']['name'] + ".index-log.txt"
793
- log.info(f"{BLUE}Breakdown of the time for building the index, "
794
- f"based on the timestamps for key lines in "
795
- f"\"{log_file_name}{NORMAL}\"")
926
+ log_file_name = self.config["data"]["name"] + ".index-log.txt"
927
+ log.info(
928
+ f"{BLUE}Breakdown of the time for building the index, "
929
+ f"based on the timestamps for key lines in "
930
+ f'"{log_file_name}{NORMAL}"'
931
+ )
796
932
  log.info("")
797
933
  if only_show:
798
934
  return
@@ -802,8 +938,9 @@ class Actions:
802
938
  with open(log_file_name, "r") as f:
803
939
  lines = f.readlines()
804
940
  except Exception as e:
805
- raise ActionException(f"Could not read log file {log_file_name}: "
806
- f"{e}")
941
+ raise ActionException(
942
+ f"Could not read log file {log_file_name}: " f"{e}"
943
+ )
807
944
  current_line = 0
808
945
 
809
946
  # Helper lambda that finds the next line matching the given `regex`,
@@ -827,13 +964,15 @@ class Actions:
827
964
  if regex_match:
828
965
  try:
829
966
  return datetime.strptime(
830
- re.match(timestamp_regex, line).group(),
831
- timestamp_format), regex_match
967
+ re.match(timestamp_regex, line).group(),
968
+ timestamp_format,
969
+ ), regex_match
832
970
  except Exception as e:
833
971
  raise ActionException(
834
- f"Could not parse timestamp of form "
835
- f"\"{timestamp_regex}\" from line "
836
- f" \"{line.rstrip()}\" ({e})")
972
+ f"Could not parse timestamp of form "
973
+ f'"{timestamp_regex}" from line '
974
+ f' "{line.rstrip()}" ({e})'
975
+ )
837
976
  # If we get here, we did not find a matching line.
838
977
  if line_is_optional:
839
978
  current_line = current_line_backup
@@ -849,13 +988,15 @@ class Actions:
849
988
  perm_begin, _ = find_next_line(r"INFO:\s*Creating a pair", True)
850
989
  if perm_begin is None:
851
990
  break
852
- _, perm_info = find_next_line(r"INFO:\s*Writing meta data for"
853
- r" ([A-Z]+ and [A-Z]+)", True)
991
+ _, perm_info = find_next_line(
992
+ r"INFO:\s*Writing meta data for" r" ([A-Z]+ and [A-Z]+)", True
993
+ )
854
994
  # if perm_info is None:
855
995
  # break
856
996
  perm_begin_and_info.append((perm_begin, perm_info))
857
- convert_end = (perm_begin_and_info[0][0] if
858
- len(perm_begin_and_info) > 0 else None)
997
+ convert_end = (
998
+ perm_begin_and_info[0][0] if len(perm_begin_and_info) > 0 else None
999
+ )
859
1000
  normal_end, _ = find_next_line(r"INFO:\s*Index build completed")
860
1001
  text_begin, _ = find_next_line(r"INFO:\s*Adding text index", True)
861
1002
  text_end, _ = find_next_line(r"INFO:\s*DocsDB done", True)
@@ -867,9 +1008,11 @@ class Actions:
867
1008
  if overall_begin is None:
868
1009
  raise ActionException("Missing line that index build has started")
869
1010
  if overall_begin and not merge_begin:
870
- raise ActionException("According to the log file, the index build "
871
- "has started, but is still in its first "
872
- "phase (parsing the input)")
1011
+ raise ActionException(
1012
+ "According to the log file, the index build "
1013
+ "has started, but is still in its first "
1014
+ "phase (parsing the input)"
1015
+ )
873
1016
 
874
1017
  # Helper lambda that shows the duration for a phase (if the start and
875
1018
  # end timestamps are available).
@@ -904,22 +1047,31 @@ class Actions:
904
1047
  show_duration("Convert to global IDs", [(convert_begin, convert_end)])
905
1048
  for i in range(len(perm_begin_and_info)):
906
1049
  perm_begin, perm_info = perm_begin_and_info[i]
907
- perm_end = perm_begin_and_info[i + 1][0] if i + 1 < len(
908
- perm_begin_and_info) else normal_end
909
- perm_info_text = (perm_info.group(1).replace(" and ", " & ")
910
- if perm_info else f"#{i + 1}")
911
- show_duration(f"Permutation {perm_info_text}",
912
- [(perm_begin, perm_end)])
1050
+ perm_end = (
1051
+ perm_begin_and_info[i + 1][0]
1052
+ if i + 1 < len(perm_begin_and_info)
1053
+ else normal_end
1054
+ )
1055
+ perm_info_text = (
1056
+ perm_info.group(1).replace(" and ", " & ")
1057
+ if perm_info
1058
+ else f"#{i + 1}"
1059
+ )
1060
+ show_duration(
1061
+ f"Permutation {perm_info_text}", [(perm_begin, perm_end)]
1062
+ )
913
1063
  show_duration("Text index", [(text_begin, text_end)])
914
1064
  if text_begin and text_end:
915
1065
  log.info("")
916
- show_duration("TOTAL index build time",
917
- [(overall_begin, normal_end),
918
- (text_begin, text_end)])
1066
+ show_duration(
1067
+ "TOTAL index build time",
1068
+ [(overall_begin, normal_end), (text_begin, text_end)],
1069
+ )
919
1070
  elif normal_end:
920
1071
  log.info("")
921
- show_duration("TOTAL index build time",
922
- [(overall_begin, normal_end)])
1072
+ show_duration(
1073
+ "TOTAL index build time", [(overall_begin, normal_end)]
1074
+ )
923
1075
 
924
1076
  @track_action_rank
925
1077
  def action_test_query(self, only_show=False):
@@ -929,11 +1081,15 @@ class Actions:
929
1081
 
930
1082
  # Construct the curl command.
931
1083
  query = "SELECT * WHERE { ?s ?p ?o } LIMIT 10"
932
- headers = ["Accept: text/tab-separated-values",
933
- "Content-Type: application/sparql-query"]
934
- curl_cmd = (f"curl -s {self.config['server']['url']} "
935
- f"-H \"{headers[0]}\" -H \"{headers[1]}\" "
936
- f"--data \"{query}\"")
1084
+ headers = [
1085
+ "Accept: text/tab-separated-values",
1086
+ "Content-Type: application/sparql-query",
1087
+ ]
1088
+ curl_cmd = (
1089
+ f"curl -s {self.config['server']['url']} "
1090
+ f"-H \"{headers[0]}\" -H \"{headers[1]}\" "
1091
+ f"--data \"{query}\""
1092
+ )
937
1093
 
938
1094
  # Show it.
939
1095
  self.show(curl_cmd, only_show)
@@ -955,37 +1111,58 @@ class Actions:
955
1111
  server_url = f"http://{host_name}:{self.config['server']['port']}"
956
1112
  docker_rm_cmd = f"docker rm -f {self.config['ui']['container']}"
957
1113
  docker_pull_cmd = f"docker pull {self.config['ui']['image']}"
958
- docker_run_cmd = (f"docker run -d -p {self.config['ui']['port']}:7000 "
959
- f"--name {self.config['ui']['container']} "
960
- f"{self.config['ui']['image']} ")
961
- docker_exec_cmd = (f"docker exec -it "
962
- f"{self.config['ui']['container']} "
963
- f"bash -c \"python manage.py configure "
964
- f"{self.config['ui']['config']} "
965
- f"{server_url}\"")
1114
+ docker_run_cmd = (
1115
+ f"docker run -d -p {self.config['ui']['port']}:7000 "
1116
+ f"--name {self.config['ui']['container']} "
1117
+ f"{self.config['ui']['image']} "
1118
+ )
1119
+ docker_exec_cmd = (
1120
+ f"docker exec -it "
1121
+ f"{self.config['ui']['container']} "
1122
+ f"bash -c \"python manage.py configure "
1123
+ f"{self.config['ui']['config']} "
1124
+ f"{server_url}\""
1125
+ )
966
1126
 
967
1127
  # Show them.
968
- self.show("\n".join([docker_rm_cmd, docker_pull_cmd, docker_run_cmd,
969
- docker_exec_cmd]), only_show)
1128
+ self.show(
1129
+ "\n".join(
1130
+ [
1131
+ docker_rm_cmd,
1132
+ docker_pull_cmd,
1133
+ docker_run_cmd,
1134
+ docker_exec_cmd,
1135
+ ]
1136
+ ),
1137
+ only_show,
1138
+ )
970
1139
  if only_show:
971
1140
  return
972
1141
 
973
1142
  # Execute them.
974
1143
  try:
975
- subprocess.run(docker_rm_cmd, shell=True,
976
- stdout=subprocess.DEVNULL)
977
- subprocess.run(docker_pull_cmd, shell=True,
978
- stdout=subprocess.DEVNULL)
979
- subprocess.run(docker_run_cmd, shell=True,
980
- stdout=subprocess.DEVNULL)
981
- subprocess.run(docker_exec_cmd, shell=True,
982
- stdout=subprocess.DEVNULL)
1144
+ subprocess.run(
1145
+ docker_rm_cmd, shell=True, stdout=subprocess.DEVNULL
1146
+ )
1147
+ subprocess.run(
1148
+ docker_pull_cmd, shell=True, stdout=subprocess.DEVNULL
1149
+ )
1150
+ subprocess.run(
1151
+ docker_run_cmd, shell=True, stdout=subprocess.DEVNULL
1152
+ )
1153
+ subprocess.run(
1154
+ docker_exec_cmd, shell=True, stdout=subprocess.DEVNULL
1155
+ )
983
1156
  except subprocess.CalledProcessError as e:
984
1157
  raise ActionException(f"Failed to start the QLever UI {e}")
985
- log.info(f"The QLever UI should now be up at "
986
- f"http://{host_name}:{self.config['ui']['port']}")
987
- log.info("You can log in as QLever UI admin with username and "
988
- "password \"demo\"")
1158
+ log.info(
1159
+ f"The QLever UI should now be up at "
1160
+ f"http://{host_name}:{self.config['ui']['port']}"
1161
+ )
1162
+ log.info(
1163
+ "You can log in as QLever UI admin with username and "
1164
+ 'password "demo"'
1165
+ )
989
1166
 
990
1167
  @track_action_rank
991
1168
  def action_cache_stats_and_settings(self, only_show=False):
@@ -994,10 +1171,14 @@ class Actions:
994
1171
  """
995
1172
 
996
1173
  # Construct the two curl commands.
997
- cache_stats_cmd = (f"curl -s {self.config['server']['url']} "
998
- f"--data-urlencode \"cmd=cache-stats\"")
999
- cache_settings_cmd = (f"curl -s {self.config['server']['url']} "
1000
- f"--data-urlencode \"cmd=get-settings\"")
1174
+ cache_stats_cmd = (
1175
+ f"curl -s {self.config['server']['url']} "
1176
+ f"--data-urlencode \"cmd=cache-stats\""
1177
+ )
1178
+ cache_settings_cmd = (
1179
+ f"curl -s {self.config['server']['url']} "
1180
+ f"--data-urlencode \"cmd=get-settings\""
1181
+ )
1001
1182
 
1002
1183
  # Show them.
1003
1184
  self.show("\n".join([cache_stats_cmd, cache_settings_cmd]), only_show)
@@ -1007,8 +1188,9 @@ class Actions:
1007
1188
  # Execute them.
1008
1189
  try:
1009
1190
  cache_stats = subprocess.check_output(cache_stats_cmd, shell=True)
1010
- cache_settings = subprocess.check_output(cache_settings_cmd,
1011
- shell=True)
1191
+ cache_settings = subprocess.check_output(
1192
+ cache_settings_cmd, shell=True
1193
+ )
1012
1194
 
1013
1195
  # Print the key-value pairs of the stats JSON in tabular form.
1014
1196
  def print_json_as_tabular(raw_json):
@@ -1020,12 +1202,14 @@ class Actions:
1020
1202
  if re.match(r"^\d+\.\d+$", value):
1021
1203
  value = "{:.2f}".format(float(value))
1022
1204
  log.info(f"{key.ljust(max_key_len)} : {value}")
1205
+
1023
1206
  print_json_as_tabular(cache_stats)
1024
1207
  log.info("")
1025
1208
  print_json_as_tabular(cache_settings)
1026
1209
  except Exception as e:
1027
- raise ActionException(f"Failed to get cache stats and settings: "
1028
- f"{e}")
1210
+ raise ActionException(
1211
+ f"Failed to get cache stats and settings: " f"{e}"
1212
+ )
1029
1213
 
1030
1214
  @track_action_rank
1031
1215
  def action_clear_cache(self, only_show=False):
@@ -1034,8 +1218,10 @@ class Actions:
1034
1218
  """
1035
1219
 
1036
1220
  # Construct the curl command.
1037
- clear_cache_cmd = (f"curl -s {self.config['server']['url']} "
1038
- f"--data-urlencode \"cmd=clear-cache\"")
1221
+ clear_cache_cmd = (
1222
+ f"curl -s {self.config['server']['url']} "
1223
+ f"--data-urlencode \"cmd=clear-cache\""
1224
+ )
1039
1225
 
1040
1226
  # Show it.
1041
1227
  self.show(clear_cache_cmd, only_show)
@@ -1044,8 +1230,9 @@ class Actions:
1044
1230
 
1045
1231
  # Execute it.
1046
1232
  try:
1047
- subprocess.run(clear_cache_cmd, shell=True,
1048
- stdout=subprocess.DEVNULL)
1233
+ subprocess.run(
1234
+ clear_cache_cmd, shell=True, stdout=subprocess.DEVNULL
1235
+ )
1049
1236
  print("Cache cleared (only unpinned entries)")
1050
1237
  print()
1051
1238
  self.action_cache_stats_and_settings(only_show)
@@ -1060,10 +1247,12 @@ class Actions:
1060
1247
  """
1061
1248
 
1062
1249
  # Construct the curl command.
1063
- access_token = self.config['server']['access_token']
1064
- clear_cache_cmd = (f"curl -s {self.config['server']['url']} "
1065
- f"--data-urlencode \"cmd=clear-cache-complete\" "
1066
- f"--data-urlencode \"access-token={access_token}\"")
1250
+ access_token = self.config["server"]["access_token"]
1251
+ clear_cache_cmd = (
1252
+ f"curl -s {self.config['server']['url']} "
1253
+ f"--data-urlencode \"cmd=clear-cache-complete\" "
1254
+ f"--data-urlencode \"access-token={access_token}\""
1255
+ )
1067
1256
 
1068
1257
  # Show it.
1069
1258
  self.show(clear_cache_cmd, only_show)
@@ -1072,8 +1261,9 @@ class Actions:
1072
1261
 
1073
1262
  # Execute it.
1074
1263
  try:
1075
- subprocess.run(clear_cache_cmd, shell=True,
1076
- stdout=subprocess.DEVNULL)
1264
+ subprocess.run(
1265
+ clear_cache_cmd, shell=True, stdout=subprocess.DEVNULL
1266
+ )
1077
1267
  print("Cache cleared (both pinned and unpinned entries)")
1078
1268
  print()
1079
1269
  self.action_cache_stats_and_settings(only_show)
@@ -1095,7 +1285,7 @@ class Actions:
1095
1285
  access_token_ui = "top-secret"
1096
1286
  config_name = self.config["ui"]["config"]
1097
1287
  warmup_url = f"{self.config['ui']['url']}/warmup/{config_name}"
1098
- curl_cmd = (f"curl -s {warmup_url}/queries?token={access_token_ui}")
1288
+ curl_cmd = f"curl -s {warmup_url}/queries?token={access_token_ui}"
1099
1289
 
1100
1290
  # Show it.
1101
1291
  self.show(f"Pin warmup queries obtained via: {curl_cmd}", only_show)
@@ -1114,22 +1304,27 @@ class Actions:
1114
1304
  first = True
1115
1305
  timeout = "300s"
1116
1306
  access_token = self.config["server"]["access_token"]
1117
- for description, query in [line.split("\t") for line in
1118
- queries.decode("utf-8").split("\n")]:
1307
+ for description, query in [
1308
+ line.split("\t") for line in queries.decode("utf-8").split("\n")
1309
+ ]:
1119
1310
  if first:
1120
1311
  first = False
1121
1312
  else:
1122
1313
  log.info("")
1123
1314
  log.info(f"{BOLD}Pin query: {description}{NORMAL}")
1124
- pin_cmd = (f"curl -s {self.config['server']['url']}/api "
1125
- f"-H \"{header}\" "
1126
- f"--data-urlencode query={shlex.quote(query)} "
1127
- f"--data-urlencode timeout={timeout} "
1128
- f"--data-urlencode access-token={access_token} "
1129
- f"--data-urlencode pinresult=true "
1130
- f"--data-urlencode send=0")
1131
- clear_cache_cmd = (f"curl -s {self.config['server']['url']} "
1132
- f"--data-urlencode \"cmd=clear-cache\"")
1315
+ pin_cmd = (
1316
+ f"curl -s {self.config['server']['url']}/api "
1317
+ f"-H \"{header}\" "
1318
+ f"--data-urlencode query={shlex.quote(query)} "
1319
+ f"--data-urlencode timeout={timeout} "
1320
+ f"--data-urlencode access-token={access_token} "
1321
+ f"--data-urlencode pinresult=true "
1322
+ f"--data-urlencode send=0"
1323
+ )
1324
+ clear_cache_cmd = (
1325
+ f"curl -s {self.config['server']['url']} "
1326
+ f"--data-urlencode \"cmd=clear-cache\""
1327
+ )
1133
1328
  log.info(pin_cmd)
1134
1329
  # Launch query and show the `resultsize` of the JSON response.
1135
1330
  try:
@@ -1140,8 +1335,9 @@ class Actions:
1140
1335
  raise Exception(json_result["exception"])
1141
1336
  log.info(f"Result size: {json_result['resultsize']:,}")
1142
1337
  log.info(clear_cache_cmd)
1143
- subprocess.check_output(clear_cache_cmd, shell=True,
1144
- stderr=subprocess.DEVNULL)
1338
+ subprocess.check_output(
1339
+ clear_cache_cmd, shell=True, stderr=subprocess.DEVNULL
1340
+ )
1145
1341
  except Exception as e:
1146
1342
  log.error(f"Query failed: {e}")
1147
1343
 
@@ -1159,12 +1355,14 @@ class Actions:
1159
1355
  curl_cmd = f"curl -s {example_queries_url}"
1160
1356
 
1161
1357
  # Show what the action does.
1162
- self.show(f"Launch example queries obtained via: {curl_cmd}\n"
1163
- f"SPARQL endpoint: {self.config['server']['url']}\n"
1164
- f"Clearing the cache before each query\n"
1165
- f"Using send={example_queries_send} and limit="
1166
- f"{example_queries_limit}",
1167
- only_show)
1358
+ self.show(
1359
+ f"Launch example queries obtained via: {curl_cmd}\n"
1360
+ f"SPARQL endpoint: {self.config['server']['url']}\n"
1361
+ f"Clearing the cache before each query\n"
1362
+ f"Using send={example_queries_send} and limit="
1363
+ f"{example_queries_limit}",
1364
+ only_show,
1365
+ )
1168
1366
  if only_show:
1169
1367
  return
1170
1368
 
@@ -1179,19 +1377,27 @@ class Actions:
1179
1377
  count = 0
1180
1378
  total_time_seconds = 0.0
1181
1379
  total_result_size = 0
1182
- for description, query in [line.split("\t") for line in
1183
- queries.decode("utf-8").splitlines()]:
1380
+ for description, query in [
1381
+ line.split("\t") for line in queries.decode("utf-8").splitlines()
1382
+ ]:
1184
1383
  # Launch query and show the `resultsize` of the JSON response.
1185
- clear_cache_cmd = (f"curl -s {self.config['server']['url']} "
1186
- f"--data-urlencode cmd=clear-cache")
1187
- query_cmd = (f"curl -s {self.config['server']['url']} "
1188
- f"-H \"Accept: application/qlever-results+json\" "
1189
- f"--data-urlencode query={shlex.quote(query)} "
1190
- f"--data-urlencode send={example_queries_send}")
1384
+ clear_cache_cmd = (
1385
+ f"curl -s {self.config['server']['url']} "
1386
+ f"--data-urlencode cmd=clear-cache"
1387
+ )
1388
+ query_cmd = (
1389
+ f"curl -s {self.config['server']['url']} "
1390
+ f"-H \"Accept: application/qlever-results+json\" "
1391
+ f"--data-urlencode query={shlex.quote(query)} "
1392
+ f"--data-urlencode send={example_queries_send}"
1393
+ )
1191
1394
  try:
1192
- subprocess.run(clear_cache_cmd, shell=True,
1193
- stdout=subprocess.DEVNULL,
1194
- stderr=subprocess.DEVNULL)
1395
+ subprocess.run(
1396
+ clear_cache_cmd,
1397
+ shell=True,
1398
+ stdout=subprocess.DEVNULL,
1399
+ stderr=subprocess.DEVNULL,
1400
+ )
1195
1401
  start_time = time.time()
1196
1402
  result = subprocess.check_output(query_cmd, shell=True)
1197
1403
  time_seconds = time.time() - start_time
@@ -1203,15 +1409,18 @@ class Actions:
1203
1409
  except Exception as e:
1204
1410
  time_seconds = 0.0
1205
1411
  result_size = 0
1206
- result_string = (f"{RED} FAILED{NORMAL}"
1207
- f" {RED}({e}){NORMAL}")
1412
+ result_string = (
1413
+ f"{RED} FAILED{NORMAL}" f" {RED}({e}){NORMAL}"
1414
+ )
1208
1415
 
1209
1416
  # Print description, time, result in tabular form.
1210
1417
  log.debug(query)
1211
- if (len(description) > 60):
1418
+ if len(description) > 60:
1212
1419
  description = description[:57] + "..."
1213
- log.info(f"{description:<60} {time_seconds:6.2f} s "
1214
- f"{result_string}")
1420
+ log.info(
1421
+ f"{description:<60} {time_seconds:6.2f} s "
1422
+ f"{result_string}"
1423
+ )
1215
1424
  count += 1
1216
1425
  total_time_seconds += time_seconds
1217
1426
  total_result_size += result_size
@@ -1220,14 +1429,20 @@ class Actions:
1220
1429
 
1221
1430
  # Print total time.
1222
1431
  log.info("")
1223
- description = (f"TOTAL for {count} "
1224
- f"{'query' if count == 1 else 'queries'}")
1225
- log.info(f"{description:<60} {total_time_seconds:6.2f} s "
1226
- f"{total_result_size:>14,}")
1227
- description = (f"AVERAGE for {count} "
1228
- f"{'query' if count == 1 else 'queries'}")
1229
- log.info(f"{description:<60} {total_time_seconds / count:6.2f} s "
1230
- f"{round(total_result_size / count):>14,}")
1432
+ description = (
1433
+ f"TOTAL for {count} " f"{'query' if count == 1 else 'queries'}"
1434
+ )
1435
+ log.info(
1436
+ f"{description:<60} {total_time_seconds:6.2f} s "
1437
+ f"{total_result_size:>14,}"
1438
+ )
1439
+ description = (
1440
+ f"AVERAGE for {count} " f"{'query' if count == 1 else 'queries'}"
1441
+ )
1442
+ log.info(
1443
+ f"{description:<60} {total_time_seconds / count:6.2f} s "
1444
+ f"{round(total_result_size / count):>14,}"
1445
+ )
1231
1446
 
1232
1447
  @track_action_rank
1233
1448
  def action_memory_profile(self, only_show=False):
@@ -1237,8 +1452,11 @@ class Actions:
1237
1452
  """
1238
1453
 
1239
1454
  # Show what the action does.
1240
- self.show("Poll memory usage of the given process every second "
1241
- "and print it to a file", only_show)
1455
+ self.show(
1456
+ "Poll memory usage of the given process every second "
1457
+ "and print it to a file",
1458
+ only_show,
1459
+ )
1242
1460
  if only_show:
1243
1461
  return
1244
1462
 
@@ -1249,8 +1467,10 @@ class Actions:
1249
1467
  pid = int(self.config["general"]["pid"])
1250
1468
  proc = psutil.Process(pid)
1251
1469
  except Exception as e:
1252
- raise ActionException(f"Could not obtain information for process "
1253
- f"with PID {pid} ({e})")
1470
+ raise ActionException(
1471
+ f"Could not obtain information for process "
1472
+ f"with PID {pid} ({e})"
1473
+ )
1254
1474
  self.show_process_info(proc, "", show_heading=True)
1255
1475
  log.info("")
1256
1476
 
@@ -1282,15 +1502,17 @@ class Actions:
1282
1502
  if "pid" not in self.config["general"]:
1283
1503
  raise ActionException("PID must be specified via general.PID")
1284
1504
  pid = int(self.config["general"]["pid"])
1285
- gnuplot_script = (f"set datafile separator \"\t\"; "
1286
- f"set xdata time; "
1287
- f"set timefmt \"%Y-%m-%d %H:%M:%S\"; "
1288
- f"set xlabel \"Time\"; "
1289
- f"set ylabel \"Memory Usage\"; "
1290
- f"set grid; "
1291
- f"plot \"{pid}.memory-usage.tsv\" "
1292
- f"using 1:2 with lines; "
1293
- f"pause -1")
1505
+ gnuplot_script = (
1506
+ f'set datafile separator "\t"; '
1507
+ f"set xdata time; "
1508
+ f'set timefmt "%Y-%m-%d %H:%M:%S"; '
1509
+ f'set xlabel "Time"; '
1510
+ f'set ylabel "Memory Usage"; '
1511
+ f"set grid; "
1512
+ f'plot "{pid}.memory-usage.tsv" '
1513
+ f"using 1:2 with lines; "
1514
+ f"pause -1"
1515
+ )
1294
1516
  gnuplot_cmd = f"gnuplot -e {shlex.quote(gnuplot_script)}"
1295
1517
 
1296
1518
  # Show it.
@@ -1316,8 +1538,9 @@ def setup_autocompletion_cmd():
1316
1538
  # their appearance in the class (see the `@track_action_rank` decorator).
1317
1539
  methods = inspect.getmembers(Actions, predicate=inspect.isfunction)
1318
1540
  methods = [m for m in methods if m[0].startswith("action_")]
1319
- action_names = sorted([m[0] for m in methods],
1320
- key=lambda m: getattr(Actions, m).rank)
1541
+ action_names = sorted(
1542
+ [m[0] for m in methods], key=lambda m: getattr(Actions, m).rank
1543
+ )
1321
1544
  action_names = [_.replace("action_", "") for _ in action_names]
1322
1545
  action_names = [_.replace("_", "-") for _ in action_names]
1323
1546
  action_names = " ".join(action_names)
@@ -1352,22 +1575,30 @@ def main():
1352
1575
  version = "unknown"
1353
1576
  # If the script is called without argument, say hello and provide some
1354
1577
  # help to get started.
1355
- if len(sys.argv) == 1 or \
1356
- (len(sys.argv) == 2 and sys.argv[1] == "help") or \
1357
- (len(sys.argv) == 2 and sys.argv[1] == "--help") or \
1358
- (len(sys.argv) == 2 and sys.argv[1] == "-h"):
1578
+ if (
1579
+ len(sys.argv) == 1
1580
+ or (len(sys.argv) == 2 and sys.argv[1] == "help")
1581
+ or (len(sys.argv) == 2 and sys.argv[1] == "--help")
1582
+ or (len(sys.argv) == 2 and sys.argv[1] == "-h")
1583
+ ):
1359
1584
  log.info("")
1360
- log.info(f"{BOLD}Hello, I am the OLD qlever script"
1361
- f" (version {version}){NORMAL}")
1585
+ log.info(
1586
+ f"{BOLD}Hello, I am the OLD qlever script"
1587
+ f" (version {version}){NORMAL}"
1588
+ )
1362
1589
  log.info("")
1363
1590
  if os.path.exists("Qleverfile"):
1364
- log.info("I see that you already have a \"Qleverfile\" in the "
1365
- "current directory, so you are ready to start")
1591
+ log.info(
1592
+ 'I see that you already have a "Qleverfile" in the '
1593
+ "current directory, so you are ready to start"
1594
+ )
1366
1595
  log.info("")
1367
1596
  show_available_action_names()
1368
1597
  else:
1369
- log.info("You need a Qleverfile in the current directory, which "
1370
- "you can create as follows:")
1598
+ log.info(
1599
+ "You need a Qleverfile in the current directory, which "
1600
+ "you can create as follows:"
1601
+ )
1371
1602
  log.info("")
1372
1603
  log.info(f"{BLUE}qlever-old setup-config <config name>{NORMAL}")
1373
1604
  log.info("")
@@ -1390,8 +1621,9 @@ def main():
1390
1621
  # take the log level from the config file).
1391
1622
  log.setLevel(logging.NOTSET)
1392
1623
  if len(sys.argv) > 1:
1393
- set_log_level_match = re.match(r"general.log_level=(\w+)",
1394
- sys.argv[1], re.IGNORECASE)
1624
+ set_log_level_match = re.match(
1625
+ r"general.log_level=(\w+)", sys.argv[1], re.IGNORECASE
1626
+ )
1395
1627
  if set_log_level_match:
1396
1628
  log_level = set_log_level_match.group(1).upper()
1397
1629
  sys.argv = sys.argv[1:]
@@ -1401,13 +1633,13 @@ def main():
1401
1633
  log.debug(f"Log level set to {log_level}")
1402
1634
  log.debug("")
1403
1635
  except AttributeError:
1404
- log.error(f"Invalid log level: \"{log_level}\"")
1636
+ log.error(f'Invalid log level: "{log_level}"')
1405
1637
  abort_script()
1406
1638
 
1407
1639
  # Helper function that executes an action.
1408
1640
  def execute_action(actions, action_name, **kwargs):
1409
1641
  log.info("")
1410
- log.info(f"{BOLD}Action \"{action_name}\"{NORMAL}")
1642
+ log.info(f'{BOLD}Action "{action_name}"{NORMAL}')
1411
1643
  log.info("")
1412
1644
  action = f"action_{action_name.replace('-', '_')}"
1413
1645
  try:
@@ -1417,8 +1649,10 @@ def main():
1417
1649
  abort_script()
1418
1650
  except Exception as err:
1419
1651
  line = traceback.extract_tb(err.__traceback__)[-1].lineno
1420
- print(f"{RED}Error in Python script (line {line}: {err})"
1421
- f", stack trace follows:{NORMAL}")
1652
+ print(
1653
+ f"{RED}Error in Python script (line {line}: {err})"
1654
+ f", stack trace follows:{NORMAL}"
1655
+ )
1422
1656
  print()
1423
1657
  raise err
1424
1658
 
@@ -1431,8 +1665,10 @@ def main():
1431
1665
  abort_script()
1432
1666
  if len(sys.argv) > 3:
1433
1667
  log.setLevel(logging.ERROR)
1434
- log.error("Action `setup-config` must be followed by at most one "
1435
- "argument (the name of the desied configuration)")
1668
+ log.error(
1669
+ "Action `setup-config` must be followed by at most one "
1670
+ "argument (the name of the desied configuration)"
1671
+ )
1436
1672
  abort_script()
1437
1673
  log.setLevel(logging.INFO)
1438
1674
  config_name = sys.argv[2] if len(sys.argv) == 3 else "default"
@@ -1465,8 +1701,10 @@ def main():
1465
1701
  continue
1466
1702
  # If the action name does not exist, exit.
1467
1703
  if action_name not in action_names:
1468
- log.error(f"Action \"{action_name}\" does not exist, available "
1469
- f"actions are: {', '.join(action_names)}")
1704
+ log.error(
1705
+ f"Action \"{action_name}\" does not exist, available "
1706
+ f"actions are: {', '.join(action_names)}"
1707
+ )
1470
1708
  abort_script()
1471
1709
  # Execute the action (or only show what would be executed).
1472
1710
  execute_action(actions, action_name, only_show=only_show)