squad 1.92__py3-none-any.whl → 1.93.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,7 @@ from requests.adapters import HTTPAdapter, Retry
10
10
  from functools import reduce
11
11
  from urllib.parse import urljoin
12
12
 
13
+ from cryptography.exceptions import InvalidSignature
13
14
  from cryptography.hazmat.primitives.asymmetric import ec
14
15
  from cryptography.hazmat.primitives import (
15
16
  hashes,
@@ -481,22 +482,18 @@ class Backend(BaseBackend):
481
482
  if public_key is None:
482
483
  raise Exception("missing tuxsuite public key for this project")
483
484
 
484
- payload = json.loads(request.body)
485
485
  signature = base64.urlsafe_b64decode(signature)
486
486
  key = serialization.load_ssh_public_key(public_key.encode("ascii"))
487
- key.verify(
488
- signature,
489
- payload.encode("utf-8"),
490
- ec.ECDSA(hashes.SHA256()),
491
- )
487
+ try:
488
+ key.verify(
489
+ signature,
490
+ request.body,
491
+ ec.ECDSA(hashes.SHA256()),
492
+ )
493
+ except InvalidSignature:
494
+ raise Exception("Failed to verify signature against payload")
492
495
 
493
496
  def process_callback(self, json_payload, build, environment, backend):
494
- # The payload coming from Tuxsuite is formatted as bytes,
495
- # so after the first json.loads(request.body), the result
496
- # will still be a string containing the actual json document
497
- # We need to call json.loads() once more to get the actual
498
- # python dict containing all the information we need
499
- json_payload = json.loads(json_payload)
500
497
  if "kind" not in json_payload or "status" not in json_payload:
501
498
  raise Exception("`kind` and `status` are required in the payload")
502
499
 
squad/ci/models.py CHANGED
@@ -138,10 +138,8 @@ class Backend(models.Model):
138
138
  completed=completed,
139
139
  )
140
140
  test_job.testrun = testrun
141
- except InvalidMetadata as exception:
141
+ except (DuplicatedTestJob, InvalidMetadata) as exception:
142
142
  test_job.failure = str(exception)
143
- except DuplicatedTestJob as exception:
144
- logger.error('Failed to fetch test_job(%d): "%s"' % (test_job.id, str(exception)))
145
143
 
146
144
  if test_job.needs_postprocessing():
147
145
  # Offload postprocessing plugins to a new task
@@ -0,0 +1,44 @@
1
+ from django.core.management.base import BaseCommand
2
+
3
+ from squad.plugins.linux_log_parser import Plugin as BootTestLogParser
4
+ from squad.plugins.linux_log_parser_build import Plugin as BuildLogParser
5
+
6
+
7
+ class FakeTestRun:
8
+ log_file = None
9
+ id = None
10
+
11
+
12
+ log_parsers = {
13
+ 'linux_log_parser_boot_test': BootTestLogParser(),
14
+ "linux_log_parser_build": BuildLogParser(),
15
+ }
16
+
17
+
18
+ class Command(BaseCommand):
19
+
20
+ help = """Run a log parser and print the outputs to the stdout."""
21
+
22
+ def add_arguments(self, parser):
23
+
24
+ parser.add_argument(
25
+ "LOG_FILE",
26
+ help="Log file to parser",
27
+ )
28
+
29
+ parser.add_argument(
30
+ "LOG_PARSER",
31
+ choices=log_parsers.keys(),
32
+ help="Which log parser to run"
33
+ )
34
+
35
+ def handle(self, *args, **options):
36
+ self.options = options
37
+
38
+ with open(options["LOG_FILE"], "r") as f:
39
+ log_file = f.read()
40
+
41
+ testrun = FakeTestRun()
42
+ testrun.log_file = log_file
43
+ parser = log_parsers[options["LOG_PARSER"]]
44
+ parser.postprocess_testrun(testrun, squad=False, print=True)
squad/core/models.py CHANGED
@@ -493,6 +493,9 @@ class Build(models.Model):
493
493
  ordering = ['datetime']
494
494
 
495
495
  def save(self, *args, **kwargs):
496
+ # Initialize this to timezone.now(), then if a testrun is seen with an
497
+ # earlier datetime, keep this value up to date with the earliest
498
+ # testrun.datetime (handled in ReceiveTestRun.__call__).
496
499
  if not self.datetime:
497
500
  self.datetime = timezone.now()
498
501
  with transaction.atomic():
@@ -577,12 +580,17 @@ class Build(models.Model):
577
580
  List of attachments from all testruns
578
581
  """
579
582
  if self.__attachments__ is None:
583
+ test_run_ids = self.test_runs.values_list('id', flat=True)
584
+ all_attachments = Attachment.objects.filter(test_run_id__in=test_run_ids).values(
585
+ 'test_run_id', 'filename'
586
+ )
587
+
580
588
  attachments = {}
581
- for test_run in self.test_runs.all():
582
- attachments[test_run.pk] = []
583
- for attachment in test_run.attachments.all():
584
- attachments[test_run.pk].append(attachment.filename)
589
+ for attachment in all_attachments:
590
+ attachments.setdefault(attachment['test_run_id'], []).append(attachment['filename'])
591
+
585
592
  self.__attachments__ = attachments
593
+
586
594
  return self.__attachments__
587
595
 
588
596
  @property
@@ -801,6 +809,9 @@ class TestRun(models.Model):
801
809
  unique_together = ('build', 'job_id')
802
810
 
803
811
  def save(self, *args, **kwargs):
812
+ # testrun.datetime will take datetime from the metadata if it exists
813
+ # (during ReceiveTestRun.__call__). If datetime is not in the metadata,
814
+ # set it to timezone.now()
804
815
  if not self.datetime:
805
816
  self.datetime = timezone.now()
806
817
  if self.__metadata__:
@@ -178,6 +178,8 @@ class ReceiveTestRun(object):
178
178
 
179
179
  testrun.refresh_from_db()
180
180
 
181
+ # This keeps the datetime of the build in line with the earliest
182
+ # observed testrun.datetime.
181
183
  if not build.datetime or testrun.datetime < build.datetime:
182
184
  build.datetime = testrun.datetime
183
185
  build.save()
@@ -16,17 +16,44 @@ square_brackets_and_contents = r"\[[^\]]+\]"
16
16
 
17
17
  class BaseLogParser:
18
18
  def compile_regexes(self, regexes):
19
- combined = [r"(%s)" % r[REGEX_BODY] for r in regexes]
20
- return re.compile(r"|".join(combined), re.S | re.M)
19
+ with_brackets = [r"(%s)" % r[REGEX_BODY] for r in regexes]
20
+ combined = r"|".join(with_brackets)
21
+
22
+ # In the case where there is only one regex, we need to add extra
23
+ # bracket around it for it to behave the same as the multiple regex
24
+ # case
25
+ if len(regexes) == 1:
26
+ combined = f"({combined})"
27
+
28
+ return re.compile(combined, re.S | re.M)
21
29
 
22
30
  def remove_numbers_and_time(self, snippet):
23
- # [ 1067.461794][ T132] BUG: KCSAN: data-race in do_page_fault spectre_v4_enable_task_mitigation
24
- # -> [ .][ T] BUG: KCSAN: data-race in do_page_fault spectre_v_enable_task_mitigation
25
- without_numbers = re.sub(r"(0x[a-f0-9]+|[<\[][0-9a-f]+?[>\]]|\d+)", "", snippet)
31
+ # [ 92.236941] CPU: 1 PID: 191 Comm: kunit_try_catch Tainted: G W 5.15.75-rc1 #1
32
+ # <4>[ 87.925462] CPU: 0 PID: 135 Comm: (crub_all) Not tainted 6.7.0-next-20240111 #14
33
+ # Remove '(Not t|T)ainted', to the end of the line.
34
+ without_tainted = re.sub(r"(Not t|T)ainted.*", "", snippet)
35
+
36
+ # x23: ffff9b7275bc6f90 x22: ffff9b7275bcfb50 x21: fff00000cc80ef88
37
+ # x20: 1ffff00010668fb8 x19: ffff8000800879f0 x18: 00000000805c0b5c
38
+ # Remove words with hex numbers.
39
+ # <3>[ 2.491276][ T1] BUG: KCSAN: data-race in console_emit_next_record / console_trylock_spinning
40
+ # -> <>[ .][ T1] BUG: KCSAN: data-race in console_emit_next_record / console_trylock_spinning
41
+ without_hex = re.sub(r"\b(?:0x)?[a-fA-F0-9]+\b", "", without_tainted)
42
+
43
+ # <>[ 1067.461794][ T132] BUG: KCSAN: data-race in do_page_fault spectre_v4_enable_task_mitigation
44
+ # -> <>[ .][ T132] BUG: KCSAN: data-race in do_page_fault spectre_v_enable_task_mitigation
45
+ # But should not remove numbers from functions.
46
+ without_numbers = re.sub(
47
+ r"(0x[a-f0-9]+|[<\[][0-9a-f]+?[>\]]|\b\d+\b(?!\s*\())", "", without_hex
48
+ )
26
49
 
27
- # [ .][ T] BUG: KCSAN: data-race in do_page_fault spectre_v_enable_task_mitigation
50
+ # <>[ .][ T132] BUG: KCSAN: data-race in do_page_fault spectre_v_enable_task_mitigation
28
51
  # -> BUG: KCSAN: data-race in do_page_fault spectre_v_enable_task_mitigation
29
- without_time = re.sub(f"^{square_brackets_and_contents}({square_brackets_and_contents})?", "", without_numbers) # noqa
52
+ without_time = re.sub(
53
+ f"^<?>?{square_brackets_and_contents}({square_brackets_and_contents})?",
54
+ "",
55
+ without_numbers,
56
+ ) # noqa
30
57
 
31
58
  return without_time
32
59
 
@@ -41,10 +68,7 @@ class BaseLogParser:
41
68
  snippet = matches[0]
42
69
  without_numbers_and_time = self.remove_numbers_and_time(snippet)
43
70
 
44
- # Limit the name length to 191 characters, since the max name length
45
- # for SuiteMetadata in SQUAD is 256 characters. The SHA and "-" take 65
46
- # characters: 256-65=191
47
- return slugify(without_numbers_and_time)[:191]
71
+ return slugify(without_numbers_and_time)
48
72
 
49
73
  def create_shasum(self, snippet):
50
74
  sha = hashlib.sha256()
@@ -52,7 +76,7 @@ class BaseLogParser:
52
76
  sha.update(without_numbers_and_time.encode())
53
77
  return sha.hexdigest()
54
78
 
55
- def create_name_log_dict(self, test_name, lines, test_regex=None):
79
+ def create_name_log_dict(self, test_name, lines, test_regex=None, create_shas=True):
56
80
  """
57
81
  Produce a dictionary with the test names as keys and the extracted logs
58
82
  for that test name as values. There will be at least one test name per
@@ -64,31 +88,43 @@ class BaseLogParser:
64
88
  # have any output for a particular regex, just use the default name
65
89
  # (for example "check-kernel-oops").
66
90
  tests_without_shas_to_create = defaultdict(set)
67
- tests_with_shas_to_create = defaultdict(set)
91
+ tests_with_shas_to_create = None
68
92
 
69
93
  # If there are lines, then create the tests for these.
70
94
  for line in lines:
71
95
  extracted_name = self.create_name(line, test_regex)
72
96
  if extracted_name:
73
- extended_test_name = f"{test_name}-{extracted_name}"
97
+ max_name_length = 256
98
+ # If adding SHAs, limit the name length to 191 characters,
99
+ # since the max name length for SuiteMetadata in SQUAD is 256
100
+ # characters. The SHA and "-" take 65 characters: 256-65=191
101
+ if create_shas:
102
+ max_name_length -= 65
103
+ extended_test_name = f"{test_name}-{extracted_name}"[:max_name_length]
74
104
  else:
75
105
  extended_test_name = test_name
76
106
  tests_without_shas_to_create[extended_test_name].add(line)
77
107
 
78
- for name, test_lines in tests_without_shas_to_create.items():
79
- # Some lines of the matched regex might be the same, and we don't want to create
80
- # multiple tests like test1-sha1, test1-sha1, etc, so we'll create a set of sha1sums
81
- # then create only new tests for unique sha's
108
+ if create_shas:
109
+ tests_with_shas_to_create = defaultdict(set)
110
+ for name, test_lines in tests_without_shas_to_create.items():
111
+ # Some lines of the matched regex might be the same, and we don't want to create
112
+ # multiple tests like test1-sha1, test1-sha1, etc, so we'll create a set of sha1sums
113
+ # then create only new tests for unique sha's
82
114
 
83
- for line in test_lines:
84
- sha = self.create_shasum(line)
85
- name_with_sha = f"{name}-{sha}"
86
- tests_with_shas_to_create[name_with_sha].add(line)
115
+ for line in test_lines:
116
+ sha = self.create_shasum(line)
117
+ name_with_sha = f"{name}-{sha}"
118
+ tests_with_shas_to_create[name_with_sha].add(line)
87
119
 
88
120
  return tests_without_shas_to_create, tests_with_shas_to_create
89
121
 
90
122
  def create_squad_tests_from_name_log_dict(
91
- self, suite, testrun, tests_without_shas_to_create, tests_with_shas_to_create
123
+ self,
124
+ suite_name,
125
+ testrun,
126
+ tests_without_shas_to_create,
127
+ tests_with_shas_to_create=None,
92
128
  ):
93
129
  # Import SuiteMetadata from SQUAD only when required so BaseLogParser
94
130
  # does not require a SQUAD to work. This makes it easier to reuse this
@@ -96,6 +132,8 @@ class BaseLogParser:
96
132
  # patterns.
97
133
  from squad.core.models import SuiteMetadata
98
134
 
135
+ suite, _ = testrun.build.project.suites.get_or_create(slug=suite_name)
136
+
99
137
  for name, lines in tests_without_shas_to_create.items():
100
138
  metadata, _ = SuiteMetadata.objects.get_or_create(
101
139
  suite=suite.slug, name=name, kind="test"
@@ -108,34 +146,72 @@ class BaseLogParser:
108
146
  build=testrun.build,
109
147
  environment=testrun.environment,
110
148
  )
111
- for name_with_sha, lines in tests_with_shas_to_create.items():
112
- metadata, _ = SuiteMetadata.objects.get_or_create(
113
- suite=suite.slug, name=name_with_sha, kind="test"
114
- )
115
- testrun.tests.create(
116
- suite=suite,
117
- result=False,
118
- log="\n---\n".join(lines),
119
- metadata=metadata,
120
- build=testrun.build,
121
- environment=testrun.environment,
122
- )
123
-
124
- def create_squad_tests(self, testrun, suite, test_name, lines, test_regex=None):
149
+ if tests_with_shas_to_create:
150
+ for name_with_sha, lines in tests_with_shas_to_create.items():
151
+ metadata, _ = SuiteMetadata.objects.get_or_create(
152
+ suite=suite.slug, name=name_with_sha, kind="test"
153
+ )
154
+ testrun.tests.create(
155
+ suite=suite,
156
+ result=False,
157
+ log="\n---\n".join(lines),
158
+ metadata=metadata,
159
+ build=testrun.build,
160
+ environment=testrun.environment,
161
+ )
162
+
163
+ def print_squad_tests_from_name_log_dict(
164
+ self,
165
+ suite_name,
166
+ tests_without_shas_to_create,
167
+ tests_with_shas_to_create=None,
168
+ ):
169
+ for name, lines in tests_without_shas_to_create.items():
170
+ print(f"\nName: {suite_name}/{name}")
171
+ log = "\n".join(lines)
172
+ print(f"Log:\n{log}")
173
+
174
+ if tests_with_shas_to_create:
175
+ for name_with_sha, lines in tests_with_shas_to_create.items():
176
+ print(f"\nName: {suite_name}/{name_with_sha}")
177
+ log = "\n---\n".join(lines)
178
+ print(f"Log:\n{log}")
179
+
180
+ def create_squad_tests(
181
+ self,
182
+ testrun,
183
+ suite_name,
184
+ test_name,
185
+ lines,
186
+ test_regex=None,
187
+ create_shas=True,
188
+ print=False,
189
+ squad=True,
190
+ ):
125
191
  """
126
192
  There will be at least one test per regex. If there were any match for
127
193
  a given regex, then a new test will be generated using test_name +
128
194
  shasum. This helps comparing kernel logs across different builds
129
195
  """
196
+
130
197
  tests_without_shas_to_create, tests_with_shas_to_create = (
131
- self.create_name_log_dict(test_name, lines, test_regex)
132
- )
133
- self.create_squad_tests_from_name_log_dict(
134
- suite,
135
- testrun,
136
- tests_without_shas_to_create,
137
- tests_with_shas_to_create,
198
+ self.create_name_log_dict(
199
+ test_name, lines, test_regex, create_shas=create_shas
200
+ )
138
201
  )
202
+ if print:
203
+ self.print_squad_tests_from_name_log_dict(
204
+ suite_name,
205
+ tests_without_shas_to_create,
206
+ tests_with_shas_to_create,
207
+ )
208
+ if squad:
209
+ self.create_squad_tests_from_name_log_dict(
210
+ suite_name,
211
+ testrun,
212
+ tests_without_shas_to_create,
213
+ tests_with_shas_to_create,
214
+ )
139
215
 
140
216
  def join_matches(self, matches, regexes):
141
217
  """
@@ -44,8 +44,9 @@ class Plugin(BasePlugin, BaseLogParser):
44
44
  kernel_msgs = re.findall(f'({tstamp}{pid}? .*?)$', log, re.S | re.M) # noqa
45
45
  return '\n'.join(kernel_msgs)
46
46
 
47
- def postprocess_testrun(self, testrun):
48
- if testrun.log_file is None:
47
+ def postprocess_testrun(self, testrun, squad=True, print=False):
48
+ # If running as a SQUAD plugin, only run the boot/test log parser if this is not a build testrun
49
+ if testrun.log_file is None or (squad and testrun.tests.filter(suite__slug="build").exists()):
49
50
  return
50
51
 
51
52
  boot_log, test_log = self.__cutoff_boot_log(testrun.log_file)
@@ -56,7 +57,7 @@ class Plugin(BasePlugin, BaseLogParser):
56
57
 
57
58
  for log_type, log in logs.items():
58
59
  log = self.__kernel_msgs_only(log)
59
- suite, _ = testrun.build.project.suites.get_or_create(slug=f'log-parser-{log_type}')
60
+ suite_name = f'log-parser-{log_type}'
60
61
 
61
62
  regex = self.compile_regexes(REGEXES)
62
63
  matches = regex.findall(log)
@@ -68,4 +69,4 @@ class Plugin(BasePlugin, BaseLogParser):
68
69
  test_name_regex = None
69
70
  if regex_pattern:
70
71
  test_name_regex = re.compile(regex_pattern, re.S | re.M)
71
- self.create_squad_tests(testrun, suite, test_name, snippets[regex_id], test_name_regex)
72
+ self.create_squad_tests(testrun, suite_name, test_name, snippets[regex_id], test_name_regex, squad=squad, print=print)
@@ -0,0 +1,334 @@
1
+ import logging
2
+ import re
3
+
4
+ from django.template.defaultfilters import slugify
5
+
6
+ from squad.plugins import Plugin as BasePlugin
7
+ from squad.plugins.lib.base_log_parser import (
8
+ REGEX_EXTRACT_NAME,
9
+ REGEX_NAME,
10
+ BaseLogParser,
11
+ )
12
+
13
+ logger = logging.getLogger()
14
+
15
+ file_path = r"^(?:[^\n]*?:(?:\d+:){2}|<[^\n]*?>:)"
16
+ gcc_clang_compiler_error_warning = rf"{file_path} (?:error|warning): [^\n]+?\n^(?:\.+\n|^(?!\s+(?:CC|Kernel[^\n]*?is ready))\s+?[^\n]+\n|{file_path} note:[^\n]+\n)*"
17
+
18
+ MULTILINERS_GCC = [
19
+ (
20
+ "gcc-compiler",
21
+ gcc_clang_compiler_error_warning,
22
+ r"^[^\n]*(?:error|warning)[^\n]*$",
23
+ ),
24
+ ]
25
+
26
+ ONELINERS_GCC = []
27
+
28
+
29
+ MULTILINERS_CLANG = [
30
+ (
31
+ "clang-compiler",
32
+ gcc_clang_compiler_error_warning,
33
+ r"^[^\n]*(?:error|warning)[^\n]*$",
34
+ ),
35
+ ]
36
+
37
+ ONELINERS_CLANG = [
38
+ (
39
+ "clang-compiler-single-line",
40
+ "^clang: (?:error|warning).*?$",
41
+ r"^[^\n]*(?:error|warning).*?$",
42
+ ),
43
+ (
44
+ "clang-compiler-fatal-error",
45
+ "^fatal error.*?$",
46
+ r"^fatal error.*?$",
47
+ ),
48
+ ]
49
+
50
+ MULTILINERS_GENERAL = [
51
+ (
52
+ "general-not-a-git-repo",
53
+ r"^[^\n]*fatal: not a git repository.*?not set\)\.$",
54
+ r"^[^\n]*fatal: not a git repository.*?$",
55
+ ),
56
+ (
57
+ "general-unexpected-argument",
58
+ r"^[^\n]*error: Found argument.*?--help$",
59
+ r"^[^\n]*error: Found argument.*?$",
60
+ ),
61
+ (
62
+ "general-broken-32-bit",
63
+ r"^[^\n]*Warning: you seem to have a broken 32-bit build.*?(?:If[^\n]*?try:(?:\n|\s+.+?$)+)+",
64
+ r"^[^\n]*Warning:.*?$",
65
+ ),
66
+ (
67
+ "general-makefile-overriding",
68
+ r"^[^\n]*warning: overriding recipe for target.*?ignoring old recipe for target.*?$",
69
+ r"^[^\n]*warning:.*?$",
70
+ ),
71
+ (
72
+ "general-unmet-dependencies",
73
+ r"^WARNING: unmet direct dependencies detected for.*?$(?:\n +[^\n]+)*",
74
+ r"^WARNING: unmet direct dependencies detected for.*?$",
75
+ ),
76
+ (
77
+ "general-ldd",
78
+ r"^[^\n]*?lld:[^\n]+?(?:warning|error):.*?$(?:\n^>>>[^\n]+)*",
79
+ r"^[^\n]*?lld:.*?$",
80
+ ),
81
+ (
82
+ "general-ld",
83
+ r"^[^\n]*?ld:[^\n]+?(?:warning|error):[^\n]*?$(?:\n^[^\n]*?NOTE:[^\n]+)*",
84
+ r"^[^\n]*?ld:[^\n]+?(?:warning|error):.*?$",
85
+ ),
86
+ (
87
+ "general-objcopy",
88
+ r"^[^\n]*?objcopy:[^\n]+?(?:warning|error):[^\n]*?$(?:\n^[^\n]*?NOTE:[^\n]+)*",
89
+ r"^[^\n]*?objcopy:[^\n]+?(?:warning|error):.*?$",
90
+ ),
91
+ (
92
+ "general-ld-undefined-reference",
93
+ r"^[^\n]*?ld[^\n]*?$\n^[^\n]+undefined reference.*?$",
94
+ r"^[^\n]+undefined reference.*?$",
95
+ ),
96
+ (
97
+ "general-modpost",
98
+ r"^[^\n]*?WARNING: modpost:[^\n]*?$(?:\n^To see.*?:$\n^.*?$)?",
99
+ r"^[^\n]*?WARNING.*?$",
100
+ ),
101
+ (
102
+ "general-python-traceback",
103
+ r"Traceback.*?^[^\s]+Error: .*?$",
104
+ r"^[^\s]+Error: .*?$",
105
+ ),
106
+ ]
107
+
108
+ ONELINERS_GENERAL = [
109
+ (
110
+ "general-no-such-file-or-directory",
111
+ r"^[^\n]+?No such file or directory.*?$",
112
+ r"^[^\n]+?No such file or directory.*?$",
113
+ ),
114
+ (
115
+ "general-no-targets",
116
+ r"^[^\n]+?No targets.*?$",
117
+ r"^[^\n]+?No targets.*?$",
118
+ ),
119
+ (
120
+ "general-no-rule-to-make-target",
121
+ r"^[^\n]+?No rule to make target.*?$",
122
+ r"^[^\n]+?No rule to make target.*?$",
123
+ ),
124
+ (
125
+ "general-makefile-config",
126
+ r"^Makefile.config:\d+:.*?$",
127
+ r"^Makefile.config:\d+:.*?$",
128
+ ),
129
+ (
130
+ "general-not-found",
131
+ r"^[^\n]*?not found.*?$",
132
+ r"^[^\n]*?not found.*?$",
133
+ ),
134
+ (
135
+ "general-kernel-abi",
136
+ r"^Warning: Kernel ABI header at.*?$",
137
+ r"^Warning: Kernel ABI header at.*?$",
138
+ ),
139
+ (
140
+ "general-missing",
141
+ r"^Warning: missing.*?$",
142
+ r"^Warning: missing.*?$",
143
+ ),
144
+ (
145
+ "general-dtc",
146
+ r"^[^\n]*?Warning \([^\n]*?\).*?$",
147
+ r"^[^\n]*?Warning.*?$",
148
+ ),
149
+ (
150
+ "general-register-allocation",
151
+ r"^[^\n]*?error: register allocation failed.*?$",
152
+ r"^[^\n]*?error.*?$",
153
+ ),
154
+ ]
155
+
156
+ # Tip: broader regexes should come first
157
+ REGEXES_GCC = MULTILINERS_GCC + MULTILINERS_GENERAL + ONELINERS_GCC + ONELINERS_GENERAL
158
+ REGEXES_CLANG = (
159
+ MULTILINERS_CLANG + MULTILINERS_GENERAL + ONELINERS_CLANG + ONELINERS_GENERAL
160
+ )
161
+
162
+ supported_toolchains = {
163
+ "gcc": REGEXES_GCC,
164
+ "clang": REGEXES_CLANG,
165
+ }
166
+
167
+ make_regex = r"^make .*?$"
168
+ in_file_regex = r"^In file[^\n]*?[:,]$(?:\n^(?:\s+|In file)[^\n]*?[:,]$)*"
169
+ in_function_regex = r"^[^\n]*?In function.*?:$"
170
+ entering_dir_regex = r"^make\[(?:\d+)\]: Entering directory.*?$"
171
+ leaving_dir_regex = r"^make\[(?:\d+)\]: Leaving directory.*?$"
172
+
173
+ split_regex_gcc = rf"(.*?)({make_regex}|{in_file_regex}|{in_function_regex}|{entering_dir_regex}|{leaving_dir_regex})"
174
+
175
+
176
+ class Plugin(BasePlugin, BaseLogParser):
177
+
178
+ def post_process_test_name(self, text):
179
+ # Remove "builds/linux" if there
180
+ text = re.sub(r"builds/linux", "", text)
181
+
182
+ # Change "/" and "." to "_" for readability
183
+ text = re.sub(r"[/\.]", "_", text)
184
+
185
+ # Remove numbers and hex
186
+ text = re.sub(r"(0x[a-f0-9]+|[<\[][0-9a-f]+?[>\]]|\d+)", "", text)
187
+
188
+ # Remove "{...}" and "[...]"
189
+ text = re.sub(r"\{.+?\}", "", text)
190
+ text = re.sub(r"\[.+?\]", "", text)
191
+
192
+ return text
193
+
194
+ def create_name(self, snippet, compiled_regex=None):
195
+ matches = None
196
+ if compiled_regex:
197
+ matches = compiled_regex.findall(snippet)
198
+ if not matches:
199
+ # Only extract a name if we provide a regex to extract the name and
200
+ # there is a match
201
+ return None
202
+ snippet = matches[0]
203
+ without_numbers = re.sub(
204
+ r"(0x[a-f0-9]+|[<\[][0-9a-f]+?[>\]]|\b\d+\b(?!\s*\())", "", snippet
205
+ )
206
+
207
+ name = slugify(self.post_process_test_name(without_numbers))
208
+
209
+ return name
210
+
211
+ def split_by_regex(self, log, regex):
212
+ # Split up the log by the keywords we want to capture
213
+ s_lines_compiled = re.compile(regex, re.DOTALL | re.MULTILINE)
214
+ split_by_regex_list = s_lines_compiled.split(log)
215
+ split_by_regex_list = [
216
+ f for f in split_by_regex_list if f is not None and f != ""
217
+ ]
218
+
219
+ return split_by_regex_list
220
+
221
+ def process_blocks(
222
+ self,
223
+ blocks_to_process,
224
+ regexes,
225
+ make_regex=make_regex,
226
+ entering_dir_regex=entering_dir_regex,
227
+ leaving_dir_regex=leaving_dir_regex,
228
+ in_file_regex=in_file_regex,
229
+ in_function_regex=in_function_regex,
230
+ ):
231
+ snippets = dict()
232
+ regex_compiled = self.compile_regexes(regexes)
233
+ make_regex_compiled = re.compile(make_regex, re.DOTALL | re.MULTILINE)
234
+ entering_dir_regex_compiled = re.compile(
235
+ entering_dir_regex, re.DOTALL | re.MULTILINE
236
+ )
237
+ leaving_dir_regex_compiled = re.compile(
238
+ leaving_dir_regex, re.DOTALL | re.MULTILINE
239
+ )
240
+ in_file_regex_compiled = re.compile(in_file_regex, re.DOTALL | re.MULTILINE)
241
+ in_function_regex_compiled = re.compile(
242
+ in_function_regex, re.DOTALL | re.MULTILINE
243
+ )
244
+
245
+ # For tracking the last piece of information we saw
246
+ make_command = None
247
+ entering_dir = None
248
+ in_file = None
249
+ in_function = None
250
+
251
+ for regex_id in range(len(regexes)):
252
+ snippets[regex_id] = []
253
+ for block in blocks_to_process:
254
+ if make_regex_compiled.match(block):
255
+ make_command = block
256
+ entering_dir = None
257
+ in_file = None
258
+ in_function = None
259
+ elif entering_dir_regex_compiled.match(block):
260
+ entering_dir = block
261
+ in_file = None
262
+ in_function = None
263
+ elif leaving_dir_regex_compiled.match(block):
264
+ entering_dir = None
265
+ in_file = None
266
+ in_function = None
267
+ elif in_file_regex_compiled.match(block):
268
+ in_file = block
269
+ in_function = None
270
+ elif in_function_regex_compiled.match(block):
271
+ in_function = block
272
+ else:
273
+ matches = regex_compiled.findall(block)
274
+ sub_snippets = self.join_matches(matches, regexes)
275
+ prepend = ""
276
+ if make_command:
277
+ prepend += make_command + "\n"
278
+ if entering_dir:
279
+ prepend += entering_dir + "\n"
280
+ if in_file:
281
+ prepend += in_file + "\n"
282
+ if in_function:
283
+ prepend += in_function + "\n"
284
+ for regex_id in range(len(regexes)):
285
+ for s in sub_snippets[regex_id]:
286
+ snippets[regex_id].append(prepend + s)
287
+
288
+ return snippets
289
+
290
+ def postprocess_testrun(self, testrun, squad=True, print=False):
291
+ """
292
+ Check:
293
+ - There is a log file
294
+ - If running as SQUAD plugin, the testrun contains the "build"
295
+ suite - this tells us that the testrun's log is a build log
296
+ """
297
+ if testrun.log_file is None or (
298
+ squad and not testrun.tests.filter(suite__slug="build").exists()
299
+ ):
300
+ return
301
+
302
+ regexes = None
303
+ for toolchain, toolchain_regexes in supported_toolchains.items():
304
+ if f"--toolchain={toolchain}" in testrun.log_file:
305
+ toolchain_name = toolchain
306
+ regexes = toolchain_regexes
307
+
308
+ # If a supported toolchain was not found in the log
309
+ if regexes is None:
310
+ return
311
+
312
+ # If running in SQUAD, create the suite
313
+ suite_name = f"log-parser-build-{toolchain_name}"
314
+
315
+ blocks_to_process = self.split_by_regex(testrun.log_file, split_regex_gcc)
316
+
317
+ snippets = self.process_blocks(blocks_to_process, regexes)
318
+
319
+ for regex_id in range(len(regexes)):
320
+ test_name = regexes[regex_id][REGEX_NAME]
321
+ regex_pattern = regexes[regex_id][REGEX_EXTRACT_NAME]
322
+ test_name_regex = None
323
+ if regex_pattern:
324
+ test_name_regex = re.compile(regex_pattern, re.S | re.M)
325
+ self.create_squad_tests(
326
+ testrun,
327
+ suite_name,
328
+ test_name,
329
+ snippets[regex_id],
330
+ test_name_regex,
331
+ create_shas=False,
332
+ print=print,
333
+ squad=squad,
334
+ )
squad/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = '1.92'
1
+ __version__ = '1.93.1'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: squad
3
- Version: 1.92
3
+ Version: 1.93.1
4
4
  Summary: Software Quality Dashboard
5
5
  Home-page: https://github.com/Linaro/squad
6
6
  Author: Antonio Terceiro
@@ -10,7 +10,7 @@ squad/manage.py,sha256=Z-LXT67p0R-IzwJ9fLIAacEZmU0VUjqDOSg7j2ZSxJ4,1437
10
10
  squad/settings.py,sha256=0MZ48SV_7CTrLMik2ubWf8-ROQiFju6CKnUC3iR8KAc,14800
11
11
  squad/socialaccount.py,sha256=vySqPwQ3qVVpahuJ-Snln8K--yzRL3bw4Nx27AsB39A,789
12
12
  squad/urls.py,sha256=JiEfVW8YlzLPE52c2aHzdn5kVVKK4o22w8h5KOA6QhQ,2776
13
- squad/version.py,sha256=WHUX3wrk23jPrWzNUft2dot0QtSqj7kO0c3Q3yKJiDM,21
13
+ squad/version.py,sha256=N1lgYLLJIViE9VAAq7J5l3sFo7_BdyvoOEdr3j9eNaA,23
14
14
  squad/wsgi.py,sha256=SF8T0cQ0OPVyuYjO5YXBIQzvSXQHV0M2BTmd4gP1rPs,387
15
15
  squad/api/__init__.py,sha256=CJiVakfAlHVN5mIFRVQYZQfuNUhUgWVbsdYTME4tq7U,1349
16
16
  squad/api/apps.py,sha256=Trk72p-iV1uGn0o5mdJn5HARUoHGbfgO49jwXvpkmdQ,141
@@ -26,14 +26,14 @@ squad/ci/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
26
  squad/ci/admin.py,sha256=7yB-6F0cvt0NVvzGOTlZCyGPV_YHarmbKJZTTzataT4,2255
27
27
  squad/ci/apps.py,sha256=6OVnzTdJkxdqEJnKWYE9dZgUcc29_T1LrDw41cK4EQk,139
28
28
  squad/ci/exceptions.py,sha256=a1sccygniTYDSQi7FRn_6doapddFFiMf55AwGUh5Y80,227
29
- squad/ci/models.py,sha256=Fm-4b3SDgMh9HXzqjOd4iZDRMJ1D9AnZ2cg7i2OR248,16018
29
+ squad/ci/models.py,sha256=wR9FMBdjQgtEP3ga9CY6npFr5fUIeVpnfAhNa2xqM00,15897
30
30
  squad/ci/tasks.py,sha256=P0NYjLuyUViTpO1jZMuRVREbFDCccrMCZDw5E4pt928,3882
31
31
  squad/ci/utils.py,sha256=38zHpw8xkZDSFlkG-2BwSK6AkcddK9OkN9LXuQ3SHR0,97
32
32
  squad/ci/backend/__init__.py,sha256=yhpotXT9F4IdAOXvGQ3-17eOHAFwoaqf9SnMX17ab30,534
33
33
  squad/ci/backend/fake.py,sha256=7Rl-JXnBYThDomOBzBsN9XuVkSjSHTZjtZOURdowZbA,2397
34
34
  squad/ci/backend/lava.py,sha256=WeOJJNxv42geGf3Y6r-I0WnhWinxpSSgZAFAwfkiXGY,34039
35
35
  squad/ci/backend/null.py,sha256=htEd4NbrXLKdPgFfTS0Ixm8PdT6Ghat3BCYi2zjfuv0,5624
36
- squad/ci/backend/tuxsuite.py,sha256=dTQNgHs5HNrY2LUaSH6jXiBLVjgyMNTqJ6CUWU3yPK4,18914
36
+ squad/ci/backend/tuxsuite.py,sha256=pFcNdcHpFzalHPQhbSY6ryOci_PU3LFsaNjSsgjbqGg,18676
37
37
  squad/ci/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  squad/ci/management/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  squad/ci/management/commands/create_tuxsuite_boot_tests.py,sha256=JvjNusebLX71eyz9d-kaeCyekYSpzc1eXoeIqWK9ygo,4045
@@ -82,7 +82,7 @@ squad/core/comparison.py,sha256=LR3-Unv0CTmakFCDzF_h8fm2peTJzkv79mQWNau1iwI,2442
82
82
  squad/core/data.py,sha256=2zw56v7iYRTUc7wlhuUNgwIIMmK2w84hi-amR9J7EPU,2236
83
83
  squad/core/failures.py,sha256=X6lJVghM2fOrd-RfuHeLlezW2pt7owDZ8eX-Kn_Qrt0,918
84
84
  squad/core/history.py,sha256=QRSIoDOw6R6vUWMtsPMknsHGM7FaCAeuCYqASCayHTk,3541
85
- squad/core/models.py,sha256=sXQmgPtl54IZT7rDmJEU3QK6JSPbi0hTUGRsjwL6PIo,60851
85
+ squad/core/models.py,sha256=qSLlxjBwzsZKGoCkPX6T-g48jXg81B1JH3wMXSLLvHQ,61401
86
86
  squad/core/notification.py,sha256=rOpO6F63w7_5l9gQgWBBEk-MFBjp7x_hVzoVIVyDze0,10030
87
87
  squad/core/plugins.py,sha256=FLgyoXXKnPBYEf2MgHup9M017rHuADHivLhgzmx_cJE,6354
88
88
  squad/core/queries.py,sha256=78fhIJZWXIlDryewYAt96beK1VJad66Ufu8cg3dHh4w,7698
@@ -105,6 +105,7 @@ squad/core/management/commands/migrate_test_runs.py,sha256=RHV06tb4gWyv_q-ooC821
105
105
  squad/core/management/commands/populate_metric_build_and_environment.py,sha256=DJP9_YLRso0RiERBVsB0GP4-GaiRtJb0rAiUQDfFNQk,3166
106
106
  squad/core/management/commands/populate_test_build_and_environment.py,sha256=0yHClC0x_8LSZlvT6Ag0BnipC9Xk-U6lcIaCsqAGEWk,3146
107
107
  squad/core/management/commands/prepdump.py,sha256=WM58leVdJj45KhWPw3DGO7vwnNY70ReXrJRSIIzGXkI,518
108
+ squad/core/management/commands/run_log_parser.py,sha256=SeksSD1cnbgl8oRsD3wu12p30_FMw090T6ouQyO4ZsI,1113
108
109
  squad/core/management/commands/send-email.py,sha256=wb1o5oKLDyH2ZonnQY-Jw28Y0Mu61OHWP8b1AQGKqbU,1120
109
110
  squad/core/management/commands/update_project_statuses.py,sha256=JleCesbVhYOSXr90ntH7s5u9Isknt7EnlX22VC6yI78,2089
110
111
  squad/core/management/commands/users.py,sha256=qIp87xRMfKWHymsAft5-gnYajm2mgaiHvVn7z86DCT8,9429
@@ -278,7 +279,7 @@ squad/core/migrations/0167_add_project_datetime.py,sha256=VUBG-qsAhh2f2NXaHOqfX9
278
279
  squad/core/migrations/0168_add_group_settings.py,sha256=5UdylfMMNavTL0KXkjPSiEMhSisGWXbhUXQSzfK29Ck,462
279
280
  squad/core/migrations/0169_userpreferences.py,sha256=FwYv9RWxMWdQ2lXJMgi-Xc6XBB5Kp-_YTAOr9GVq1To,1098
280
281
  squad/core/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
281
- squad/core/tasks/__init__.py,sha256=pYbEkFzNaat7iQQretRiJQPPF4Sq-5-hBykJYnBM04g,18567
282
+ squad/core/tasks/__init__.py,sha256=wKjyFw0JXiEDY6PEaKx3ureiSNIQFL8lHH4JIOMjlF8,18677
282
283
  squad/core/tasks/exceptions.py,sha256=n4cbmJFBdA6KWsGiTbfN9DyYGbJpk0DjR0UneEYw_W0,931
283
284
  squad/core/tasks/notification.py,sha256=6ZyTbUQZPITPP-4r9MUON7x-NbwvDBG8YeabM6fsjzA,4915
284
285
  squad/core/templates/squad/notification/base.jinja2,sha256=AbtQioEHV5DJBW4Etsu0-DQXd_8tQCnLejzgbDGDW7s,3413
@@ -426,17 +427,18 @@ squad/plugins/__init__.py,sha256=9BSzy2jFIoDpWlhD7odPPrLdW4CC3btBhdFCvB651dM,152
426
427
  squad/plugins/example.py,sha256=BKpwd315lHRIuNXJPteibpwfnI6C5eXYHYdFYBtVmsI,89
427
428
  squad/plugins/gerrit.py,sha256=CqO2KnFQzu9utr_TQ-sGr1wg3ln0B-bS2-c0_i8T5-c,7009
428
429
  squad/plugins/github.py,sha256=pdtLZw_7xNuzkaFvY_zWi0f2rsMlalXjKm7sz0eADz4,2429
429
- squad/plugins/linux_log_parser.py,sha256=MB8ScFZycq70Rrm7IM_Cw95rMtqVS9w4RhS5HhBSpcE,3292
430
+ squad/plugins/linux_log_parser.py,sha256=HQVreyZLBmLuv-K-MjlN43sQQSkcls4hkUsjJ9_5WfM,3472
431
+ squad/plugins/linux_log_parser_build.py,sha256=42pTj1_inTsiS_-htElNWw5Cod0bxpF8ZAm1qvYVhes,10481
430
432
  squad/plugins/lib/__init__.py,sha256=jzazbAvp2_ibblAs0cKZrmo9aR2EL3hKLyRDE008r2I,40
431
- squad/plugins/lib/base_log_parser.py,sha256=OW6JkZ3PM5RiDkt9UZ7OFFpUIArCxFUaqovynzwBL1Y,6573
433
+ squad/plugins/lib/base_log_parser.py,sha256=Bb3ok6R9_65EYvdWAsm8wcY741duGujTpaDXw1gJ9Yk,9366
432
434
  squad/run/__init__.py,sha256=ssE8GPAGFiK6V0WpZYowav6Zqsd63dfDMMYasNa1sQg,1410
433
435
  squad/run/__main__.py,sha256=DOl8JOi4Yg7DdtwnUeGqtYBJ6P2k-D2psAEuYOjWr8w,66
434
436
  squad/run/listener.py,sha256=jBeOQhPGb4EdIREB1QsCzYuumsfJ-TqJPd3nR-0m59g,200
435
437
  squad/run/scheduler.py,sha256=CDJG3q5C0GuQuxwlMOfWTSSJpDdwbR6rzpbJfuA0xuw,277
436
438
  squad/run/worker.py,sha256=jtML0h5qKDuSbpJ6_rpWP4MT_rsGA7a24AhwGxBquzk,594
437
- squad-1.92.dist-info/COPYING,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
438
- squad-1.92.dist-info/METADATA,sha256=_lF0F4lPQq88zJhPQ7M38zYoiWXjPWDKCOXr10WTKfg,1278
439
- squad-1.92.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
440
- squad-1.92.dist-info/entry_points.txt,sha256=J_jG3qnkoOHX4RFNGC0f83eJ4BSvK3pqLFkoF3HWfmA,195
441
- squad-1.92.dist-info/top_level.txt,sha256=_x9uqE1XppiiytmVTl_qNgpnXus6Gsef69HqfliE7WI,6
442
- squad-1.92.dist-info/RECORD,,
439
+ squad-1.93.1.dist-info/COPYING,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
440
+ squad-1.93.1.dist-info/METADATA,sha256=BBKV-R_mmv5k6Tujtg0ARX4GSF0LwWdGvbEerx0OHpw,1280
441
+ squad-1.93.1.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
442
+ squad-1.93.1.dist-info/entry_points.txt,sha256=J_jG3qnkoOHX4RFNGC0f83eJ4BSvK3pqLFkoF3HWfmA,195
443
+ squad-1.93.1.dist-info/top_level.txt,sha256=_x9uqE1XppiiytmVTl_qNgpnXus6Gsef69HqfliE7WI,6
444
+ squad-1.93.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.44.0)
2
+ Generator: bdist_wheel (0.45.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes