@misterhuydo/sentinel 1.5.63 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/.cairn/session.json +2 -2
  2. package/package.json +1 -1
  3. package/python/scripts/__pycache__/fix_ask_codebase_context.cpython-311.pyc +0 -0
  4. package/python/scripts/__pycache__/fix_ask_codebase_stdin.cpython-311.pyc +0 -0
  5. package/python/scripts/__pycache__/fix_chain_slack.cpython-311.pyc +0 -0
  6. package/python/scripts/__pycache__/fix_fstring.cpython-311.pyc +0 -0
  7. package/python/scripts/__pycache__/fix_knowledge_cache.cpython-311.pyc +0 -0
  8. package/python/scripts/__pycache__/fix_knowledge_cache_staleness.cpython-311.pyc +0 -0
  9. package/python/scripts/__pycache__/fix_merge_confirm.cpython-311.pyc +0 -0
  10. package/python/scripts/__pycache__/fix_permission_messages.cpython-311.pyc +0 -0
  11. package/python/scripts/__pycache__/fix_pr_check_head_detect.cpython-311.pyc +0 -0
  12. package/python/scripts/__pycache__/fix_pr_msg_newlines.cpython-311.pyc +0 -0
  13. package/python/scripts/__pycache__/fix_pr_tracking_boss.cpython-311.pyc +0 -0
  14. package/python/scripts/__pycache__/fix_pr_tracking_db.cpython-311.pyc +0 -0
  15. package/python/scripts/__pycache__/fix_pr_tracking_main.cpython-311.pyc +0 -0
  16. package/python/scripts/__pycache__/fix_project_isolation.cpython-311.pyc +0 -0
  17. package/python/scripts/__pycache__/fix_system_prompt.cpython-311.pyc +0 -0
  18. package/python/scripts/__pycache__/fix_two_bugs.cpython-311.pyc +0 -0
  19. package/python/scripts/__pycache__/patch_chain_release.cpython-311.pyc +0 -0
  20. package/python/sentinel/__init__.py +1 -1
  21. package/python/sentinel/__pycache__/__init__.cpython-311.pyc +0 -0
  22. package/python/sentinel/__pycache__/cairn_client.cpython-311.pyc +0 -0
  23. package/python/sentinel/__pycache__/cicd_trigger.cpython-311.pyc +0 -0
  24. package/python/sentinel/__pycache__/config_loader.cpython-311.pyc +0 -0
  25. package/python/sentinel/__pycache__/dependency_manager.cpython-311.pyc +0 -0
  26. package/python/sentinel/__pycache__/dev_watcher.cpython-311.pyc +0 -0
  27. package/python/sentinel/__pycache__/fix_engine.cpython-311.pyc +0 -0
  28. package/python/sentinel/__pycache__/git_manager.cpython-311.pyc +0 -0
  29. package/python/sentinel/__pycache__/health_checker.cpython-311.pyc +0 -0
  30. package/python/sentinel/__pycache__/issue_watcher.cpython-311.pyc +0 -0
  31. package/python/sentinel/__pycache__/log_fetcher.cpython-311.pyc +0 -0
  32. package/python/sentinel/__pycache__/log_parser.cpython-311.pyc +0 -0
  33. package/python/sentinel/__pycache__/log_syncer.cpython-311.pyc +0 -0
  34. package/python/sentinel/__pycache__/main.cpython-311.pyc +0 -0
  35. package/python/sentinel/__pycache__/notify.cpython-311.pyc +0 -0
  36. package/python/sentinel/__pycache__/repo_router.cpython-311.pyc +0 -0
  37. package/python/sentinel/__pycache__/repo_task_engine.cpython-311.pyc +0 -0
  38. package/python/sentinel/__pycache__/reporter.cpython-311.pyc +0 -0
  39. package/python/sentinel/__pycache__/sentinel_boss.cpython-311.pyc +0 -0
  40. package/python/sentinel/__pycache__/sentinel_dev.cpython-311.pyc +0 -0
  41. package/python/sentinel/__pycache__/slack_bot.cpython-311.pyc +0 -0
  42. package/python/sentinel/__pycache__/state_store.cpython-311.pyc +0 -0
  43. package/python/sentinel/cairn_client.py +30 -11
  44. package/python/sentinel/fix_engine.py +182 -43
  45. package/python/sentinel/git_manager.py +335 -0
  46. package/python/sentinel/main.py +189 -5
  47. package/python/sentinel/state_store.py +121 -0
  48. package/python/tests/test_cairn_client.py +72 -0
  49. package/python/tests/test_fix_engine_json.py +95 -0
  50. package/python/tests/test_fix_engine_prompt.py +93 -0
  51. package/python/tests/test_multi_repo_apply.py +254 -0
  52. package/python/tests/test_multi_repo_publish.py +175 -0
  53. package/python/tests/test_patch_parser.py +250 -0
  54. package/python/tests/test_project_lock.py +85 -0
  55. package/python/tests/test_state_store.py +87 -0
@@ -0,0 +1,175 @@
1
+ """
2
+ test_multi_repo_publish.py — Tests for publish_multi().
3
+
4
+ publish_multi() takes the list of per-repo results from apply_and_commit_multi()
5
+ and pushes / opens PRs per repo. PR bodies note the sibling repos in the same
6
+ multi-repo fix, so reviewers can find them by branch name.
7
+
8
+ Single-repo behavior must remain identical to today.
9
+ """
10
+ from pathlib import Path
11
+ from unittest.mock import patch
12
+ from types import SimpleNamespace
13
+
14
+ import pytest
15
+
16
+ from sentinel import git_manager
17
+ from sentinel.config_loader import RepoConfig
18
+
19
+
20
+ def _mk_event(fp="deadbeef00000001"):
21
+ e = SimpleNamespace()
22
+ e.fingerprint = fp
23
+ e.source = "src"
24
+ e.message = "boom"
25
+ e.stack_trace = []
26
+ e.timestamp = "2026-04-24T10:00:00Z"
27
+ e.log_file = ""
28
+ e.short_summary = lambda: "boom in foo"
29
+ return e
30
+
31
+
32
+ def _mk_repo(tmp_path: Path, name: str, auto_commit=False) -> RepoConfig:
33
+ p = tmp_path / "repos" / name
34
+ p.mkdir(parents=True, exist_ok=True)
35
+ return RepoConfig(
36
+ repo_name=name,
37
+ repo_url=f"git@github.com:org/{name}.git",
38
+ local_path=str(p),
39
+ branch="main",
40
+ auto_commit=auto_commit,
41
+ )
42
+
43
+
44
+ def _mk_cfg():
45
+ cfg = SimpleNamespace()
46
+ cfg.project_name = "test-project"
47
+ cfg.github_token = "ghp_test"
48
+ cfg.slack_bot_token = ""
49
+ cfg.slack_channel = ""
50
+ cfg.auto_commit = False
51
+ cfg.auto_release = False
52
+ return cfg
53
+
54
+
55
+ def _ok():
56
+ return SimpleNamespace(returncode=0, stdout="", stderr="")
57
+
58
+
59
+ # ── single-repo PR mode (unchanged behaviour) ────────────────────────────────
60
+
61
+ def test_single_committed_repo_opens_one_pr(tmp_path):
62
+ cfg = _mk_cfg()
63
+ repo = _mk_repo(tmp_path, "r1")
64
+ results = [{
65
+ "repo_name": "r1", "repo": repo, "status": "committed",
66
+ "commit_hash": "abc1234", "branch": "main", "apply_order": 0,
67
+ "reason": "", "sub_patch_path": tmp_path / "x.diff",
68
+ }]
69
+
70
+ pr_url = "https://github.com/org/r1/pull/42"
71
+ with patch.object(git_manager, "_git", return_value=_ok()), \
72
+ patch.object(git_manager, "_open_github_pr", return_value=pr_url) as open_pr, \
73
+ patch.object(git_manager, "remote_fix_exists", return_value=False):
74
+ out = git_manager.publish_multi(_mk_event(), results, cfg)
75
+
76
+ assert open_pr.call_count == 1
77
+ assert out[0]["pr_url"] == pr_url
78
+
79
+
80
+ # ── multi-repo: each PR body lists sibling repos by name ──────────────────────
81
+
82
+ def test_two_repos_each_pr_body_mentions_siblings(tmp_path):
83
+ cfg = _mk_cfg()
84
+ lib = _mk_repo(tmp_path, "lib")
85
+ consumer = _mk_repo(tmp_path, "consumer")
86
+ results = [
87
+ {"repo_name": "lib", "repo": lib, "status": "committed",
88
+ "commit_hash": "aaa1111", "branch": "main", "apply_order": 0,
89
+ "reason": "", "sub_patch_path": tmp_path / "lib.diff"},
90
+ {"repo_name": "consumer", "repo": consumer, "status": "committed",
91
+ "commit_hash": "bbb2222", "branch": "main", "apply_order": 1,
92
+ "reason": "", "sub_patch_path": tmp_path / "consumer.diff"},
93
+ ]
94
+
95
+ captured_extras: list[str] = []
96
+
97
+ def fake_open_pr(event, repo, cfg, branch, commit_hash, extra_body=""):
98
+ captured_extras.append(extra_body)
99
+ return f"https://github.com/org/{repo.repo_name}/pull/1"
100
+
101
+ with patch.object(git_manager, "_git", return_value=_ok()), \
102
+ patch.object(git_manager, "_open_github_pr", side_effect=fake_open_pr), \
103
+ patch.object(git_manager, "remote_fix_exists", return_value=False):
104
+ out = git_manager.publish_multi(_mk_event(), results, cfg)
105
+
106
+ assert len(captured_extras) == 2
107
+ # lib's PR body mentions consumer; consumer's PR body mentions lib
108
+ assert "consumer" in captured_extras[0]
109
+ assert "lib" in captured_extras[1]
110
+ # Both PRs are recorded back into results
111
+ assert all(r["pr_url"] for r in out if r["status"] == "committed")
112
+
113
+
114
+ # ── failed entries are not pushed ─────────────────────────────────────────────
115
+
116
+ def test_failed_entries_are_skipped_for_publish(tmp_path):
117
+ cfg = _mk_cfg()
118
+ repo = _mk_repo(tmp_path, "r1")
119
+ results = [{
120
+ "repo_name": "r1", "repo": repo, "status": "failed",
121
+ "commit_hash": "", "branch": "", "apply_order": 0,
122
+ "reason": "tests failed", "sub_patch_path": tmp_path / "x.diff",
123
+ }]
124
+
125
+ with patch.object(git_manager, "_git", return_value=_ok()) as git_call, \
126
+ patch.object(git_manager, "_open_github_pr") as open_pr, \
127
+ patch.object(git_manager, "remote_fix_exists", return_value=False):
128
+ out = git_manager.publish_multi(_mk_event(), results, cfg)
129
+
130
+ open_pr.assert_not_called()
131
+ git_call.assert_not_called()
132
+ assert out[0]["status"] == "failed"
133
+
134
+
135
+ # ── auto_commit mode pushes directly to main, no PR ──────────────────────────
136
+
137
+ def test_auto_commit_mode_pushes_no_pr(tmp_path):
138
+ cfg = _mk_cfg()
139
+ cfg.auto_commit = True # global default
140
+ repo = _mk_repo(tmp_path, "r1", auto_commit=True)
141
+ results = [{
142
+ "repo_name": "r1", "repo": repo, "status": "committed",
143
+ "commit_hash": "abc1234", "branch": "main", "apply_order": 0,
144
+ "reason": "", "sub_patch_path": tmp_path / "x.diff",
145
+ }]
146
+
147
+ with patch.object(git_manager, "_git", return_value=_ok()), \
148
+ patch.object(git_manager, "_open_github_pr") as open_pr:
149
+ out = git_manager.publish_multi(_mk_event(), results, cfg)
150
+
151
+ open_pr.assert_not_called()
152
+ assert out[0]["pr_url"] == ""
153
+ # branch stays as repo.branch (main)
154
+ assert out[0]["branch"] == "main"
155
+
156
+
157
+ # ── duplicate PR detection ───────────────────────────────────────────────────
158
+
159
+ def test_existing_remote_branch_skips_push(tmp_path):
160
+ cfg = _mk_cfg()
161
+ repo = _mk_repo(tmp_path, "r1")
162
+ results = [{
163
+ "repo_name": "r1", "repo": repo, "status": "committed",
164
+ "commit_hash": "abc1234", "branch": "main", "apply_order": 0,
165
+ "reason": "", "sub_patch_path": tmp_path / "x.diff",
166
+ }]
167
+
168
+ with patch.object(git_manager, "_git", return_value=_ok()), \
169
+ patch.object(git_manager, "_open_github_pr") as open_pr, \
170
+ patch.object(git_manager, "remote_fix_exists", return_value=True):
171
+ out = git_manager.publish_multi(_mk_event(), results, cfg)
172
+
173
+ # remote branch already exists → don't open a duplicate PR
174
+ open_pr.assert_not_called()
175
+ assert out[0].get("pr_url", "") == ""
@@ -0,0 +1,250 @@
1
+ """
2
+ test_patch_parser.py — Unit tests for parse_multi_repo_patch().
3
+
4
+ The parser splits a combined diff (paths prefixed `repos/<repo-name>/...`)
5
+ into per-repo sub-patches with the prefix stripped. It is the linchpin of
6
+ the multi-repo apply flow — these tests cover edge cases exhaustively.
7
+ """
8
+ import pytest
9
+
10
+ from sentinel.git_manager import parse_multi_repo_patch
11
+
12
+
13
+ # ── happy path ────────────────────────────────────────────────────────────────
14
+
15
+ def test_parse_single_repo_patch():
16
+ diff = (
17
+ "diff --git a/repos/repo-a/src/Foo.java b/repos/repo-a/src/Foo.java\n"
18
+ "index 1111..2222 100644\n"
19
+ "--- a/repos/repo-a/src/Foo.java\n"
20
+ "+++ b/repos/repo-a/src/Foo.java\n"
21
+ "@@ -1,3 +1,3 @@\n"
22
+ " line1\n"
23
+ "-old\n"
24
+ "+new\n"
25
+ )
26
+ result = parse_multi_repo_patch(diff)
27
+ assert result["affected_repos"] == ["repo-a"]
28
+ assert "repo-a" in result["patches"]
29
+ sub = result["patches"]["repo-a"]
30
+ assert "diff --git a/src/Foo.java b/src/Foo.java" in sub
31
+ assert "--- a/src/Foo.java" in sub
32
+ assert "+++ b/src/Foo.java" in sub
33
+ assert "repos/repo-a" not in sub
34
+
35
+
36
+ def test_parse_two_repo_patch_preserves_chunks():
37
+ diff = (
38
+ "diff --git a/repos/lib/src/A.java b/repos/lib/src/A.java\n"
39
+ "--- a/repos/lib/src/A.java\n"
40
+ "+++ b/repos/lib/src/A.java\n"
41
+ "@@ -1,1 +1,1 @@\n"
42
+ "-old-a\n"
43
+ "+new-a\n"
44
+ "diff --git a/repos/consumer/pom.xml b/repos/consumer/pom.xml\n"
45
+ "--- a/repos/consumer/pom.xml\n"
46
+ "+++ b/repos/consumer/pom.xml\n"
47
+ "@@ -1,1 +1,1 @@\n"
48
+ "-1.0\n"
49
+ "+1.1\n"
50
+ )
51
+ result = parse_multi_repo_patch(diff)
52
+ assert set(result["affected_repos"]) == {"lib", "consumer"}
53
+ assert "new-a" in result["patches"]["lib"]
54
+ assert "+1.1" in result["patches"]["consumer"]
55
+ # No cross-contamination
56
+ assert "pom.xml" not in result["patches"]["lib"]
57
+ assert "A.java" not in result["patches"]["consumer"]
58
+
59
+
60
+ def test_parse_multiple_files_same_repo():
61
+ diff = (
62
+ "diff --git a/repos/r1/a.txt b/repos/r1/a.txt\n"
63
+ "--- a/repos/r1/a.txt\n"
64
+ "+++ b/repos/r1/a.txt\n"
65
+ "@@ -1 +1 @@\n"
66
+ "-x\n+y\n"
67
+ "diff --git a/repos/r1/b.txt b/repos/r1/b.txt\n"
68
+ "--- a/repos/r1/b.txt\n"
69
+ "+++ b/repos/r1/b.txt\n"
70
+ "@@ -1 +1 @@\n"
71
+ "-p\n+q\n"
72
+ )
73
+ result = parse_multi_repo_patch(diff)
74
+ assert result["affected_repos"] == ["r1"]
75
+ sub = result["patches"]["r1"]
76
+ # Both file diffs are in the same repo's patch
77
+ assert sub.count("diff --git") == 2
78
+ assert "a.txt" in sub
79
+ assert "b.txt" in sub
80
+
81
+
82
+ # ── header parsing ────────────────────────────────────────────────────────────
83
+
84
+ def test_affected_repos_header_sets_order():
85
+ diff = (
86
+ "# Affected repos: consumer, lib\n"
87
+ "\n"
88
+ "diff --git a/repos/lib/x b/repos/lib/x\n"
89
+ "--- a/repos/lib/x\n+++ b/repos/lib/x\n@@ -1 +1 @@\n-a\n+b\n"
90
+ "diff --git a/repos/consumer/y b/repos/consumer/y\n"
91
+ "--- a/repos/consumer/y\n+++ b/repos/consumer/y\n@@ -1 +1 @@\n-c\n+d\n"
92
+ )
93
+ result = parse_multi_repo_patch(diff)
94
+ # Header defines order even though `lib` appears first in the diffs
95
+ assert result["affected_repos"] == ["consumer", "lib"]
96
+
97
+
98
+ def test_affected_repos_header_case_insensitive():
99
+ diff = (
100
+ "# affected REPOS: r1\n"
101
+ "diff --git a/repos/r1/f b/repos/r1/f\n"
102
+ "--- a/repos/r1/f\n+++ b/repos/r1/f\n@@ -1 +1 @@\n-a\n+b\n"
103
+ )
104
+ assert parse_multi_repo_patch(diff)["affected_repos"] == ["r1"]
105
+
106
+
107
+ def test_no_header_falls_back_to_diff_order():
108
+ diff = (
109
+ "diff --git a/repos/zebra/x b/repos/zebra/x\n"
110
+ "--- a/repos/zebra/x\n+++ b/repos/zebra/x\n@@ -1 +1 @@\n-a\n+b\n"
111
+ "diff --git a/repos/alpha/y b/repos/alpha/y\n"
112
+ "--- a/repos/alpha/y\n+++ b/repos/alpha/y\n@@ -1 +1 @@\n-c\n+d\n"
113
+ )
114
+ assert parse_multi_repo_patch(diff)["affected_repos"] == ["zebra", "alpha"]
115
+
116
+
117
+ def test_header_with_repo_not_in_diffs_is_dropped():
118
+ """A header may overpromise; only repos with actual diffs end up in patches."""
119
+ diff = (
120
+ "# Affected repos: ghost, real\n"
121
+ "diff --git a/repos/real/f b/repos/real/f\n"
122
+ "--- a/repos/real/f\n+++ b/repos/real/f\n@@ -1 +1 @@\n-a\n+b\n"
123
+ )
124
+ result = parse_multi_repo_patch(diff)
125
+ assert "ghost" not in result["patches"]
126
+ assert "real" in result["patches"]
127
+ # ghost is removed from affected_repos as well — patches dict is the source of truth
128
+ assert "ghost" not in result["affected_repos"]
129
+
130
+
131
+ # ── special diff formats ──────────────────────────────────────────────────────
132
+
133
+ def test_new_file_mode_strips_only_b_path():
134
+ """new-file diffs use /dev/null on the - side; only the +++ b/ path needs stripping."""
135
+ diff = (
136
+ "diff --git a/repos/r1/new.java b/repos/r1/new.java\n"
137
+ "new file mode 100644\n"
138
+ "index 0000000..abcd\n"
139
+ "--- /dev/null\n"
140
+ "+++ b/repos/r1/new.java\n"
141
+ "@@ -0,0 +1,2 @@\n"
142
+ "+line one\n"
143
+ "+line two\n"
144
+ )
145
+ result = parse_multi_repo_patch(diff)
146
+ sub = result["patches"]["r1"]
147
+ assert "new file mode 100644" in sub
148
+ assert "--- /dev/null" in sub
149
+ assert "+++ b/new.java" in sub
150
+ assert "diff --git a/new.java b/new.java" in sub
151
+ assert "repos/r1" not in sub
152
+
153
+
154
+ def test_deleted_file_strips_only_a_path():
155
+ diff = (
156
+ "diff --git a/repos/r1/gone.java b/repos/r1/gone.java\n"
157
+ "deleted file mode 100644\n"
158
+ "index abcd..0000000\n"
159
+ "--- a/repos/r1/gone.java\n"
160
+ "+++ /dev/null\n"
161
+ "@@ -1,1 +0,0 @@\n"
162
+ "-bye\n"
163
+ )
164
+ result = parse_multi_repo_patch(diff)
165
+ sub = result["patches"]["r1"]
166
+ assert "deleted file mode" in sub
167
+ assert "--- a/gone.java" in sub
168
+ assert "+++ /dev/null" in sub
169
+ assert "repos/r1" not in sub
170
+
171
+
172
+ def test_rename_strips_both_paths():
173
+ diff = (
174
+ "diff --git a/repos/r1/old.java b/repos/r1/new.java\n"
175
+ "similarity index 100%\n"
176
+ "rename from repos/r1/old.java\n"
177
+ "rename to repos/r1/new.java\n"
178
+ )
179
+ result = parse_multi_repo_patch(diff)
180
+ sub = result["patches"]["r1"]
181
+ assert "diff --git a/old.java b/new.java" in sub
182
+ assert "rename from old.java" in sub
183
+ assert "rename to new.java" in sub
184
+ assert "repos/r1" not in sub
185
+
186
+
187
+ def test_nested_path_in_repo():
188
+ diff = (
189
+ "diff --git a/repos/r1/src/main/java/com/x/Foo.java b/repos/r1/src/main/java/com/x/Foo.java\n"
190
+ "--- a/repos/r1/src/main/java/com/x/Foo.java\n"
191
+ "+++ b/repos/r1/src/main/java/com/x/Foo.java\n"
192
+ "@@ -1 +1 @@\n-a\n+b\n"
193
+ )
194
+ sub = parse_multi_repo_patch(diff)["patches"]["r1"]
195
+ assert "diff --git a/src/main/java/com/x/Foo.java b/src/main/java/com/x/Foo.java" in sub
196
+ assert "--- a/src/main/java/com/x/Foo.java" in sub
197
+ assert "+++ b/src/main/java/com/x/Foo.java" in sub
198
+
199
+
200
+ def test_repo_name_with_dashes_and_digits():
201
+ diff = (
202
+ "diff --git a/repos/1881-SSOLoginWebApp/src/F.java b/repos/1881-SSOLoginWebApp/src/F.java\n"
203
+ "--- a/repos/1881-SSOLoginWebApp/src/F.java\n"
204
+ "+++ b/repos/1881-SSOLoginWebApp/src/F.java\n"
205
+ "@@ -1 +1 @@\n-a\n+b\n"
206
+ )
207
+ result = parse_multi_repo_patch(diff)
208
+ assert "1881-SSOLoginWebApp" in result["patches"]
209
+ assert "diff --git a/src/F.java" in result["patches"]["1881-SSOLoginWebApp"]
210
+
211
+
212
+ # ── empty / malformed input ───────────────────────────────────────────────────
213
+
214
+ def test_empty_patch_returns_empty():
215
+ result = parse_multi_repo_patch("")
216
+ assert result["affected_repos"] == []
217
+ assert result["patches"] == {}
218
+
219
+
220
+ def test_only_header_no_diffs():
221
+ result = parse_multi_repo_patch("# Affected repos: r1\n")
222
+ assert result["affected_repos"] == []
223
+ assert result["patches"] == {}
224
+
225
+
226
+ def test_diff_without_repos_prefix_is_rejected():
227
+ """A diff path that doesn't start with repos/<name>/ has no repo to route to."""
228
+ diff = (
229
+ "diff --git a/src/Loose.java b/src/Loose.java\n"
230
+ "--- a/src/Loose.java\n+++ b/src/Loose.java\n@@ -1 +1 @@\n-a\n+b\n"
231
+ )
232
+ result = parse_multi_repo_patch(diff)
233
+ # No repos identified → empty result
234
+ assert result["affected_repos"] == []
235
+ assert result["patches"] == {}
236
+
237
+
238
+ def test_summary_lines_above_header_preserved():
239
+ diff = (
240
+ "Trim and broaden FirstName/LastName regex.\n"
241
+ "Reason: CRM names with diacritics were rejected.\n"
242
+ "\n"
243
+ "# Affected repos: r1\n"
244
+ "diff --git a/repos/r1/F.java b/repos/r1/F.java\n"
245
+ "--- a/repos/r1/F.java\n+++ b/repos/r1/F.java\n@@ -1 +1 @@\n-a\n+b\n"
246
+ )
247
+ result = parse_multi_repo_patch(diff)
248
+ assert "diacritics" in result["summary"]
249
+ assert "# Affected repos:" not in result["summary"]
250
+ assert "diff --git" not in result["summary"]
@@ -0,0 +1,85 @@
1
+ """
2
+ test_project_lock.py — Tests for the per-project asyncio.Lock helper.
3
+
4
+ The lock is what guarantees "one Claude session at a time per project" — a
5
+ regression here means concurrent fix attempts can race on the working tree.
6
+ """
7
+ import asyncio
8
+ import pytest
9
+
10
+ from sentinel.main import _project_lock, _project_locks
11
+
12
+
13
+ @pytest.fixture(autouse=True)
14
+ def _clear_locks():
15
+ """Each test starts with an empty lock registry."""
16
+ _project_locks.clear()
17
+ yield
18
+ _project_locks.clear()
19
+
20
+
21
+ def test_same_project_returns_same_lock():
22
+ a = _project_lock("proj-1")
23
+ b = _project_lock("proj-1")
24
+ assert a is b
25
+
26
+
27
+ def test_different_projects_get_different_locks():
28
+ a = _project_lock("proj-a")
29
+ b = _project_lock("proj-b")
30
+ assert a is not b
31
+
32
+
33
+ def test_lock_is_an_asyncio_lock():
34
+ assert isinstance(_project_lock("p"), asyncio.Lock)
35
+
36
+
37
+ def test_lock_is_lazily_created():
38
+ assert "lazy" not in _project_locks
39
+ _project_lock("lazy")
40
+ assert "lazy" in _project_locks
41
+
42
+
43
+ def test_serializes_concurrent_async_tasks():
44
+ """Two coroutines acquiring the same project's lock must run sequentially."""
45
+ sequence: list[str] = []
46
+
47
+ async def worker(label: str):
48
+ async with _project_lock("p"):
49
+ sequence.append(f"{label}:start")
50
+ # yield control so the other task gets a chance to interleave (it shouldn't)
51
+ await asyncio.sleep(0)
52
+ sequence.append(f"{label}:end")
53
+
54
+ async def driver():
55
+ await asyncio.gather(worker("A"), worker("B"))
56
+
57
+ asyncio.run(driver())
58
+
59
+ # Whichever started first must finish before the other starts —
60
+ # no interleaving allowed.
61
+ assert sequence in [
62
+ ["A:start", "A:end", "B:start", "B:end"],
63
+ ["B:start", "B:end", "A:start", "A:end"],
64
+ ], f"unexpected interleave: {sequence}"
65
+
66
+
67
+ def test_does_not_serialize_across_projects():
68
+ """Two coroutines acquiring DIFFERENT project locks may interleave."""
69
+ sequence: list[str] = []
70
+
71
+ async def worker(label: str, project: str):
72
+ async with _project_lock(project):
73
+ sequence.append(f"{label}:start")
74
+ await asyncio.sleep(0) # let the other task run
75
+ sequence.append(f"{label}:end")
76
+
77
+ async def driver():
78
+ await asyncio.gather(worker("A", "proj-a"), worker("B", "proj-b"))
79
+
80
+ asyncio.run(driver())
81
+
82
+ # Different locks → tasks may interleave. We expect interleaving here
83
+ # because each task awaits asyncio.sleep(0) which yields control.
84
+ assert sequence == ["A:start", "B:start", "A:end", "B:end"] or \
85
+ sequence == ["B:start", "A:start", "B:end", "A:end"]
@@ -126,3 +126,90 @@ def test_get_all_user_stats(store):
126
126
  assert len(stats) == 2
127
127
  ids = {s["user_id"] for s in stats}
128
128
  assert ids == {"U1", "U2"}
129
+
130
+
131
+ # ── Multi-repo fix tracking (fix_repos table) ─────────────────────────────────
132
+
133
+ def test_fix_repos_record_single(store):
134
+ store.record_fix_repo("fp1", "repo-a", branch="sentinel/fix-fp1",
135
+ commit_hash="abc", pr_url="https://x/pr/1",
136
+ pr_state="open", apply_order=0)
137
+ rows = store.get_fix_repos("fp1")
138
+ assert len(rows) == 1
139
+ assert rows[0]["repo_name"] == "repo-a"
140
+ assert rows[0]["pr_state"] == "open"
141
+ assert rows[0]["apply_order"] == 0
142
+
143
+
144
+ def test_fix_repos_record_multiple_ordered(store):
145
+ store.record_fix_repo("fp1", "lib-typeib", branch="b1", apply_order=0)
146
+ store.record_fix_repo("fp1", "consumer-svc", branch="b2", apply_order=1)
147
+ rows = store.get_fix_repos("fp1")
148
+ assert [r["repo_name"] for r in rows] == ["lib-typeib", "consumer-svc"]
149
+ assert [r["apply_order"] for r in rows] == [0, 1]
150
+
151
+
152
+ def test_fix_repos_upsert_overwrites(store):
153
+ store.record_fix_repo("fp1", "repo-a", pr_state="open", apply_order=0)
154
+ store.record_fix_repo("fp1", "repo-a", pr_state="merged", apply_order=0,
155
+ commit_hash="def")
156
+ rows = store.get_fix_repos("fp1")
157
+ assert len(rows) == 1
158
+ assert rows[0]["pr_state"] == "merged"
159
+ assert rows[0]["commit_hash"] == "def"
160
+
161
+
162
+ def test_fix_repos_empty_for_unknown_fingerprint(store):
163
+ assert store.get_fix_repos("unknown-fp") == []
164
+
165
+
166
+ def test_fix_repos_update_state(store):
167
+ store.record_fix_repo("fp1", "repo-a", pr_state="open", apply_order=0)
168
+ store.update_fix_repo_state("fp1", "repo-a", pr_state="merged",
169
+ commit_hash="xyz")
170
+ rows = store.get_fix_repos("fp1")
171
+ assert rows[0]["pr_state"] == "merged"
172
+ assert rows[0]["commit_hash"] == "xyz"
173
+
174
+
175
+ # ── Per-project Claude session tracking ───────────────────────────────────────
176
+
177
+ def test_claude_session_unset_returns_none(store):
178
+ assert store.get_claude_session("proj-x") is None
179
+
180
+
181
+ def test_claude_session_set_and_get(store):
182
+ store.set_claude_session("proj-x", "session-uuid-1", cost_delta=0.05)
183
+ s = store.get_claude_session("proj-x")
184
+ assert s["session_id"] == "session-uuid-1"
185
+ assert s["total_cost_usd"] == pytest.approx(0.05)
186
+ assert s["turn_count"] == 1
187
+
188
+
189
+ def test_claude_session_accumulates_cost_and_turns(store):
190
+ store.set_claude_session("proj-x", "session-1", cost_delta=0.10)
191
+ store.set_claude_session("proj-x", "session-1", cost_delta=0.07)
192
+ s = store.get_claude_session("proj-x")
193
+ assert s["total_cost_usd"] == pytest.approx(0.17)
194
+ assert s["turn_count"] == 2
195
+
196
+
197
+ def test_claude_session_replaces_session_id(store):
198
+ store.set_claude_session("proj-x", "session-1", cost_delta=0.10)
199
+ store.set_claude_session("proj-x", "session-2", cost_delta=0.05)
200
+ s = store.get_claude_session("proj-x")
201
+ assert s["session_id"] == "session-2"
202
+ assert s["total_cost_usd"] == pytest.approx(0.15)
203
+
204
+
205
+ def test_claude_session_isolated_per_project(store):
206
+ store.set_claude_session("proj-a", "sess-a", cost_delta=0.10)
207
+ store.set_claude_session("proj-b", "sess-b", cost_delta=0.20)
208
+ assert store.get_claude_session("proj-a")["session_id"] == "sess-a"
209
+ assert store.get_claude_session("proj-b")["session_id"] == "sess-b"
210
+
211
+
212
+ def test_claude_session_clear(store):
213
+ store.set_claude_session("proj-x", "sess-1", cost_delta=0.05)
214
+ store.clear_claude_session("proj-x")
215
+ assert store.get_claude_session("proj-x") is None