opencode-pilot 0.21.4 → 0.22.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Formula/opencode-pilot.rb +2 -2
- package/package.json +1 -1
- package/service/poll-service.js +19 -2
- package/service/poller.js +124 -3
- package/service/utils.js +54 -0
- package/test/integration/session-reuse.test.js +113 -0
- package/test/unit/poller.test.js +187 -0
- package/test/unit/utils.test.js +108 -0
- package/install.sh +0 -246
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
class OpencodePilot < Formula
|
|
2
2
|
desc "Automation daemon for OpenCode - polls GitHub/Linear issues and spawns sessions"
|
|
3
3
|
homepage "https://github.com/athal7/opencode-pilot"
|
|
4
|
-
url "https://github.com/athal7/opencode-pilot/archive/refs/tags/v0.21.
|
|
5
|
-
sha256 "
|
|
4
|
+
url "https://github.com/athal7/opencode-pilot/archive/refs/tags/v0.21.4.tar.gz"
|
|
5
|
+
sha256 "7d21ed495bfb2b737f6a519dc9bfcdb2ef341636b138c6450e1848fd6819c924"
|
|
6
6
|
license "MIT"
|
|
7
7
|
|
|
8
8
|
depends_on "node"
|
package/package.json
CHANGED
package/service/poll-service.js
CHANGED
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
*/
|
|
11
11
|
|
|
12
12
|
import { loadRepoConfig, getRepoConfig, getAllSources, getToolProviderConfig, resolveRepoForItem, getCleanupTtlDays, getStartupDelay } from "./repo-config.js";
|
|
13
|
-
import { createPoller, pollGenericSource, enrichItemsWithComments, enrichItemsWithMergeable, computeAttentionLabels } from "./poller.js";
|
|
13
|
+
import { createPoller, pollGenericSource, enrichItemsWithComments, enrichItemsWithMergeable, computeAttentionLabels, computeDedupKeys } from "./poller.js";
|
|
14
14
|
import { evaluateReadiness, sortByPriority } from "./readiness.js";
|
|
15
15
|
import { executeAction, buildCommand } from "./actions.js";
|
|
16
16
|
import { debug } from "./logger.js";
|
|
@@ -199,7 +199,12 @@ export async function pollOnce(options = {}) {
|
|
|
199
199
|
|
|
200
200
|
debug(`Processing ${sortedItems.length} sorted items`);
|
|
201
201
|
for (const item of sortedItems) {
|
|
202
|
-
//
|
|
202
|
+
// Compute dedup keys for cross-source deduplication
|
|
203
|
+
// Context includes repo for resolving relative GitHub refs (#123)
|
|
204
|
+
const dedupContext = { repo: item.repository_full_name || item.repository?.nameWithOwner };
|
|
205
|
+
const dedupKeys = computeDedupKeys(item, dedupContext);
|
|
206
|
+
|
|
207
|
+
// Check if already processed (by item ID)
|
|
203
208
|
let existingDirectory = null;
|
|
204
209
|
if (pollerInstance && pollerInstance.isProcessed(item.id)) {
|
|
205
210
|
// Check if item should be reprocessed (reopened, status changed, etc.)
|
|
@@ -215,6 +220,16 @@ export async function pollOnce(options = {}) {
|
|
|
215
220
|
continue;
|
|
216
221
|
}
|
|
217
222
|
}
|
|
223
|
+
|
|
224
|
+
// Check for cross-source deduplication (e.g., Linear issue + GitHub PR)
|
|
225
|
+
// Skip if any of this item's dedup keys were already processed by another item
|
|
226
|
+
if (pollerInstance && dedupKeys.length > 0) {
|
|
227
|
+
const existingItemId = pollerInstance.findProcessedByDedupKey(dedupKeys);
|
|
228
|
+
if (existingItemId && existingItemId !== item.id) {
|
|
229
|
+
debug(`Skipping ${item.id} - dedup key matches already-processed item ${existingItemId}`);
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
218
233
|
|
|
219
234
|
debug(`Executing action for ${item.id}`);
|
|
220
235
|
// Build action config from source and item (resolves repo from item fields)
|
|
@@ -255,6 +270,7 @@ export async function pollOnce(options = {}) {
|
|
|
255
270
|
// Mark as processed to avoid re-triggering
|
|
256
271
|
// Store item state for detecting reopened/updated items
|
|
257
272
|
// Store directory for worktree reuse when reprocessing
|
|
273
|
+
// Store dedup keys for cross-source deduplication
|
|
258
274
|
if (pollerInstance) {
|
|
259
275
|
pollerInstance.markProcessed(item.id, {
|
|
260
276
|
repoKey: item.repo_key,
|
|
@@ -263,6 +279,7 @@ export async function pollOnce(options = {}) {
|
|
|
263
279
|
directory: result.directory || null,
|
|
264
280
|
itemState: item.state || item.status || null,
|
|
265
281
|
itemUpdatedAt: item.updated_at || null,
|
|
282
|
+
dedupKeys: dedupKeys.length > 0 ? dedupKeys : undefined,
|
|
266
283
|
});
|
|
267
284
|
}
|
|
268
285
|
if (result.warning) {
|
package/service/poller.js
CHANGED
|
@@ -12,7 +12,7 @@ import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
|
|
|
12
12
|
import fs from "fs";
|
|
13
13
|
import path from "path";
|
|
14
14
|
import os from "os";
|
|
15
|
-
import { getNestedValue, hasNonBotFeedback } from "./utils.js";
|
|
15
|
+
import { getNestedValue, hasNonBotFeedback, extractIssueRefs } from "./utils.js";
|
|
16
16
|
|
|
17
17
|
/**
|
|
18
18
|
* Expand template string with item fields
|
|
@@ -674,6 +674,49 @@ export function computeAttentionLabels(items, source) {
|
|
|
674
674
|
});
|
|
675
675
|
}
|
|
676
676
|
|
|
677
|
+
/**
|
|
678
|
+
* Compute deduplication keys for an item
|
|
679
|
+
*
|
|
680
|
+
* Dedup keys allow cross-source deduplication: when an issue and its linked PR
|
|
681
|
+
* both trigger, we can detect they represent the same work.
|
|
682
|
+
*
|
|
683
|
+
* Generates keys from:
|
|
684
|
+
* 1. The item's own canonical identifier (Linear ID, GitHub repo#number)
|
|
685
|
+
* 2. Issue references found in title/body (e.g., PR mentioning "Fixes ENG-123")
|
|
686
|
+
*
|
|
687
|
+
* @param {object} item - Item from a source
|
|
688
|
+
* @param {object} [context] - Context for resolving references
|
|
689
|
+
* @param {string} [context.repo] - Repository (e.g., "org/repo") for GitHub relative refs
|
|
690
|
+
* @returns {string[]} Array of dedup keys (e.g., ["linear:ENG-123", "github:org/repo#456"])
|
|
691
|
+
*/
|
|
692
|
+
export function computeDedupKeys(item, context = {}) {
|
|
693
|
+
const keys = new Set();
|
|
694
|
+
|
|
695
|
+
// 1. Generate canonical key for the item itself
|
|
696
|
+
|
|
697
|
+
// Linear items: use the "number" field which is the issue identifier (e.g., "ENG-123")
|
|
698
|
+
// Linear preset maps this from url using regex: "url:/([A-Z0-9]+-[0-9]+)/"
|
|
699
|
+
if (item.number && typeof item.number === 'string' && /^[A-Z][A-Z0-9]*-\d+$/.test(item.number)) {
|
|
700
|
+
keys.add(`linear:${item.number}`);
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
// GitHub items: use repo + number
|
|
704
|
+
// GitHub items have repository.nameWithOwner or repository_full_name (after mapping)
|
|
705
|
+
const repo = item.repository_full_name || item.repository?.nameWithOwner || context.repo;
|
|
706
|
+
if (repo && item.number && typeof item.number === 'number') {
|
|
707
|
+
keys.add(`github:${repo}#${item.number}`);
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
// 2. Extract issue references from title and body
|
|
711
|
+
const textToSearch = [item.title, item.body].filter(Boolean).join('\n');
|
|
712
|
+
const issueRefs = extractIssueRefs(textToSearch, { repo });
|
|
713
|
+
for (const ref of issueRefs) {
|
|
714
|
+
keys.add(ref);
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
return Array.from(keys);
|
|
718
|
+
}
|
|
719
|
+
|
|
677
720
|
/**
|
|
678
721
|
* Create a poller instance with state tracking
|
|
679
722
|
*
|
|
@@ -688,17 +731,35 @@ export function createPoller(options = {}) {
|
|
|
688
731
|
|
|
689
732
|
// Load existing state
|
|
690
733
|
let processedItems = new Map();
|
|
734
|
+
// Dedup key index: maps dedup keys to item IDs for cross-source deduplication
|
|
735
|
+
let dedupKeyIndex = new Map();
|
|
736
|
+
|
|
691
737
|
if (fs.existsSync(stateFile)) {
|
|
692
738
|
try {
|
|
693
739
|
const state = JSON.parse(fs.readFileSync(stateFile, 'utf-8'));
|
|
694
740
|
if (state.processed) {
|
|
695
741
|
processedItems = new Map(Object.entries(state.processed));
|
|
696
742
|
}
|
|
743
|
+
if (state.dedupKeys) {
|
|
744
|
+
dedupKeyIndex = new Map(Object.entries(state.dedupKeys));
|
|
745
|
+
}
|
|
697
746
|
} catch {
|
|
698
747
|
// Start fresh if state is corrupted
|
|
699
748
|
}
|
|
700
749
|
}
|
|
701
750
|
|
|
751
|
+
// Rebuild dedup key index from processed items if not in state file
|
|
752
|
+
// (handles migration from older state files)
|
|
753
|
+
if (dedupKeyIndex.size === 0 && processedItems.size > 0) {
|
|
754
|
+
for (const [itemId, meta] of processedItems) {
|
|
755
|
+
if (meta.dedupKeys && Array.isArray(meta.dedupKeys)) {
|
|
756
|
+
for (const key of meta.dedupKeys) {
|
|
757
|
+
dedupKeyIndex.set(key, itemId);
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
|
|
702
763
|
function saveState() {
|
|
703
764
|
const dir = path.dirname(stateFile);
|
|
704
765
|
if (!fs.existsSync(dir)) {
|
|
@@ -706,6 +767,7 @@ export function createPoller(options = {}) {
|
|
|
706
767
|
}
|
|
707
768
|
const state = {
|
|
708
769
|
processed: Object.fromEntries(processedItems),
|
|
770
|
+
dedupKeys: Object.fromEntries(dedupKeyIndex),
|
|
709
771
|
savedAt: new Date().toISOString(),
|
|
710
772
|
};
|
|
711
773
|
fs.writeFileSync(stateFile, JSON.stringify(state, null, 2));
|
|
@@ -719,6 +781,22 @@ export function createPoller(options = {}) {
|
|
|
719
781
|
return processedItems.has(itemId);
|
|
720
782
|
},
|
|
721
783
|
|
|
784
|
+
/**
|
|
785
|
+
* Check if any of the given dedup keys have been processed
|
|
786
|
+
* Used for cross-source deduplication (e.g., Linear issue + GitHub PR)
|
|
787
|
+
* @param {string[]} dedupKeys - Array of dedup keys to check
|
|
788
|
+
* @returns {string|null} The item ID that owns a matching dedup key, or null
|
|
789
|
+
*/
|
|
790
|
+
findProcessedByDedupKey(dedupKeys) {
|
|
791
|
+
for (const key of dedupKeys) {
|
|
792
|
+
const itemId = dedupKeyIndex.get(key);
|
|
793
|
+
if (itemId && processedItems.has(itemId)) {
|
|
794
|
+
return itemId;
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
return null;
|
|
798
|
+
},
|
|
799
|
+
|
|
722
800
|
/**
|
|
723
801
|
* Get metadata for a processed item
|
|
724
802
|
* @param {string} itemId - Item ID
|
|
@@ -730,13 +808,26 @@ export function createPoller(options = {}) {
|
|
|
730
808
|
|
|
731
809
|
/**
|
|
732
810
|
* Mark an item as processed
|
|
811
|
+
* @param {string} itemId - Item ID
|
|
812
|
+
* @param {object} [metadata] - Additional metadata to store
|
|
813
|
+
* @param {string[]} [metadata.dedupKeys] - Dedup keys for cross-source deduplication
|
|
733
814
|
*/
|
|
734
815
|
markProcessed(itemId, metadata = {}) {
|
|
735
|
-
|
|
816
|
+
// Store dedup keys in item metadata
|
|
817
|
+
const itemMeta = {
|
|
736
818
|
processedAt: new Date().toISOString(),
|
|
737
819
|
lastSeenAt: new Date().toISOString(),
|
|
738
820
|
...metadata,
|
|
739
|
-
}
|
|
821
|
+
};
|
|
822
|
+
processedItems.set(itemId, itemMeta);
|
|
823
|
+
|
|
824
|
+
// Index dedup keys for fast lookup
|
|
825
|
+
if (metadata.dedupKeys && Array.isArray(metadata.dedupKeys)) {
|
|
826
|
+
for (const key of metadata.dedupKeys) {
|
|
827
|
+
dedupKeyIndex.set(key, itemId);
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
|
|
740
831
|
saveState();
|
|
741
832
|
},
|
|
742
833
|
|
|
@@ -814,8 +905,16 @@ export function createPoller(options = {}) {
|
|
|
814
905
|
|
|
815
906
|
/**
|
|
816
907
|
* Clear a specific item from processed state
|
|
908
|
+
* Also removes its dedup keys from the index
|
|
817
909
|
*/
|
|
818
910
|
clearProcessed(itemId) {
|
|
911
|
+
// Remove dedup keys from index first
|
|
912
|
+
const meta = processedItems.get(itemId);
|
|
913
|
+
if (meta && meta.dedupKeys && Array.isArray(meta.dedupKeys)) {
|
|
914
|
+
for (const key of meta.dedupKeys) {
|
|
915
|
+
dedupKeyIndex.delete(key);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
819
918
|
processedItems.delete(itemId);
|
|
820
919
|
saveState();
|
|
821
920
|
},
|
|
@@ -825,6 +924,7 @@ export function createPoller(options = {}) {
|
|
|
825
924
|
*/
|
|
826
925
|
clearState() {
|
|
827
926
|
processedItems.clear();
|
|
927
|
+
dedupKeyIndex.clear();
|
|
828
928
|
saveState();
|
|
829
929
|
},
|
|
830
930
|
|
|
@@ -851,6 +951,7 @@ export function createPoller(options = {}) {
|
|
|
851
951
|
|
|
852
952
|
/**
|
|
853
953
|
* Clear all entries for a specific source
|
|
954
|
+
* Also removes associated dedup keys from the index
|
|
854
955
|
* @param {string} sourceName - Source name
|
|
855
956
|
* @returns {number} Number of entries removed
|
|
856
957
|
*/
|
|
@@ -858,6 +959,12 @@ export function createPoller(options = {}) {
|
|
|
858
959
|
let removed = 0;
|
|
859
960
|
for (const [id, meta] of processedItems) {
|
|
860
961
|
if (meta.source === sourceName) {
|
|
962
|
+
// Remove dedup keys from index
|
|
963
|
+
if (meta.dedupKeys && Array.isArray(meta.dedupKeys)) {
|
|
964
|
+
for (const key of meta.dedupKeys) {
|
|
965
|
+
dedupKeyIndex.delete(key);
|
|
966
|
+
}
|
|
967
|
+
}
|
|
861
968
|
processedItems.delete(id);
|
|
862
969
|
removed++;
|
|
863
970
|
}
|
|
@@ -868,6 +975,7 @@ export function createPoller(options = {}) {
|
|
|
868
975
|
|
|
869
976
|
/**
|
|
870
977
|
* Remove entries older than ttlDays
|
|
978
|
+
* Also removes associated dedup keys from the index
|
|
871
979
|
* @param {number} [ttlDays=30] - Days before expiration
|
|
872
980
|
* @returns {number} Number of entries removed
|
|
873
981
|
*/
|
|
@@ -877,6 +985,12 @@ export function createPoller(options = {}) {
|
|
|
877
985
|
for (const [id, meta] of processedItems) {
|
|
878
986
|
const processedAt = new Date(meta.processedAt).getTime();
|
|
879
987
|
if (processedAt < cutoffMs) {
|
|
988
|
+
// Remove dedup keys from index
|
|
989
|
+
if (meta.dedupKeys && Array.isArray(meta.dedupKeys)) {
|
|
990
|
+
for (const key of meta.dedupKeys) {
|
|
991
|
+
dedupKeyIndex.delete(key);
|
|
992
|
+
}
|
|
993
|
+
}
|
|
880
994
|
processedItems.delete(id);
|
|
881
995
|
removed++;
|
|
882
996
|
}
|
|
@@ -888,6 +1002,7 @@ export function createPoller(options = {}) {
|
|
|
888
1002
|
/**
|
|
889
1003
|
* Remove entries for a source that are no longer in current items
|
|
890
1004
|
* Only removes entries older than minAgeDays to avoid race conditions
|
|
1005
|
+
* Also removes associated dedup keys from the index
|
|
891
1006
|
* @param {string} sourceName - Source name to clean
|
|
892
1007
|
* @param {string[]} currentItemIds - Current item IDs from source
|
|
893
1008
|
* @param {number} [minAgeDays=1] - Minimum age before cleanup (0 = immediate)
|
|
@@ -903,6 +1018,12 @@ export function createPoller(options = {}) {
|
|
|
903
1018
|
const processedAt = new Date(meta.processedAt).getTime();
|
|
904
1019
|
// Use <= to allow immediate cleanup when minAgeDays=0
|
|
905
1020
|
if (processedAt <= cutoffTimestamp) {
|
|
1021
|
+
// Remove dedup keys from index
|
|
1022
|
+
if (meta.dedupKeys && Array.isArray(meta.dedupKeys)) {
|
|
1023
|
+
for (const key of meta.dedupKeys) {
|
|
1024
|
+
dedupKeyIndex.delete(key);
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
906
1027
|
processedItems.delete(id);
|
|
907
1028
|
removed++;
|
|
908
1029
|
}
|
package/service/utils.js
CHANGED
|
@@ -109,6 +109,60 @@ export function isReply(feedback) {
|
|
|
109
109
|
return feedback.in_reply_to_id !== undefined && feedback.in_reply_to_id !== null;
|
|
110
110
|
}
|
|
111
111
|
|
|
112
|
+
/**
|
|
113
|
+
* Extract issue references from text (PR title, body, etc.)
|
|
114
|
+
*
|
|
115
|
+
* Used for cross-source deduplication: when a PR references an issue,
|
|
116
|
+
* both should be treated as the same work item.
|
|
117
|
+
*
|
|
118
|
+
* Supported patterns:
|
|
119
|
+
* - Linear issues: ENG-123, PROJ-456 (uppercase prefix, hyphen, numbers)
|
|
120
|
+
* - GitHub issues: #123, org/repo#123, Fixes #123, Closes org/repo#456
|
|
121
|
+
*
|
|
122
|
+
* @param {string} text - Text to extract references from
|
|
123
|
+
* @param {object} [context] - Optional context for resolving relative refs
|
|
124
|
+
* @param {string} [context.repo] - Repository (e.g., "org/repo") for resolving #123
|
|
125
|
+
* @returns {string[]} Array of normalized issue references (e.g., ["linear:ENG-123", "github:org/repo#123"])
|
|
126
|
+
*/
|
|
127
|
+
export function extractIssueRefs(text, context = {}) {
|
|
128
|
+
if (!text || typeof text !== 'string') {
|
|
129
|
+
return [];
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const refs = new Set();
|
|
133
|
+
|
|
134
|
+
// Linear issue pattern: ENG-123, PROJ-456 (1-10 char uppercase prefix)
|
|
135
|
+
// Must be word-bounded to avoid matching random strings
|
|
136
|
+
const linearPattern = /\b([A-Z][A-Z0-9]{0,9}-\d+)\b/g;
|
|
137
|
+
let match;
|
|
138
|
+
while ((match = linearPattern.exec(text)) !== null) {
|
|
139
|
+
refs.add(`linear:${match[1]}`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// GitHub issue patterns:
|
|
143
|
+
// - Full: org/repo#123
|
|
144
|
+
// - Relative: #123 (needs context.repo)
|
|
145
|
+
// - Keywords: Fixes #123, Closes org/repo#456, Resolves #789
|
|
146
|
+
|
|
147
|
+
// Full repo reference: org/repo#123
|
|
148
|
+
const fullGithubPattern = /\b([a-zA-Z0-9_.-]+\/[a-zA-Z0-9_.-]+)#(\d+)\b/g;
|
|
149
|
+
while ((match = fullGithubPattern.exec(text)) !== null) {
|
|
150
|
+
refs.add(`github:${match[1]}#${match[2]}`);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Relative reference: #123 (only if context.repo is provided)
|
|
154
|
+
if (context.repo) {
|
|
155
|
+
// Match #123 but not org/repo#123 (already handled above)
|
|
156
|
+
// Use negative lookbehind to avoid matching the # in full refs
|
|
157
|
+
const relativePattern = /(?<![a-zA-Z0-9_.-]\/[a-zA-Z0-9_.-]+)#(\d+)\b/g;
|
|
158
|
+
while ((match = relativePattern.exec(text)) !== null) {
|
|
159
|
+
refs.add(`github:${context.repo}#${match[1]}`);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return Array.from(refs);
|
|
164
|
+
}
|
|
165
|
+
|
|
112
166
|
/**
|
|
113
167
|
* Check if a PR/issue has actionable feedback
|
|
114
168
|
*
|
|
@@ -620,3 +620,116 @@ describe("integration: worktree creation with worktree_name", () => {
|
|
|
620
620
|
assert.strictEqual(sessionDirectory, existingWorktreeDir, "Session should use existing directory");
|
|
621
621
|
});
|
|
622
622
|
});
|
|
623
|
+
|
|
624
|
+
describe("integration: cross-source deduplication", () => {
|
|
625
|
+
/**
|
|
626
|
+
* These tests verify cross-source deduplication:
|
|
627
|
+
* When a Linear issue and a GitHub PR are linked (PR mentions Linear ID),
|
|
628
|
+
* only one session should be created.
|
|
629
|
+
*/
|
|
630
|
+
|
|
631
|
+
it("computeDedupKeys extracts Linear refs from GitHub PR", async () => {
|
|
632
|
+
const { computeDedupKeys } = await import("../../service/poller.js");
|
|
633
|
+
|
|
634
|
+
// Simulated GitHub PR that mentions a Linear issue
|
|
635
|
+
const pr = {
|
|
636
|
+
id: "https://github.com/myorg/backend/pull/456",
|
|
637
|
+
number: 456,
|
|
638
|
+
repository_full_name: "myorg/backend",
|
|
639
|
+
title: "ENG-123: Implement new feature",
|
|
640
|
+
body: "This PR implements the feature requested in ENG-123.\n\nCloses #789",
|
|
641
|
+
};
|
|
642
|
+
|
|
643
|
+
const keys = computeDedupKeys(pr);
|
|
644
|
+
|
|
645
|
+
// Should include:
|
|
646
|
+
// 1. PR's own canonical key
|
|
647
|
+
// 2. Linear issue ref from title
|
|
648
|
+
// 3. GitHub issue ref from body (relative, needs context)
|
|
649
|
+
assert.ok(keys.includes("github:myorg/backend#456"), "Should have PR's canonical key");
|
|
650
|
+
assert.ok(keys.includes("linear:ENG-123"), "Should extract Linear ref from title");
|
|
651
|
+
});
|
|
652
|
+
|
|
653
|
+
it("computeDedupKeys generates canonical key for Linear issues", async () => {
|
|
654
|
+
const { computeDedupKeys } = await import("../../service/poller.js");
|
|
655
|
+
|
|
656
|
+
// Simulated Linear issue (number is the identifier like "ENG-123")
|
|
657
|
+
const issue = {
|
|
658
|
+
id: "linear:abc-123-uuid",
|
|
659
|
+
number: "ENG-123", // Linear preset extracts this from URL
|
|
660
|
+
title: "Implement new feature",
|
|
661
|
+
body: "Description of the feature",
|
|
662
|
+
};
|
|
663
|
+
|
|
664
|
+
const keys = computeDedupKeys(issue);
|
|
665
|
+
|
|
666
|
+
assert.ok(keys.includes("linear:ENG-123"), "Should have Linear issue's canonical key");
|
|
667
|
+
});
|
|
668
|
+
|
|
669
|
+
it("poller detects duplicate via shared dedup key", async () => {
|
|
670
|
+
const { createPoller } = await import("../../service/poller.js");
|
|
671
|
+
const { mkdtempSync, rmSync } = await import("fs");
|
|
672
|
+
const { join } = await import("path");
|
|
673
|
+
const { tmpdir } = await import("os");
|
|
674
|
+
|
|
675
|
+
const tempDir = mkdtempSync(join(tmpdir(), "dedup-test-"));
|
|
676
|
+
const stateFile = join(tempDir, "poll-state.json");
|
|
677
|
+
|
|
678
|
+
try {
|
|
679
|
+
const poller = createPoller({ stateFile });
|
|
680
|
+
|
|
681
|
+
// Linear issue ENG-123 is processed first
|
|
682
|
+
poller.markProcessed("linear:abc-uuid", {
|
|
683
|
+
source: "linear/my-issues",
|
|
684
|
+
dedupKeys: ["linear:ENG-123"],
|
|
685
|
+
});
|
|
686
|
+
|
|
687
|
+
// GitHub PR comes in with ENG-123 in title
|
|
688
|
+
const prDedupKeys = ["github:myorg/backend#456", "linear:ENG-123"];
|
|
689
|
+
|
|
690
|
+
// Should find the Linear issue via shared dedup key
|
|
691
|
+
const existingItemId = poller.findProcessedByDedupKey(prDedupKeys);
|
|
692
|
+
|
|
693
|
+
assert.strictEqual(existingItemId, "linear:abc-uuid",
|
|
694
|
+
"PR should be detected as duplicate of Linear issue via shared ENG-123 key");
|
|
695
|
+
} finally {
|
|
696
|
+
rmSync(tempDir, { recursive: true, force: true });
|
|
697
|
+
}
|
|
698
|
+
});
|
|
699
|
+
|
|
700
|
+
it("dedup key index is rebuilt on load if missing from old state file", async () => {
|
|
701
|
+
const { createPoller } = await import("../../service/poller.js");
|
|
702
|
+
const { mkdtempSync, rmSync, writeFileSync } = await import("fs");
|
|
703
|
+
const { join } = await import("path");
|
|
704
|
+
const { tmpdir } = await import("os");
|
|
705
|
+
|
|
706
|
+
const tempDir = mkdtempSync(join(tmpdir(), "dedup-migration-test-"));
|
|
707
|
+
const stateFile = join(tempDir, "poll-state.json");
|
|
708
|
+
|
|
709
|
+
try {
|
|
710
|
+
// Simulate old state file without dedupKeys index (migration scenario)
|
|
711
|
+
const oldState = {
|
|
712
|
+
processed: {
|
|
713
|
+
"linear:abc-uuid": {
|
|
714
|
+
processedAt: new Date().toISOString(),
|
|
715
|
+
source: "linear",
|
|
716
|
+
dedupKeys: ["linear:ENG-123"], // Keys are in item metadata
|
|
717
|
+
},
|
|
718
|
+
},
|
|
719
|
+
// No dedupKeys index at top level
|
|
720
|
+
savedAt: new Date().toISOString(),
|
|
721
|
+
};
|
|
722
|
+
writeFileSync(stateFile, JSON.stringify(oldState));
|
|
723
|
+
|
|
724
|
+
// Load poller - should rebuild index from item metadata
|
|
725
|
+
const poller = createPoller({ stateFile });
|
|
726
|
+
|
|
727
|
+
// Should still be able to find by dedup key
|
|
728
|
+
const foundId = poller.findProcessedByDedupKey(["linear:ENG-123"]);
|
|
729
|
+
assert.strictEqual(foundId, "linear:abc-uuid",
|
|
730
|
+
"Should rebuild dedup index from processed items on load");
|
|
731
|
+
} finally {
|
|
732
|
+
rmSync(tempDir, { recursive: true, force: true });
|
|
733
|
+
}
|
|
734
|
+
});
|
|
735
|
+
});
|
package/test/unit/poller.test.js
CHANGED
|
@@ -374,6 +374,193 @@ describe('poller.js', () => {
|
|
|
374
374
|
});
|
|
375
375
|
});
|
|
376
376
|
|
|
377
|
+
describe('cross-source deduplication', () => {
|
|
378
|
+
test('findProcessedByDedupKey finds item by dedup key', async () => {
|
|
379
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
380
|
+
|
|
381
|
+
const poller = createPoller({ stateFile });
|
|
382
|
+
poller.markProcessed('linear:abc123', {
|
|
383
|
+
source: 'linear',
|
|
384
|
+
dedupKeys: ['linear:ENG-123'],
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
// Should find the item by its dedup key
|
|
388
|
+
const foundId = poller.findProcessedByDedupKey(['linear:ENG-123']);
|
|
389
|
+
assert.strictEqual(foundId, 'linear:abc123');
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
test('findProcessedByDedupKey returns null when no match', async () => {
|
|
393
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
394
|
+
|
|
395
|
+
const poller = createPoller({ stateFile });
|
|
396
|
+
poller.markProcessed('linear:abc123', {
|
|
397
|
+
source: 'linear',
|
|
398
|
+
dedupKeys: ['linear:ENG-123'],
|
|
399
|
+
});
|
|
400
|
+
|
|
401
|
+
// Different dedup key should not match
|
|
402
|
+
const foundId = poller.findProcessedByDedupKey(['linear:ENG-456']);
|
|
403
|
+
assert.strictEqual(foundId, null);
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
test('findProcessedByDedupKey enables cross-source dedup (Linear + GitHub PR)', async () => {
|
|
407
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
408
|
+
|
|
409
|
+
const poller = createPoller({ stateFile });
|
|
410
|
+
|
|
411
|
+
// Linear issue ENG-123 is processed first
|
|
412
|
+
poller.markProcessed('linear:abc123', {
|
|
413
|
+
source: 'linear/my-issues',
|
|
414
|
+
dedupKeys: ['linear:ENG-123'],
|
|
415
|
+
});
|
|
416
|
+
|
|
417
|
+
// GitHub PR mentioning ENG-123 comes later - should find the Linear item
|
|
418
|
+
const prDedupKeys = ['github:myorg/backend#456', 'linear:ENG-123']; // PR has ENG-123 in title
|
|
419
|
+
const foundId = poller.findProcessedByDedupKey(prDedupKeys);
|
|
420
|
+
|
|
421
|
+
assert.strictEqual(foundId, 'linear:abc123', 'PR should match Linear issue by shared dedup key');
|
|
422
|
+
});
|
|
423
|
+
|
|
424
|
+
test('dedup key index persists across poller instances', async () => {
|
|
425
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
426
|
+
|
|
427
|
+
const poller1 = createPoller({ stateFile });
|
|
428
|
+
poller1.markProcessed('linear:abc123', {
|
|
429
|
+
source: 'linear',
|
|
430
|
+
dedupKeys: ['linear:ENG-123'],
|
|
431
|
+
});
|
|
432
|
+
|
|
433
|
+
// New poller instance should have the dedup index
|
|
434
|
+
const poller2 = createPoller({ stateFile });
|
|
435
|
+
const foundId = poller2.findProcessedByDedupKey(['linear:ENG-123']);
|
|
436
|
+
assert.strictEqual(foundId, 'linear:abc123');
|
|
437
|
+
});
|
|
438
|
+
|
|
439
|
+
test('clearProcessed removes dedup keys from index', async () => {
|
|
440
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
441
|
+
|
|
442
|
+
const poller = createPoller({ stateFile });
|
|
443
|
+
poller.markProcessed('linear:abc123', {
|
|
444
|
+
source: 'linear',
|
|
445
|
+
dedupKeys: ['linear:ENG-123'],
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
// Verify it's there
|
|
449
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['linear:ENG-123']), 'linear:abc123');
|
|
450
|
+
|
|
451
|
+
// Clear the item
|
|
452
|
+
poller.clearProcessed('linear:abc123');
|
|
453
|
+
|
|
454
|
+
// Dedup key should be gone
|
|
455
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['linear:ENG-123']), null);
|
|
456
|
+
});
|
|
457
|
+
|
|
458
|
+
test('clearState removes all dedup keys', async () => {
|
|
459
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
460
|
+
|
|
461
|
+
const poller = createPoller({ stateFile });
|
|
462
|
+
poller.markProcessed('item-1', { dedupKeys: ['key-1', 'key-2'] });
|
|
463
|
+
poller.markProcessed('item-2', { dedupKeys: ['key-3'] });
|
|
464
|
+
|
|
465
|
+
poller.clearState();
|
|
466
|
+
|
|
467
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['key-1']), null);
|
|
468
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['key-2']), null);
|
|
469
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['key-3']), null);
|
|
470
|
+
});
|
|
471
|
+
|
|
472
|
+
test('clearBySource removes dedup keys for that source only', async () => {
|
|
473
|
+
const { createPoller } = await import('../../service/poller.js');
|
|
474
|
+
|
|
475
|
+
const poller = createPoller({ stateFile });
|
|
476
|
+
poller.markProcessed('item-1', { source: 'linear', dedupKeys: ['linear:ENG-123'] });
|
|
477
|
+
poller.markProcessed('item-2', { source: 'github', dedupKeys: ['github:org/repo#456'] });
|
|
478
|
+
|
|
479
|
+
poller.clearBySource('linear');
|
|
480
|
+
|
|
481
|
+
// Linear dedup key should be gone
|
|
482
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['linear:ENG-123']), null);
|
|
483
|
+
// GitHub dedup key should still exist
|
|
484
|
+
assert.strictEqual(poller.findProcessedByDedupKey(['github:org/repo#456']), 'item-2');
|
|
485
|
+
});
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
describe('computeDedupKeys', () => {
|
|
489
|
+
test('generates Linear dedup key from number field', async () => {
|
|
490
|
+
const { computeDedupKeys } = await import('../../service/poller.js');
|
|
491
|
+
|
|
492
|
+
// Linear item with number field (extracted from URL by preset mapping)
|
|
493
|
+
const item = {
|
|
494
|
+
id: 'abc-123-uuid',
|
|
495
|
+
number: 'ENG-123',
|
|
496
|
+
title: 'Fix the bug',
|
|
497
|
+
};
|
|
498
|
+
|
|
499
|
+
const keys = computeDedupKeys(item);
|
|
500
|
+
assert.ok(keys.includes('linear:ENG-123'));
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
test('generates GitHub dedup key from repo + number', async () => {
|
|
504
|
+
const { computeDedupKeys } = await import('../../service/poller.js');
|
|
505
|
+
|
|
506
|
+
const item = {
|
|
507
|
+
id: 'https://github.com/myorg/backend/issues/123',
|
|
508
|
+
number: 123,
|
|
509
|
+
repository_full_name: 'myorg/backend',
|
|
510
|
+
title: 'Fix the bug',
|
|
511
|
+
};
|
|
512
|
+
|
|
513
|
+
const keys = computeDedupKeys(item);
|
|
514
|
+
assert.ok(keys.includes('github:myorg/backend#123'));
|
|
515
|
+
});
|
|
516
|
+
|
|
517
|
+
test('extracts Linear refs from PR title/body', async () => {
|
|
518
|
+
const { computeDedupKeys } = await import('../../service/poller.js');
|
|
519
|
+
|
|
520
|
+
const item = {
|
|
521
|
+
id: 'https://github.com/myorg/backend/pull/456',
|
|
522
|
+
number: 456,
|
|
523
|
+
repository_full_name: 'myorg/backend',
|
|
524
|
+
title: 'ENG-123: Fix the bug',
|
|
525
|
+
body: 'This PR fixes the issue described in ENG-123.',
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
const keys = computeDedupKeys(item);
|
|
529
|
+
// Should have both the PR's own key and the Linear ref
|
|
530
|
+
assert.ok(keys.includes('github:myorg/backend#456'), 'Should have PR key');
|
|
531
|
+
assert.ok(keys.includes('linear:ENG-123'), 'Should extract Linear ref from title/body');
|
|
532
|
+
});
|
|
533
|
+
|
|
534
|
+
test('extracts GitHub issue refs from PR body', async () => {
|
|
535
|
+
const { computeDedupKeys } = await import('../../service/poller.js');
|
|
536
|
+
|
|
537
|
+
const item = {
|
|
538
|
+
id: 'https://github.com/myorg/backend/pull/456',
|
|
539
|
+
number: 456,
|
|
540
|
+
repository_full_name: 'myorg/backend',
|
|
541
|
+
title: 'Fix the bug',
|
|
542
|
+
body: 'Fixes #123',
|
|
543
|
+
};
|
|
544
|
+
|
|
545
|
+
const keys = computeDedupKeys(item, { repo: 'myorg/backend' });
|
|
546
|
+
assert.ok(keys.includes('github:myorg/backend#456'), 'Should have PR key');
|
|
547
|
+
assert.ok(keys.includes('github:myorg/backend#123'), 'Should extract issue ref from body');
|
|
548
|
+
});
|
|
549
|
+
|
|
550
|
+
test('handles items without extractable refs', async () => {
|
|
551
|
+
const { computeDedupKeys } = await import('../../service/poller.js');
|
|
552
|
+
|
|
553
|
+
const item = {
|
|
554
|
+
id: 'reminder:abc123',
|
|
555
|
+
title: 'Buy groceries',
|
|
556
|
+
};
|
|
557
|
+
|
|
558
|
+
const keys = computeDedupKeys(item);
|
|
559
|
+
// Should return empty array - no recognizable refs
|
|
560
|
+
assert.deepStrictEqual(keys, []);
|
|
561
|
+
});
|
|
562
|
+
});
|
|
563
|
+
|
|
377
564
|
describe('status tracking', () => {
|
|
378
565
|
test('shouldReprocess returns false for item with same state', async () => {
|
|
379
566
|
const { createPoller } = await import('../../service/poller.js');
|
package/test/unit/utils.test.js
CHANGED
|
@@ -290,6 +290,114 @@ describe('utils.js', () => {
|
|
|
290
290
|
});
|
|
291
291
|
});
|
|
292
292
|
|
|
293
|
+
describe('extractIssueRefs', () => {
|
|
294
|
+
test('extracts Linear issue IDs from text', async () => {
|
|
295
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
296
|
+
|
|
297
|
+
assert.deepStrictEqual(
|
|
298
|
+
extractIssueRefs('Fix bug in ENG-123'),
|
|
299
|
+
['linear:ENG-123']
|
|
300
|
+
);
|
|
301
|
+
assert.deepStrictEqual(
|
|
302
|
+
extractIssueRefs('Implement PROJ-456 feature'),
|
|
303
|
+
['linear:PROJ-456']
|
|
304
|
+
);
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
test('extracts multiple Linear IDs', async () => {
|
|
308
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
309
|
+
|
|
310
|
+
const refs = extractIssueRefs('Fixes ENG-123 and also addresses ENG-456');
|
|
311
|
+
assert.ok(refs.includes('linear:ENG-123'));
|
|
312
|
+
assert.ok(refs.includes('linear:ENG-456'));
|
|
313
|
+
assert.strictEqual(refs.length, 2);
|
|
314
|
+
});
|
|
315
|
+
|
|
316
|
+
test('extracts full GitHub issue references', async () => {
|
|
317
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
318
|
+
|
|
319
|
+
assert.deepStrictEqual(
|
|
320
|
+
extractIssueRefs('Fixes myorg/backend#123'),
|
|
321
|
+
['github:myorg/backend#123']
|
|
322
|
+
);
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
test('extracts relative GitHub issue references with context', async () => {
|
|
326
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
327
|
+
|
|
328
|
+
const refs = extractIssueRefs('Fixes #123', { repo: 'myorg/backend' });
|
|
329
|
+
assert.deepStrictEqual(refs, ['github:myorg/backend#123']);
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
test('ignores relative GitHub refs without context', async () => {
|
|
333
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
334
|
+
|
|
335
|
+
// Without context.repo, relative refs like #123 cannot be resolved
|
|
336
|
+
const refs = extractIssueRefs('Fixes #123');
|
|
337
|
+
assert.deepStrictEqual(refs, []);
|
|
338
|
+
});
|
|
339
|
+
|
|
340
|
+
test('extracts mixed Linear and GitHub refs', async () => {
|
|
341
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
342
|
+
|
|
343
|
+
const refs = extractIssueRefs('Fixes ENG-123 and closes myorg/backend#456');
|
|
344
|
+
assert.ok(refs.includes('linear:ENG-123'));
|
|
345
|
+
assert.ok(refs.includes('github:myorg/backend#456'));
|
|
346
|
+
assert.strictEqual(refs.length, 2);
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
test('deduplicates repeated references', async () => {
|
|
350
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
351
|
+
|
|
352
|
+
const refs = extractIssueRefs('ENG-123 mentioned again ENG-123');
|
|
353
|
+
assert.deepStrictEqual(refs, ['linear:ENG-123']);
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
test('handles typical PR body with closing keywords', async () => {
|
|
357
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
358
|
+
|
|
359
|
+
const prBody = `## Summary
|
|
360
|
+
This PR implements the feature requested in ENG-123.
|
|
361
|
+
|
|
362
|
+
Closes #456
|
|
363
|
+
`;
|
|
364
|
+
|
|
365
|
+
const refs = extractIssueRefs(prBody, { repo: 'myorg/backend' });
|
|
366
|
+
assert.ok(refs.includes('linear:ENG-123'));
|
|
367
|
+
assert.ok(refs.includes('github:myorg/backend#456'));
|
|
368
|
+
});
|
|
369
|
+
|
|
370
|
+
test('handles empty/null/undefined input', async () => {
|
|
371
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
372
|
+
|
|
373
|
+
assert.deepStrictEqual(extractIssueRefs(''), []);
|
|
374
|
+
assert.deepStrictEqual(extractIssueRefs(null), []);
|
|
375
|
+
assert.deepStrictEqual(extractIssueRefs(undefined), []);
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
test('does not match partial patterns', async () => {
|
|
379
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
380
|
+
|
|
381
|
+
// Should not match: lowercase, no hyphen
|
|
382
|
+
assert.deepStrictEqual(extractIssueRefs('eng-123'), []); // lowercase
|
|
383
|
+
assert.deepStrictEqual(extractIssueRefs('ENG123'), []); // no hyphen
|
|
384
|
+
// Underscores are word characters, so \b won't match - ENG-123 inside underscores won't be extracted
|
|
385
|
+
assert.deepStrictEqual(extractIssueRefs('PREFIX_ENG-123_SUFFIX'), []); // underscores prevent word boundary
|
|
386
|
+
// But spaces, parentheses, etc. are fine
|
|
387
|
+
assert.deepStrictEqual(extractIssueRefs('(ENG-123)'), ['linear:ENG-123']);
|
|
388
|
+
assert.deepStrictEqual(extractIssueRefs('See ENG-123 for details'), ['linear:ENG-123']);
|
|
389
|
+
});
|
|
390
|
+
|
|
391
|
+
test('handles various Linear team prefixes', async () => {
|
|
392
|
+
const { extractIssueRefs } = await import('../../service/utils.js');
|
|
393
|
+
|
|
394
|
+
// Various real-world team prefix patterns
|
|
395
|
+
assert.deepStrictEqual(extractIssueRefs('A-1'), ['linear:A-1']); // shortest
|
|
396
|
+
assert.deepStrictEqual(extractIssueRefs('PLATFORM-99999'), ['linear:PLATFORM-99999']); // longer
|
|
397
|
+
assert.deepStrictEqual(extractIssueRefs('DEV2-123'), ['linear:DEV2-123']); // with numbers
|
|
398
|
+
});
|
|
399
|
+
});
|
|
400
|
+
|
|
293
401
|
describe('getNestedValue', () => {
|
|
294
402
|
test('gets top-level value', async () => {
|
|
295
403
|
const { getNestedValue } = await import('../../service/utils.js');
|
package/install.sh
DELETED
|
@@ -1,246 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env bash
|
|
2
|
-
#
|
|
3
|
-
# Install opencode-ntfy plugin and callback service
|
|
4
|
-
#
|
|
5
|
-
# Usage:
|
|
6
|
-
# curl -fsSL https://raw.githubusercontent.com/athal7/opencode-ntfy/main/install.sh | bash
|
|
7
|
-
#
|
|
8
|
-
# Or from a local clone:
|
|
9
|
-
# ./install.sh
|
|
10
|
-
#
|
|
11
|
-
|
|
12
|
-
set -euo pipefail
|
|
13
|
-
|
|
14
|
-
REPO="athal7/opencode-ntfy"
|
|
15
|
-
PLUGIN_NAME="opencode-ntfy"
|
|
16
|
-
PLUGIN_DIR="$HOME/.config/opencode/plugins/$PLUGIN_NAME"
|
|
17
|
-
SERVICE_DIR="$HOME/.local/share/opencode-ntfy"
|
|
18
|
-
CONFIG_FILE="$HOME/.config/opencode/opencode.json"
|
|
19
|
-
PLIST_DIR="$HOME/Library/LaunchAgents"
|
|
20
|
-
PLIST_NAME="io.opencode.ntfy.plist"
|
|
21
|
-
PLUGIN_FILES="index.js notifier.js callback.js hostname.js nonces.js config.js service-client.js"
|
|
22
|
-
SERVICE_FILES="server.js"
|
|
23
|
-
|
|
24
|
-
echo "Installing $PLUGIN_NAME..."
|
|
25
|
-
echo ""
|
|
26
|
-
|
|
27
|
-
# Create directories
|
|
28
|
-
mkdir -p "$PLUGIN_DIR"
|
|
29
|
-
mkdir -p "$SERVICE_DIR"
|
|
30
|
-
|
|
31
|
-
# Check if we're running from a local clone or need to download
|
|
32
|
-
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}" 2>/dev/null)" && pwd 2>/dev/null)" || SCRIPT_DIR=""
|
|
33
|
-
|
|
34
|
-
if [[ -n "$SCRIPT_DIR" ]] && [[ -f "$SCRIPT_DIR/plugin/index.js" ]]; then
|
|
35
|
-
# Local install from clone
|
|
36
|
-
echo "Installing from local directory..."
|
|
37
|
-
|
|
38
|
-
echo ""
|
|
39
|
-
echo "Plugin files:"
|
|
40
|
-
for file in $PLUGIN_FILES; do
|
|
41
|
-
if [[ -f "$SCRIPT_DIR/plugin/$file" ]]; then
|
|
42
|
-
cp "$SCRIPT_DIR/plugin/$file" "$PLUGIN_DIR/$file"
|
|
43
|
-
echo " Installed: plugin/$file -> $PLUGIN_DIR/$file"
|
|
44
|
-
fi
|
|
45
|
-
done
|
|
46
|
-
|
|
47
|
-
echo ""
|
|
48
|
-
echo "Service files:"
|
|
49
|
-
for file in $SERVICE_FILES; do
|
|
50
|
-
if [[ -f "$SCRIPT_DIR/service/$file" ]]; then
|
|
51
|
-
cp "$SCRIPT_DIR/service/$file" "$SERVICE_DIR/$file"
|
|
52
|
-
echo " Installed: service/$file -> $SERVICE_DIR/$file"
|
|
53
|
-
fi
|
|
54
|
-
done
|
|
55
|
-
else
|
|
56
|
-
# Remote install - download from GitHub
|
|
57
|
-
echo "Downloading plugin files from GitHub..."
|
|
58
|
-
|
|
59
|
-
for file in $PLUGIN_FILES; do
|
|
60
|
-
echo " Downloading: plugin/$file"
|
|
61
|
-
if curl -fsSL "https://raw.githubusercontent.com/$REPO/main/plugin/$file" -o "$PLUGIN_DIR/$file"; then
|
|
62
|
-
echo " Installed: $file"
|
|
63
|
-
else
|
|
64
|
-
echo " ERROR: Failed to download $file"
|
|
65
|
-
exit 1
|
|
66
|
-
fi
|
|
67
|
-
done
|
|
68
|
-
|
|
69
|
-
echo ""
|
|
70
|
-
echo "Downloading service files from GitHub..."
|
|
71
|
-
|
|
72
|
-
for file in $SERVICE_FILES; do
|
|
73
|
-
echo " Downloading: service/$file"
|
|
74
|
-
if curl -fsSL "https://raw.githubusercontent.com/$REPO/main/service/$file" -o "$SERVICE_DIR/$file"; then
|
|
75
|
-
echo " Installed: $file"
|
|
76
|
-
else
|
|
77
|
-
echo " ERROR: Failed to download $file"
|
|
78
|
-
exit 1
|
|
79
|
-
fi
|
|
80
|
-
done
|
|
81
|
-
fi
|
|
82
|
-
|
|
83
|
-
echo ""
|
|
84
|
-
echo "Plugin files installed to: $PLUGIN_DIR"
|
|
85
|
-
echo "Service files installed to: $SERVICE_DIR"
|
|
86
|
-
|
|
87
|
-
# Install LaunchAgent plist (macOS only)
|
|
88
|
-
if [[ "$(uname)" == "Darwin" ]]; then
|
|
89
|
-
echo ""
|
|
90
|
-
echo "Installing LaunchAgent for callback service..."
|
|
91
|
-
|
|
92
|
-
mkdir -p "$PLIST_DIR"
|
|
93
|
-
|
|
94
|
-
# Find node path (handle both Intel and Apple Silicon Macs)
|
|
95
|
-
NODE_PATH=$(command -v node 2>/dev/null)
|
|
96
|
-
if [[ -z "$NODE_PATH" ]]; then
|
|
97
|
-
# Try Homebrew paths
|
|
98
|
-
if [[ -x "/opt/homebrew/bin/node" ]]; then
|
|
99
|
-
NODE_PATH="/opt/homebrew/bin/node"
|
|
100
|
-
elif [[ -x "/usr/local/bin/node" ]]; then
|
|
101
|
-
NODE_PATH="/usr/local/bin/node"
|
|
102
|
-
else
|
|
103
|
-
echo " WARNING: node not found, please install Node.js"
|
|
104
|
-
NODE_PATH="/usr/local/bin/node"
|
|
105
|
-
fi
|
|
106
|
-
fi
|
|
107
|
-
|
|
108
|
-
# Generate plist with correct paths
|
|
109
|
-
cat > "$PLIST_DIR/$PLIST_NAME" << EOF
|
|
110
|
-
<?xml version="1.0" encoding="UTF-8"?>
|
|
111
|
-
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
112
|
-
<plist version="1.0">
|
|
113
|
-
<dict>
|
|
114
|
-
<key>Label</key>
|
|
115
|
-
<string>io.opencode.ntfy</string>
|
|
116
|
-
|
|
117
|
-
<key>ProgramArguments</key>
|
|
118
|
-
<array>
|
|
119
|
-
<string>$NODE_PATH</string>
|
|
120
|
-
<string>$SERVICE_DIR/server.js</string>
|
|
121
|
-
</array>
|
|
122
|
-
|
|
123
|
-
<key>RunAtLoad</key>
|
|
124
|
-
<true/>
|
|
125
|
-
|
|
126
|
-
<key>KeepAlive</key>
|
|
127
|
-
<true/>
|
|
128
|
-
|
|
129
|
-
<key>StandardOutPath</key>
|
|
130
|
-
<string>$HOME/.local/share/opencode-ntfy/opencode-ntfy.log</string>
|
|
131
|
-
|
|
132
|
-
<key>StandardErrorPath</key>
|
|
133
|
-
<string>$HOME/.local/share/opencode-ntfy/opencode-ntfy.log</string>
|
|
134
|
-
|
|
135
|
-
<key>WorkingDirectory</key>
|
|
136
|
-
<string>$SERVICE_DIR</string>
|
|
137
|
-
</dict>
|
|
138
|
-
</plist>
|
|
139
|
-
EOF
|
|
140
|
-
|
|
141
|
-
echo " LaunchAgent installed to: $PLIST_DIR/$PLIST_NAME"
|
|
142
|
-
echo ""
|
|
143
|
-
echo " To start the callback service:"
|
|
144
|
-
echo " launchctl load $PLIST_DIR/$PLIST_NAME"
|
|
145
|
-
echo ""
|
|
146
|
-
echo " To stop the callback service:"
|
|
147
|
-
echo " launchctl unload $PLIST_DIR/$PLIST_NAME"
|
|
148
|
-
fi
|
|
149
|
-
|
|
150
|
-
# Configure opencode.json
|
|
151
|
-
echo ""
|
|
152
|
-
echo "Configuring OpenCode..."
|
|
153
|
-
|
|
154
|
-
if [[ -f "$CONFIG_FILE" ]]; then
|
|
155
|
-
# Check if plugin already configured
|
|
156
|
-
if grep -q "$PLUGIN_DIR" "$CONFIG_FILE" 2>/dev/null; then
|
|
157
|
-
echo " Plugin already configured in opencode.json"
|
|
158
|
-
else
|
|
159
|
-
echo ""
|
|
160
|
-
echo " Would you like to add the plugin to opencode.json? [Y/n]"
|
|
161
|
-
read -r response </dev/tty || response="y"
|
|
162
|
-
if [[ "$response" != "n" && "$response" != "N" ]]; then
|
|
163
|
-
# Use node to update JSON safely
|
|
164
|
-
if command -v node >/dev/null 2>&1; then
|
|
165
|
-
node -e "
|
|
166
|
-
const fs = require('fs');
|
|
167
|
-
const configPath = '$CONFIG_FILE';
|
|
168
|
-
const pluginDir = '$PLUGIN_DIR';
|
|
169
|
-
|
|
170
|
-
let config;
|
|
171
|
-
try {
|
|
172
|
-
config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
173
|
-
} catch {
|
|
174
|
-
config = {};
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
config.plugin = config.plugin || [];
|
|
178
|
-
if (!config.plugin.includes(pluginDir)) {
|
|
179
|
-
config.plugin.push(pluginDir);
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + '\n');
|
|
183
|
-
"
|
|
184
|
-
echo " Plugin added to opencode.json"
|
|
185
|
-
else
|
|
186
|
-
echo " WARNING: node not found, please manually add to opencode.json:"
|
|
187
|
-
echo ""
|
|
188
|
-
echo " \"plugin\": [\"$PLUGIN_DIR\"]"
|
|
189
|
-
fi
|
|
190
|
-
else
|
|
191
|
-
echo " Skipped. You can manually add the plugin path to opencode.json later."
|
|
192
|
-
fi
|
|
193
|
-
fi
|
|
194
|
-
else
|
|
195
|
-
echo ""
|
|
196
|
-
echo " No opencode.json found. Create one with the plugin configured? [Y/n]"
|
|
197
|
-
read -r response </dev/tty || response="y"
|
|
198
|
-
if [[ "$response" != "n" && "$response" != "N" ]]; then
|
|
199
|
-
mkdir -p "$(dirname "$CONFIG_FILE")"
|
|
200
|
-
cat > "$CONFIG_FILE" << EOF
|
|
201
|
-
{
|
|
202
|
-
"plugin": ["$PLUGIN_DIR"]
|
|
203
|
-
}
|
|
204
|
-
EOF
|
|
205
|
-
echo " Created $CONFIG_FILE"
|
|
206
|
-
else
|
|
207
|
-
echo " Skipped. You can create opencode.json later with:"
|
|
208
|
-
echo ""
|
|
209
|
-
echo " {\"plugin\": [\"$PLUGIN_DIR\"]}"
|
|
210
|
-
fi
|
|
211
|
-
fi
|
|
212
|
-
|
|
213
|
-
# Environment variable check and guidance
|
|
214
|
-
echo ""
|
|
215
|
-
echo "========================================"
|
|
216
|
-
echo " Installation complete!"
|
|
217
|
-
echo "========================================"
|
|
218
|
-
echo ""
|
|
219
|
-
|
|
220
|
-
# Check if NTFY_TOPIC is set
|
|
221
|
-
if [[ -n "${NTFY_TOPIC:-}" ]]; then
|
|
222
|
-
echo "NTFY_TOPIC is set: $NTFY_TOPIC"
|
|
223
|
-
echo ""
|
|
224
|
-
echo "The plugin is ready to use!"
|
|
225
|
-
else
|
|
226
|
-
echo "REQUIRED: Set NTFY_TOPIC in your environment."
|
|
227
|
-
echo ""
|
|
228
|
-
echo "Add to ~/.env (if using direnv) or your shell profile:"
|
|
229
|
-
echo ""
|
|
230
|
-
echo " export NTFY_TOPIC=your-secret-topic"
|
|
231
|
-
fi
|
|
232
|
-
|
|
233
|
-
echo ""
|
|
234
|
-
echo "Optional configuration:"
|
|
235
|
-
echo " NTFY_SERVER=https://ntfy.sh # ntfy server (default: ntfy.sh)"
|
|
236
|
-
echo " NTFY_TOKEN=tk_xxx # ntfy access token for protected topics"
|
|
237
|
-
echo " NTFY_CALLBACK_HOST=host.ts.net # Callback host for interactive notifications"
|
|
238
|
-
echo " NTFY_CALLBACK_PORT=4097 # Callback server port"
|
|
239
|
-
echo " NTFY_IDLE_DELAY_MS=300000 # Idle notification delay (5 min)"
|
|
240
|
-
|
|
241
|
-
echo ""
|
|
242
|
-
echo "For interactive permissions:"
|
|
243
|
-
echo " 1. Set NTFY_CALLBACK_HOST to your machine's hostname (e.g., via Tailscale)"
|
|
244
|
-
echo " 2. Start the callback service: launchctl load ~/Library/LaunchAgents/$PLIST_NAME"
|
|
245
|
-
echo " 3. Ensure your phone can reach the callback URL"
|
|
246
|
-
echo ""
|