@powerhousedao/network-admin 0.0.61 → 0.0.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,12 +5,12 @@
5
5
  import {} from "document-drive/processors/types";
6
6
  import {} from "document-model";
7
7
  // Import processor factories here as they are generated
8
- import { MyProcessorProcessorFactory } from "./my-processor/factory.js";
8
+ import { workstreamsProcessorFactory } from "./workstreams/factory.js";
9
9
  export const processorFactory = (module) => {
10
10
  // Initialize all processor factories once with the module
11
11
  const factories = [];
12
12
  // Add processors here as they are generated
13
- factories.push(MyProcessorProcessorFactory(module));
13
+ factories.push(workstreamsProcessorFactory(module));
14
14
  // Return the inner function that will be called for each drive
15
15
  return async (driveHeader) => {
16
16
  const processors = [];
@@ -1,4 +1,5 @@
1
1
  export {};
2
2
  export { processorFactory } from "./factory.js";
3
- export { MyProcessorProcessor } from "./my-processor/index.js";
3
+ export * as WorkstreamsProcessor from "./workstreams/index.js";
4
+ export { workstreamsProcessorFactory } from "./workstreams/factory.js";
4
5
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../processors/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAG,CAAC;AACX,OAAO,EAAE,gBAAgB,EAAE,MAAM,cAAc,CAAC;AAChD,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../processors/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC;AACV,OAAO,EAAE,gBAAgB,EAAE,MAAM,cAAc,CAAC;AAChD,OAAO,KAAK,oBAAoB,MAAM,wBAAwB,CAAC;AAC/D,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC"}
@@ -1,3 +1,4 @@
1
1
  export {};
2
2
  export { processorFactory } from "./factory.js";
3
- export { MyProcessorProcessor } from "./my-processor/index.js";
3
+ export * as WorkstreamsProcessor from "./workstreams/index.js";
4
+ export { workstreamsProcessorFactory } from "./workstreams/factory.js";
@@ -0,0 +1,4 @@
1
+ import { type ProcessorRecord, type IProcessorHostModule } from "document-drive";
2
+ import { type PHDocumentHeader } from "document-model";
3
+ export declare const workstreamsProcessorFactory: (module: IProcessorHostModule) => (driveHeader: PHDocumentHeader) => Promise<ProcessorRecord[]>;
4
+ //# sourceMappingURL=factory.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../../../processors/workstreams/factory.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,eAAe,EACpB,KAAK,oBAAoB,EAC1B,MAAM,gBAAgB,CAAC;AAExB,OAAO,EAAE,KAAK,gBAAgB,EAAE,MAAM,gBAAgB,CAAC;AAGvD,eAAO,MAAM,2BAA2B,GACrC,QAAQ,oBAAoB,MACtB,aAAa,gBAAgB,KAAG,OAAO,CAAC,eAAe,EAAE,CAiC/D,CAAC"}
@@ -0,0 +1,28 @@
1
+ import {} from "document-drive";
2
+ import {} from "document-drive";
3
+ import {} from "document-model";
4
+ import { WorkstreamsProcessor } from "./index.js";
5
+ export const workstreamsProcessorFactory = (module) => async (driveHeader) => {
6
+ // Create a namespace for the processor and the provided drive id
7
+ const namespace = WorkstreamsProcessor.getNamespace(driveHeader.id);
8
+ console.log(`[WorkstreamsProcessor] Factory called for drive: ${driveHeader.id}, namespace: ${namespace}`);
9
+ // Create a namespaced db for the processor
10
+ const store = await module.relationalDb.createNamespace(namespace);
11
+ // Create a filter for the processor
12
+ const filter = {
13
+ branch: ["main"],
14
+ documentId: ["*"],
15
+ documentType: ["powerhouse/workstream", "powerhouse/document-drive"],
16
+ scope: ["global"],
17
+ };
18
+ // Create the processor
19
+ const processor = new WorkstreamsProcessor(namespace, filter, store);
20
+ // Initialize database tables (run migrations)
21
+ await processor.initAndUpgrade();
22
+ return [
23
+ {
24
+ processor,
25
+ filter,
26
+ },
27
+ ];
28
+ };
@@ -0,0 +1,16 @@
1
+ import { RelationalDbProcessor } from "document-drive";
2
+ import type { InternalTransmitterUpdate, InternalOperationUpdate } from "document-drive";
3
+ import type { DB } from "./schema.js";
4
+ export declare class WorkstreamsProcessor extends RelationalDbProcessor<DB> {
5
+ static getNamespace(driveId: string): string;
6
+ workstreams: string[];
7
+ initAndUpgrade(): Promise<void>;
8
+ onStrands(strands: InternalTransmitterUpdate[]): Promise<void>;
9
+ onDisconnect(): Promise<void>;
10
+ setWorkstream: (strand: InternalTransmitterUpdate) => Promise<void>;
11
+ updateInitialProposalInWorkstream: (strand: InternalTransmitterUpdate, operation: InternalOperationUpdate) => Promise<void>;
12
+ updateSowFromAlternativeProposal: (strand: InternalTransmitterUpdate, operation: InternalOperationUpdate) => Promise<void>;
13
+ updateNetworkInWorkstream: (strand: InternalTransmitterUpdate, operation: InternalOperationUpdate) => Promise<void>;
14
+ updateWorkstream: (strand: InternalTransmitterUpdate, operation: InternalOperationUpdate) => Promise<void>;
15
+ }
16
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../processors/workstreams/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AACvD,OAAO,KAAK,EACV,yBAAyB,EACzB,uBAAuB,EAExB,MAAM,gBAAgB,CAAC;AASxB,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,aAAa,CAAC;AAEtC,qBAAa,oBAAqB,SAAQ,qBAAqB,CAAC,EAAE,CAAC;WACjD,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAKrD,WAAW,EAAE,MAAM,EAAE,CAAM;IAEZ,cAAc,IAAI,OAAO,CAAC,IAAI,CAAC;IAI/B,SAAS,CACtB,OAAO,EAAE,yBAAyB,EAAE,GACnC,OAAO,CAAC,IAAI,CAAC;IA0CV,YAAY;IAgBlB,aAAa,GAAU,QAAQ,yBAAyB,mBAuCtD;IAEF,iCAAiC,GAC/B,QAAQ,yBAAyB,EACjC,WAAW,uBAAuB,mBAiDlC;IAEF,gCAAgC,GAC9B,QAAQ,yBAAyB,EACjC,WAAW,uBAAuB,mBAsDlC;IAEF,yBAAyB,GACvB,QAAQ,yBAAyB,EACjC,WAAW,uBAAuB,mBAyClC;IAEF,gBAAgB,GACd,QAAQ,yBAAyB,EACjC,WAAW,uBAAuB,mBA0ClC;CACH"}
@@ -0,0 +1,259 @@
1
+ import { RelationalDbProcessor } from "document-drive";
2
+ import { up } from "./migrations.js";
3
+ export class WorkstreamsProcessor extends RelationalDbProcessor {
4
+ static getNamespace(driveId) {
5
+ // Default namespace: `${this.name}_${driveId.replaceAll("-", "_")}`
6
+ return super.getNamespace(driveId);
7
+ }
8
+ workstreams = [];
9
+ async initAndUpgrade() {
10
+ await up(this.relationalDb);
11
+ }
12
+ async onStrands(strands) {
13
+ if (strands.length === 0) {
14
+ return;
15
+ }
16
+ for (const strand of strands) {
17
+ if (strand.operations.length === 0) {
18
+ continue;
19
+ }
20
+ if (strand.documentType === "powerhouse/workstream") {
21
+ this.setWorkstream(strand);
22
+ }
23
+ // console.log("strand", { documentType: strand.documentType, docId: strand.documentId });
24
+ for (const operation of strand.operations) {
25
+ if (strand.documentType === "powerhouse/workstream") {
26
+ this.updateWorkstream(strand, operation);
27
+ this.updateNetworkInWorkstream(strand, operation);
28
+ this.updateInitialProposalInWorkstream(strand, operation);
29
+ this.updateSowFromAlternativeProposal(strand, operation);
30
+ }
31
+ if (strand.documentType === "powerhouse/document-drive") {
32
+ if (operation.action.type === "DELETE_NODE") {
33
+ const castAction = operation.action;
34
+ console.log("deleting workstream node", castAction.input.id);
35
+ const foundWorkstream = this.workstreams.find((ws) => ws === castAction.input.id);
36
+ if (foundWorkstream) {
37
+ await this.relationalDb
38
+ .deleteFrom("workstreams")
39
+ .where("workstream_phid", "=", foundWorkstream)
40
+ .execute();
41
+ }
42
+ }
43
+ }
44
+ }
45
+ }
46
+ }
47
+ async onDisconnect() {
48
+ // Clean up all workstreams for this drive's namespace when the drive is deleted
49
+ // Since the database is already namespaced per drive, we delete all rows
50
+ // This ensures no orphaned data remains after drive deletion
51
+ try {
52
+ await this.relationalDb.deleteFrom("workstreams").execute();
53
+ console.log(`Cleaned up workstreams for namespace: ${this.namespace}`);
54
+ }
55
+ catch (error) {
56
+ console.error(`Error cleaning up workstreams for namespace ${this.namespace}:`, error);
57
+ // Don't throw - cleanup errors shouldn't prevent drive deletion
58
+ }
59
+ }
60
+ setWorkstream = async (strand) => {
61
+ const docId = strand.documentId;
62
+ const existingWorkstreamPhids = await this.relationalDb
63
+ .selectFrom("workstreams")
64
+ .select("workstream_phid")
65
+ .where("workstream_phid", "=", docId)
66
+ .execute();
67
+ if (existingWorkstreamPhids.length === 0) {
68
+ console.log("No workstream id found, inserting new one", docId);
69
+ this.workstreams.push(docId);
70
+ // insert network id
71
+ await this.relationalDb
72
+ .insertInto("workstreams")
73
+ .values({
74
+ network_phid: strand.state.client?.id ? strand.state.client.id : null,
75
+ network_slug: strand.state.client?.name
76
+ ? strand.state.client.name.toLowerCase().split(" ").join("-")
77
+ : null,
78
+ workstream_phid: strand.documentId,
79
+ workstream_slug: strand.state.title
80
+ ? strand.state.title.toLowerCase().split(" ").join("-")
81
+ : "",
82
+ workstream_title: strand.state.title,
83
+ workstream_status: strand.state.status,
84
+ sow_phid: strand.state.initialProposal
85
+ ? strand.state.initialProposal.sow
86
+ : null,
87
+ // final_milestone_target: new Date(),
88
+ initial_proposal_status: strand.state.initialProposal
89
+ ? strand.state.initialProposal.status
90
+ : null,
91
+ initial_proposal_author: strand.state.initialProposal
92
+ ? strand.state.initialProposal.author.name
93
+ : null,
94
+ })
95
+ .onConflict((oc) => oc.column("workstream_phid").doNothing())
96
+ .execute();
97
+ }
98
+ };
99
+ updateInitialProposalInWorkstream = async (strand, operation) => {
100
+ const docId = strand.documentId;
101
+ const existingWorkstreamPhids = await this.relationalDb
102
+ .selectFrom("workstreams")
103
+ .select("workstream_phid")
104
+ .where("workstream_phid", "=", docId)
105
+ .execute();
106
+ const [foundWorkstreamId] = existingWorkstreamPhids;
107
+ if (foundWorkstreamId) {
108
+ // update existing workstream row
109
+ if (operation.action.type === "EDIT_INITIAL_PROPOSAL") {
110
+ const input = operation.action.input;
111
+ if (!input)
112
+ return;
113
+ console.log("updating initial proposal in workstream", operation.action.input);
114
+ // Build update object with only defined values
115
+ const updateData = {};
116
+ // Check for undefined, not truthiness - allows null to pass through
117
+ if (input.sowId !== undefined) {
118
+ if (strand.state.initialProposal?.status === "ACCEPTED") {
119
+ updateData.sow_phid = strand.state.initialProposal?.sow || null;
120
+ }
121
+ }
122
+ if (input.proposalAuthor) {
123
+ updateData.initial_proposal_author = input.proposalAuthor.name;
124
+ }
125
+ if (input.status) {
126
+ updateData.initial_proposal_status = input.status;
127
+ if (input.status === "ACCEPTED") {
128
+ updateData.sow_phid = strand.state.initialProposal?.sow || null;
129
+ }
130
+ }
131
+ // Only execute update if there are fields to update
132
+ if (Object.keys(updateData).length > 0) {
133
+ await this.relationalDb
134
+ .updateTable("workstreams")
135
+ .set(updateData)
136
+ .where("workstream_phid", "=", docId)
137
+ .execute();
138
+ }
139
+ }
140
+ }
141
+ };
142
+ updateSowFromAlternativeProposal = async (strand, operation) => {
143
+ const docId = strand.documentId;
144
+ const existingWorkstreamPhids = await this.relationalDb
145
+ .selectFrom("workstreams")
146
+ .select("workstream_phid")
147
+ .where("workstream_phid", "=", docId)
148
+ .execute();
149
+ const [foundWorkstreamId] = existingWorkstreamPhids;
150
+ if (foundWorkstreamId) {
151
+ // update existing workstream row
152
+ if (operation.action.type === "EDIT_ALTERNATIVE_PROPOSAL") {
153
+ const input = operation.action.input;
154
+ if (!input)
155
+ return;
156
+ console.log("updating sow from alternative proposal in workstream", operation.action.input);
157
+ // Build update object with only defined values
158
+ const updateData = {};
159
+ const selectedAlternativeProposal = strand.state.alternativeProposals.find((proposal) => proposal.id === input.id);
160
+ if (selectedAlternativeProposal) {
161
+ // Check for undefined, not truthiness - allows null to pass through
162
+ if (input.sowId !== undefined) {
163
+ if (selectedAlternativeProposal.status === "ACCEPTED") {
164
+ updateData.sow_phid = selectedAlternativeProposal.sow || null;
165
+ }
166
+ }
167
+ if (input.status) {
168
+ if (input.status === "ACCEPTED") {
169
+ updateData.sow_phid = selectedAlternativeProposal.sow || null;
170
+ }
171
+ }
172
+ }
173
+ // Only execute update if there are fields to update
174
+ if (Object.keys(updateData).length > 0) {
175
+ await this.relationalDb
176
+ .updateTable("workstreams")
177
+ .set(updateData)
178
+ .where("workstream_phid", "=", docId)
179
+ .execute();
180
+ }
181
+ }
182
+ }
183
+ };
184
+ updateNetworkInWorkstream = async (strand, operation) => {
185
+ const docId = strand.documentId;
186
+ const existingWorkstreamPhids = await this.relationalDb
187
+ .selectFrom("workstreams")
188
+ .select("workstream_phid")
189
+ .where("workstream_phid", "=", docId)
190
+ .execute();
191
+ const [foundWorkstreamId] = existingWorkstreamPhids;
192
+ if (foundWorkstreamId) {
193
+ // update existing workstream row
194
+ if (operation.action.type === "EDIT_CLIENT_INFO") {
195
+ const input = operation.action.input;
196
+ if (!input)
197
+ return;
198
+ console.log("updating client in workstream", operation.action.input);
199
+ // Build update object with only defined values
200
+ const updateData = {};
201
+ if (input.clientId) {
202
+ updateData.network_phid = input.clientId;
203
+ }
204
+ if (input.name) {
205
+ updateData.network_slug = input.name
206
+ .toLowerCase()
207
+ .split(" ")
208
+ .join("-");
209
+ }
210
+ // Only execute update if there are fields to update
211
+ if (Object.keys(updateData).length > 0) {
212
+ await this.relationalDb
213
+ .updateTable("workstreams")
214
+ .set(updateData)
215
+ .where("workstream_phid", "=", docId)
216
+ .execute();
217
+ }
218
+ }
219
+ }
220
+ };
221
+ updateWorkstream = async (strand, operation) => {
222
+ const docId = strand.documentId;
223
+ const existingWorkstreamPhids = await this.relationalDb
224
+ .selectFrom("workstreams")
225
+ .select("workstream_phid")
226
+ .where("workstream_phid", "=", docId)
227
+ .execute();
228
+ const [foundWorkstreamId] = existingWorkstreamPhids;
229
+ if (foundWorkstreamId) {
230
+ // update existing workstream row
231
+ if (operation.action.type === "EDIT_WORKSTREAM") {
232
+ const input = operation.action.input;
233
+ if (!input)
234
+ return;
235
+ console.log("updating workstream", operation.action.input);
236
+ // Build update object with only defined values
237
+ const updateData = {};
238
+ if (input.title) {
239
+ updateData.workstream_title = input.title;
240
+ updateData.workstream_slug = input.title
241
+ .toLowerCase()
242
+ .split(" ")
243
+ .join("-");
244
+ }
245
+ if (input.status) {
246
+ updateData.workstream_status = input.status;
247
+ }
248
+ // Only execute update if there are fields to update
249
+ if (Object.keys(updateData).length > 0) {
250
+ await this.relationalDb
251
+ .updateTable("workstreams")
252
+ .set(updateData)
253
+ .where("workstream_phid", "=", docId)
254
+ .execute();
255
+ }
256
+ }
257
+ }
258
+ };
259
+ }
@@ -0,0 +1,4 @@
1
+ import { type IRelationalDb } from "document-drive";
2
+ export declare function up(db: IRelationalDb<any>): Promise<void>;
3
+ export declare function down(db: IRelationalDb<any>): Promise<void>;
4
+ //# sourceMappingURL=migrations.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"migrations.d.ts","sourceRoot":"","sources":["../../../processors/workstreams/migrations.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAEpD,wBAAsB,EAAE,CAAC,EAAE,EAAE,aAAa,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAuB9D;AAED,wBAAsB,IAAI,CAAC,EAAE,EAAE,aAAa,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAGhE"}
@@ -0,0 +1,30 @@
1
+ import {} from "document-drive";
2
+ export async function up(db) {
3
+ // Create table with IF NOT EXISTS
4
+ try {
5
+ await db.schema
6
+ .createTable("workstreams")
7
+ .addColumn("network_phid", "varchar(255)")
8
+ .addColumn("network_slug", "varchar(255)")
9
+ .addColumn("workstream_phid", "varchar(255)")
10
+ .addColumn("workstream_slug", "varchar(255)")
11
+ .addColumn("workstream_title", "varchar(255)")
12
+ .addColumn("workstream_status", "varchar(255)")
13
+ .addColumn("sow_phid", "varchar(255)")
14
+ .addColumn("final_milestone_target", "timestamp")
15
+ .addColumn("initial_proposal_status", "varchar(255)")
16
+ .addColumn("initial_proposal_author", "varchar(255)")
17
+ .addPrimaryKeyConstraint("workstreams_pkey", ["workstream_phid"])
18
+ .ifNotExists()
19
+ .execute();
20
+ console.log("[WorkstreamsProcessor] Table 'workstreams' created or already exists");
21
+ }
22
+ catch (error) {
23
+ console.error("[WorkstreamsProcessor] Failed to create 'workstreams' table:", error);
24
+ throw error;
25
+ }
26
+ }
27
+ export async function down(db) {
28
+ // drop table
29
+ await db.schema.dropTable("workstreams").execute();
30
+ }
@@ -0,0 +1,19 @@
1
+ import type { ColumnType } from "kysely";
2
+ export type Timestamp = ColumnType<Date, Date | string, Date | string>;
3
+ export interface Workstreams {
4
+ final_milestone_target: Timestamp | null;
5
+ initial_proposal_author: string | null;
6
+ initial_proposal_status: string | null;
7
+ network_phid: string | null;
8
+ network_slug: string | null;
9
+ roadmap_oid: string | null;
10
+ sow_phid: string | null;
11
+ workstream_phid: string;
12
+ workstream_slug: string | null;
13
+ workstream_status: string | null;
14
+ workstream_title: string | null;
15
+ }
16
+ export interface DB {
17
+ workstreams: Workstreams;
18
+ }
19
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../../processors/workstreams/schema.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEzC,MAAM,MAAM,SAAS,GAAG,UAAU,CAAC,IAAI,EAAE,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAC;AAEvE,MAAM,WAAW,WAAW;IAC1B,sBAAsB,EAAE,SAAS,GAAG,IAAI,CAAC;IACzC,uBAAuB,EAAE,MAAM,GAAG,IAAI,CAAC;IACvC,uBAAuB,EAAE,MAAM,GAAG,IAAI,CAAC;IACvC,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,eAAe,EAAE,MAAM,CAAC;IACxB,eAAe,EAAE,MAAM,GAAG,IAAI,CAAC;IAC/B,iBAAiB,EAAE,MAAM,GAAG,IAAI,CAAC;IACjC,gBAAgB,EAAE,MAAM,GAAG,IAAI,CAAC;CACjC;AAED,MAAM,WAAW,EAAE;IACjB,WAAW,EAAE,WAAW,CAAC;CAC1B"}
@@ -0,0 +1 @@
1
+ export {};
@@ -1 +1 @@
1
- {"version":3,"file":"resolvers.d.ts","sourceRoot":"","sources":["../../../subgraphs/workstreams/resolvers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAqC5D,eAAO,MAAM,YAAY,GAAI,UAAU,SAAS,KAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAk8BxE,CAAC"}
1
+ {"version":3,"file":"resolvers.d.ts","sourceRoot":"","sources":["../../../subgraphs/workstreams/resolvers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAuC5D,eAAO,MAAM,YAAY,GAAI,UAAU,SAAS,KAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAi9BxE,CAAC"}
@@ -1,7 +1,8 @@
1
1
  import {} from "@powerhousedao/reactor-api";
2
+ import { WorkstreamsProcessor } from "../../processors/workstreams/index.js";
2
3
  import {} from "../../document-models/request-for-proposals/index.js";
3
4
  import {} from "../../document-models/workstream/index.js";
4
- import { AnalyticsPath } from "@powerhousedao/analytics-engine-core";
5
+ import { sql } from "kysely";
5
6
  export const getResolvers = (subgraph) => {
6
7
  const reactor = subgraph.reactor;
7
8
  const db = subgraph.relationalDb;
@@ -23,11 +24,19 @@ export const getResolvers = (subgraph) => {
23
24
  }
24
25
  return null;
25
26
  };
27
+ // Normalize drive IDs to match the format used by the processor factory
28
+ // reactor.getDrives() returns base64-like IDs (with + and =)
29
+ // but driveHeader.id in the factory uses - instead of +
30
+ // TODO: Report this inconsistency to @powerhousedao packages
31
+ const normalizeDriveId = (driveId) => {
32
+ return driveId.replace(/\+/g, "-").replace(/=+$/, "");
33
+ };
26
34
  const getCandidateDrives = async () => {
27
35
  try {
28
36
  const drives = await reactor.getDrives?.();
29
- if (Array.isArray(drives) && drives.length > 0)
30
- return drives;
37
+ if (Array.isArray(drives) && drives.length > 0) {
38
+ return drives.map(normalizeDriveId);
39
+ }
31
40
  }
32
41
  catch { }
33
42
  return [];
@@ -256,153 +265,126 @@ export const getResolvers = (subgraph) => {
256
265
  };
257
266
  }
258
267
  };
259
- const getWorkstreamRowsFromAnalytics = async (analyticsStore, filters, wantedSlug) => {
260
- const select = {
261
- network: [],
262
- network_slug: [],
263
- workstream_slug: [],
264
- workstream_title: [],
265
- status: [],
266
- initial_proposal_status: [],
267
- initial_proposal_author: [],
268
- sow_phid: [],
269
- workstream_phid: [],
270
- operation_index: [],
271
- };
272
- if (filters) {
273
- if ("workstreamId" in filters && filters.workstreamId) {
274
- select.workstream_phid = [
275
- AnalyticsPath.fromString(`/${filters.workstreamId}`),
276
- ];
277
- }
278
- if ("workstreamSlug" in filters && filters.workstreamSlug) {
279
- select.workstream_slug = [
280
- AnalyticsPath.fromString(`/${filters.workstreamSlug}`),
281
- ];
282
- }
283
- if (filters.networkId) {
284
- select.network = [AnalyticsPath.fromString(`/${filters.networkId}`)];
285
- }
286
- if (filters.networkSlug) {
287
- select.network_slug = [
288
- AnalyticsPath.fromString(`/${filters.networkSlug}`),
289
- ];
290
- }
291
- else if ("networkName" in filters &&
292
- filters.networkName &&
293
- wantedSlug) {
294
- select.network_slug = [AnalyticsPath.fromString(`/${wantedSlug}`)];
295
- }
296
- if ("networkNames" in filters && filters.networkNames) {
297
- const networkSlugs = filters.networkNames
298
- .filter((name) => Boolean(name))
299
- .map((name) => deriveSlug(name));
300
- if (networkSlugs.length > 0) {
301
- select.network_slug = networkSlugs.map((slug) => AnalyticsPath.fromString(`/${slug}`));
302
- }
303
- }
304
- const statuses = (filters.workstreamStatuses || []).filter((status) => Boolean(status));
305
- if (statuses.length > 0) {
306
- select.status = statuses.map((s) => AnalyticsPath.fromString(`/${s}`));
307
- }
308
- else if (filters.workstreamStatus) {
309
- select.status = [
310
- AnalyticsPath.fromString(`/${filters.workstreamStatus}`),
311
- ];
312
- }
268
+ const applyWorkstreamFilters = (qb, filters, wantedSlug) => {
269
+ // Handle workstreamId and workstreamSlug (from WorkstreamFilter)
270
+ if ("workstreamId" in filters && filters.workstreamId) {
271
+ qb = qb.where("workstream_phid", "=", filters.workstreamId);
313
272
  }
314
- const series = await analyticsStore.getMatchingSeries({
315
- start: null,
316
- end: null,
317
- metrics: ["Workstream"],
318
- select,
319
- });
320
- const latestByWorkstream = {};
321
- for (const s of series) {
322
- const dims = s.dimensions;
323
- const getPathString = (dim) => {
324
- if (!dim)
325
- return null;
326
- const path = dim.path?.toString() || dim.toString();
327
- if (path === "?" || path === "none" || path === "/none" || path === "")
328
- return null;
329
- return path.startsWith("/") ? path.substring(1) : path;
330
- };
331
- const workstreamPhid = getPathString(dims.workstream_phid) || s.source.toString().split("/")[3];
332
- if (!workstreamPhid || workstreamPhid === "?")
333
- continue;
334
- const opIndex = parseInt(getPathString(dims.operation_index) || "0");
335
- const existing = latestByWorkstream[workstreamPhid];
336
- // Priority 1: Higher operation index
337
- // Priority 2: Higher database record ID (if opIndex/timestamp are same)
338
- if (!existing ||
339
- opIndex > existing.opIndex ||
340
- (opIndex === existing.opIndex && s.start.toMillis() >= existing.start.toMillis())) {
341
- latestByWorkstream[workstreamPhid] = {
342
- network_phid: getPathString(dims.network),
343
- network_slug: getPathString(dims.network_slug),
344
- workstream_phid: workstreamPhid,
345
- workstream_slug: getPathString(dims.workstream_slug),
346
- workstream_title: getPathString(dims.workstream_title)?.replace(/-/g, "/"),
347
- workstream_status: getPathString(dims.status),
348
- sow_phid: getPathString(dims.sow_phid),
349
- initial_proposal_status: getPathString(dims.initial_proposal_status),
350
- initial_proposal_author: getPathString(dims.initial_proposal_author),
351
- start: s.start,
352
- opIndex,
353
- };
273
+ else if ("workstreamSlug" in filters && filters.workstreamSlug) {
274
+ qb = qb.where("workstream_slug", "=", filters.workstreamSlug);
275
+ }
276
+ // Handle workstreamTitle filter (from WorkstreamsFilter)
277
+ if ("workstreamTitle" in filters && filters.workstreamTitle) {
278
+ // Use case-insensitive partial match for workstream title
279
+ // Filter out NULL values and do case-insensitive search
280
+ const searchPattern = `%${filters.workstreamTitle.toLowerCase()}%`;
281
+ qb = qb
282
+ .where("workstream_title", "is not", null)
283
+ .where((eb) => eb(sql `LOWER(workstream_title)`, "like", searchPattern));
284
+ }
285
+ if (filters.networkId) {
286
+ qb = qb.where("network_phid", "=", filters.networkId);
287
+ }
288
+ else if (filters.networkSlug) {
289
+ qb = qb.where("network_slug", "=", filters.networkSlug);
290
+ }
291
+ else if (filters.networkName && wantedSlug) {
292
+ qb = qb.where("network_slug", "=", wantedSlug);
293
+ }
294
+ else if ("networkNames" in filters && filters.networkNames) {
295
+ // Handle networkNames filter (from WorkstreamsFilter)
296
+ const networkSlugs = filters.networkNames
297
+ .filter((name) => Boolean(name))
298
+ .map((name) => deriveSlug(name));
299
+ if (networkSlugs.length > 0) {
300
+ qb = qb.where("network_slug", "in", networkSlugs);
354
301
  }
355
302
  }
356
- let rows = Object.values(latestByWorkstream);
357
- // Manual filtering for workstreamTitle (was partial match in SQL)
358
- if (filters && "workstreamTitle" in filters && filters.workstreamTitle) {
359
- const search = filters.workstreamTitle.toLowerCase();
360
- rows = rows.filter((r) => r.workstream_title?.toLowerCase().includes(search));
303
+ const statuses = (filters.workstreamStatuses || []).filter((status) => Boolean(status));
304
+ if (statuses.length > 0) {
305
+ qb = qb.where("workstream_status", "in", statuses);
306
+ }
307
+ else if (filters.workstreamStatus) {
308
+ qb = qb.where("workstream_status", "=", filters.workstreamStatus);
309
+ }
310
+ return qb;
311
+ };
312
+ const applyScopeOfWorkFilters = (qb, filters, wantedSlug) => {
313
+ if (filters.workstreamId) {
314
+ qb = qb.where("workstream_phid", "=", filters.workstreamId);
315
+ }
316
+ else if (filters.workstreamSlug) {
317
+ qb = qb.where("workstream_slug", "=", filters.workstreamSlug);
318
+ }
319
+ if (filters.networkId) {
320
+ qb = qb.where("network_phid", "=", filters.networkId);
321
+ }
322
+ else if (filters.networkSlug) {
323
+ qb = qb.where("network_slug", "=", filters.networkSlug);
324
+ }
325
+ else if (filters.networkName && wantedSlug) {
326
+ qb = qb.where("network_slug", "=", wantedSlug);
327
+ }
328
+ if (filters.workstreamStatus) {
329
+ qb = qb.where("workstream_status", "=", filters.workstreamStatus);
361
330
  }
362
- return rows;
331
+ return qb;
363
332
  };
364
333
  return {
365
334
  Query: {
366
335
  processorWorkstreams: async () => {
367
- const analyticsStore = subgraph.analyticsStore;
368
- if (!analyticsStore) {
369
- console.error("analyticsStore not found in subgraph");
370
- return [];
371
- }
372
- try {
373
- const rows = await getWorkstreamRowsFromAnalytics(analyticsStore);
374
- return rows.map((row) => ({
375
- network_phid: row.network_phid,
376
- network_slug: row.network_slug,
377
- workstream_phid: row.workstream_phid,
378
- workstream_slug: row.workstream_slug,
379
- workstream_title: row.workstream_title,
380
- workstream_status: row.workstream_status,
381
- sow_phid: row.sow_phid,
382
- roadmap_oid: null,
383
- final_milestone_target: null,
384
- initial_proposal_status: row.initial_proposal_status,
385
- initial_proposal_author: row.initial_proposal_author,
386
- }));
387
- }
388
- catch (error) {
389
- console.error("Error in processorWorkstreams resolver:", error);
390
- throw error;
391
- }
336
+ const drives = await getCandidateDrives();
337
+ const allProcessorWorkstreams = await Promise.all(drives.map(async (driveId) => {
338
+ const namespace = WorkstreamsProcessor.getNamespace(driveId);
339
+ console.log(`[WorkstreamsProcessor] Resolver querying drive: ${driveId}, namespace: ${namespace}`);
340
+ try {
341
+ return await WorkstreamsProcessor.query(driveId, db)
342
+ .selectFrom("workstreams")
343
+ .selectAll()
344
+ .execute();
345
+ }
346
+ catch (error) {
347
+ console.warn(`[WorkstreamsProcessor] Failed to query namespace ${namespace}:`, error);
348
+ return []; // Return empty array if table doesn't exist for this drive
349
+ }
350
+ }));
351
+ // Flatten the array of arrays into a single array
352
+ const flattenedWorkstreams = allProcessorWorkstreams.flat();
353
+ return flattenedWorkstreams.map((workstream) => ({
354
+ network_phid: workstream.network_phid,
355
+ network_slug: workstream.network_slug,
356
+ workstream_phid: workstream.workstream_phid,
357
+ workstream_slug: workstream.workstream_slug,
358
+ workstream_title: workstream.workstream_title,
359
+ workstream_status: workstream.workstream_status,
360
+ sow_phid: workstream.sow_phid,
361
+ roadmap_oid: workstream.roadmap_oid,
362
+ final_milestone_target: workstream.final_milestone_target,
363
+ initial_proposal_status: workstream.initial_proposal_status,
364
+ initial_proposal_author: workstream.initial_proposal_author,
365
+ }));
392
366
  },
393
367
  workstream: async (parent, args) => {
394
368
  const filters = args.filter || {};
395
- const analyticsStore = subgraph.analyticsStore;
396
- if (!analyticsStore)
397
- return [];
369
+ const candidateDrives = await getCandidateDrives();
398
370
  const wantedSlug = filters.networkSlug ||
399
371
  (filters.networkName ? deriveSlug(filters.networkName) : undefined);
400
- const rows = await getWorkstreamRowsFromAnalytics(analyticsStore, filters, wantedSlug);
401
372
  const resolved = [];
402
373
  const contributorPhids = new Set();
403
- for (const row of rows) {
404
- const hydrated = await hydrateWorkstreamRow(row);
405
- resolved.push(hydrated);
374
+ for (const driveId of candidateDrives) {
375
+ let qb = WorkstreamsProcessor.query(driveId, db)
376
+ .selectFrom("workstreams")
377
+ .selectAll();
378
+ qb = applyWorkstreamFilters(qb, filters, wantedSlug);
379
+ const rows = await qb.execute();
380
+ if (rows.length === 0) {
381
+ continue;
382
+ }
383
+ for (const row of rows) {
384
+ const hydrated = await hydrateWorkstreamRow(row);
385
+ resolved.push(hydrated);
386
+ }
387
+ break;
406
388
  }
407
389
  // Collect SOWs and their contributors
408
390
  const sowDocs = collectSowsFromWorkstreams(resolved);
@@ -482,17 +464,35 @@ export const getResolvers = (subgraph) => {
482
464
  },
483
465
  workstreams: async (parent, args) => {
484
466
  const filters = args.filter || {};
485
- const analyticsStore = subgraph.analyticsStore;
486
- if (!analyticsStore)
487
- return [];
467
+ const candidateDrives = await getCandidateDrives();
468
+ // Check if any filters are provided
469
+ const hasFilters = filters.networkId ||
470
+ filters.networkSlug ||
471
+ filters.networkName ||
472
+ (filters.networkNames && filters.networkNames.length > 0) ||
473
+ filters.workstreamTitle ||
474
+ filters.workstreamStatus ||
475
+ (filters.workstreamStatuses && filters.workstreamStatuses.length > 0);
488
476
  const wantedSlug = filters.networkSlug ||
489
477
  (filters.networkName ? deriveSlug(filters.networkName) : undefined);
490
- const rows = await getWorkstreamRowsFromAnalytics(analyticsStore, filters, wantedSlug);
491
478
  const results = [];
492
479
  const contributorPhids = new Set();
493
- for (const row of rows) {
494
- const hydrated = await hydrateWorkstreamRow(row);
495
- results.push(hydrated);
480
+ for (const driveId of candidateDrives) {
481
+ let qb = WorkstreamsProcessor.query(driveId, db)
482
+ .selectFrom("workstreams")
483
+ .selectAll();
484
+ // Only apply filters if any are provided
485
+ if (hasFilters) {
486
+ qb = applyWorkstreamFilters(qb, filters, wantedSlug);
487
+ }
488
+ const rows = await qb.execute();
489
+ if (rows.length === 0) {
490
+ continue;
491
+ }
492
+ for (const row of rows) {
493
+ const hydrated = await hydrateWorkstreamRow(row);
494
+ results.push(hydrated);
495
+ }
496
496
  }
497
497
  // Collect SOWs and their contributors
498
498
  const sowDocs = collectSowsFromWorkstreams(results);
@@ -572,109 +572,129 @@ export const getResolvers = (subgraph) => {
572
572
  },
573
573
  rfpByWorkstream: async (parent, args) => {
574
574
  const filters = args.filter || {};
575
- const analyticsStore = subgraph.analyticsStore;
576
- if (!analyticsStore)
577
- return [];
575
+ const candidateDrives = await getCandidateDrives();
578
576
  const wantedSlug = filters.networkSlug ||
579
577
  (filters.networkName ? deriveSlug(filters.networkName) : undefined);
580
- const rows = await getWorkstreamRowsFromAnalytics(analyticsStore, filters, wantedSlug);
581
578
  const results = [];
582
- for (const row of rows) {
583
- const hydrated = await hydrateWorkstreamRow(row);
584
- results.push({
585
- code: hydrated.code,
586
- title: hydrated.title,
587
- status: hydrated.status,
588
- rfp: hydrated.rfp,
589
- });
579
+ for (const driveId of candidateDrives) {
580
+ let qb = WorkstreamsProcessor.query(driveId, db)
581
+ .selectFrom("workstreams")
582
+ .selectAll();
583
+ qb = applyWorkstreamFilters(qb, filters, wantedSlug);
584
+ const rows = await qb.execute();
585
+ if (rows.length === 0) {
586
+ continue;
587
+ }
588
+ for (const row of rows) {
589
+ const hydrated = await hydrateWorkstreamRow(row);
590
+ results.push({
591
+ code: hydrated.code,
592
+ title: hydrated.title,
593
+ status: hydrated.status,
594
+ rfp: hydrated.rfp,
595
+ });
596
+ }
597
+ if (filters.workstreamId || filters.workstreamSlug) {
598
+ break;
599
+ }
590
600
  }
591
601
  return results;
592
602
  },
593
603
  scopeOfWorkByNetworkOrStatus: async (parent, args) => {
594
604
  const filters = args.filter || {};
595
- const analyticsStore = subgraph.analyticsStore;
596
- if (!analyticsStore)
597
- return [];
605
+ const candidateDrives = await getCandidateDrives();
598
606
  const wantedSlug = filters.networkSlug ||
599
607
  (filters.networkName ? deriveSlug(filters.networkName) : undefined);
600
- const rows = await getWorkstreamRowsFromAnalytics(analyticsStore, filters, wantedSlug);
601
608
  const results = [];
602
609
  const contributorPhids = new Set();
603
- for (const row of rows) {
604
- const hydrated = await hydrateWorkstreamRow(row);
605
- // Collect SOWs based on proposalRole filter
606
- const sowDocs = [];
607
- if (!filters.proposalRole) {
608
- // If no proposalRole specified, include all SOWs
609
- if (hydrated.sow) {
610
- sowDocs.push(hydrated.sow);
611
- }
612
- if (hydrated.initialProposal?.sow) {
613
- sowDocs.push(hydrated.initialProposal.sow);
614
- }
615
- for (const altProposal of hydrated.alternativeProposals || []) {
616
- if (altProposal.sow) {
617
- sowDocs.push(altProposal.sow);
618
- }
619
- }
610
+ for (const driveId of candidateDrives) {
611
+ let qb = WorkstreamsProcessor.query(driveId, db)
612
+ .selectFrom("workstreams")
613
+ .selectAll();
614
+ qb = applyScopeOfWorkFilters(qb, filters, wantedSlug);
615
+ const rows = await qb.execute();
616
+ if (rows.length === 0) {
617
+ continue;
620
618
  }
621
- else if (filters.proposalRole === "INITIAL") {
622
- if (hydrated.initialProposal?.sow) {
623
- sowDocs.push(hydrated.initialProposal.sow);
619
+ for (const row of rows) {
620
+ const hydrated = await hydrateWorkstreamRow(row);
621
+ // Collect SOWs based on proposalRole filter
622
+ const sowDocs = [];
623
+ if (!filters.proposalRole) {
624
+ // If no proposalRole specified, include all SOWs
625
+ if (hydrated.sow) {
626
+ sowDocs.push(hydrated.sow);
627
+ }
628
+ if (hydrated.initialProposal?.sow) {
629
+ sowDocs.push(hydrated.initialProposal.sow);
630
+ }
631
+ for (const altProposal of hydrated.alternativeProposals || []) {
632
+ if (altProposal.sow) {
633
+ sowDocs.push(altProposal.sow);
634
+ }
635
+ }
624
636
  }
625
- }
626
- else if (filters.proposalRole === "ALTERNATIVE") {
627
- for (const altProposal of hydrated.alternativeProposals || []) {
628
- if (altProposal.sow) {
629
- sowDocs.push(altProposal.sow);
637
+ else if (filters.proposalRole === "INITIAL") {
638
+ if (hydrated.initialProposal?.sow) {
639
+ sowDocs.push(hydrated.initialProposal.sow);
630
640
  }
631
641
  }
632
- }
633
- else if (filters.proposalRole === "AWARDED") {
634
- // For AWARDED, we check if the workstream status is AWARDED
635
- // and return the initial proposal's SOW (as it's typically the awarded one)
636
- if (hydrated.status === "AWARDED" &&
637
- hydrated.initialProposal?.sow) {
638
- sowDocs.push(hydrated.initialProposal.sow);
642
+ else if (filters.proposalRole === "ALTERNATIVE") {
643
+ for (const altProposal of hydrated.alternativeProposals || []) {
644
+ if (altProposal.sow) {
645
+ sowDocs.push(altProposal.sow);
646
+ }
647
+ }
639
648
  }
640
- }
641
- // Collect contributor PHIDs from all SOWs
642
- for (const sow of sowDocs) {
643
- if (!sow || typeof sow !== "object")
644
- continue;
645
- if (Array.isArray(sow.contributors)) {
646
- sow.contributors.forEach((contributor) => {
647
- const phid = extractPhid(contributor);
648
- if (phid)
649
- contributorPhids.add(phid);
650
- });
649
+ else if (filters.proposalRole === "AWARDED") {
650
+ // For AWARDED, we check if the workstream status is AWARDED
651
+ // and return the initial proposal's SOW (as it's typically the awarded one)
652
+ if (hydrated.status === "AWARDED" &&
653
+ hydrated.initialProposal?.sow) {
654
+ sowDocs.push(hydrated.initialProposal.sow);
655
+ }
651
656
  }
652
- // Collect deliverable owners too so `SOW_Deliverable.owner` can resolve
653
- if (Array.isArray(sow.deliverables)) {
654
- sow.deliverables.forEach((deliverable) => {
655
- if (!deliverable || typeof deliverable !== "object")
656
- return;
657
- const phid = extractPhid(deliverable.owner);
658
- if (phid)
659
- contributorPhids.add(phid);
660
- });
657
+ // Collect contributor PHIDs from all SOWs
658
+ for (const sow of sowDocs) {
659
+ if (!sow || typeof sow !== "object")
660
+ continue;
661
+ if (Array.isArray(sow.contributors)) {
662
+ sow.contributors.forEach((contributor) => {
663
+ const phid = extractPhid(contributor);
664
+ if (phid)
665
+ contributorPhids.add(phid);
666
+ });
667
+ }
668
+ // Collect deliverable owners too so `SOW_Deliverable.owner` can resolve
669
+ if (Array.isArray(sow.deliverables)) {
670
+ sow.deliverables.forEach((deliverable) => {
671
+ if (!deliverable || typeof deliverable !== "object")
672
+ return;
673
+ const phid = extractPhid(deliverable.owner);
674
+ if (phid)
675
+ contributorPhids.add(phid);
676
+ });
677
+ }
678
+ // Collect project owners too so `SOW_Project.projectOwner` can resolve
679
+ if (Array.isArray(sow.projects)) {
680
+ sow.projects.forEach((project) => {
681
+ if (!project || typeof project !== "object")
682
+ return;
683
+ const phid = extractPhid(project.projectOwner);
684
+ if (phid)
685
+ contributorPhids.add(phid);
686
+ });
687
+ }
661
688
  }
662
- // Collect project owners too so `SOW_Project.projectOwner` can resolve
663
- if (Array.isArray(sow.projects)) {
664
- sow.projects.forEach((project) => {
665
- if (!project || typeof project !== "object")
666
- return;
667
- const phid = extractPhid(project.projectOwner);
668
- if (phid)
669
- contributorPhids.add(phid);
670
- });
689
+ // Filter out null/undefined SOWs and add to results
690
+ for (const sow of sowDocs) {
691
+ if (sow) {
692
+ results.push(sow);
693
+ }
671
694
  }
672
695
  }
673
- // Filter out null/undefined SOWs and add to results
674
- for (const sow of sowDocs) {
675
- if (sow) {
676
- results.push(sow);
677
- }
696
+ if (filters.workstreamId || filters.workstreamSlug) {
697
+ break;
678
698
  }
679
699
  }
680
700
  // Fetch all builder profile documents for contributors
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@powerhousedao/network-admin",
3
3
  "description": "Network Admin package for Powerhouse",
4
- "version": "0.0.61",
4
+ "version": "0.0.62",
5
5
  "license": "AGPL-3.0-only",
6
6
  "type": "module",
7
7
  "files": [
@@ -1,5 +0,0 @@
1
- import { type ProcessorRecord } from "document-drive";
2
- import { type IProcessorHostModule } from "document-drive";
3
- import { type PHDocumentHeader } from "document-model";
4
- export declare const MyProcessorProcessorFactory: (module: IProcessorHostModule) => (driveHeader: PHDocumentHeader) => ProcessorRecord[];
5
- //# sourceMappingURL=factory.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"factory.d.ts","sourceRoot":"","sources":["../../../processors/my-processor/factory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,KAAK,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAC3D,OAAO,EAAE,KAAK,gBAAgB,EAAE,MAAM,gBAAgB,CAAC;AAGvD,eAAO,MAAM,2BAA2B,GACrC,QAAQ,oBAAoB,MAC5B,aAAa,gBAAgB,KAAG,eAAe,EAY/C,CAAC"}
@@ -1,17 +0,0 @@
1
- import {} from "document-drive";
2
- import {} from "document-drive";
3
- import {} from "document-model";
4
- import { MyProcessorProcessor } from "./index.js";
5
- export const MyProcessorProcessorFactory = (module) => (driveHeader) => {
6
- return [
7
- {
8
- processor: new MyProcessorProcessor(module.analyticsStore),
9
- filter: {
10
- branch: ["main"],
11
- documentId: ["*"],
12
- scope: ["*"],
13
- documentType: ["powerhouse/workstream"],
14
- },
15
- },
16
- ];
17
- };
@@ -1,13 +0,0 @@
1
- import type { IAnalyticsStore } from "@powerhousedao/analytics-engine-core";
2
- import type { InternalTransmitterUpdate, IProcessor } from "document-drive";
3
- export declare class MyProcessorProcessor implements IProcessor {
4
- private readonly analyticsStore;
5
- private readonly NAMESPACE;
6
- private readonly inputs;
7
- constructor(analyticsStore: IAnalyticsStore);
8
- onStrands(strands: InternalTransmitterUpdate[]): Promise<void>;
9
- onDisconnect(): Promise<void>;
10
- private addWorkstreamAnalytics;
11
- private clearSource;
12
- }
13
- //# sourceMappingURL=index.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../processors/my-processor/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAEV,eAAe,EAChB,MAAM,sCAAsC,CAAC;AAE9C,OAAO,KAAK,EAAE,yBAAyB,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5E,qBAAa,oBAAqB,YAAW,UAAU;IAKzC,OAAO,CAAC,QAAQ,CAAC,cAAc;IAJ3C,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAiB;IAE3C,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA8B;gBAExB,cAAc,EAAE,eAAe;IAItD,SAAS,CAAC,OAAO,EAAE,yBAAyB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAgD9D,YAAY;IAIlB,OAAO,CAAC,sBAAsB;YA+DhB,WAAW;CAO1B"}
@@ -1,90 +0,0 @@
1
- import { DateTime } from "luxon";
2
- import { AnalyticsPath } from "@powerhousedao/analytics-engine-core";
3
- export class MyProcessorProcessor {
4
- analyticsStore;
5
- NAMESPACE = "MyProcessor";
6
- inputs = [];
7
- constructor(analyticsStore) {
8
- this.analyticsStore = analyticsStore;
9
- //
10
- }
11
- async onStrands(strands) {
12
- if (strands.length === 0) {
13
- return;
14
- }
15
- for (const strand of strands) {
16
- if (strand.operations.length === 0) {
17
- continue;
18
- }
19
- const source = AnalyticsPath.fromString(`/${this.NAMESPACE}/${strand.driveId}/${strand.documentId}/${strand.branch}/${strand.scope}`);
20
- // clear source if we have already inserted these analytics
21
- const firstOp = strand.operations[0];
22
- if (firstOp.index === 0) {
23
- await this.clearSource(source);
24
- }
25
- if (strand.documentType === "powerhouse/workstream") {
26
- for (const operation of strand.operations) {
27
- const timestamp = operation.timestampUtcMs
28
- ? DateTime.fromMillis(parseInt(operation.timestampUtcMs))
29
- : DateTime.now();
30
- const state = operation.state || strand.state;
31
- // Record analytics for every operation to ensure we don't miss state changes
32
- this.addWorkstreamAnalytics(state, source, timestamp, strand.documentId, operation.index);
33
- }
34
- }
35
- }
36
- // batch insert
37
- if (this.inputs.length > 0) {
38
- await this.analyticsStore.addSeriesValues(this.inputs);
39
- this.inputs.length = 0;
40
- }
41
- }
42
- async onDisconnect() {
43
- //
44
- }
45
- addWorkstreamAnalytics(state, source, timestamp, documentId, operationIndex) {
46
- if (!state)
47
- return;
48
- const dimensions = {};
49
- if (state.status) {
50
- dimensions.status = AnalyticsPath.fromString(`/${state.status}`);
51
- }
52
- if (state.client?.id) {
53
- dimensions.network = AnalyticsPath.fromString(`/${state.client.id}`);
54
- }
55
- const initialProposal = state.initialProposal;
56
- if (initialProposal?.status) {
57
- dimensions.initial_proposal_status = AnalyticsPath.fromString(`/${initialProposal.status}`);
58
- }
59
- if (state.client?.name) {
60
- dimensions.network_slug = AnalyticsPath.fromString(`/${state.client.name.toLowerCase().split(" ").join("-")}`);
61
- }
62
- if (state.title) {
63
- dimensions.workstream_slug = AnalyticsPath.fromString(`/${state.title.toLowerCase().split(" ").join("-")}`);
64
- dimensions.workstream_title = AnalyticsPath.fromString(`/${state.title.split("/").join("-")}`);
65
- }
66
- if (initialProposal?.author?.id) {
67
- dimensions.initial_proposal_author = AnalyticsPath.fromString(`/${initialProposal.author.id}`);
68
- }
69
- if (initialProposal?.sow) {
70
- dimensions.sow_phid = AnalyticsPath.fromString(`/${initialProposal.sow}`);
71
- }
72
- dimensions.workstream_phid = AnalyticsPath.fromString(`/${documentId}`);
73
- dimensions.operation_index = AnalyticsPath.fromString(`/${operationIndex}`);
74
- this.inputs.push({
75
- start: timestamp,
76
- source,
77
- metric: "workstream",
78
- value: 1,
79
- dimensions,
80
- });
81
- }
82
- async clearSource(source) {
83
- try {
84
- await this.analyticsStore.clearSeriesBySource(source, true);
85
- }
86
- catch (e) {
87
- console.error(e);
88
- }
89
- }
90
- }