@trafficgroup/knex-rel 0.1.7 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dao/VideoMinuteResultDAO.d.ts +40 -0
- package/dist/dao/VideoMinuteResultDAO.js +149 -0
- package/dist/dao/VideoMinuteResultDAO.js.map +1 -1
- package/dist/dao/batch/batch.dao.d.ts +27 -0
- package/dist/dao/batch/batch.dao.js +135 -0
- package/dist/dao/batch/batch.dao.js.map +1 -0
- package/dist/dao/camera/camera.dao.d.ts +17 -7
- package/dist/dao/camera/camera.dao.js +33 -48
- package/dist/dao/camera/camera.dao.js.map +1 -1
- package/dist/dao/folder/folder.dao.js +2 -1
- package/dist/dao/folder/folder.dao.js.map +1 -1
- package/dist/dao/location/location.dao.d.ts +17 -0
- package/dist/dao/location/location.dao.js +123 -0
- package/dist/dao/location/location.dao.js.map +1 -0
- package/dist/dao/study/study.dao.d.ts +1 -1
- package/dist/dao/study/study.dao.js +10 -10
- package/dist/dao/study/study.dao.js.map +1 -1
- package/dist/dao/video/video.dao.d.ts +30 -0
- package/dist/dao/video/video.dao.js +113 -1
- package/dist/dao/video/video.dao.js.map +1 -1
- package/dist/index.d.ts +6 -1
- package/dist/index.js +5 -1
- package/dist/index.js.map +1 -1
- package/dist/interfaces/batch/batch.interfaces.d.ts +13 -0
- package/dist/interfaces/batch/batch.interfaces.js +3 -0
- package/dist/interfaces/batch/batch.interfaces.js.map +1 -0
- package/dist/interfaces/camera/camera.interfaces.d.ts +4 -2
- package/dist/interfaces/location/location.interfaces.d.ts +9 -0
- package/dist/interfaces/location/location.interfaces.js +3 -0
- package/dist/interfaces/location/location.interfaces.js.map +1 -0
- package/dist/interfaces/study/study.interfaces.d.ts +4 -3
- package/dist/interfaces/video/video.interfaces.d.ts +9 -0
- package/migrations/20251020225758_migration.ts +135 -0
- package/migrations/20251112120000_migration.ts +89 -0
- package/migrations/20251112120100_migration.ts +21 -0
- package/migrations/20251112120200_migration.ts +50 -0
- package/migrations/20251112120300_migration.ts +27 -0
- package/package.json +1 -1
- package/src/dao/VideoMinuteResultDAO.ts +237 -0
- package/src/dao/batch/batch.dao.ts +121 -0
- package/src/dao/camera/camera.dao.ts +44 -61
- package/src/dao/folder/folder.dao.ts +7 -1
- package/src/dao/location/location.dao.ts +123 -0
- package/src/dao/study/study.dao.ts +10 -10
- package/src/dao/video/video.dao.ts +135 -1
- package/src/index.ts +13 -1
- package/src/interfaces/batch/batch.interfaces.ts +14 -0
- package/src/interfaces/camera/camera.interfaces.ts +4 -2
- package/src/interfaces/location/location.interfaces.ts +9 -0
- package/src/interfaces/study/study.interfaces.ts +4 -3
- package/src/interfaces/video/video.interfaces.ts +13 -1
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
import type { IFolder } from "../folder/folder.interfaces";
|
|
2
|
+
export interface ITrimPeriod {
|
|
3
|
+
startTime: string;
|
|
4
|
+
endTime: string;
|
|
5
|
+
}
|
|
2
6
|
export interface IVideo {
|
|
3
7
|
id: number;
|
|
4
8
|
uuid: string;
|
|
5
9
|
folderId: number;
|
|
6
10
|
annotationSourceId?: number;
|
|
11
|
+
cameraId?: number;
|
|
7
12
|
name: string;
|
|
8
13
|
videoLocation: string;
|
|
9
14
|
videoOutputLocation: string | null;
|
|
@@ -20,6 +25,10 @@ export interface IVideo {
|
|
|
20
25
|
hlsPlaylist: string | null;
|
|
21
26
|
videoSizeMB: number | null;
|
|
22
27
|
streamingMetadata: Record<string, any> | null;
|
|
28
|
+
recordingStartedAt?: Date;
|
|
29
|
+
trimEnabled?: boolean;
|
|
30
|
+
trimPeriods?: ITrimPeriod[] | null;
|
|
31
|
+
batchId?: number | null;
|
|
23
32
|
created_at: string;
|
|
24
33
|
updated_at: string;
|
|
25
34
|
folder?: IFolder;
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import type { Knex } from "knex";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Migration: Add Recording Time, Video Trimming, and Batch Upload Features
|
|
5
|
+
*
|
|
6
|
+
* This migration implements three major features:
|
|
7
|
+
* 1. Recording Start Time - REQUIRED field for all videos (TIMESTAMPTZ in UTC)
|
|
8
|
+
* 2. Video Trimming - Optional datetime-based trimming with frame-skipping
|
|
9
|
+
* 3. Batch Upload - Up to 50 videos in all-or-nothing transaction
|
|
10
|
+
*
|
|
11
|
+
* FK Pattern: video_batch table uses id (INTEGER) for FKs, uuid (UUID) for external API
|
|
12
|
+
*/
|
|
13
|
+
export async function up(knex: Knex): Promise<void> {
|
|
14
|
+
// Step 1: Create video_batch table with proper id/uuid pattern
|
|
15
|
+
await knex.schema.createTable("video_batch", (table) => {
|
|
16
|
+
table
|
|
17
|
+
.increments("id")
|
|
18
|
+
.primary()
|
|
19
|
+
.comment("Primary key for internal foreign key relationships");
|
|
20
|
+
|
|
21
|
+
table
|
|
22
|
+
.uuid("uuid")
|
|
23
|
+
.notNullable()
|
|
24
|
+
.unique()
|
|
25
|
+
.defaultTo(knex.raw("uuid_generate_v4()"))
|
|
26
|
+
.comment("UUID for external API communication");
|
|
27
|
+
|
|
28
|
+
table
|
|
29
|
+
.integer("folderId")
|
|
30
|
+
.unsigned()
|
|
31
|
+
.notNullable()
|
|
32
|
+
.references("id")
|
|
33
|
+
.inTable("folders")
|
|
34
|
+
.onDelete("CASCADE")
|
|
35
|
+
.comment("Foreign key to folders table");
|
|
36
|
+
|
|
37
|
+
table
|
|
38
|
+
.enu("status", ["PENDING", "IN_PROGRESS", "COMPLETED", "FAILED"])
|
|
39
|
+
.notNullable()
|
|
40
|
+
.defaultTo("PENDING")
|
|
41
|
+
.comment("Status of the batch upload");
|
|
42
|
+
|
|
43
|
+
table
|
|
44
|
+
.integer("totalVideos")
|
|
45
|
+
.notNullable()
|
|
46
|
+
.defaultTo(0)
|
|
47
|
+
.comment("Total number of videos in batch");
|
|
48
|
+
|
|
49
|
+
table
|
|
50
|
+
.integer("completedVideos")
|
|
51
|
+
.notNullable()
|
|
52
|
+
.defaultTo(0)
|
|
53
|
+
.comment("Number of successfully created videos");
|
|
54
|
+
|
|
55
|
+
table
|
|
56
|
+
.integer("failedVideos")
|
|
57
|
+
.notNullable()
|
|
58
|
+
.defaultTo(0)
|
|
59
|
+
.comment("Number of failed videos");
|
|
60
|
+
|
|
61
|
+
table.timestamps(true, true);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
// Step 2: Add recording time, trimming, and batch columns to video table
|
|
65
|
+
await knex.schema.alterTable("video", (table) => {
|
|
66
|
+
table
|
|
67
|
+
.timestamp("recordingStartedAt", { useTz: true })
|
|
68
|
+
.nullable()
|
|
69
|
+
.comment("Recording start time in UTC - null for backward compatibility");
|
|
70
|
+
|
|
71
|
+
table
|
|
72
|
+
.boolean("trimEnabled")
|
|
73
|
+
.notNullable()
|
|
74
|
+
.defaultTo(false)
|
|
75
|
+
.comment("Whether video trimming is enabled");
|
|
76
|
+
|
|
77
|
+
table
|
|
78
|
+
.jsonb("trimPeriods")
|
|
79
|
+
.nullable()
|
|
80
|
+
.comment(
|
|
81
|
+
"Array of trim periods with startTime and endTime in ISO 8601 format",
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
table
|
|
85
|
+
.integer("batchId")
|
|
86
|
+
.unsigned()
|
|
87
|
+
.nullable()
|
|
88
|
+
.references("id")
|
|
89
|
+
.inTable("video_batch")
|
|
90
|
+
.withKeyName("fk_video_batch")
|
|
91
|
+
.onDelete("SET NULL")
|
|
92
|
+
.comment("Foreign key to video_batch table (numerical ID, NOT UUID)");
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
// Step 3: Create indices for performance optimization
|
|
96
|
+
|
|
97
|
+
// Index for chronological queries by folder and recording time
|
|
98
|
+
await knex.raw(`
|
|
99
|
+
CREATE INDEX idx_videos_folder_recording
|
|
100
|
+
ON video("folderId", "recordingStartedAt")
|
|
101
|
+
WHERE "recordingStartedAt" IS NOT NULL
|
|
102
|
+
`);
|
|
103
|
+
|
|
104
|
+
// Index for batch operations
|
|
105
|
+
await knex.raw(`
|
|
106
|
+
CREATE INDEX idx_videos_batch
|
|
107
|
+
ON video("batchId")
|
|
108
|
+
WHERE "batchId" IS NOT NULL
|
|
109
|
+
`);
|
|
110
|
+
|
|
111
|
+
// Index for trimming filter
|
|
112
|
+
await knex.raw(`
|
|
113
|
+
CREATE INDEX idx_videos_trimming
|
|
114
|
+
ON video("trimEnabled")
|
|
115
|
+
WHERE "trimEnabled" = true
|
|
116
|
+
`);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
export async function down(knex: Knex): Promise<void> {
|
|
120
|
+
// Drop indices first
|
|
121
|
+
await knex.raw(`DROP INDEX IF EXISTS idx_videos_folder_recording`);
|
|
122
|
+
await knex.raw(`DROP INDEX IF EXISTS idx_videos_batch`);
|
|
123
|
+
await knex.raw(`DROP INDEX IF EXISTS idx_videos_trimming`);
|
|
124
|
+
|
|
125
|
+
// Drop columns from video table
|
|
126
|
+
await knex.schema.alterTable("video", (table) => {
|
|
127
|
+
table.dropColumn("recordingStartedAt");
|
|
128
|
+
table.dropColumn("trimEnabled");
|
|
129
|
+
table.dropColumn("trimPeriods");
|
|
130
|
+
table.dropColumn("batchId");
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
// Drop video_batch table
|
|
134
|
+
await knex.schema.dropTableIfExists("video_batch");
|
|
135
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { Knex } from "knex";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Migration: Rename cameras table to locations
|
|
5
|
+
*
|
|
6
|
+
* Purpose: Rename existing 'cameras' table to 'locations' to represent physical study sites.
|
|
7
|
+
* This is the first step in separating locations (physical sites) from cameras (devices at those sites).
|
|
8
|
+
*/
|
|
9
|
+
export async function up(knex: Knex): Promise<void> {
|
|
10
|
+
// 1. Rename the cameras table to locations
|
|
11
|
+
await knex.schema.renameTable("cameras", "locations");
|
|
12
|
+
|
|
13
|
+
// 2. Rename the uuid unique constraint to match new table name (frees up name for new cameras table)
|
|
14
|
+
await knex.raw(`
|
|
15
|
+
ALTER TABLE locations
|
|
16
|
+
RENAME CONSTRAINT cameras_uuid_unique TO locations_uuid_unique
|
|
17
|
+
`);
|
|
18
|
+
|
|
19
|
+
// 3. Rename the foreign key column in study table
|
|
20
|
+
await knex.schema.alterTable("study", (table) => {
|
|
21
|
+
table.renameColumn("cameraId", "locationId");
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
// 4. Drop the old foreign key constraint
|
|
25
|
+
await knex.raw(`
|
|
26
|
+
ALTER TABLE study
|
|
27
|
+
DROP CONSTRAINT IF EXISTS study_cameraid_foreign
|
|
28
|
+
`);
|
|
29
|
+
|
|
30
|
+
// 5. Add new foreign key constraint with updated name
|
|
31
|
+
await knex.schema.alterTable("study", (table) => {
|
|
32
|
+
table
|
|
33
|
+
.foreign("locationId")
|
|
34
|
+
.references("id")
|
|
35
|
+
.inTable("locations")
|
|
36
|
+
.onDelete("SET NULL");
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
// 6. Drop old index if it exists
|
|
40
|
+
await knex.raw(`
|
|
41
|
+
DROP INDEX IF EXISTS study_cameraid_index
|
|
42
|
+
`);
|
|
43
|
+
|
|
44
|
+
// 7. Create new index with updated name
|
|
45
|
+
await knex.schema.alterTable("study", (table) => {
|
|
46
|
+
table.index(["locationId"], "study_locationid_index");
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function down(knex: Knex): Promise<void> {
|
|
51
|
+
// 1. Drop the new foreign key constraint
|
|
52
|
+
await knex.raw(`
|
|
53
|
+
ALTER TABLE study
|
|
54
|
+
DROP CONSTRAINT IF EXISTS study_locationid_foreign
|
|
55
|
+
`);
|
|
56
|
+
|
|
57
|
+
// 2. Drop new index
|
|
58
|
+
await knex.raw(`
|
|
59
|
+
DROP INDEX IF EXISTS study_locationid_index
|
|
60
|
+
`);
|
|
61
|
+
|
|
62
|
+
// 3. Rename column back
|
|
63
|
+
await knex.schema.alterTable("study", (table) => {
|
|
64
|
+
table.renameColumn("locationId", "cameraId");
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
// 4. Restore original foreign key constraint
|
|
68
|
+
await knex.schema.alterTable("study", (table) => {
|
|
69
|
+
table
|
|
70
|
+
.foreign("cameraId")
|
|
71
|
+
.references("id")
|
|
72
|
+
.inTable("locations")
|
|
73
|
+
.onDelete("SET NULL");
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
// 5. Restore original index
|
|
77
|
+
await knex.schema.alterTable("study", (table) => {
|
|
78
|
+
table.index(["cameraId"], "study_cameraid_index");
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
// 6. Rename constraint back to original name
|
|
82
|
+
await knex.raw(`
|
|
83
|
+
ALTER TABLE locations
|
|
84
|
+
RENAME CONSTRAINT locations_uuid_unique TO cameras_uuid_unique
|
|
85
|
+
`);
|
|
86
|
+
|
|
87
|
+
// 7. Rename the table back to cameras
|
|
88
|
+
await knex.schema.renameTable("locations", "cameras");
|
|
89
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Knex } from "knex";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Migration: Add isMultiCamera field to study table
|
|
5
|
+
*
|
|
6
|
+
* Purpose: Add boolean flag to indicate whether a study uses multiple cameras.
|
|
7
|
+
* Defaults to false for backward compatibility with existing single-camera studies.
|
|
8
|
+
*/
|
|
9
|
+
export async function up(knex: Knex): Promise<void> {
|
|
10
|
+
await knex.schema.alterTable("study", (table) => {
|
|
11
|
+
table.boolean("isMultiCamera").notNullable().defaultTo(false);
|
|
12
|
+
table.index(["isMultiCamera"], "study_ismulticamera_index");
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export async function down(knex: Knex): Promise<void> {
|
|
17
|
+
await knex.schema.alterTable("study", (table) => {
|
|
18
|
+
table.dropIndex(["isMultiCamera"], "study_ismulticamera_index");
|
|
19
|
+
table.dropColumn("isMultiCamera");
|
|
20
|
+
});
|
|
21
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Knex } from "knex";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Migration: Create cameras table
|
|
5
|
+
*
|
|
6
|
+
* Purpose: Create new 'cameras' table to represent camera devices at locations.
|
|
7
|
+
* This allows multiple cameras to be associated with a single location.
|
|
8
|
+
*/
|
|
9
|
+
export async function up(knex: Knex): Promise<void> {
|
|
10
|
+
await knex.schema.createTable("cameras", (table) => {
|
|
11
|
+
table.increments("id").primary();
|
|
12
|
+
table
|
|
13
|
+
.uuid("uuid")
|
|
14
|
+
.defaultTo(knex.raw("uuid_generate_v4()"))
|
|
15
|
+
.unique()
|
|
16
|
+
.notNullable();
|
|
17
|
+
table
|
|
18
|
+
.integer("locationId")
|
|
19
|
+
.unsigned()
|
|
20
|
+
.notNullable()
|
|
21
|
+
.references("id")
|
|
22
|
+
.inTable("locations")
|
|
23
|
+
.onDelete("CASCADE");
|
|
24
|
+
table.string("name", 100).notNullable();
|
|
25
|
+
table.text("description").nullable();
|
|
26
|
+
table
|
|
27
|
+
.enu("status", ["ACTIVE", "INACTIVE", "MAINTENANCE"], {
|
|
28
|
+
useNative: true,
|
|
29
|
+
enumName: "camera_status_enum",
|
|
30
|
+
})
|
|
31
|
+
.defaultTo("ACTIVE")
|
|
32
|
+
.notNullable();
|
|
33
|
+
table.jsonb("metadata").defaultTo("{}").notNullable();
|
|
34
|
+
table.timestamps(true, true);
|
|
35
|
+
|
|
36
|
+
// Unique constraint: camera names must be unique within a location
|
|
37
|
+
table.unique(["locationId", "name"], "cameras_locationid_name_unique");
|
|
38
|
+
|
|
39
|
+
// Index for faster lookups
|
|
40
|
+
table.index(["locationId"], "cameras_locationid_index");
|
|
41
|
+
table.index(["status"], "cameras_status_index");
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export async function down(knex: Knex): Promise<void> {
|
|
46
|
+
await knex.schema.dropTableIfExists("cameras");
|
|
47
|
+
|
|
48
|
+
// Drop the enum type
|
|
49
|
+
await knex.raw("DROP TYPE IF EXISTS camera_status_enum");
|
|
50
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { Knex } from "knex";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Migration: Add cameraId column to video table
|
|
5
|
+
*
|
|
6
|
+
* Purpose: Link videos to specific cameras in multi-camera studies.
|
|
7
|
+
* Nullable to maintain backward compatibility with existing videos.
|
|
8
|
+
*/
|
|
9
|
+
export async function up(knex: Knex): Promise<void> {
|
|
10
|
+
await knex.schema.alterTable("video", (table) => {
|
|
11
|
+
table
|
|
12
|
+
.integer("cameraId")
|
|
13
|
+
.unsigned()
|
|
14
|
+
.nullable()
|
|
15
|
+
.references("id")
|
|
16
|
+
.inTable("cameras")
|
|
17
|
+
.onDelete("SET NULL");
|
|
18
|
+
table.index(["cameraId"], "video_cameraid_index");
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export async function down(knex: Knex): Promise<void> {
|
|
23
|
+
await knex.schema.alterTable("video", (table) => {
|
|
24
|
+
table.dropIndex(["cameraId"], "video_cameraid_index");
|
|
25
|
+
table.dropColumn("cameraId");
|
|
26
|
+
});
|
|
27
|
+
}
|
package/package.json
CHANGED
|
@@ -68,6 +68,35 @@ interface IGroupedResponse {
|
|
|
68
68
|
};
|
|
69
69
|
}
|
|
70
70
|
|
|
71
|
+
interface IStudyTimeGroupResult {
|
|
72
|
+
absoluteTime: string; // ISO 8601 start of bucket
|
|
73
|
+
groupIndex: number;
|
|
74
|
+
startMinute: number; // Start minute number (0-based from video start)
|
|
75
|
+
endMinute: number; // End minute number (0-based from video start)
|
|
76
|
+
label: string; // Formatted label
|
|
77
|
+
results: ITMCResult | IATRResult;
|
|
78
|
+
minuteCount: number;
|
|
79
|
+
videoCount: number;
|
|
80
|
+
contributingVideos: string[]; // Video UUIDs
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
interface IGroupedStudyResponse {
|
|
84
|
+
success: boolean;
|
|
85
|
+
data: IStudyTimeGroupResult[];
|
|
86
|
+
groupingMinutes: number;
|
|
87
|
+
study: {
|
|
88
|
+
uuid: string;
|
|
89
|
+
name: string;
|
|
90
|
+
type: "TMC" | "ATR";
|
|
91
|
+
status: string;
|
|
92
|
+
};
|
|
93
|
+
videoCount: number;
|
|
94
|
+
dateRange: {
|
|
95
|
+
earliest: string;
|
|
96
|
+
latest: string;
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
71
100
|
export class VideoMinuteResultDAO implements IBaseDAO<IVideoMinuteResult> {
|
|
72
101
|
private knex = KnexManager.getConnection();
|
|
73
102
|
private tableName = "video_minute_results";
|
|
@@ -486,6 +515,184 @@ export class VideoMinuteResultDAO implements IBaseDAO<IVideoMinuteResult> {
|
|
|
486
515
|
};
|
|
487
516
|
}
|
|
488
517
|
|
|
518
|
+
/**
|
|
519
|
+
* Get grouped minute results by study UUID with time block aggregation across all videos
|
|
520
|
+
* @param studyUuid - The UUID of the study
|
|
521
|
+
* @param groupingMinutes - Number of minutes to group together (default: 15)
|
|
522
|
+
*/
|
|
523
|
+
async getGroupedMinuteResultsByStudyUuid(
|
|
524
|
+
studyUuid: string,
|
|
525
|
+
groupingMinutes: number = 15,
|
|
526
|
+
): Promise<IGroupedStudyResponse> {
|
|
527
|
+
// Step 1: Fetch study and validate
|
|
528
|
+
const study = await this.knex("study").where("uuid", studyUuid).first();
|
|
529
|
+
|
|
530
|
+
if (!study) {
|
|
531
|
+
throw new Error(`Study with UUID ${studyUuid} not found`);
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
// Step 2: Fetch all COMPLETED videos in the study with recordingStartedAt
|
|
535
|
+
const videos = await this.knex("video as v")
|
|
536
|
+
.innerJoin("folders as f", "v.folderId", "f.id")
|
|
537
|
+
.select("v.id", "v.uuid", "v.name", "v.recordingStartedAt", "v.videoType")
|
|
538
|
+
.where("f.studyId", study.id)
|
|
539
|
+
.where("v.status", "COMPLETED")
|
|
540
|
+
.whereNotNull("v.recordingStartedAt")
|
|
541
|
+
.orderBy("v.recordingStartedAt", "asc");
|
|
542
|
+
|
|
543
|
+
// If no completed videos with recordingStartedAt, return empty result
|
|
544
|
+
if (videos.length === 0) {
|
|
545
|
+
return {
|
|
546
|
+
success: true,
|
|
547
|
+
data: [],
|
|
548
|
+
groupingMinutes,
|
|
549
|
+
study: {
|
|
550
|
+
uuid: study.uuid,
|
|
551
|
+
name: study.name,
|
|
552
|
+
type: study.type,
|
|
553
|
+
status: study.status,
|
|
554
|
+
},
|
|
555
|
+
videoCount: 0,
|
|
556
|
+
dateRange: {
|
|
557
|
+
earliest: "",
|
|
558
|
+
latest: "",
|
|
559
|
+
},
|
|
560
|
+
};
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
// Get video IDs for minute results query
|
|
564
|
+
const videoIds = videos.map((v) => v.id);
|
|
565
|
+
|
|
566
|
+
// Calculate date range
|
|
567
|
+
const earliestVideo = videos[0];
|
|
568
|
+
const latestVideo = videos[videos.length - 1];
|
|
569
|
+
const earliestTime = new Date(earliestVideo.recordingStartedAt);
|
|
570
|
+
|
|
571
|
+
// Step 3: Fetch and normalize all minute results with absolute times
|
|
572
|
+
const minuteResults = await this.knex("video_minute_results as vmr")
|
|
573
|
+
.innerJoin("video as v", "vmr.video_id", "v.id")
|
|
574
|
+
.select(
|
|
575
|
+
"vmr.minute_number",
|
|
576
|
+
"vmr.results",
|
|
577
|
+
"v.uuid as videoUuid",
|
|
578
|
+
"v.recordingStartedAt",
|
|
579
|
+
this.knex.raw(
|
|
580
|
+
'"v"."recordingStartedAt" + (vmr.minute_number || \' minutes\')::INTERVAL as "absoluteTime"',
|
|
581
|
+
),
|
|
582
|
+
)
|
|
583
|
+
.whereIn("v.id", videoIds)
|
|
584
|
+
.orderBy("absoluteTime", "asc");
|
|
585
|
+
|
|
586
|
+
// If no minute results found, return empty result
|
|
587
|
+
if (minuteResults.length === 0) {
|
|
588
|
+
return {
|
|
589
|
+
success: true,
|
|
590
|
+
data: [],
|
|
591
|
+
groupingMinutes,
|
|
592
|
+
study: {
|
|
593
|
+
uuid: study.uuid,
|
|
594
|
+
name: study.name,
|
|
595
|
+
type: study.type,
|
|
596
|
+
status: study.status,
|
|
597
|
+
},
|
|
598
|
+
videoCount: videos.length,
|
|
599
|
+
dateRange: {
|
|
600
|
+
earliest: earliestVideo.recordingStartedAt.toISOString(),
|
|
601
|
+
latest: latestVideo.recordingStartedAt.toISOString(),
|
|
602
|
+
},
|
|
603
|
+
};
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
// Step 4: Group by time buckets in TypeScript
|
|
607
|
+
interface IBucket {
|
|
608
|
+
groupIndex: number;
|
|
609
|
+
absoluteTime: Date;
|
|
610
|
+
results: any[];
|
|
611
|
+
videoUuids: Set<string>;
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
const buckets = new Map<number, IBucket>();
|
|
615
|
+
|
|
616
|
+
for (const minute of minuteResults) {
|
|
617
|
+
const absoluteTime = new Date(minute.absoluteTime);
|
|
618
|
+
const minutesSinceEarliest = Math.floor(
|
|
619
|
+
(absoluteTime.getTime() - earliestTime.getTime()) / (1000 * 60),
|
|
620
|
+
);
|
|
621
|
+
const bucketIndex = Math.floor(minutesSinceEarliest / groupingMinutes);
|
|
622
|
+
|
|
623
|
+
if (!buckets.has(bucketIndex)) {
|
|
624
|
+
// Calculate bucket start time
|
|
625
|
+
const bucketStartMinutes = bucketIndex * groupingMinutes;
|
|
626
|
+
const bucketStartTime = new Date(
|
|
627
|
+
earliestTime.getTime() + bucketStartMinutes * 60 * 1000,
|
|
628
|
+
);
|
|
629
|
+
|
|
630
|
+
buckets.set(bucketIndex, {
|
|
631
|
+
groupIndex: bucketIndex,
|
|
632
|
+
absoluteTime: bucketStartTime,
|
|
633
|
+
results: [],
|
|
634
|
+
videoUuids: new Set<string>(),
|
|
635
|
+
});
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
const bucket = buckets.get(bucketIndex)!;
|
|
639
|
+
bucket.results.push(minute.results);
|
|
640
|
+
bucket.videoUuids.add(minute.videoUuid);
|
|
641
|
+
}
|
|
642
|
+
|
|
643
|
+
// Step 5: Aggregate using existing methods based on study type
|
|
644
|
+
const aggregatedGroups: IStudyTimeGroupResult[] = Array.from(
|
|
645
|
+
buckets.values(),
|
|
646
|
+
)
|
|
647
|
+
.sort((a, b) => a.groupIndex - b.groupIndex)
|
|
648
|
+
.map((bucket) => {
|
|
649
|
+
let aggregatedResult: ITMCResult | IATRResult;
|
|
650
|
+
|
|
651
|
+
if (study.type === "TMC") {
|
|
652
|
+
aggregatedResult = this.aggregateTMCResults(bucket.results);
|
|
653
|
+
} else {
|
|
654
|
+
aggregatedResult = this.aggregateATRResults(bucket.results);
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
return {
|
|
658
|
+
absoluteTime: bucket.absoluteTime.toISOString(),
|
|
659
|
+
groupIndex: bucket.groupIndex,
|
|
660
|
+
startMinute: bucket.groupIndex * groupingMinutes,
|
|
661
|
+
endMinute: (bucket.groupIndex + 1) * groupingMinutes - 1,
|
|
662
|
+
label: this.formatStudyTimeLabel(
|
|
663
|
+
bucket.absoluteTime,
|
|
664
|
+
groupingMinutes,
|
|
665
|
+
),
|
|
666
|
+
results: aggregatedResult,
|
|
667
|
+
minuteCount: bucket.results.length,
|
|
668
|
+
videoCount: bucket.videoUuids.size,
|
|
669
|
+
contributingVideos: Array.from(bucket.videoUuids),
|
|
670
|
+
};
|
|
671
|
+
});
|
|
672
|
+
|
|
673
|
+
// Calculate latest time from last minute result
|
|
674
|
+
const lastMinute = minuteResults[minuteResults.length - 1];
|
|
675
|
+
const latestTime = new Date(lastMinute.absoluteTime);
|
|
676
|
+
|
|
677
|
+
// Step 6: Format response
|
|
678
|
+
return {
|
|
679
|
+
success: true,
|
|
680
|
+
data: aggregatedGroups,
|
|
681
|
+
groupingMinutes,
|
|
682
|
+
study: {
|
|
683
|
+
uuid: study.uuid,
|
|
684
|
+
name: study.name,
|
|
685
|
+
type: study.type,
|
|
686
|
+
status: study.status,
|
|
687
|
+
},
|
|
688
|
+
videoCount: videos.length,
|
|
689
|
+
dateRange: {
|
|
690
|
+
earliest: earliestTime.toISOString(),
|
|
691
|
+
latest: latestTime.toISOString(),
|
|
692
|
+
},
|
|
693
|
+
};
|
|
694
|
+
}
|
|
695
|
+
|
|
489
696
|
/**
|
|
490
697
|
* Aggregate minute results based on video type (TMC or ATR)
|
|
491
698
|
*/
|
|
@@ -728,6 +935,36 @@ export class VideoMinuteResultDAO implements IBaseDAO<IVideoMinuteResult> {
|
|
|
728
935
|
|
|
729
936
|
return `${formatMinute(startMinute)}:00 - ${formatMinute(endMinute)}:59`;
|
|
730
937
|
}
|
|
938
|
+
|
|
939
|
+
/**
|
|
940
|
+
* Format time label for study-level results (time only, no dates)
|
|
941
|
+
* Used when results are already grouped by date in the UI
|
|
942
|
+
* Uses UTC time for consistency with absoluteTime field
|
|
943
|
+
*/
|
|
944
|
+
private formatStudyTimeLabel(
|
|
945
|
+
startTime: Date,
|
|
946
|
+
durationMinutes: number,
|
|
947
|
+
): string {
|
|
948
|
+
const endTime = new Date(startTime.getTime() + durationMinutes * 60 * 1000);
|
|
949
|
+
|
|
950
|
+
const formatTime = (date: Date): string => {
|
|
951
|
+
const hours = date.getUTCHours().toString().padStart(2, "0");
|
|
952
|
+
const minutes = date.getUTCMinutes().toString().padStart(2, "0");
|
|
953
|
+
return `${hours}:${minutes}`;
|
|
954
|
+
};
|
|
955
|
+
|
|
956
|
+
return `${formatTime(startTime)} - ${formatTime(endTime)}`;
|
|
957
|
+
}
|
|
731
958
|
}
|
|
732
959
|
|
|
960
|
+
// Export interfaces for external use
|
|
961
|
+
export type {
|
|
962
|
+
IStudyTimeGroupResult,
|
|
963
|
+
IGroupedStudyResponse,
|
|
964
|
+
IGroupedResponse,
|
|
965
|
+
IGroupedResult,
|
|
966
|
+
ITMCResult,
|
|
967
|
+
IATRResult,
|
|
968
|
+
};
|
|
969
|
+
|
|
733
970
|
export default VideoMinuteResultDAO;
|