speechrecorderng 3.9.7 → 3.9.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +478 -12
  2. package/esm2022/lib/audio/audio_display.mjs +3 -3
  3. package/esm2022/lib/audio/audio_player.mjs +3 -3
  4. package/esm2022/lib/audio/ui/audio_canvas_layer_comp.mjs +6 -6
  5. package/esm2022/lib/audio/ui/audio_display_control.mjs +3 -3
  6. package/esm2022/lib/audio/ui/audio_display_scroll_pane.mjs +3 -3
  7. package/esm2022/lib/audio/ui/audiosignal.mjs +3 -3
  8. package/esm2022/lib/audio/ui/container.mjs +3 -3
  9. package/esm2022/lib/audio/ui/livelevel.mjs +3 -3
  10. package/esm2022/lib/audio/ui/scroll_pane_horizontal.mjs +3 -3
  11. package/esm2022/lib/audio/ui/sonagram.mjs +3 -3
  12. package/esm2022/lib/db/inddb.mjs +3 -3
  13. package/esm2022/lib/speechrecorder/project/project.service.mjs +3 -3
  14. package/esm2022/lib/speechrecorder/recordings/recordings.service.mjs +3 -3
  15. package/esm2022/lib/speechrecorder/script/script.service.mjs +3 -3
  16. package/esm2022/lib/speechrecorder/session/audiorecorder.mjs +6 -6
  17. package/esm2022/lib/speechrecorder/session/controlpanel.mjs +21 -21
  18. package/esm2022/lib/speechrecorder/session/progress.mjs +3 -3
  19. package/esm2022/lib/speechrecorder/session/prompting.mjs +15 -15
  20. package/esm2022/lib/speechrecorder/session/recorder_combi_pane.mjs +3 -3
  21. package/esm2022/lib/speechrecorder/session/recording_list.mjs +3 -3
  22. package/esm2022/lib/speechrecorder/session/recordingfile/recording-file-meta.component.mjs +3 -3
  23. package/esm2022/lib/speechrecorder/session/recordingfile/recording-file-navi.component.mjs +3 -3
  24. package/esm2022/lib/speechrecorder/session/recordingfile/recording-file-u-i.component.mjs +3 -3
  25. package/esm2022/lib/speechrecorder/session/recordingfile/recording-file-view.component.mjs +3 -3
  26. package/esm2022/lib/speechrecorder/session/recordingfile/recordingfile-service.mjs +3 -3
  27. package/esm2022/lib/speechrecorder/session/session.service.mjs +3 -3
  28. package/esm2022/lib/speechrecorder/session/session_finished_dialog.mjs +3 -3
  29. package/esm2022/lib/speechrecorder/session/sessionmanager.mjs +3 -3
  30. package/esm2022/lib/speechrecorder/session/warning_bar.mjs +3 -3
  31. package/esm2022/lib/speechrecorder/spruploader.mjs +3 -3
  32. package/esm2022/lib/speechrecorder/startstopsignal/ui/simpletrafficlight.mjs +3 -3
  33. package/esm2022/lib/speechrecorderng.component.mjs +3 -3
  34. package/esm2022/lib/speechrecorderng.module.mjs +4 -4
  35. package/esm2022/lib/spr.config.mjs +3 -3
  36. package/esm2022/lib/spr.module.version.mjs +2 -2
  37. package/esm2022/lib/ui/canvas_layer_comp.mjs +3 -3
  38. package/esm2022/lib/ui/intersection-observer.directive.mjs +3 -3
  39. package/esm2022/lib/ui/message_dialog.mjs +3 -3
  40. package/esm2022/lib/ui/recordingitem_display.mjs +6 -6
  41. package/esm2022/lib/ui/responsive_component.mjs +3 -3
  42. package/esm2022/lib/utils/scrollIntoViewToBottom.mjs +3 -3
  43. package/fesm2022/speechrecorderng.mjs +161 -161
  44. package/fesm2022/speechrecorderng.mjs.map +1 -1
  45. package/lib/spr.module.version.d.ts +1 -1
  46. package/package.json +12 -12
package/README.md CHANGED
@@ -1,23 +1,489 @@
1
- # Speechrecorderng
1
+ # SpeechRecorderNg
2
2
 
3
+ A Speech Recording Tool implemented as an Angular 17 module.
3
4
 
4
- ## Code scaffolding
5
+ ## Migrate from version 2.x.x to 3.x.x
6
+ For backwards compatibility to server REST API v1 set the property `apiVersion: 1` in your environment file.
5
7
 
6
- Run `ng generate component component-name --project speechrecorderng` to generate a new component. You can also use `ng generate directive|pipe|service|class|guard|interface|enum|module --project speechrecorderng`.
7
- > Note: Don't forget to add `--project speechrecorderng` or else it will be added to the default project in your `angular.json` file.
8
+ ## Integrate SpeechRecorder module to your web application
8
9
 
9
- ## Build
10
+ ### Install NPM package
11
+ Speechrecorder module is available as NPM package.
12
+ Add `"speechrecorderng": "3.9.8"` to the `dependencies` array property in the `package.json` file of your application. Run `npm install` to install the package.
13
+ ### Module integration
14
+ Add SpeechRecorderNg module to imports property of your `AppModule` annotation. The module main component `SpeechRecorder` should be activated by an Angular route.
10
15
 
11
- Run `ng build speechrecorderng` to build the project. The build artifacts will be stored in the `dist/` directory.
16
+ #### Example `app.module.ts`
17
+ ```
18
+ import { BrowserModule } from '@angular/platform-browser';
19
+ import { NgModule } from '@angular/core';
12
20
 
13
- ## Publishing
21
+ import { AppComponent } from './app.component';
22
+ import {SpeechrecorderngComponent, SpeechRecorderConfig, SpeechrecorderngModule} from 'speechrecorderng'
23
+ import {RouterModule, Routes} from '@angular/router';
24
+ import {BrowserAnimationsModule} from "@angular/platform-browser/animations";
25
+ import {MdButtonModule, MdDialogModule, MdIconModule, MdMenuModule, MdToolbarModule} from "@angular/material";
14
26
 
15
- After building your library with `ng build speechrecorderng`, go to the dist folder `cd dist/speechrecorderng` and run `npm publish`.
27
+ const MY_APP_ROUTES: Routes = [
28
+ { path: 'spr', component: SpeechrecorderngComponent}
29
+ ];
16
30
 
17
- ## Running unit tests
31
+ const SPR_CFG:SpeechRecorderConfig={
32
+ apiEndPoint: '/myapppath/api/v1'
33
+ }
18
34
 
19
- Run `ng test speechrecorderng` to execute the unit tests via [Karma](https://karma-runner.github.io).
35
+ @NgModule({
36
+ declarations: [
37
+ AppComponent
38
+ ],
39
+ imports: [
40
+ RouterModule.forRoot(MY_APP_ROUTES),BrowserModule,BrowserAnimationsModule,SpeechrecorderngModule.forRoot(SPR_CFG)
41
+ ],
42
+ providers: [],
43
+ bootstrap: [AppComponent]
44
+ })
45
+ export class AppModule { }
46
+ ```
20
47
 
21
- ## Further help
48
+ ### HTML/CSS integration
49
+ Speechrecorder is intended to run in a layout which always fits to the browser viewport without scrollbars. The subject should not be distracted from performing the recording session.
50
+ Therefore the module should be embedded in HTML page with 100% height and without padding or margin.
51
+ At least the CSS properties `margin-top`,`margin-bottom`,`padding-top`,`padding-bottom` should be zero and `height` should be `100%` for the DOM elements `html` and `body`
52
+ #### Example `index.html`
53
+ ```
54
+ <!doctype html>
55
+ <html lang="en" style="height:100%;margin:0;padding:0">
56
+ <head>
57
+ <meta charset="utf-8">
58
+ <title>My application</title>
59
+ <base href="/">
60
+ <meta name="viewport" content="width=device-width, initial-scale=1">
61
+ <link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
62
+ </head>
63
+ <body style="height:100%;margin:0;padding:0">
64
+ <app-root class="mat-typography"></app-root>
65
+ </body>
66
+ </html>
67
+ ```
68
+ The SpeechRecorder component will appear in the Angular `router-outlet` element, if a route for the `SpeechRecorder` component is matched.
69
+
70
+ #### Example `app.component.html` with Material Design menubar
71
+ ```
72
+ <md-toolbar color="primary">
73
+
74
+ <button md-button [mdMenuTriggerFor]="menu">
75
+ <md-icon>menu</md-icon>
76
+ </button>
77
+ <md-menu #menu="mdMenu" yPosition="below" [overlapTrigger]="false">
78
+ <button md-menu-item [mdMenuTriggerFor]="helpMenu">Help</button>
79
+ <md-menu #helpMenu="mdMenu" xPosition="after" [overlapTrigger]="false">
80
+ <p>My application</p>
81
+ </md-menu>
82
+ </md-menu>
83
+ &nbsp;<span>My Application</span>
84
+ </md-toolbar>
85
+ <router-outlet></router-outlet>
86
+ ```
87
+
88
+ ### Deployment on the server
89
+ See [Angular Deployment/Server Configuration](https://angular.io/guide/deployment#server-configuration) for details.
22
90
 
23
- To get more help on the Angular CLI use `ng help` or go check out the [Angular CLI Overview and Command Reference](https://angular.io/cli) page.
91
+ To distinguish between the REST API base paths and the path for the web application the application should not be deployed to the top level directory of your Web-server.
92
+ Choose an arbitrary base path for the app e.g. `/wsr/ng/dist/` and build the app accordingly:
93
+ ```
94
+ ng build --base-href=/wsr/ng/dist/ --prod
95
+ ```
96
+ Copy the dist folder to ```/wsr/ng/``` on your Web-Server and setup the fallback configuration for this path in your Web-Server.
97
+
98
+
99
+
100
+ ### Server REST API
101
+
102
+ SpeechRecorder requires a HTTP server providing a REST API. The server code is not part of this package.
103
+ The package only contains a minimal file structure for testing. The files reside in `src/test`.
104
+
105
+ Versions 2.x.x of WebSpeechRecorderNg use the REST API version v1, Versions 3.x.x may use API version v1 and v2. Set environment property apiVersion accordingly (default: `apiVersion: 1`)
106
+
107
+ ## Configuration
108
+
109
+ By default the API Endpoint ({apiEndPoint}) is an empty string, the API is then expected to be relative to the base path of the application.
110
+
111
+
112
+ ## SpeechRecorder REST API description
113
+
114
+ ### Entity Project
115
+
116
+ REST Path: GET {apiEndPoint}project/{projectId}
117
+
118
+ Content-type: application/json
119
+
120
+ Example for Mono recordings:
121
+
122
+ ```
123
+ {
124
+ "name": "My project",
125
+ "audioFormat" : {
126
+ "channels": 1
127
+ }
128
+ }
129
+ ```
130
+ ### Entity Session
131
+
132
+ Current recording session data.
133
+
134
+ REST Path: GET {apiEndPoint}session/{sessionId}
135
+
136
+ Content-type: application/json
137
+
138
+ Properties:
139
+ * sessionId: number: Unique ID of the session
140
+ * script: number: Unique ID of recording script
141
+
142
+ Example:
143
+ ```
144
+ {
145
+ "sessionId": "2",
146
+ "project": "My project",
147
+ "script": "1245"
148
+ }
149
+ ```
150
+
151
+ During the session the application will try to update the session object on the server by HTTP PATCH requests.
152
+ The session properties status,loadedDate,startedTrainingDate,startedDate,completedDate and restartedDate
153
+ will be patched accordingly to the session events.
154
+
155
+ REST Path: PATCH {apiEndPoint}session/{sessionId}
156
+
157
+ Content-type: application/json
158
+
159
+ Properties (only changed properties are set):
160
+ * status: enum: "CREATED" | "LOADED" | "STARTED_TRAINING" | "STARTED" | "COMPLETED" status of the session
161
+ * loadedDate: string: date/time when session was loaded
162
+ * startedTrainingDate: string: date/time when a training section was started
163
+ * startedDate: string: date/time of recording start
164
+ * completedDate: string: date/time of session completed
165
+ * restartedDate: string: date/time of a session restart (continue)
166
+
167
+ For example when the session and script is loaded successfully, this PATCH request might be sent:
168
+ ```
169
+ {"status":"LOADED","loadedDate":"2020-03-25T12:52:12.616Z"}
170
+ ```
171
+
172
+ ### Entity Script
173
+
174
+ Recording script controls recording session procedure.
175
+
176
+ REST Path: GET {apiEndPoint}script/{scriptId}
177
+
178
+ Content-type: application/json
179
+
180
+ Properties:
181
+ * type: script: constant: Must be `"script"`
182
+ * scriptId: number: Unique ID of the script
183
+ * sections: array: Array of recording session sections
184
+
185
+ ### Embedded entity Section
186
+
187
+ Properties:
188
+ * name: Optional name of section
189
+ * mode: enum: `MANUAL`, `AUTOPROGRESS` or `AUTORECORDING`
190
+ * promptUnits: array: List of prompt units.
191
+ * training: boolean: Section is intended as training for the subject. The recording items of a training section are ignored when the completeness of the session (each prompt item is recorded) is checked.
192
+
193
+ ### Embedded entity Prompt Unit
194
+
195
+ Properties:
196
+
197
+ * recpromptId: Unique ID of this recording prompt
198
+ * itemcode: string: In the scope of the script unique identifier of an recording item
199
+ * mediaitems: array: List of media items for this prompt. Currently only a single mediaitem element in the array is supported.
200
+
201
+ ### Embedded entity Media item
202
+
203
+ Properties (supported properties only):
204
+ * text: string: Text to prompt
205
+
206
+ Example script:
207
+ ```
208
+ {
209
+ "type": "script",
210
+ "scriptId": "1245",
211
+ "sections": [
212
+ {
213
+ "mode": "MANUAL",
214
+ "name": "Introduction",
215
+ "groups": [
216
+ {
217
+ "promptItems": [
218
+ {
219
+ "itemcode": "I0",
220
+ "mediaitems": [
221
+ {
222
+ "text": "Willkommen bei der IPS-Sprachaufnahme!"
223
+ }
224
+ ],
225
+
226
+ },
227
+ {
228
+ "itemcode": "I1",
229
+ "mediaitems": [
230
+ {
231
+ "text": "Hier steht der Prompt; ein kurzer Text, den Sie lesen, eine Frage, die Sie beantworten oder ein Bild, das Sie beschreiben sollen."
232
+ }
233
+ ],
234
+
235
+ }
236
+ ]
237
+ }
238
+ ],
239
+ "training": false
240
+ },
241
+ {
242
+ "mode": "AUTOPROGRESS",
243
+ "name": "Recording Session",
244
+ "groups": [
245
+ {
246
+ "promptItems": [
247
+ {
248
+ "itemcode": "N0",
249
+ "recduration": 10000,
250
+ "mediaitems": [
251
+ {
252
+ "text": "What's your name?"
253
+ }
254
+ ],
255
+
256
+ },
257
+ {
258
+ "itemcode": "S0",
259
+ "mediaitems": [
260
+ {
261
+ "text": "Lorem ipsum dolor sit amet, consectetur adipiscing elit."
262
+ }
263
+ ],
264
+
265
+ }
266
+ ]
267
+ }
268
+ ]
269
+ }
270
+ ]
271
+ }
272
+
273
+ ```
274
+
275
+ ### Recording file
276
+
277
+ SpeechRecorder stores the recording in browser memory first. The recordings are then uploaded to the server as binary encoded WAVE files.
278
+
279
+ Path: POST {apiEndPoint}session/{sessionId}/recfile/{itemcode}
280
+
281
+ Content-Type: audio/wav
282
+
283
+ There might be multiple uploads for one recording item, when the subject repeats a recording. The server is responsible to handle this uploads.
284
+ The server should apply a unique identifier for each uploaded recording file. Subsequent recording uploads for the same itemcode should get different IDs and should be stored with a version number starting with zero.
285
+ A GET request to the URL should return the latest upload.
286
+
287
+ ### Start a recording session
288
+
289
+ The default routing path to start a recording session is `/spr/session/{sessionId}`. If you call this router link from your Angular application
290
+ WebSpeechRecorderNg should start and will try to load the session data from the REST API first.
291
+
292
+ ## GUI components to view and edit your recording database
293
+
294
+ ### Edit or view recording files
295
+ To edit a selection of a recording file call the router link:
296
+ `/spr/db/recordingfile/{recordingFileId}`
297
+
298
+ To only view a recording file:
299
+ `/spr/db/recordingfile/_view/{recordingFileId}`
300
+
301
+
302
+ The application will send in both modes the following requests to the REST API:
303
+
304
+ 1. Recording file meta data
305
+
306
+ Path: POST {apiEndPoint}recordingfile/{recordingFileId}
307
+
308
+ Accept: application/json
309
+
310
+ ```
311
+ {
312
+ "recordingFileId": "5678",
313
+ "session": 2,
314
+ "version": 0,
315
+ "recording": {
316
+ "itemcode": "N0",
317
+ "recduration": 10000,
318
+
319
+ "recinstructions": {
320
+ "recinstructions": "Please answer:"
321
+ },
322
+ "mediaitems": [
323
+ {
324
+ "annotationTemplate": false,
325
+ "autoplay": false,
326
+ "mimetype": "text/plain",
327
+ "text": "What's your name?"
328
+ }
329
+ ]
330
+ }
331
+ }
332
+ ```
333
+
334
+ 2. The recording file itself:
335
+
336
+ (Same URL however it requests an audio MIME type )
337
+
338
+ Path: POST {apiEndPoint}recordingfile/{recordingFileId}
339
+
340
+ Accept: audio/wav
341
+
342
+
343
+ and optional to navigate through recording files of the same session:
344
+
345
+ 3. Session data of this recording file
346
+
347
+ REST Path: GET {apiEndPoint}session/{sessionId}
348
+
349
+ Content-type: application/json
350
+
351
+
352
+ 4. The recording file list of the session if the session ID could be retrieved:
353
+
354
+ REST Path: GET {apiEndPoint}project/{projectId}/session/{sessionId}/recfile
355
+
356
+ Content-type: application/json
357
+
358
+
359
+ A server response might look like this:
360
+
361
+ ```
362
+ [ {
363
+ "recordingFileId": "1234",
364
+ "session": 2,
365
+ "date" : "2020-05-01T20:03:00.456+01:00",
366
+ "recording" : {
367
+ "mediaitems" : [ {
368
+ "annotationTemplate" : true,
369
+ "text" : "Heute ist schönes Frühlingswetter!"
370
+ } ],
371
+ "itemcode" : "demo_99",
372
+ "recduration" : 4000,
373
+ "recinstructions" : {
374
+ "recinstructions" : "Please read:"
375
+ }
376
+ }
377
+ },
378
+ {
379
+ "recordingFileId": "5678",
380
+ "session": 2,
381
+ "date" : "2020-06-10T20:04:44.123+01:00",
382
+ "version": 0,
383
+ "recording": {
384
+ "itemcode": "N0",
385
+ "recduration": 10000,
386
+
387
+ "recinstructions": {
388
+ "recinstructions": "Please answer:"
389
+ },
390
+ "mediaitems": [
391
+ {
392
+ "annotationTemplate": false,
393
+ "autoplay": false,
394
+ "mimetype": "text/plain",
395
+ "text": "What's your name?"
396
+ }
397
+ ]
398
+ }
399
+ },
400
+ {
401
+ "recordingFileId": "9999",
402
+ "session": 2,
403
+ "date" : "2020-06-15T 18:05:19.000+01:00",
404
+ "version": 1,
405
+ "recording": {
406
+ "itemcode": "N0",
407
+ "recduration": 10000,
408
+
409
+ "recinstructions": {
410
+ "recinstructions": "Please answer:"
411
+ },
412
+ "mediaitems": [
413
+ {
414
+ "annotationTemplate": false,
415
+ "autoplay": false,
416
+ "mimetype": "text/plain",
417
+ "text": "What's your name?"
418
+ }
419
+ ]
420
+ }
421
+ }
422
+ ]
423
+ ```
424
+
425
+ 5. Get the recording file:
426
+ Path: GET {apiEndPoint}project/{projectId}/session/{sessionId}/recfile
427
+ Accept: audio/wav
428
+
429
+ Content-type: audio/wav
430
+
431
+ API v2 extension:
432
+ The server must be able to deliver sections of a recording file as a valid WAVE file.
433
+ The section will be selected by the query parameters `startFrame` for the start position and `frameLength` for the length of the section.
434
+ The client will not send this queries with API v1.
435
+
436
+ Path: GET {apiEndPoint}project/{projectId}/session/{sessionId}/recfile?startFrame={startFrame}&frameLength={frameLength}
437
+ Accept: audio/wav
438
+
439
+ Content-type: audio/wav
440
+
441
+ 6. Save edit selection:
442
+
443
+ Path: PATCH {apiEndPoint}recordingfile/{recordingFileId}
444
+
445
+ Accept: application/json
446
+
447
+ Sends `editSampleRate`,`editStartFrame` and `editEndFrame` sample position properties of the selection, for example:
448
+
449
+ ```
450
+ {
451
+ "editSampleRate": 48000,
452
+ "editStartFrame":182360,
453
+ "editEndFrame":303934
454
+ }
455
+ ```
456
+
457
+ or null values to remove the edit selection:
458
+
459
+ ```
460
+ {
461
+ "editSampleRate": null,
462
+ "editStartFrame":null,
463
+ "editEndFrame":null
464
+ }
465
+ ```
466
+
467
+
468
+ ### Development server
469
+
470
+ Run `ng serve` for a development server.
471
+ Navigate to `http://localhost:4200/spr/session/2` start a demo recording session.
472
+ Or edit/view a test recording file ID 1234 from the demo database:
473
+ `http://localhost:4200/spr/db/recordingfile/1234`
474
+
475
+ The app will automatically reload if you change any of the source files.
476
+
477
+ ### Build
478
+
479
+ Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `-prod` flag for a production build.
480
+
481
+
482
+ ### Build module
483
+
484
+ Run `npm run build_module` to build the module. The build artifacts will be stored in the `dist/speechrecorderng` directory.
485
+
486
+
487
+ ### Clean dist
488
+
489
+ Remove folder `dist`.
@@ -67,8 +67,8 @@ export class AudioDisplay {
67
67
  error() {
68
68
  this.status = 'ERROR';
69
69
  }
70
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplay, deps: [{ token: i0.ChangeDetectorRef }, { token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
71
- static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.4", type: AudioDisplay, selector: "app-audiodisplay", inputs: { playStartAction: "playStartAction", playStopAction: "playStopAction", playSelectionAction: "playSelectionAction", autoPlayOnSelectToggleAction: "autoPlayOnSelectToggleAction", audioData: "audioData", audioClip: "audioClip" }, viewQueries: [{ propertyName: "audioDisplayScrollPane", first: true, predicate: AudioDisplayScrollPane, descendants: true, static: true }], ngImport: i0, template: `
70
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplay, deps: [{ token: i0.ChangeDetectorRef }, { token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
71
+ static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.5", type: AudioDisplay, selector: "app-audiodisplay", inputs: { playStartAction: "playStartAction", playStopAction: "playStopAction", playSelectionAction: "playSelectionAction", autoPlayOnSelectToggleAction: "autoPlayOnSelectToggleAction", audioData: "audioData", audioClip: "audioClip" }, viewQueries: [{ propertyName: "audioDisplayScrollPane", first: true, predicate: AudioDisplayScrollPane, descendants: true, static: true }], ngImport: i0, template: `
72
72
 
73
73
  <audio-display-scroll-pane #audioDisplayScrollPane></audio-display-scroll-pane>
74
74
 
@@ -83,7 +83,7 @@ export class AudioDisplay {
83
83
  [zoomFitToPanelAction]="zoomFitToPanelAction"></audio-display-control>
84
84
  `, isInline: true, styles: [":host{display:flex;flex-direction:column;position:absolute;bottom:0;height:100%;width:100%;overflow:hidden;padding:20px;z-index:5;box-sizing:border-box;background-color:#e6e6e6}\n", "legend{margin-left:1em;padding:.2em .8em;font-size:.8em}\n", "fieldset{border:1px darkgray solid}\n"], dependencies: [{ kind: "component", type: i1.AudioDisplayScrollPane, selector: "audio-display-scroll-pane", inputs: ["audioClip"], outputs: ["zoomInAction", "zoomOutAction", "zoomSelectedAction", "zoomFitToPanelAction"] }, { kind: "component", type: i2.AudioDisplayControl, selector: "audio-display-control", inputs: ["audioClip", "playStartAction", "playSelectionAction", "playStopAction", "zoomInAction", "zoomOutAction", "zoomFitToPanelAction", "zoomSelectedAction", "autoPlayOnSelectToggleAction"] }] }); }
85
85
  }
86
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplay, decorators: [{
86
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplay, decorators: [{
87
87
  type: Component,
88
88
  args: [{ selector: 'app-audiodisplay', template: `
89
89
 
@@ -187,8 +187,8 @@ export class AudioDisplayPlayer {
187
187
  error() {
188
188
  this.status = 'ERROR';
189
189
  }
190
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplayPlayer, deps: [{ token: i1.ActivatedRoute }, { token: i0.ChangeDetectorRef }, { token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
191
- static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.4", type: AudioDisplayPlayer, selector: "app-audiodisplayplayer", inputs: { playStartAction: "playStartAction", playStopAction: "playStopAction", playSelectionAction: "playSelectionAction", autoPlayOnSelectToggleAction: "autoPlayOnSelectToggleAction", audioData: "audioData", audioClip: "audioClip" }, viewQueries: [{ propertyName: "audioDisplayScrollPane", first: true, predicate: AudioDisplayScrollPane, descendants: true, static: true }], ngImport: i0, template: `
190
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplayPlayer, deps: [{ token: i1.ActivatedRoute }, { token: i0.ChangeDetectorRef }, { token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
191
+ static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.5", type: AudioDisplayPlayer, selector: "app-audiodisplayplayer", inputs: { playStartAction: "playStartAction", playStopAction: "playStopAction", playSelectionAction: "playSelectionAction", autoPlayOnSelectToggleAction: "autoPlayOnSelectToggleAction", audioData: "audioData", audioClip: "audioClip" }, viewQueries: [{ propertyName: "audioDisplayScrollPane", first: true, predicate: AudioDisplayScrollPane, descendants: true, static: true }], ngImport: i0, template: `
192
192
 
193
193
  <audio-display-scroll-pane #audioDisplayScrollPane></audio-display-scroll-pane>
194
194
 
@@ -203,7 +203,7 @@ export class AudioDisplayPlayer {
203
203
  [zoomFitToPanelAction]="zoomFitToPanelAction"></audio-display-control><p>{{status}}
204
204
  `, isInline: true, styles: [":host{display:flex;flex-direction:column;position:absolute;bottom:0;height:100%;width:100%;overflow:hidden;padding:20px;z-index:5;box-sizing:border-box;background-color:#000000bf}\n"], dependencies: [{ kind: "component", type: i2.AudioDisplayScrollPane, selector: "audio-display-scroll-pane", inputs: ["audioClip"], outputs: ["zoomInAction", "zoomOutAction", "zoomSelectedAction", "zoomFitToPanelAction"] }, { kind: "component", type: i3.AudioDisplayControl, selector: "audio-display-control", inputs: ["audioClip", "playStartAction", "playSelectionAction", "playStopAction", "zoomInAction", "zoomOutAction", "zoomFitToPanelAction", "zoomSelectedAction", "autoPlayOnSelectToggleAction"] }] }); }
205
205
  }
206
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplayPlayer, decorators: [{
206
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplayPlayer, decorators: [{
207
207
  type: Component,
208
208
  args: [{ selector: 'app-audiodisplayplayer', template: `
209
209
 
@@ -138,10 +138,10 @@ export class BasicAudioCanvasLayerComponent extends CanvasLayerComponent {
138
138
  }
139
139
  }
140
140
  }
141
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: BasicAudioCanvasLayerComponent, deps: null, target: i0.ɵɵFactoryTarget.Directive }); }
142
- static { this.ɵdir = i0.ɵɵngDeclareDirective({ minVersion: "14.0.0", version: "17.3.4", type: BasicAudioCanvasLayerComponent, usesInheritance: true, ngImport: i0 }); }
141
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: BasicAudioCanvasLayerComponent, deps: null, target: i0.ɵɵFactoryTarget.Directive }); }
142
+ static { this.ɵdir = i0.ɵɵngDeclareDirective({ minVersion: "14.0.0", version: "17.3.5", type: BasicAudioCanvasLayerComponent, usesInheritance: true, ngImport: i0 }); }
143
143
  }
144
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: BasicAudioCanvasLayerComponent, decorators: [{
144
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: BasicAudioCanvasLayerComponent, decorators: [{
145
145
  type: Directive
146
146
  }] });
147
147
  export class AudioCanvasLayerComponent extends BasicAudioCanvasLayerComponent {
@@ -317,10 +317,10 @@ export class AudioCanvasLayerComponent extends BasicAudioCanvasLayerComponent {
317
317
  }
318
318
  }
319
319
  }
320
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioCanvasLayerComponent, deps: null, target: i0.ɵɵFactoryTarget.Directive }); }
321
- static { this.ɵdir = i0.ɵɵngDeclareDirective({ minVersion: "14.0.0", version: "17.3.4", type: AudioCanvasLayerComponent, inputs: { pointerPosition: "pointerPosition", selecting: "selecting", selection: "selection" }, outputs: { pointerPositionEventEmitter: "pointerPositionEventEmitter", selectingEventEmitter: "selectingEventEmitter", selectedEventEmitter: "selectedEventEmitter" }, host: { listeners: { "document:mouseup": "onMouseup($event)" } }, viewQueries: [{ propertyName: "bgCanvasRef", first: true, predicate: ["bg"], descendants: true, static: true }, { propertyName: "cursorCanvasRef", first: true, predicate: ["cursor"], descendants: true, static: true }], usesInheritance: true, ngImport: i0 }); }
320
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioCanvasLayerComponent, deps: null, target: i0.ɵɵFactoryTarget.Directive }); }
321
+ static { this.ɵdir = i0.ɵɵngDeclareDirective({ minVersion: "14.0.0", version: "17.3.5", type: AudioCanvasLayerComponent, inputs: { pointerPosition: "pointerPosition", selecting: "selecting", selection: "selection" }, outputs: { pointerPositionEventEmitter: "pointerPositionEventEmitter", selectingEventEmitter: "selectingEventEmitter", selectedEventEmitter: "selectedEventEmitter" }, host: { listeners: { "document:mouseup": "onMouseup($event)" } }, viewQueries: [{ propertyName: "bgCanvasRef", first: true, predicate: ["bg"], descendants: true, static: true }, { propertyName: "cursorCanvasRef", first: true, predicate: ["cursor"], descendants: true, static: true }], usesInheritance: true, ngImport: i0 }); }
322
322
  }
323
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioCanvasLayerComponent, decorators: [{
323
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioCanvasLayerComponent, decorators: [{
324
324
  type: Directive
325
325
  }], propDecorators: { bgCanvasRef: [{
326
326
  type: ViewChild,
@@ -33,8 +33,8 @@ export class AudioDisplayControl {
33
33
  error() {
34
34
  this.status = 'ERROR';
35
35
  }
36
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplayControl, deps: [], target: i0.ɵɵFactoryTarget.Component }); }
37
- static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.4", type: AudioDisplayControl, selector: "audio-display-control", inputs: { audioClip: "audioClip", playStartAction: "playStartAction", playSelectionAction: "playSelectionAction", playStopAction: "playStopAction", zoomInAction: "zoomInAction", zoomOutAction: "zoomOutAction", zoomFitToPanelAction: "zoomFitToPanelAction", zoomSelectedAction: "zoomSelectedAction", autoPlayOnSelectToggleAction: "autoPlayOnSelectToggleAction" }, viewQueries: [{ propertyName: "autoplaySelectedCheckbox", first: true, predicate: MatCheckbox, descendants: true, static: true }], ngImport: i0, template: `
36
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplayControl, deps: [], target: i0.ɵɵFactoryTarget.Component }); }
37
+ static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.5", type: AudioDisplayControl, selector: "audio-display-control", inputs: { audioClip: "audioClip", playStartAction: "playStartAction", playSelectionAction: "playSelectionAction", playStopAction: "playStopAction", zoomInAction: "zoomInAction", zoomOutAction: "zoomOutAction", zoomFitToPanelAction: "zoomFitToPanelAction", zoomSelectedAction: "zoomSelectedAction", autoPlayOnSelectToggleAction: "autoPlayOnSelectToggleAction" }, viewQueries: [{ propertyName: "autoplaySelectedCheckbox", first: true, predicate: MatCheckbox, descendants: true, static: true }], ngImport: i0, template: `
38
38
  <div #controlPanel style="display:flex;flex-direction: row;">
39
39
  <fieldset>
40
40
 
@@ -79,7 +79,7 @@ export class AudioDisplayControl {
79
79
  </fieldset>
80
80
  </div>`, isInline: true, styles: [":host{flex:0}\n"], dependencies: [{ kind: "directive", type: i1.NgIf, selector: "[ngIf]", inputs: ["ngIf", "ngIfThen", "ngIfElse"] }, { kind: "component", type: i2.MatIcon, selector: "mat-icon", inputs: ["color", "inline", "svgIcon", "fontSet", "fontIcon"], exportAs: ["matIcon"] }, { kind: "directive", type: i3.MatTooltip, selector: "[matTooltip]", inputs: ["matTooltipPosition", "matTooltipPositionAtOrigin", "matTooltipDisabled", "matTooltipShowDelay", "matTooltipHideDelay", "matTooltipTouchGestures", "matTooltip", "matTooltipClass"], exportAs: ["matTooltip"] }, { kind: "component", type: i4.MatCheckbox, selector: "mat-checkbox", inputs: ["aria-label", "aria-labelledby", "aria-describedby", "id", "required", "labelPosition", "name", "value", "disableRipple", "tabIndex", "color", "checked", "disabled", "indeterminate"], outputs: ["change", "indeterminateChange"], exportAs: ["matCheckbox"] }] }); }
81
81
  }
82
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplayControl, decorators: [{
82
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplayControl, decorators: [{
83
83
  type: Component,
84
84
  args: [{ selector: 'audio-display-control', template: `
85
85
  <div #controlPanel style="display:flex;flex-direction: row;">
@@ -109,14 +109,14 @@ export class AudioDisplayScrollPane {
109
109
  set playFramePosition(framePos) {
110
110
  this.ac.playFramePosition = framePos;
111
111
  }
112
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplayScrollPane, deps: [{ token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
113
- static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.4", type: AudioDisplayScrollPane, selector: "audio-display-scroll-pane", inputs: { audioClip: "audioClip" }, outputs: { zoomInAction: "zoomInAction", zoomOutAction: "zoomOutAction", zoomSelectedAction: "zoomSelectedAction", zoomFitToPanelAction: "zoomFitToPanelAction" }, host: { listeners: { "scroll": "onScroll($event)", "window:resize": "onResize($event)" } }, viewQueries: [{ propertyName: "ac", first: true, predicate: AudioClipUIContainer, descendants: true, static: true }], ngImport: i0, template: `
112
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplayScrollPane, deps: [{ token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
113
+ static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.5", type: AudioDisplayScrollPane, selector: "audio-display-scroll-pane", inputs: { audioClip: "audioClip" }, outputs: { zoomInAction: "zoomInAction", zoomOutAction: "zoomOutAction", zoomSelectedAction: "zoomSelectedAction", zoomFitToPanelAction: "zoomFitToPanelAction" }, host: { listeners: { "scroll": "onScroll($event)", "window:resize": "onResize($event)" } }, viewQueries: [{ propertyName: "ac", first: true, predicate: AudioClipUIContainer, descendants: true, static: true }], ngImport: i0, template: `
114
114
 
115
115
  <app-audio #audioSignalContainer (selectionEventEmitter)="selectionChanged($event)"></app-audio>
116
116
 
117
117
  `, isInline: true, styles: [":host{flex:2;width:100%;background:#a9a9a9;box-sizing:border-box;height:100%;position:relative;overflow-x:scroll;overflow-y:auto}\n", "app-audio{margin:0;padding:0;top:0;left:0;width:100%;height:100%;box-sizing:border-box}\n"], dependencies: [{ kind: "component", type: i1.AudioClipUIContainer, selector: "app-audio", inputs: ["audioData", "audioClip"], outputs: ["selectionEventEmitter"] }] }); }
118
118
  }
119
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioDisplayScrollPane, decorators: [{
119
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioDisplayScrollPane, decorators: [{
120
120
  type: Component,
121
121
  args: [{ selector: 'audio-display-scroll-pane', template: `
122
122
 
@@ -507,15 +507,15 @@ export class AudioSignal extends AudioCanvasLayerComponent {
507
507
  this._audioDataHolder = audioData;
508
508
  this.playFramePosition = 0;
509
509
  }
510
- static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioSignal, deps: [{ token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
511
- static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.4", type: AudioSignal, selector: "audio-signal", viewQueries: [{ propertyName: "audioSignalCanvasRef", first: true, predicate: ["audioSignal"], descendants: true, static: true }, { propertyName: "playPosCanvasRef", first: true, predicate: ["marker"], descendants: true, static: true }], usesInheritance: true, ngImport: i0, template: `
510
+ static { this.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioSignal, deps: [{ token: i0.ElementRef }], target: i0.ɵɵFactoryTarget.Component }); }
511
+ static { this.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "14.0.0", version: "17.3.5", type: AudioSignal, selector: "audio-signal", viewQueries: [{ propertyName: "audioSignalCanvasRef", first: true, predicate: ["audioSignal"], descendants: true, static: true }, { propertyName: "playPosCanvasRef", first: true, predicate: ["marker"], descendants: true, static: true }], usesInheritance: true, ngImport: i0, template: `
512
512
  <canvas #bg height="10"></canvas>
513
513
  <canvas #audioSignal height="10"></canvas>
514
514
  <canvas #cursor height="10" (mousedown)="selectionStart($event)" (mouseover)="updateCursorCanvas($event)" (mousemove)="updateCursorCanvas($event)"
515
515
  (mouseleave)="updateCursorCanvas($event, false)"></canvas>
516
516
  <canvas #marker height="10"></canvas>`, isInline: true, styles: [":host{min-height:0px}\n", "canvas{top:0;left:0;width:0;height:0;min-height:0px;position:absolute}\n"] }); }
517
517
  }
518
- i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.4", ngImport: i0, type: AudioSignal, decorators: [{
518
+ i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "17.3.5", ngImport: i0, type: AudioSignal, decorators: [{
519
519
  type: Component,
520
520
  args: [{ selector: 'audio-signal', template: `
521
521
  <canvas #bg height="10"></canvas>