cribl-control-plane 0.4.0 → 0.5.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/README.md +75 -27
  2. package/dist/commonjs/lib/config.d.ts +4 -4
  3. package/dist/commonjs/lib/config.js +4 -4
  4. package/dist/commonjs/lib/config.js.map +1 -1
  5. package/dist/commonjs/models/runnablejobcollection.d.ts +37 -37
  6. package/dist/commonjs/models/runnablejobcollection.d.ts.map +1 -1
  7. package/dist/commonjs/models/runnablejobexecutor.d.ts +21 -21
  8. package/dist/commonjs/models/runnablejobexecutor.d.ts.map +1 -1
  9. package/dist/commonjs/models/runnablejobscheduledsearch.d.ts +16 -16
  10. package/dist/commonjs/models/runnablejobscheduledsearch.d.ts.map +1 -1
  11. package/dist/esm/lib/config.d.ts +4 -4
  12. package/dist/esm/lib/config.js +4 -4
  13. package/dist/esm/lib/config.js.map +1 -1
  14. package/dist/esm/models/runnablejobcollection.d.ts +37 -37
  15. package/dist/esm/models/runnablejobcollection.d.ts.map +1 -1
  16. package/dist/esm/models/runnablejobexecutor.d.ts +21 -21
  17. package/dist/esm/models/runnablejobexecutor.d.ts.map +1 -1
  18. package/dist/esm/models/runnablejobscheduledsearch.d.ts +16 -16
  19. package/dist/esm/models/runnablejobscheduledsearch.d.ts.map +1 -1
  20. package/examples/example-cloud-auth.ts +13 -13
  21. package/examples/example-cloud-search-packs-lake.ts +2 -2
  22. package/examples/example-cloud-worker-group.ts +4 -4
  23. package/examples/example-edge.ts +1 -2
  24. package/examples/example-onprem-auth.ts +20 -57
  25. package/examples/example-stream.ts +1 -2
  26. package/examples/example-worker-group-replication.ts +1 -2
  27. package/examples/package-lock.json +1 -1
  28. package/jsr.json +1 -1
  29. package/package.json +1 -1
  30. package/src/lib/config.ts +4 -4
  31. package/src/models/runnablejobcollection.ts +37 -37
  32. package/src/models/runnablejobexecutor.ts +21 -21
  33. package/src/models/runnablejobscheduledsearch.ts +16 -16
@@ -32,24 +32,24 @@ export type RunnableJobScheduledSearchRunSettings = {
32
32
  /**
33
33
  * Reschedule tasks that failed with non-fatal errors
34
34
  */
35
- rescheduleDroppedTasks?: boolean | undefined;
35
+ rescheduleDroppedTasks: boolean;
36
36
  /**
37
37
  * Maximum number of times a task can be rescheduled
38
38
  */
39
- maxTaskReschedule?: number | undefined;
39
+ maxTaskReschedule: number;
40
40
  /**
41
41
  * Level at which to set task logging
42
42
  */
43
- logLevel?: RunnableJobScheduledSearchLogLevel | undefined;
43
+ logLevel: RunnableJobScheduledSearchLogLevel;
44
44
  /**
45
45
  * Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time.
46
46
  */
47
- jobTimeout?: string | undefined;
47
+ jobTimeout: string;
48
48
  /**
49
49
  * Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job.
50
50
  */
51
- mode?: string | undefined;
52
- timeRangeType?: string | undefined;
51
+ mode: string;
52
+ timeRangeType: string;
53
53
  /**
54
54
  * Earliest time to collect data for the selected timezone
55
55
  */
@@ -63,7 +63,7 @@ export type RunnableJobScheduledSearchRunSettings = {
63
63
  /**
64
64
  * A filter for tokens in the provided collect path and/or the events being collected
65
65
  */
66
- expression?: string | undefined;
66
+ expression: string;
67
67
  /**
68
68
  * Limits the bundle size for small tasks. For example,
69
69
  *
@@ -71,7 +71,7 @@ export type RunnableJobScheduledSearchRunSettings = {
71
71
  *
72
72
  * if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
73
73
  */
74
- minTaskSize?: string | undefined;
74
+ minTaskSize: string;
75
75
  /**
76
76
  * Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
77
77
  *
@@ -79,7 +79,7 @@ export type RunnableJobScheduledSearchRunSettings = {
79
79
  *
80
80
  * you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
81
81
  */
82
- maxTaskSize?: string | undefined;
82
+ maxTaskSize: string;
83
83
  };
84
84
  /**
85
85
  * Configuration for a scheduled job
@@ -92,19 +92,19 @@ export type RunnableJobScheduledSearchSchedule = {
92
92
  /**
93
93
  * Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits
94
94
  */
95
- skippable?: boolean | undefined;
95
+ skippable: boolean;
96
96
  /**
97
97
  * If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules
98
98
  */
99
- resumeMissed?: boolean | undefined;
99
+ resumeMissed: boolean;
100
100
  /**
101
101
  * A cron schedule on which to run this job
102
102
  */
103
- cronSchedule?: string | undefined;
103
+ cronSchedule: string;
104
104
  /**
105
105
  * The maximum number of instances of this scheduled job that may be running at any time
106
106
  */
107
- maxConcurrentRuns?: number | undefined;
107
+ maxConcurrentRuns: number;
108
108
  run?: RunnableJobScheduledSearchRunSettings | undefined;
109
109
  };
110
110
  export type RunnableJobScheduledSearch = {
@@ -117,11 +117,11 @@ export type RunnableJobScheduledSearch = {
117
117
  /**
118
118
  * Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector.
119
119
  */
120
- ttl?: string | undefined;
120
+ ttl: string;
121
121
  /**
122
122
  * When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live.
123
123
  */
124
- ignoreGroupJobsLimit?: boolean | undefined;
124
+ ignoreGroupJobsLimit: boolean;
125
125
  /**
126
126
  * List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface.
127
127
  */
@@ -129,7 +129,7 @@ export type RunnableJobScheduledSearch = {
129
129
  /**
130
130
  * Resume the ad hoc job if a failure condition causes Stream to restart during job execution
131
131
  */
132
- resumeOnBoot?: boolean | undefined;
132
+ resumeOnBoot: boolean;
133
133
  /**
134
134
  * Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere.
135
135
  */
@@ -1 +1 @@
1
- {"version":3,"file":"runnablejobscheduledsearch.d.ts","sourceRoot":"","sources":["../../../src/models/runnablejobscheduledsearch.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,CAAC,MAAM,QAAQ,CAAC;AAG5B,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AACzD,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAC3D,OAAO,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEpE,eAAO,MAAM,iCAAiC;;;;CAIpC,CAAC;AACX,MAAM,MAAM,iCAAiC,GAAG,QAAQ,CACtD,OAAO,iCAAiC,CACzC,CAAC;AAEF,eAAO,MAAM,8BAA8B;;CAEjC,CAAC;AACX,MAAM,MAAM,8BAA8B,GAAG,UAAU,CACrD,OAAO,8BAA8B,CACtC,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,kCAAkC;;;;;;CAMrC,CAAC;AACX;;GAEG;AACH,MAAM,MAAM,kCAAkC,GAAG,UAAU,CACzD,OAAO,kCAAkC,CAC1C,CAAC;AAEF,MAAM,MAAM,qCAAqC,GAAG,EAAE,CAAC;AAEvD,MAAM,MAAM,qCAAqC,GAAG;IAClD,IAAI,CAAC,EAAE,8BAA8B,GAAG,SAAS,CAAC;IAClD;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC7C;;OAEG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACvC;;OAEG;IACH,QAAQ,CAAC,EAAE,kCAAkC,GAAG,SAAS,CAAC;IAC1D;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAChC;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,aAAa,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,iBAAiB,CAAC,EAAE,GAAG,GAAG,SAAS,CAAC;IACpC,WAAW,CAAC,EAAE,qCAAqC,GAAG,SAAS,CAAC;IAChE;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAChC;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC;;;;;;OAMG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;CAClC,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,kCAAkC,GAAG;IAC/C;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAChC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACnC;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAClC;;OAEG;IACH,iBAAiB,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACvC,GAAG,CAAC,EAAE,qCAAqC,GAAG,SAAS,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,0BAA0B,GAAG;IACvC;;OAEG;IACH,EAAE,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACxB,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,IAAI,EAAE,iCAAiC,CAAC;IACxC;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACzB;;OAEG;IACH,oBAAoB,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC3C;;OAEG;IACH,YAAY,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACzC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IACnC;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC;;OAEG;IACH,QAAQ,CAAC,EAAE,kCAAkC,GAAG,SAAS,CAAC;IAC1D;;OAEG;IACH,UAAU,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACvC;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;CACtB,CAAC;AAEF,gBAAgB;AAChB,eAAO,MAAM,+CAA+C,EAAE,CAAC,CAAC,OAAO,CACrE,iCAAiC,EACjC,CAAC,CAAC,UAAU,EACZ,OAAO,CACqD,CAAC;AAE/D,gBAAgB;AAChB,eAAO,MAAM,4CAA4C,EAAE,CAAC,CAAC,aAAa,CACxE,OAAO,8BAA8B,CACS,CAAC;AAEjD,gBAAgB;AAChB,eAAO,MAAM,gDAAgD,EAAE,CAAC,CAAC,aAAa,CAC5E,OAAO,kCAAkC,CACS,CAAC;AAErD,gBAAgB;AAChB,eAAO,MAAM,mDAAmD,EAAE,CAAC,CAAC,OAAO,CACzE,qCAAqC,EACrC,CAAC,CAAC,UAAU,EACZ,OAAO,CACO,CAAC;AAEjB,wBAAgB,6CAA6C,CAC3D,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,qCAAqC,EAAE,kBAAkB,CAAC,CAO5E;AAED,gBAAgB;AAChB,eAAO,MAAM,mDAAmD,EAAE,CAAC,CAAC,OAAO,CACzE,qCAAqC,EACrC,CAAC,CAAC,UAAU,EACZ,OAAO,CAiBP,CAAC;AAEH,wBAAgB,6CAA6C,CAC3D,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,qCAAqC,EAAE,kBAAkB,CAAC,CAO5E;AAED,gBAAgB;AAChB,eAAO,MAAM,gDAAgD,EAAE,CAAC,CAAC,OAAO,CACtE,kCAAkC,EAClC,CAAC,CAAC,UAAU,EACZ,OAAO,CASP,CAAC;AAEH,wBAAgB,0CAA0C,CACxD,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,kCAAkC,EAAE,kBAAkB,CAAC,CAOzE;AAED,gBAAgB;AAChB,eAAO,MAAM,wCAAwC,EAAE,CAAC,CAAC,OAAO,CAC9D,0BAA0B,EAC1B,CAAC,CAAC,UAAU,EACZ,OAAO,CAcP,CAAC;AAEH,wBAAgB,kCAAkC,CAChD,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,0BAA0B,EAAE,kBAAkB,CAAC,CAMjE"}
1
+ {"version":3,"file":"runnablejobscheduledsearch.d.ts","sourceRoot":"","sources":["../../../src/models/runnablejobscheduledsearch.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,CAAC,MAAM,QAAQ,CAAC;AAG5B,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AACzD,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAC3D,OAAO,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEpE,eAAO,MAAM,iCAAiC;;;;CAIpC,CAAC;AACX,MAAM,MAAM,iCAAiC,GAAG,QAAQ,CACtD,OAAO,iCAAiC,CACzC,CAAC;AAEF,eAAO,MAAM,8BAA8B;;CAEjC,CAAC;AACX,MAAM,MAAM,8BAA8B,GAAG,UAAU,CACrD,OAAO,8BAA8B,CACtC,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,kCAAkC;;;;;;CAMrC,CAAC;AACX;;GAEG;AACH,MAAM,MAAM,kCAAkC,GAAG,UAAU,CACzD,OAAO,kCAAkC,CAC1C,CAAC;AAEF,MAAM,MAAM,qCAAqC,GAAG,EAAE,CAAC;AAEvD,MAAM,MAAM,qCAAqC,GAAG;IAClD,IAAI,CAAC,EAAE,8BAA8B,GAAG,SAAS,CAAC;IAClD;;OAEG;IACH,sBAAsB,EAAE,OAAO,CAAC;IAChC;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,QAAQ,EAAE,kCAAkC,CAAC;IAC7C;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,iBAAiB,CAAC,EAAE,GAAG,GAAG,SAAS,CAAC;IACpC,WAAW,CAAC,EAAE,qCAAqC,GAAG,SAAS,CAAC;IAChE;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;;;;;OAMG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;;;;;OAMG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,kCAAkC,GAAG;IAC/C;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;IAC9B;;OAEG;IACH,SAAS,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,YAAY,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAC1B,GAAG,CAAC,EAAE,qCAAqC,GAAG,SAAS,CAAC;CACzD,CAAC;AAEF,MAAM,MAAM,0BAA0B,GAAG;IACvC;;OAEG;IACH,EAAE,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACxB,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,IAAI,EAAE,iCAAiC,CAAC;IACxC;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,oBAAoB,EAAE,OAAO,CAAC;IAC9B;;OAEG;IACH,YAAY,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACzC;;OAEG;IACH,YAAY,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC;;OAEG;IACH,QAAQ,CAAC,EAAE,kCAAkC,GAAG,SAAS,CAAC;IAC1D;;OAEG;IACH,UAAU,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,GAAG,SAAS,CAAC;IACvC;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;CACtB,CAAC;AAEF,gBAAgB;AAChB,eAAO,MAAM,+CAA+C,EAAE,CAAC,CAAC,OAAO,CACrE,iCAAiC,EACjC,CAAC,CAAC,UAAU,EACZ,OAAO,CACqD,CAAC;AAE/D,gBAAgB;AAChB,eAAO,MAAM,4CAA4C,EAAE,CAAC,CAAC,aAAa,CACxE,OAAO,8BAA8B,CACS,CAAC;AAEjD,gBAAgB;AAChB,eAAO,MAAM,gDAAgD,EAAE,CAAC,CAAC,aAAa,CAC5E,OAAO,kCAAkC,CACS,CAAC;AAErD,gBAAgB;AAChB,eAAO,MAAM,mDAAmD,EAAE,CAAC,CAAC,OAAO,CACzE,qCAAqC,EACrC,CAAC,CAAC,UAAU,EACZ,OAAO,CACO,CAAC;AAEjB,wBAAgB,6CAA6C,CAC3D,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,qCAAqC,EAAE,kBAAkB,CAAC,CAO5E;AAED,gBAAgB;AAChB,eAAO,MAAM,mDAAmD,EAAE,CAAC,CAAC,OAAO,CACzE,qCAAqC,EACrC,CAAC,CAAC,UAAU,EACZ,OAAO,CAiBP,CAAC;AAEH,wBAAgB,6CAA6C,CAC3D,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,qCAAqC,EAAE,kBAAkB,CAAC,CAO5E;AAED,gBAAgB;AAChB,eAAO,MAAM,gDAAgD,EAAE,CAAC,CAAC,OAAO,CACtE,kCAAkC,EAClC,CAAC,CAAC,UAAU,EACZ,OAAO,CASP,CAAC;AAEH,wBAAgB,0CAA0C,CACxD,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,kCAAkC,EAAE,kBAAkB,CAAC,CAOzE;AAED,gBAAgB;AAChB,eAAO,MAAM,wCAAwC,EAAE,CAAC,CAAC,OAAO,CAC9D,0BAA0B,EAC1B,CAAC,CAAC,UAAU,EACZ,OAAO,CAcP,CAAC;AAEH,wBAAgB,kCAAkC,CAChD,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,0BAA0B,EAAE,kBAAkB,CAAC,CAMjE"}
@@ -1,30 +1,30 @@
1
1
  /**
2
- * Cribl.Cloud and Hybrid Authentication Example
2
+ * Cribl.Cloud Authentication Example
3
3
  *
4
- * This example demonstrates how to configure authentication on Cribl.Cloud
5
- * and in hybrid deployments using OAuth2 credentials.
4
+ * This example demonstrates the Cribl.Cloud authentication process using
5
+ * OAuth2 credentials.
6
6
  *
7
- * 1. Create an SDK client with OAuth2 client credentials using the
8
- * clientOauth security scheme.
7
+ * 1. Create an SDK client with OAuth2 client credentials.
9
8
  * 2. Automatically handle token exchange and refresh.
10
- * 3. Validate the connection by listing all git branches.
9
+ * 3. Validate the connection by checking the server health status and listing
10
+ * all git branches.
11
11
  *
12
12
  * Prerequisites: Replace the placeholder values for ORG_ID, CLIENT_ID,
13
13
  * CLIENT_SECRET, and WORKSPACE_NAME with your Organization ID, Client ID and
14
- * Secret, and Workspace name. To get your Client ID and Secret, follow the
15
- * steps at https://docs.cribl.io/cribl-as-code/sdks-auth/#sdks-auth-cloud.
16
- * Your Client ID and Secret are sensitive information and should be kept private.
14
+ * Secret, and Workspace name. To get your CLIENT_ID and CLIENT_SECRET values,
15
+ * follow the steps at https://docs.cribl.io/api/#criblcloud. Your Client ID
16
+ * and Secret are sensitive information and should be kept private.
17
17
  *
18
- * NOTE: This example is for Cribl.Cloud and hybrid deployments only.
19
- * It does not require .env file configuration.
18
+ * NOTE: This example is for Cribl.Cloud deployments only. It does not require
19
+ * .env file configuration.
20
20
  */
21
21
 
22
22
  import { CriblControlPlane } from "../dist/esm";
23
23
 
24
- // Cribl.Cloud and hybrid configuration: Replace the placeholder values
24
+ // Cribl.Cloud configuration: Replace the placeholder values
25
+ const ORG_ID = "your-org-id"; // Replace with your Organization ID
25
26
  const CLIENT_ID = "your-client-id"; // Replace with your OAuth2 Client ID
26
27
  const CLIENT_SECRET = "your-client-secret"; // Replace with your OAuth2 Client Secret
27
- const ORG_ID = "your-org-id"; // Replace with your Organization ID
28
28
  const WORKSPACE_NAME = "main"; // Replace with your Workspace name
29
29
 
30
30
  const baseUrl = `https://${WORKSPACE_NAME}-${ORG_ID}.cribl.cloud/api/v1`;
@@ -11,8 +11,8 @@
11
11
  * Prerequisites: Replace the placeholder values for ORG_ID, CLIENT_ID,
12
12
  * CLIENT_SECRET, and WORKSPACE_NAME with your Organization ID, Client ID and
13
13
  * Secret, and Workspace name. To get your CLIENT_ID and CLIENT_SECRET values,
14
- * follow the steps at https://docs.cribl.io/cribl-as-code/authentication/#cloud-auth.
15
- * Your Client ID and Secret are sensitive information and should be kept private.
14
+ * follow the steps at https://docs.cribl.io/api/#criblcloud. Your Client ID
15
+ * and Secret are sensitive information and should be kept private.
16
16
  *
17
17
  * NOTE: This example is for Cribl.Cloud deployments only. It does not require
18
18
  * .env file configuration.
@@ -19,14 +19,14 @@
19
19
  * Prerequisites: Replace the placeholder values for ORG_ID, CLIENT_ID,
20
20
  * CLIENT_SECRET, and WORKSPACE_NAME with your Organization ID, Client ID and
21
21
  * Secret, and Workspace name. To get your CLIENT_ID and CLIENT_SECRET values,
22
- * follow the steps at https://docs.cribl.io/cribl-as-code/authentication/#cloud-auth.
23
- * Your Client ID and Secret are sensitive information and should be kept private.
22
+ * follow the steps at https://docs.cribl.io/api/#criblcloud. Your Client ID
23
+ * and Secret are sensitive information and should be kept private.
24
24
  *
25
25
  * NOTE: This example is for Cribl.Cloud deployments only. It does not require
26
26
  * .env file configuration.
27
27
  */
28
28
 
29
- import { ConfigGroup, GroupCreateRequest } from "../dist/esm/models";
29
+ import { ConfigGroup } from "../dist/esm/models";
30
30
  import { AuthCloud } from "./auth";
31
31
 
32
32
  const ORG_ID = 'your-org-id';
@@ -68,7 +68,7 @@ async function main() {
68
68
  }
69
69
 
70
70
  // Create the Worker Group
71
- await cribl.groups.create({ product: "stream", groupCreateRequest: group as GroupCreateRequest });
71
+ await cribl.groups.create({ product: "stream", groupCreateRequest: group });
72
72
  console.log(`✅ Worker Group created: ${group.id}`);
73
73
 
74
74
  // Scale and provision the Worker Group
@@ -40,7 +40,6 @@ const AWS_REGION = "us-east-2"; // Replace with your S3 bucket region
40
40
 
41
41
  import {
42
42
  ConfigGroup,
43
- GroupCreateRequest,
44
43
  InputSyslog,
45
44
  OutputS3,
46
45
  Pipeline,
@@ -122,7 +121,7 @@ async function main() {
122
121
  }
123
122
 
124
123
  // Create Fleet
125
- await cribl.groups.create({ product: "edge", groupCreateRequest: myFleet as GroupCreateRequest });
124
+ await cribl.groups.create({ product: "edge", groupCreateRequest: myFleet });
126
125
  console.log(`✅ Fleet created: ${myFleet.id}`);
127
126
 
128
127
  // Create Syslog Source
@@ -1,13 +1,13 @@
1
1
  /**
2
2
  * On-Prem Authentication Example
3
3
  *
4
- * This example demonstrates how to configure authentication for an on-prem
5
- * Cribl instance using username and password credentials.
4
+ * This example demonstrates the authentication process for an on-prem Cribl
5
+ * instance using username and password credentials.
6
6
  *
7
- * 1. Create an SDK client with username and password credentials using the
8
- * bearerAuth security scheme.
9
- * 2. Automatically handle token exchange and refresh using a callback function.
10
- * 3. Validate the connection by listing all git branches.
7
+ * 1. Authenticate with your username and password to obtain a Bearer token.
8
+ * 2. Create an SDK client that uses the Bearer token for API calls.
9
+ * 3. Validate the connection by checking the server health status and listing
10
+ * all git branches.
11
11
  *
12
12
  * Prerequisites: Replace the placeholder values for ONPREM_SERVER_URL
13
13
  * ONPREM_USERNAME, and ONPREM_PASSWORD with your server URL and credentials.
@@ -21,69 +21,32 @@ import { CriblControlPlane } from "../dist/esm";
21
21
 
22
22
  // On-prem configuration: Replace the placeholder values
23
23
  const ONPREM_SERVER_URL: string = "http://localhost:9000"; // Replace with your server URL
24
- const ONPREM_USERNAME: string = "admin"; // Replace with your username
25
- const ONPREM_PASSWORD: string = "admin"; // Replace with your password
24
+ const ONPREM_USERNAME: string = "admin" // Replace with your username
25
+ const ONPREM_PASSWORD: string = "admin" // Replace with your password
26
26
 
27
27
  const BASE_URL: string = `${ONPREM_SERVER_URL}/api/v1`;
28
28
 
29
- // Token cache
30
- let _cachedToken: string | null = null;
31
- let _tokenExpiresAt: Date | null = null;
32
-
33
- function _getJwtExp(token: string): Date {
34
- const payloadB64 = token.split(".")[1];
35
- const padding = "=".repeat((4 - (payloadB64.length % 4)) % 4);
36
- const payload = JSON.parse(atob(payloadB64 + padding));
37
- const exp = payload.exp;
38
- if (exp === undefined) {
39
- throw new Error("Token missing 'exp' field");
40
- }
41
- return new Date(exp * 1000);
42
- }
43
-
44
29
  async function main() {
45
- // Create client for retrieving Bearer token
46
- const client = new CriblControlPlane({ serverURL: BASE_URL });
47
-
48
- const callback = async () => {
49
- // Check cache
50
- const now = new Date();
51
- if (
52
- _cachedToken &&
53
- _tokenExpiresAt &&
54
- now.getTime() + 3000 < _tokenExpiresAt.getTime()
55
- ) {
56
- return { bearerAuth: _cachedToken };
57
- }
58
-
59
- // Retrieve Bearer token initially and refresh automatically when it expires
60
- const response = await client.auth.tokens.get({
61
- username: ONPREM_USERNAME,
62
- password: ONPREM_PASSWORD,
63
- });
64
- const token = response.token;
65
- _tokenExpiresAt = _getJwtExp(token);
66
- _cachedToken = token;
67
- return { bearerAuth: token };
68
- };
30
+ // Retrieve Bearer token for authentication
31
+ let client = new CriblControlPlane({ serverURL: BASE_URL });
32
+ const { token } = await client.auth.tokens.get({
33
+ username: ONPREM_USERNAME,
34
+ password: ONPREM_PASSWORD,
35
+ });
36
+ console.log(`✅ Authenticated with on-prem server. Token: ${token}`);
69
37
 
70
38
  // Create authenticated SDK client
71
- const authenticatedClient = new CriblControlPlane({
72
- serverURL: BASE_URL,
73
- security: callback,
74
- });
75
- console.log(`✅ Authenticated SDK client created for on-prem server`);
39
+ client = new CriblControlPlane({ serverURL: BASE_URL, security: { bearerAuth: token }});
40
+ console.log(`✅ Cribl SDK client created for on-prem server`);
76
41
 
77
42
  // Validate connection and list all git branches
78
- const response = await authenticatedClient.versions.branches.list();
79
- const branches = response.items?.map((branch: any) => branch.id).join("\n\t");
43
+ const response = await client.versions.branches.list();
44
+ const branches = response.items?.map(branch => branch.id).join("\n\t");
80
45
  console.log(`✅ Client works! Your list of branches:\n\t${branches}`);
81
46
  }
82
47
 
83
48
  main().catch((error) => {
84
- if (error.statusCode === 401) {
85
- console.log("⚠️ Authentication failed! Check your USERNAME and PASSWORD.");
86
- } else if (error.statusCode === 429) {
49
+ if (error.statusCode === 429) {
87
50
  console.log("⚠️ Uh oh, you've reached the rate limit! Try again in a few seconds.");
88
51
  } else {
89
52
  console.error("❌ Something went wrong: ", error);
@@ -28,7 +28,6 @@
28
28
 
29
29
  import {
30
30
  ConfigGroup,
31
- GroupCreateRequest,
32
31
  InputTcpjson,
33
32
  OutputFilesystem,
34
33
  Pipeline,
@@ -108,7 +107,7 @@ async function main() {
108
107
  }
109
108
 
110
109
  // Create Worker Group
111
- await cribl.groups.create({ product: "stream", groupCreateRequest: myWorkerGroup as GroupCreateRequest });
110
+ await cribl.groups.create({ product: "stream", groupCreateRequest: myWorkerGroup });
112
111
  console.log(`✅ Worker Group created: ${myWorkerGroup.id}`);
113
112
 
114
113
  // Create TCP JSON Source
@@ -22,7 +22,6 @@
22
22
  * - API Bearer token with Permissions that include creating Worker Groups.
23
23
  */
24
24
 
25
- import { GroupCreateRequest } from "../dist/esm/models";
26
25
  import { createCriblClient, CriblControlPlane } from './auth';
27
26
 
28
27
  /**
@@ -101,7 +100,7 @@ async function replicateWorkerGroup(client: CriblControlPlane, sourceId: string)
101
100
  id: replicaId,
102
101
  name: replicaName,
103
102
  description: `Replica of '${sourceId}'`
104
- } as GroupCreateRequest
103
+ }
105
104
  };
106
105
 
107
106
  // Create the replica Worker Group by copying the configuration of the source Worker Group
@@ -19,7 +19,7 @@
19
19
  },
20
20
  "..": {
21
21
  "name": "cribl-control-plane",
22
- "version": "0.4.0",
22
+ "version": "0.5.0-alpha.1",
23
23
  "dependencies": {
24
24
  "zod": "^3.25.0 || ^4.0.0"
25
25
  },
package/jsr.json CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  {
4
4
  "name": "cribl-control-plane",
5
- "version": "0.4.0",
5
+ "version": "0.5.0-alpha.1",
6
6
  "exports": {
7
7
  ".": "./src/index.ts",
8
8
  "./models/errors": "./src/models/errors/index.ts",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "cribl-control-plane",
3
- "version": "0.4.0",
3
+ "version": "0.5.0-alpha.1",
4
4
  "author": "Speakeasy",
5
5
  "type": "module",
6
6
  "tshy": {
package/src/lib/config.ts CHANGED
@@ -61,9 +61,9 @@ export function serverURLFromOptions(options: SDKOptions): URL | null {
61
61
 
62
62
  export const SDK_METADATA = {
63
63
  language: "typescript",
64
- openapiDocVersion: "4.15.1-1b453caa",
65
- sdkVersion: "0.4.0",
66
- genVersion: "2.778.0",
64
+ openapiDocVersion: "4.15.1-alpha.1765407756960-5c02a009",
65
+ sdkVersion: "0.5.0-alpha.1",
66
+ genVersion: "2.791.1",
67
67
  userAgent:
68
- "speakeasy-sdk/typescript 0.4.0 2.778.0 4.15.1-1b453caa cribl-control-plane",
68
+ "speakeasy-sdk/typescript 0.5.0-alpha.1 2.791.1 4.15.1-alpha.1765407756960-5c02a009 cribl-control-plane",
69
69
  } as const;
@@ -49,24 +49,24 @@ export type RunnableJobCollectionRunSettings = {
49
49
  /**
50
50
  * Reschedule tasks that failed with non-fatal errors
51
51
  */
52
- rescheduleDroppedTasks?: boolean | undefined;
52
+ rescheduleDroppedTasks: boolean;
53
53
  /**
54
54
  * Maximum number of times a task can be rescheduled
55
55
  */
56
- maxTaskReschedule?: number | undefined;
56
+ maxTaskReschedule: number;
57
57
  /**
58
58
  * Level at which to set task logging
59
59
  */
60
- logLevel?: RunnableJobCollectionScheduleLogLevel | undefined;
60
+ logLevel: RunnableJobCollectionScheduleLogLevel;
61
61
  /**
62
62
  * Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time.
63
63
  */
64
- jobTimeout?: string | undefined;
64
+ jobTimeout: string;
65
65
  /**
66
66
  * Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job.
67
67
  */
68
- mode?: string | undefined;
69
- timeRangeType?: string | undefined;
68
+ mode: string;
69
+ timeRangeType: string;
70
70
  /**
71
71
  * Earliest time to collect data for the selected timezone
72
72
  */
@@ -80,7 +80,7 @@ export type RunnableJobCollectionRunSettings = {
80
80
  /**
81
81
  * A filter for tokens in the provided collect path and/or the events being collected
82
82
  */
83
- expression?: string | undefined;
83
+ expression: string;
84
84
  /**
85
85
  * Limits the bundle size for small tasks. For example,
86
86
  *
@@ -88,7 +88,7 @@ export type RunnableJobCollectionRunSettings = {
88
88
  *
89
89
  * if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
90
90
  */
91
- minTaskSize?: string | undefined;
91
+ minTaskSize: string;
92
92
  /**
93
93
  * Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
94
94
  *
@@ -96,7 +96,7 @@ export type RunnableJobCollectionRunSettings = {
96
96
  *
97
97
  * you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
98
98
  */
99
- maxTaskSize?: string | undefined;
99
+ maxTaskSize: string;
100
100
  };
101
101
 
102
102
  /**
@@ -110,19 +110,19 @@ export type RunnableJobCollectionSchedule = {
110
110
  /**
111
111
  * Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits
112
112
  */
113
- skippable?: boolean | undefined;
113
+ skippable: boolean;
114
114
  /**
115
115
  * If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules
116
116
  */
117
- resumeMissed?: boolean | undefined;
117
+ resumeMissed: boolean;
118
118
  /**
119
119
  * A cron schedule on which to run this job
120
120
  */
121
- cronSchedule?: string | undefined;
121
+ cronSchedule: string;
122
122
  /**
123
123
  * The maximum number of instances of this scheduled job that may be running at any time
124
124
  */
125
- maxConcurrentRuns?: number | undefined;
125
+ maxConcurrentRuns: number;
126
126
  run?: RunnableJobCollectionRunSettings | undefined;
127
127
  };
128
128
 
@@ -137,7 +137,7 @@ export type Collector = {
137
137
  /**
138
138
  * Delete any files collected (where applicable)
139
139
  */
140
- destructive?: boolean | undefined;
140
+ destructive: boolean;
141
141
  /**
142
142
  * Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters.
143
143
  */
@@ -150,7 +150,7 @@ export const InputType = {
150
150
  export type InputType = OpenEnum<typeof InputType>;
151
151
 
152
152
  export type RunnableJobCollectionPreprocess = {
153
- disabled?: boolean | undefined;
153
+ disabled: boolean;
154
154
  /**
155
155
  * Command to feed the data through (via stdin) and process its output (stdout)
156
156
  */
@@ -170,7 +170,7 @@ export type RunnableJobCollectionMetadatum = {
170
170
  };
171
171
 
172
172
  export type RunnableJobCollectionInput = {
173
- type?: InputType | undefined;
173
+ type: InputType;
174
174
  /**
175
175
  * A list of event-breaking rulesets that will be applied, in order, to the input data stream
176
176
  */
@@ -178,16 +178,16 @@ export type RunnableJobCollectionInput = {
178
178
  /**
179
179
  * How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines
180
180
  */
181
- staleChannelFlushMs?: number | undefined;
181
+ staleChannelFlushMs: number;
182
182
  /**
183
183
  * Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination.
184
184
  */
185
- sendToRoutes?: boolean | undefined;
185
+ sendToRoutes: boolean;
186
186
  preprocess?: RunnableJobCollectionPreprocess | undefined;
187
187
  /**
188
188
  * Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling.
189
189
  */
190
- throttleRatePerSec?: string | undefined;
190
+ throttleRatePerSec: string;
191
191
  /**
192
192
  * Fields to add to events from this input
193
193
  */
@@ -266,36 +266,36 @@ export type CaptureSettings = {
266
266
  /**
267
267
  * Amount of time to keep capture open, in seconds
268
268
  */
269
- duration?: number | undefined;
269
+ duration: number;
270
270
  /**
271
271
  * Maximum number of events to capture
272
272
  */
273
- maxEvents?: number | undefined;
274
- level?: WhereToCapture | undefined;
273
+ maxEvents: number;
274
+ level: WhereToCapture;
275
275
  };
276
276
 
277
277
  export type RunnableJobCollectionRun = {
278
278
  /**
279
279
  * Reschedule tasks that failed with non-fatal errors
280
280
  */
281
- rescheduleDroppedTasks?: boolean | undefined;
281
+ rescheduleDroppedTasks: boolean;
282
282
  /**
283
283
  * Maximum number of times a task can be rescheduled
284
284
  */
285
- maxTaskReschedule?: number | undefined;
285
+ maxTaskReschedule: number;
286
286
  /**
287
287
  * Level at which to set task logging
288
288
  */
289
- logLevel?: RunnableJobCollectionLogLevel | undefined;
289
+ logLevel: RunnableJobCollectionLogLevel;
290
290
  /**
291
291
  * Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time.
292
292
  */
293
- jobTimeout?: string | undefined;
293
+ jobTimeout: string;
294
294
  /**
295
295
  * Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job.
296
296
  */
297
- mode?: RunnableJobCollectionMode | undefined;
298
- timeRangeType?: TimeRange | undefined;
297
+ mode: RunnableJobCollectionMode;
298
+ timeRangeType: TimeRange;
299
299
  /**
300
300
  * Earliest time to collect data for the selected timezone
301
301
  */
@@ -307,12 +307,12 @@ export type RunnableJobCollectionRun = {
307
307
  /**
308
308
  * Timezone to use for Earliest and Latest times
309
309
  */
310
- timestampTimezone?: string | undefined;
310
+ timestampTimezone: string;
311
311
  timeWarning?: RunnableJobCollectionTimeWarning | undefined;
312
312
  /**
313
313
  * A filter for tokens in the provided collect path and/or the events being collected
314
314
  */
315
- expression?: string | undefined;
315
+ expression: string;
316
316
  /**
317
317
  * Limits the bundle size for small tasks. For example,
318
318
  *
@@ -320,7 +320,7 @@ export type RunnableJobCollectionRun = {
320
320
  *
321
321
  * if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
322
322
  */
323
- minTaskSize?: string | undefined;
323
+ minTaskSize: string;
324
324
  /**
325
325
  * Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
326
326
  *
@@ -328,11 +328,11 @@ export type RunnableJobCollectionRun = {
328
328
  *
329
329
  * you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
330
330
  */
331
- maxTaskSize?: string | undefined;
331
+ maxTaskSize: string;
332
332
  /**
333
333
  * Send discover results to Routes
334
334
  */
335
- discoverToRoutes?: boolean | undefined;
335
+ discoverToRoutes: boolean;
336
336
  capture?: CaptureSettings | undefined;
337
337
  };
338
338
 
@@ -346,11 +346,11 @@ export type RunnableJobCollection = {
346
346
  /**
347
347
  * Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector.
348
348
  */
349
- ttl?: string | undefined;
349
+ ttl: string;
350
350
  /**
351
351
  * When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live.
352
352
  */
353
- ignoreGroupJobsLimit?: boolean | undefined;
353
+ ignoreGroupJobsLimit: boolean;
354
354
  /**
355
355
  * List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface.
356
356
  */
@@ -358,7 +358,7 @@ export type RunnableJobCollection = {
358
358
  /**
359
359
  * Resume the ad hoc job if a failure condition causes Stream to restart during job execution
360
360
  */
361
- resumeOnBoot?: boolean | undefined;
361
+ resumeOnBoot: boolean;
362
362
  /**
363
363
  * Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere.
364
364
  */
@@ -374,7 +374,7 @@ export type RunnableJobCollection = {
374
374
  /**
375
375
  * If enabled, tasks are created and run by the same Worker Node
376
376
  */
377
- workerAffinity?: boolean | undefined;
377
+ workerAffinity: boolean;
378
378
  collector: Collector;
379
379
  input?: RunnableJobCollectionInput | undefined;
380
380
  run: RunnableJobCollectionRun;