@qlik/api 2.3.1 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/analytics/change-stores.d.ts +40 -76
- package/analytics/change-stores.js +9 -10
- package/analytics.d.ts +1 -2
- package/analytics.js +5 -6
- package/api-keys.d.ts +46 -100
- package/api-keys.js +3 -4
- package/apps.d.ts +334 -461
- package/apps.js +74 -6
- package/assistants.d.ts +172 -337
- package/assistants.js +3 -4
- package/audits.d.ts +41 -85
- package/audits.js +3 -4
- package/auth-types.d.ts +110 -1
- package/auth-types.js +22 -1
- package/auth.d.ts +1 -2
- package/auth.js +1 -1
- package/automation-connections.d.ts +44 -95
- package/automation-connections.js +3 -4
- package/automation-connectors.d.ts +10 -20
- package/automation-connectors.js +3 -4
- package/automations.d.ts +54 -112
- package/automations.js +3 -4
- package/automl-deployments.d.ts +17 -36
- package/automl-deployments.js +3 -4
- package/automl-predictions.d.ts +12 -24
- package/automl-predictions.js +3 -4
- package/banners.d.ts +8 -14
- package/banners.js +3 -4
- package/brands.d.ts +19 -39
- package/brands.js +3 -4
- package/chunks/auth-B8lJw7rm.js +31 -0
- package/chunks/{interceptors-2VSXImC9.js → boot-interceptors-DqRxTczb.js} +50 -150
- package/chunks/{chunk-D3vHIbds.js → chunk-BdHzlgOL.js} +3 -3
- package/chunks/{invoke-fetch-CckTK7bh.js → invoke-fetch-DMAi6Fg3.js} +3 -3
- package/chunks/{invoke-fetch-D9lpiAb-.js → invoke-fetch-pMN6iqup.js} +2 -3
- package/chunks/platform-types-CECrZkOF.d.ts +29 -0
- package/chunks/{public-runtime-modules-BqxAMJ9M.js → public-runtime-modules-2KfyI2qM.js} +5 -5
- package/chunks/{qix-DpvHvpx7.js → qix-BdNrIA4s.js} +19 -20
- package/chunks/{qix-chunk-entrypoint-BXZPnE6J.js → qix-chunk-entrypoint-L9RpWwLK.js} +52 -58
- package/chunks/{qix-CrqXv44x.d.ts → qix-types-y_di0roE.d.ts} +33 -115
- package/collections.d.ts +67 -144
- package/collections.js +3 -4
- package/conditions.d.ts +87 -190
- package/conditions.js +3 -4
- package/consumption.d.ts +35 -72
- package/consumption.js +3 -4
- package/core/ip-policies.d.ts +43 -93
- package/core/ip-policies.js +5 -6
- package/core.d.ts +1 -2
- package/core.js +5 -6
- package/csp-origins.d.ts +42 -85
- package/csp-origins.js +3 -4
- package/csrf-token.d.ts +6 -11
- package/csrf-token.js +3 -4
- package/data-alerts.d.ts +152 -325
- package/data-alerts.js +3 -4
- package/data-assets.d.ts +16 -31
- package/data-assets.js +3 -4
- package/data-connections.d.ts +123 -257
- package/data-connections.js +3 -4
- package/data-credentials.d.ts +36 -76
- package/data-credentials.js +3 -4
- package/data-files.d.ts +60 -121
- package/data-files.js +3 -4
- package/data-governance/data-products.d.ts +429 -0
- package/data-governance/data-products.js +212 -0
- package/data-governance.d.ts +13 -0
- package/data-governance.js +12 -0
- package/data-qualities.d.ts +11 -25
- package/data-qualities.js +3 -4
- package/data-sets.d.ts +28 -57
- package/data-sets.js +3 -4
- package/data-sources.d.ts +45 -97
- package/data-sources.js +3 -4
- package/data-stores.d.ts +53 -107
- package/data-stores.js +3 -4
- package/dcaas.d.ts +32 -67
- package/dcaas.js +3 -4
- package/di-projects.d.ts +111 -136
- package/di-projects.js +25 -4
- package/direct-access-agents.d.ts +29 -63
- package/direct-access-agents.js +3 -4
- package/encryption.d.ts +28 -59
- package/encryption.js +3 -4
- package/extensions.d.ts +35 -73
- package/extensions.js +3 -4
- package/global-types.d.ts +139 -1
- package/glossaries.d.ts +75 -158
- package/glossaries.js +3 -4
- package/groups.d.ts +58 -125
- package/groups.js +3 -4
- package/identity-providers.d.ts +148 -324
- package/identity-providers.js +3 -4
- package/index.d.ts +18 -4
- package/index.js +64 -19
- package/interceptors.d.ts +2 -3
- package/interceptors.js +104 -2
- package/invoke-fetch-types.d.ts +97 -1
- package/items.d.ts +85 -181
- package/items.js +3 -4
- package/knowledgebases.d.ts +137 -270
- package/knowledgebases.js +3 -4
- package/licenses.d.ts +71 -153
- package/licenses.js +3 -4
- package/lineage-graphs.d.ts +39 -85
- package/lineage-graphs.js +3 -4
- package/ml.d.ts +290 -621
- package/ml.js +3 -4
- package/notes.d.ts +9 -18
- package/notes.js +3 -4
- package/notifications.d.ts +14 -30
- package/notifications.js +3 -4
- package/oauth-clients.d.ts +98 -159
- package/oauth-clients.js +4 -5
- package/oauth-tokens.d.ts +16 -33
- package/oauth-tokens.js +3 -4
- package/package.json +6 -3
- package/qix.d.ts +46 -2
- package/qix.js +2 -3
- package/questions.d.ts +53 -110
- package/questions.js +3 -4
- package/quotas.d.ts +13 -26
- package/quotas.js +3 -4
- package/reload-tasks.d.ts +23 -46
- package/reload-tasks.js +3 -4
- package/reloads.d.ts +26 -52
- package/reloads.js +3 -4
- package/report-templates.d.ts +30 -63
- package/report-templates.js +3 -4
- package/reports.d.ts +111 -238
- package/reports.js +3 -4
- package/roles.d.ts +36 -77
- package/roles.js +3 -4
- package/sharing-tasks.d.ts +201 -427
- package/sharing-tasks.js +3 -4
- package/spaces.d.ts +71 -157
- package/spaces.js +3 -4
- package/tasks.d.ts +76 -164
- package/tasks.js +3 -4
- package/temp-contents.d.ts +17 -35
- package/temp-contents.js +3 -4
- package/tenant-settings.d.ts +298 -0
- package/tenant-settings.js +107 -0
- package/tenants.d.ts +31 -67
- package/tenants.js +3 -4
- package/themes.d.ts +27 -55
- package/themes.js +3 -4
- package/transports.d.ts +46 -106
- package/transports.js +3 -4
- package/ui-config.d.ts +8 -17
- package/ui-config.js +3 -4
- package/users.d.ts +80 -170
- package/users.js +3 -4
- package/web-integrations.d.ts +38 -83
- package/web-integrations.js +3 -4
- package/web-notifications.d.ts +14 -29
- package/web-notifications.js +3 -4
- package/webhooks.d.ts +77 -162
- package/webhooks.js +3 -4
- package/chunks/auth-XusKk8IA.js +0 -32
- package/chunks/auth-types-YrlH_R9f.d.ts +0 -416
- package/chunks/auth-types-h43TVDpB.js +0 -24
- package/chunks/global-types-CEVAJebk.js +0 -1
- /package/chunks/{dist-n3iOVn1W.js → dist-DR758NU5.js} +0 -0
- /package/chunks/{invoke-fetch-C1Z0RJYU.d.ts → invoke-fetch-DFc3yzaj.d.ts} +0 -0
- /package/chunks/{utils-vv-xFm06.js → utils-jkpLuYZR.js} +0 -0
- /package/chunks/{websocket-errors-CRTDTtBL.js → websocket-errors-C5U1tba-.js} +0 -0
package/di-projects.d.ts
CHANGED
|
@@ -1,52 +1,37 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import "./chunks/invoke-fetch-
|
|
1
|
+
import { ApiCallOptions, DownloadableBlob } from "./invoke-fetch-types.js";
|
|
2
|
+
import "./chunks/invoke-fetch-DFc3yzaj.js";
|
|
3
3
|
|
|
4
4
|
//#region src/public/rest/di-projects.d.ts
|
|
5
5
|
type AsyncActionDetails = {
|
|
6
6
|
endTime?: string;
|
|
7
|
-
error?: AsyncActionError;
|
|
8
|
-
/** Name of the async operation */
|
|
7
|
+
error?: AsyncActionError; /** Name of the async operation */
|
|
9
8
|
name?: string;
|
|
10
|
-
startTime?: string;
|
|
11
|
-
/** State of the action */
|
|
9
|
+
startTime?: string; /** State of the action */
|
|
12
10
|
state?: AsyncCallStatus;
|
|
13
11
|
taskDetails?: {
|
|
14
|
-
error?: AsyncActionError;
|
|
15
|
-
/** Additional details about task state */
|
|
12
|
+
error?: AsyncActionError; /** Additional details about task state */
|
|
16
13
|
info?: string;
|
|
17
|
-
name?: string;
|
|
18
|
-
/** State of the action */
|
|
14
|
+
name?: string; /** State of the action */
|
|
19
15
|
state?: AsyncCallStatus;
|
|
20
16
|
taskId?: string;
|
|
21
17
|
}[];
|
|
22
|
-
taskProgress?: AsyncActionTaskProgress;
|
|
23
|
-
/** Type of action being performed */
|
|
18
|
+
taskProgress?: AsyncActionTaskProgress; /** Type of action being performed */
|
|
24
19
|
type?: AsyncActionType;
|
|
25
20
|
};
|
|
26
21
|
type AsyncActionError = {
|
|
27
|
-
/** Error code */
|
|
28
|
-
|
|
29
|
-
/** Additional error details */
|
|
30
|
-
details?: string;
|
|
31
|
-
/** Error message */
|
|
22
|
+
/** Error code */code?: string; /** Additional error details */
|
|
23
|
+
details?: string; /** Error message */
|
|
32
24
|
message?: string;
|
|
33
25
|
};
|
|
34
26
|
type AsyncActionRsp = {
|
|
35
|
-
/** Identifier for tracking the action */
|
|
36
|
-
actionId: string;
|
|
27
|
+
/** Identifier for tracking the action */actionId: string;
|
|
37
28
|
};
|
|
38
29
|
type AsyncActionTaskProgress = {
|
|
39
|
-
/** Number of tasks canceled */
|
|
40
|
-
|
|
41
|
-
/** Number of tasks
|
|
42
|
-
|
|
43
|
-
/** Number of tasks
|
|
44
|
-
executing?: number;
|
|
45
|
-
/** Number of tasks that failed */
|
|
46
|
-
failed?: number;
|
|
47
|
-
/** Number of tasks pending execution */
|
|
48
|
-
pending?: number;
|
|
49
|
-
/** Number of tasks skipped due to conflicts */
|
|
30
|
+
/** Number of tasks canceled */canceled?: number; /** Number of tasks completed successfully */
|
|
31
|
+
completed?: number; /** Number of tasks currently executing */
|
|
32
|
+
executing?: number; /** Number of tasks that failed */
|
|
33
|
+
failed?: number; /** Number of tasks pending execution */
|
|
34
|
+
pending?: number; /** Number of tasks skipped due to conflicts */
|
|
50
35
|
skipped?: number;
|
|
51
36
|
};
|
|
52
37
|
/**
|
|
@@ -58,126 +43,98 @@ type AsyncActionType = "PROJECT_PREPARE" | "PROJECT_VALIDATE" | "TASK_PREPARE" |
|
|
|
58
43
|
*/
|
|
59
44
|
type AsyncCallStatus = "PENDING" | "EXECUTING" | "COMPLETED" | "FAILED" | "CANCELED" | "SKIPPED";
|
|
60
45
|
type CreateDiProjectReq = {
|
|
61
|
-
/** The cloud staging connection string */
|
|
62
|
-
|
|
63
|
-
/**
|
|
64
|
-
|
|
65
|
-
/** The
|
|
66
|
-
|
|
67
|
-
/** The platform connection string */
|
|
68
|
-
platformConnection?: string;
|
|
69
|
-
/** The platform type of the project. Supported values: - SNOWFLAKE: Snowflake - BIGQUERY: Google BigQuery - SYNAPSE: Azure Synapse - DATABRICKS: Databricks - REDSHIFT: Amazon Redshift - MSSQL: Microsoft SQL Server - FABRIC: Microsoft Fabric (OneLake) - QLIK_QVD: Qlik-managed QVD - QLIK_QVD_CUSTOMER_MANAGED: Customer-managed QVD */
|
|
70
|
-
platformType?: "SNOWFLAKE" | "BIGQUERY" | "SYNAPSE" | "DATABRICKS" | "REDSHIFT" | "MSSQL" | "FABRIC" | "QLIK_QVD" | "QLIK_QVD_CUSTOMER_MANAGED";
|
|
71
|
-
/** The ID of the space where the project will be created */
|
|
72
|
-
space?: string;
|
|
73
|
-
/** The type of the project */
|
|
46
|
+
/** The cloud staging connection string */cloudStagingConnection?: string; /** A description of the project */
|
|
47
|
+
description?: string; /** The name of the project */
|
|
48
|
+
name?: string; /** The platform connection string */
|
|
49
|
+
platformConnection?: string; /** The platform type of the project. Supported values: - SNOWFLAKE: Snowflake - BIGQUERY: Google BigQuery - SYNAPSE: Azure Synapse - DATABRICKS: Databricks - REDSHIFT: Amazon Redshift - MSSQL: Microsoft SQL Server - FABRIC: Microsoft Fabric (OneLake) - QLIK_QVD: Qlik-managed QVD - QLIK_QVD_CUSTOMER_MANAGED: Customer-managed QVD */
|
|
50
|
+
platformType?: "SNOWFLAKE" | "BIGQUERY" | "SYNAPSE" | "DATABRICKS" | "REDSHIFT" | "MSSQL" | "FABRIC" | "QLIK_QVD" | "QLIK_QVD_CUSTOMER_MANAGED"; /** The ID of the space where the project will be created */
|
|
51
|
+
space?: string; /** The type of the project */
|
|
74
52
|
type?: "DATA_PIPELINE" | "DATA_MOVEMENT";
|
|
75
53
|
};
|
|
76
54
|
type DataTaskDatasetState = {
|
|
77
55
|
cdcStatus?: {
|
|
78
|
-
/** Number of DDL statements executed during the last run */
|
|
79
|
-
|
|
80
|
-
/** delete portion of totalProcessedCount. Only available for some task types */
|
|
81
|
-
deleteCount?: number;
|
|
82
|
-
/** Insert portion of totalProcessedCount. Only available for some task types */
|
|
56
|
+
/** Number of DDL statements executed during the last run */ddlCount?: number; /** delete portion of totalProcessedCount. Only available for some task types */
|
|
57
|
+
deleteCount?: number; /** Insert portion of totalProcessedCount. Only available for some task types */
|
|
83
58
|
insertCount?: number;
|
|
84
59
|
lastProcessed?: string;
|
|
85
60
|
message?: string;
|
|
86
|
-
state?: "QUEUED" | "PROCESSING" | "ACCUMULATING_CHANGES" | "COMPLETED" | "ERROR";
|
|
87
|
-
/**
|
|
88
|
-
totalProcessedCount?: number;
|
|
89
|
-
/** update portion of totalProcessedCount. Only available for some task types */
|
|
61
|
+
state?: "QUEUED" | "PROCESSING" | "ACCUMULATING_CHANGES" | "COMPLETED" | "ERROR"; /** Total number of changes/DMLs applied to the dataset */
|
|
62
|
+
totalProcessedCount?: number; /** update portion of totalProcessedCount. Only available for some task types */
|
|
90
63
|
updateCount?: number;
|
|
91
|
-
};
|
|
92
|
-
|
|
93
|
-
dataReadiness?: "READY" | "NOT_READY" | "ERROR";
|
|
94
|
-
/** Id of the dataset */
|
|
64
|
+
}; /** Is the data ready for use? */
|
|
65
|
+
dataReadiness?: "READY" | "NOT_READY" | "ERROR"; /** Id of the dataset */
|
|
95
66
|
datasetId?: string;
|
|
96
67
|
fullLoad?: {
|
|
97
|
-
/** Number of changes captured and cached during full load (CDC landing/replication tasks only) */
|
|
98
|
-
cachedChangesCount?: number;
|
|
99
|
-
/** Duration in HH:MM:SS format (hours:minutes:seconds) */
|
|
68
|
+
/** Number of changes captured and cached during full load (CDC landing/replication tasks only) */cachedChangesCount?: number; /** Duration in HH:MM:SS format (hours:minutes:seconds) */
|
|
100
69
|
duration?: string;
|
|
101
|
-
endTime?: string;
|
|
102
|
-
/** Number of records that failed to load (currently only for knowledge marts) */
|
|
70
|
+
endTime?: string; /** Number of records that failed to load (currently only for knowledge marts) */
|
|
103
71
|
failedRecordsCount?: number;
|
|
104
72
|
message?: string;
|
|
105
73
|
startTime?: string;
|
|
106
|
-
state?: "QUEUED" | "LOADING" | "COMPLETED" | "ERROR";
|
|
107
|
-
/** Number of records (or docs in knowledge marts) were loaded. */
|
|
74
|
+
state?: "QUEUED" | "LOADING" | "COMPLETED" | "ERROR"; /** Number of records (or docs in knowledge marts) were loaded. */
|
|
108
75
|
totalProcessedCount?: number;
|
|
109
76
|
};
|
|
110
77
|
lastBatchOfChanges?: {
|
|
111
|
-
/** Duration in HH:MM:SS format (hours:minutes:seconds) */
|
|
112
|
-
duration?: string;
|
|
78
|
+
/** Duration in HH:MM:SS format (hours:minutes:seconds) */duration?: string;
|
|
113
79
|
endTime?: string;
|
|
114
80
|
message?: string;
|
|
115
81
|
startTime?: string;
|
|
116
|
-
state?: "QUEUED" | "PROCESSING" | "COMPLETED" | "ERROR";
|
|
117
|
-
/** Throughput in records per second */
|
|
82
|
+
state?: "QUEUED" | "PROCESSING" | "COMPLETED" | "ERROR"; /** Throughput in records per second */
|
|
118
83
|
throughputInRecordsPerSecond?: number;
|
|
119
84
|
totalProcessedCount?: number;
|
|
120
|
-
};
|
|
121
|
-
/**
|
|
122
|
-
name?: string;
|
|
123
|
-
/** Original name of the dataset, relevant only for data movement tasks */
|
|
85
|
+
}; /** Name of the dataset */
|
|
86
|
+
name?: string; /** Original name of the dataset, relevant only for data movement tasks */
|
|
124
87
|
sourceName?: string;
|
|
88
|
+
streaming?: {
|
|
89
|
+
message?: string; /** Number of records that had parsing issues */
|
|
90
|
+
parseIssueCount?: number; /** Total number of records filtered out and not written to the dataset */
|
|
91
|
+
recordsFilteredCount?: number; /** Total number of records written to the dataset */
|
|
92
|
+
recordsWrittenCount?: number;
|
|
93
|
+
state?: "QUEUED" | "RUNNING" | "ERROR";
|
|
94
|
+
};
|
|
125
95
|
};
|
|
126
96
|
type DataTaskInstanceState = {
|
|
127
97
|
cdcStatus?: {
|
|
128
98
|
accumulatingChangesCount?: number;
|
|
129
|
-
applyingChangesCount?: number;
|
|
130
|
-
/**
|
|
131
|
-
latency?: string;
|
|
132
|
-
/** Throughput in kilobytes per second */
|
|
99
|
+
applyingChangesCount?: number; /** Duration in HH:MM:SS format (hours:minutes:seconds) */
|
|
100
|
+
latency?: string; /** Throughput in kilobytes per second */
|
|
133
101
|
throughputInKilobytesPerSecond?: number;
|
|
134
102
|
totalProcessedCount?: number;
|
|
135
|
-
};
|
|
136
|
-
/**
|
|
137
|
-
duration?: string;
|
|
138
|
-
/** Timestamp indicating when the task instance ended */
|
|
103
|
+
}; /** Duration in HH:MM:SS format (hours:minutes:seconds) */
|
|
104
|
+
duration?: string; /** Timestamp indicating when the task instance ended */
|
|
139
105
|
endTime?: string;
|
|
140
106
|
fullLoad?: {
|
|
141
|
-
/** Number of datasets that have completed full load in this task run */
|
|
142
|
-
|
|
143
|
-
/** Number of datasets that
|
|
144
|
-
errorCount?: number;
|
|
145
|
-
/** Number of datasets that are currently being loaded in this task run */
|
|
146
|
-
loadingCount?: number;
|
|
147
|
-
/** Number of datasets that are queued for full load in this task run */
|
|
107
|
+
/** Number of datasets that have completed full load in this task run */completedCount?: number; /** Number of datasets that have failed full load in this task run */
|
|
108
|
+
errorCount?: number; /** Number of datasets that are currently being loaded in this task run */
|
|
109
|
+
loadingCount?: number; /** Number of datasets that are queued for full load in this task run */
|
|
148
110
|
queuedCount?: number;
|
|
149
111
|
};
|
|
150
112
|
general?: {
|
|
151
|
-
/** The latest point in time the data reflects, based on updates from the source system. */
|
|
152
|
-
|
|
153
|
-
/**
|
|
154
|
-
|
|
155
|
-
/**
|
|
156
|
-
|
|
157
|
-
/**
|
|
158
|
-
gatewayId?: string;
|
|
159
|
-
/** For tasks that run on a gateway, this is the name of the gateway */
|
|
160
|
-
gatewayName?: string;
|
|
161
|
-
/** For tasks that run on a gateway, this is the internal name of the task on the gateway */
|
|
162
|
-
gatewayTaskName?: string;
|
|
163
|
-
/** For lakehouse storage tasks, this is the name of the cluster where the task runs */
|
|
164
|
-
lakehouseClusterName?: string;
|
|
165
|
-
/** The latest point in time the live views reflect, based on updates from the source system. */
|
|
113
|
+
/** The latest point in time the data reflects, based on updates from the source system. */dataTaskUpdatedTo?: string; /** Total number of datasets produced by the task, including ones in error */
|
|
114
|
+
datasetCount?: number; /** Count of datasets that encountered errors */
|
|
115
|
+
datasetsInErrorCount?: number; /** For tasks that run on a gateway, this is the id of the gateway */
|
|
116
|
+
gatewayId?: string; /** For tasks that run on a gateway, this is the name of the gateway */
|
|
117
|
+
gatewayName?: string; /** For tasks that run on a gateway, this is the internal name of the task on the gateway */
|
|
118
|
+
gatewayTaskName?: string; /** For lakehouse storage tasks, this is the name of the cluster where the task runs */
|
|
119
|
+
lakehouseClusterName?: string; /** The latest point in time the live views reflect, based on updates from the source system. */
|
|
166
120
|
liveViewsUpdatedTo?: string;
|
|
167
121
|
};
|
|
168
122
|
lastBatchOfChanges?: {
|
|
169
|
-
/** This batch starts with operational source changes from this time. */
|
|
170
|
-
|
|
171
|
-
/** This batch ends with operational source changes from this time. */
|
|
172
|
-
relatesToRecordsTo?: string;
|
|
173
|
-
/** Throughput in records per second */
|
|
123
|
+
/** This batch starts with operational source changes from this time. */relatesToRecordsFrom?: string; /** This batch ends with operational source changes from this time. */
|
|
124
|
+
relatesToRecordsTo?: string; /** Throughput in records per second */
|
|
174
125
|
throughputInRecordsPerSecond?: number;
|
|
175
126
|
totalProcessedCount?: number;
|
|
176
127
|
};
|
|
177
|
-
message?: string;
|
|
178
|
-
/** Timestamp indicating when the task instance started */
|
|
128
|
+
message?: string; /** Timestamp indicating when the task instance started */
|
|
179
129
|
startTime?: string;
|
|
180
130
|
state?: "STARTING" | "RUNNING" | "COMPLETED" | "FAILED" | "CANCELED" | "STOPPING";
|
|
131
|
+
streaming?: {
|
|
132
|
+
/** Number of streaming datasets that have encountered errors */errorCount?: number; /** Duration in HH:MM:SS format (hours:minutes:seconds) */
|
|
133
|
+
latency?: string; /** Number of streaming datasets that are queued */
|
|
134
|
+
queuedCount?: number; /** Number of streaming datasets that are currently running */
|
|
135
|
+
runningCount?: number; /** Total number of records processed */
|
|
136
|
+
totalProcessedCount?: number;
|
|
137
|
+
};
|
|
181
138
|
};
|
|
182
139
|
type DataTaskItemRsp = {
|
|
183
140
|
description?: string;
|
|
@@ -188,8 +145,7 @@ type DataTaskItemRsp = {
|
|
|
188
145
|
type?: DataTaskType;
|
|
189
146
|
};
|
|
190
147
|
type DataTaskRuntimeState = {
|
|
191
|
-
lastRun?: DataTaskInstanceState;
|
|
192
|
-
/** Name of the data task */
|
|
148
|
+
lastRun?: DataTaskInstanceState; /** Name of the data task */
|
|
193
149
|
name?: string;
|
|
194
150
|
runReadiness?: {
|
|
195
151
|
message?: string;
|
|
@@ -197,7 +153,7 @@ type DataTaskRuntimeState = {
|
|
|
197
153
|
};
|
|
198
154
|
type?: DataTaskType;
|
|
199
155
|
};
|
|
200
|
-
type DataTaskType = "LANDING" | "STORAGE" | "QVD_STORAGE" | "TRANSFORM" | "DATAMART" | "REGISTERED_DATA" | "REPLICATION" | "DISTRIBUTION" | "LAKE_LANDING" | "KNOWLEDGE_MART" | "FILE_BASED_KNOWLEDGE_MART" | "LAKEHOUSE_STORAGE" | "LAKEHOUSE_MIRROR";
|
|
156
|
+
type DataTaskType = "LANDING" | "STORAGE" | "QVD_STORAGE" | "TRANSFORM" | "DATAMART" | "REGISTERED_DATA" | "REPLICATION" | "DISTRIBUTION" | "LAKE_LANDING" | "KNOWLEDGE_MART" | "FILE_BASED_KNOWLEDGE_MART" | "LAKEHOUSE_STORAGE" | "LAKEHOUSE_MIRROR" | "STREAMING_LAKE_LANDING" | "STREAMING_TRANSFORM";
|
|
201
157
|
type DiProjectItemRsp = {
|
|
202
158
|
description?: string;
|
|
203
159
|
id?: string;
|
|
@@ -206,8 +162,7 @@ type DiProjectItemRsp = {
|
|
|
206
162
|
spaceId?: string;
|
|
207
163
|
};
|
|
208
164
|
type DiProjectOperationSelectedTask = {
|
|
209
|
-
/** Task identifier */
|
|
210
|
-
taskId: string;
|
|
165
|
+
/** Task identifier */taskId: string;
|
|
211
166
|
};
|
|
212
167
|
type Error = {
|
|
213
168
|
code?: string;
|
|
@@ -225,8 +180,7 @@ type Errors = {
|
|
|
225
180
|
traceId?: string;
|
|
226
181
|
};
|
|
227
182
|
type ExportDiProjectReq = {
|
|
228
|
-
/** Include bindings in the exported zip file (optional, default is false) */
|
|
229
|
-
includeBindings?: boolean;
|
|
183
|
+
/** Include bindings in the exported zip file (optional, default is false) */includeBindings?: boolean;
|
|
230
184
|
};
|
|
231
185
|
type GetDiExportProjectVariablesRsp = {
|
|
232
186
|
nameToIdMap?: Record<string, string>;
|
|
@@ -240,18 +194,18 @@ type ListDiProjectsRsp = {
|
|
|
240
194
|
projects?: DiProjectItemRsp[];
|
|
241
195
|
};
|
|
242
196
|
type PrepareProjectReq = {
|
|
243
|
-
allowRecreate?: boolean;
|
|
244
|
-
/** Array of tasks to prepare. Leave empty to trigger project-level orchestration using built-in logic (same as in the user interface). */
|
|
197
|
+
allowRecreate?: boolean; /** Array of tasks to prepare. Leave empty to trigger project-level orchestration using built-in logic (same as in the user interface). */
|
|
245
198
|
selectedTasks?: TaskSelectionList;
|
|
246
199
|
};
|
|
247
200
|
type PrepareTaskReq = {
|
|
248
|
-
/** Allow recreation of existing artifacts */
|
|
249
|
-
allowRecreate: boolean;
|
|
201
|
+
/** Allow recreation of existing artifacts */allowRecreate: boolean;
|
|
250
202
|
};
|
|
203
|
+
/**
|
|
204
|
+
* Request body to recreate task datasets.
|
|
205
|
+
*/
|
|
206
|
+
type RecreateTaskDatasetsReq = unknown;
|
|
251
207
|
type ReloadDiTaskReq = {
|
|
252
|
-
/** Reload strategy (optional, applies to materialized SQL transformations and transformation flows tasks) */
|
|
253
|
-
reloadStrategy?: "NONE" | "TRUNCATE" | "COMPARE_AND_APPLY";
|
|
254
|
-
/** Datasets to reload (optional, if omitted or empty, all datasets will be reloaded). */
|
|
208
|
+
/** Reload strategy (optional, applies to materialized SQL transformations and transformation flows tasks) */reloadStrategy?: "NONE" | "TRUNCATE" | "COMPARE_AND_APPLY"; /** Datasets to reload (optional, if omitted or empty, all datasets will be reloaded). */
|
|
255
209
|
selectedDatasets?: {
|
|
256
210
|
datasetId?: string;
|
|
257
211
|
}[];
|
|
@@ -260,13 +214,10 @@ type ReloadDiTaskReq = {
|
|
|
260
214
|
* Indicates whether the reload request was registered successfully.
|
|
261
215
|
*/
|
|
262
216
|
type ReloadRequestResponse = {
|
|
263
|
-
/** Always true when the server successfully registers the request. */
|
|
264
|
-
success: boolean;
|
|
217
|
+
/** Always true when the server successfully registers the request. */success: boolean;
|
|
265
218
|
};
|
|
266
219
|
type StartTaskReq = {
|
|
267
|
-
/** Task run option for the task (optional, applies to Replication tasks only). */
|
|
268
|
-
option?: "PROCESS_CHANGES_FROM_TIMESTAMP" | "PROCESS_CHANGES_FROM_POSITION" | "RECOVER_USING_LOCALLY_STORED_CHECKPOINT";
|
|
269
|
-
/** The value indicating where to resume the process, either a timestamp or an offset depending on the run option (optional, applies to Replication tasks only). */
|
|
220
|
+
/** Task run option for the task (optional, applies to Replication tasks only). */option?: "PROCESS_CHANGES_FROM_TIMESTAMP" | "PROCESS_CHANGES_FROM_POSITION" | "RECOVER_USING_LOCALLY_STORED_CHECKPOINT"; /** The value indicating where to resume the process, either a timestamp or an offset depending on the run option (optional, applies to Replication tasks only). */
|
|
270
221
|
processChangesFrom?: string;
|
|
271
222
|
};
|
|
272
223
|
/**
|
|
@@ -278,8 +229,7 @@ type UpdateDiExportProjectVariablesReq = {
|
|
|
278
229
|
};
|
|
279
230
|
type UpdateDiExportProjectVariablesRsp = unknown;
|
|
280
231
|
type ValidateProjectReq = {
|
|
281
|
-
/** Array of tasks to prepare. Leave empty to trigger project-level orchestration using built-in logic (same as in the user interface). */
|
|
282
|
-
selectedTasks?: TaskSelectionList;
|
|
232
|
+
/** Array of tasks to prepare. Leave empty to trigger project-level orchestration using built-in logic (same as in the user interface). */selectedTasks?: TaskSelectionList;
|
|
283
233
|
};
|
|
284
234
|
/**
|
|
285
235
|
* Request body for task validation
|
|
@@ -292,8 +242,7 @@ type ValidateTaskReq = unknown;
|
|
|
292
242
|
* @throws GetDiProjectsHttpError
|
|
293
243
|
*/
|
|
294
244
|
declare function getDiProjects(query: {
|
|
295
|
-
/** Filter by space id */
|
|
296
|
-
spaceId?: string;
|
|
245
|
+
/** Filter by space id */spaceId?: string;
|
|
297
246
|
}, options?: ApiCallOptions): Promise<GetDiProjectsHttpResponse>;
|
|
298
247
|
type GetDiProjectsHttpResponse = {
|
|
299
248
|
data: ListDiProjectsRsp;
|
|
@@ -330,8 +279,7 @@ type CreateDiProjectHttpError = {
|
|
|
330
279
|
* @throws GetDiProjectHttpError
|
|
331
280
|
*/
|
|
332
281
|
declare function getDiProject(actionId: string, query: {
|
|
333
|
-
/** Specifies whether to include detailed status information in the response. Set to `true` to return detailed information. */
|
|
334
|
-
detailed?: boolean;
|
|
282
|
+
/** Specifies whether to include detailed status information in the response. Set to `true` to return detailed information. */detailed?: boolean;
|
|
335
283
|
}, options?: ApiCallOptions): Promise<GetDiProjectHttpResponse>;
|
|
336
284
|
type GetDiProjectHttpResponse = {
|
|
337
285
|
data: AsyncActionDetails;
|
|
@@ -425,8 +373,7 @@ type ValidateDiProjectHttpError = {
|
|
|
425
373
|
* @throws GetDiProjectExportVariablesHttpError
|
|
426
374
|
*/
|
|
427
375
|
declare function getDiProjectExportVariables(projectId: string, query: {
|
|
428
|
-
/** Recalculate the bindings if true, otherwise saved bindings are returned. */
|
|
429
|
-
recalculate?: boolean;
|
|
376
|
+
/** Recalculate the bindings if true, otherwise saved bindings are returned. */recalculate?: boolean;
|
|
430
377
|
}, options?: ApiCallOptions): Promise<GetDiProjectExportVariablesHttpResponse>;
|
|
431
378
|
type GetDiProjectExportVariablesHttpResponse = {
|
|
432
379
|
data: GetDiExportProjectVariablesRsp;
|
|
@@ -510,6 +457,25 @@ type PrepareDiProjectDiTaskHttpError = {
|
|
|
510
457
|
headers: Headers;
|
|
511
458
|
status: 400 | 404;
|
|
512
459
|
};
|
|
460
|
+
/**
|
|
461
|
+
* Recreates datasets in the specified data task.
|
|
462
|
+
*
|
|
463
|
+
* @param projectId Identifier of the data project.
|
|
464
|
+
* @param dataTaskId Identifier of the data task.
|
|
465
|
+
* @param body an object with the body content
|
|
466
|
+
* @throws RecreateDatasetsDiProjectDiTaskHttpError
|
|
467
|
+
*/
|
|
468
|
+
declare function recreateDatasetsDiProjectDiTask(projectId: string, dataTaskId: string, body: RecreateTaskDatasetsReq, options?: ApiCallOptions): Promise<RecreateDatasetsDiProjectDiTaskHttpResponse>;
|
|
469
|
+
type RecreateDatasetsDiProjectDiTaskHttpResponse = {
|
|
470
|
+
data: AsyncActionRsp;
|
|
471
|
+
headers: Headers;
|
|
472
|
+
status: 202;
|
|
473
|
+
};
|
|
474
|
+
type RecreateDatasetsDiProjectDiTaskHttpError = {
|
|
475
|
+
data: Errors;
|
|
476
|
+
headers: Headers;
|
|
477
|
+
status: 400 | 404;
|
|
478
|
+
};
|
|
513
479
|
/**
|
|
514
480
|
* Registers a request to reload the datasets associated with the specified data task. The reload does not occur immediately; it will take effect on the next scheduled or manual run of the task.
|
|
515
481
|
*
|
|
@@ -740,6 +706,15 @@ type DiProjectsAPI = {
|
|
|
740
706
|
* @throws PrepareDiProjectDiTaskHttpError
|
|
741
707
|
*/
|
|
742
708
|
prepareDiProjectDiTask: typeof prepareDiProjectDiTask;
|
|
709
|
+
/**
|
|
710
|
+
* Recreates datasets in the specified data task.
|
|
711
|
+
*
|
|
712
|
+
* @param projectId Identifier of the data project.
|
|
713
|
+
* @param dataTaskId Identifier of the data task.
|
|
714
|
+
* @param body an object with the body content
|
|
715
|
+
* @throws RecreateDatasetsDiProjectDiTaskHttpError
|
|
716
|
+
*/
|
|
717
|
+
recreateDatasetsDiProjectDiTask: typeof recreateDatasetsDiProjectDiTask;
|
|
743
718
|
/**
|
|
744
719
|
* Registers a request to reload the datasets associated with the specified data task. The reload does not occur immediately; it will take effect on the next scheduled or manual run of the task.
|
|
745
720
|
*
|
|
@@ -809,4 +784,4 @@ type DiProjectsAPI = {
|
|
|
809
784
|
*/
|
|
810
785
|
declare const diProjectsExport: DiProjectsAPI;
|
|
811
786
|
//#endregion
|
|
812
|
-
export { AsyncActionDetails, AsyncActionError, AsyncActionRsp, AsyncActionTaskProgress, AsyncActionType, AsyncCallStatus, CreateDiProjectHttpError, CreateDiProjectHttpResponse, CreateDiProjectReq, DataTaskDatasetState, DataTaskInstanceState, DataTaskItemRsp, DataTaskRuntimeState, DataTaskType, DiProjectItemRsp, DiProjectOperationSelectedTask, DiProjectsAPI, Error, ErrorSource, Errors, ExportDiProjectHttpError, ExportDiProjectHttpResponse, ExportDiProjectReq, GetDiExportProjectVariablesRsp, GetDiProjectDiTaskHttpError, GetDiProjectDiTaskHttpResponse, GetDiProjectDiTaskRuntimeStateDatasetsHttpError, GetDiProjectDiTaskRuntimeStateDatasetsHttpResponse, GetDiProjectDiTaskRuntimeStateHttpError, GetDiProjectDiTaskRuntimeStateHttpResponse, GetDiProjectDiTasksHttpError, GetDiProjectDiTasksHttpResponse, GetDiProjectExportVariablesHttpError, GetDiProjectExportVariablesHttpResponse, GetDiProjectHttpError, GetDiProjectHttpResponse, GetDiProjectsHttpError, GetDiProjectsHttpResponse, ImportDiProjectHttpError, ImportDiProjectHttpResponse, ImportDiProjectRsp, ListDataTasksRsp, ListDiProjectsRsp, PrepareDiProjectDiTaskHttpError, PrepareDiProjectDiTaskHttpResponse, PrepareDiProjectHttpError, PrepareDiProjectHttpResponse, PrepareProjectReq, PrepareTaskReq, ReloadDiTaskReq, ReloadRequestResponse, RequestReloadDiProjectDiTaskHttpError, RequestReloadDiProjectDiTaskHttpResponse, SetDiProjectExportVariablesHttpError, SetDiProjectExportVariablesHttpResponse, StartDiProjectDiTaskRuntimeHttpError, StartDiProjectDiTaskRuntimeHttpResponse, StartDiProjectDiTaskRuntimeWithBodyHttpError, StartDiProjectDiTaskRuntimeWithBodyHttpResponse, StartTaskReq, StopDiProjectDiTaskRuntimeHttpError, StopDiProjectDiTaskRuntimeHttpResponse, TaskSelectionList, UpdateDiExportProjectVariablesReq, UpdateDiExportProjectVariablesRsp, ValidateDiProjectDiTaskHttpError, ValidateDiProjectDiTaskHttpResponse, ValidateDiProjectHttpError, ValidateDiProjectHttpResponse, ValidateProjectReq, ValidateTaskReq, clearCache, createDiProject, diProjectsExport as default, exportDiProject, getDiProject, getDiProjectDiTask, getDiProjectDiTaskRuntimeState, getDiProjectDiTaskRuntimeStateDatasets, getDiProjectDiTasks, getDiProjectExportVariables, getDiProjects, importDiProject, prepareDiProject, prepareDiProjectDiTask, requestReloadDiProjectDiTask, setDiProjectExportVariables, startDiProjectDiTaskRuntime, startDiProjectDiTaskRuntimeWithBody, stopDiProjectDiTaskRuntime, validateDiProject, validateDiProjectDiTask };
|
|
787
|
+
export { AsyncActionDetails, AsyncActionError, AsyncActionRsp, AsyncActionTaskProgress, AsyncActionType, AsyncCallStatus, CreateDiProjectHttpError, CreateDiProjectHttpResponse, CreateDiProjectReq, DataTaskDatasetState, DataTaskInstanceState, DataTaskItemRsp, DataTaskRuntimeState, DataTaskType, DiProjectItemRsp, DiProjectOperationSelectedTask, DiProjectsAPI, Error, ErrorSource, Errors, ExportDiProjectHttpError, ExportDiProjectHttpResponse, ExportDiProjectReq, GetDiExportProjectVariablesRsp, GetDiProjectDiTaskHttpError, GetDiProjectDiTaskHttpResponse, GetDiProjectDiTaskRuntimeStateDatasetsHttpError, GetDiProjectDiTaskRuntimeStateDatasetsHttpResponse, GetDiProjectDiTaskRuntimeStateHttpError, GetDiProjectDiTaskRuntimeStateHttpResponse, GetDiProjectDiTasksHttpError, GetDiProjectDiTasksHttpResponse, GetDiProjectExportVariablesHttpError, GetDiProjectExportVariablesHttpResponse, GetDiProjectHttpError, GetDiProjectHttpResponse, GetDiProjectsHttpError, GetDiProjectsHttpResponse, ImportDiProjectHttpError, ImportDiProjectHttpResponse, ImportDiProjectRsp, ListDataTasksRsp, ListDiProjectsRsp, PrepareDiProjectDiTaskHttpError, PrepareDiProjectDiTaskHttpResponse, PrepareDiProjectHttpError, PrepareDiProjectHttpResponse, PrepareProjectReq, PrepareTaskReq, RecreateDatasetsDiProjectDiTaskHttpError, RecreateDatasetsDiProjectDiTaskHttpResponse, RecreateTaskDatasetsReq, ReloadDiTaskReq, ReloadRequestResponse, RequestReloadDiProjectDiTaskHttpError, RequestReloadDiProjectDiTaskHttpResponse, SetDiProjectExportVariablesHttpError, SetDiProjectExportVariablesHttpResponse, StartDiProjectDiTaskRuntimeHttpError, StartDiProjectDiTaskRuntimeHttpResponse, StartDiProjectDiTaskRuntimeWithBodyHttpError, StartDiProjectDiTaskRuntimeWithBodyHttpResponse, StartTaskReq, StopDiProjectDiTaskRuntimeHttpError, StopDiProjectDiTaskRuntimeHttpResponse, TaskSelectionList, UpdateDiExportProjectVariablesReq, UpdateDiExportProjectVariablesRsp, ValidateDiProjectDiTaskHttpError, ValidateDiProjectDiTaskHttpResponse, ValidateDiProjectHttpError, ValidateDiProjectHttpResponse, ValidateProjectReq, ValidateTaskReq, clearCache, createDiProject, diProjectsExport as default, exportDiProject, getDiProject, getDiProjectDiTask, getDiProjectDiTaskRuntimeState, getDiProjectDiTaskRuntimeStateDatasets, getDiProjectDiTasks, getDiProjectExportVariables, getDiProjects, importDiProject, prepareDiProject, prepareDiProjectDiTask, recreateDatasetsDiProjectDiTask, requestReloadDiProjectDiTask, setDiProjectExportVariables, startDiProjectDiTaskRuntime, startDiProjectDiTaskRuntimeWithBody, stopDiProjectDiTaskRuntime, validateDiProject, validateDiProjectDiTask };
|
package/di-projects.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import "./chunks/public-runtime-modules-
|
|
2
|
-
import { n as invokeFetch, t as clearApiCache } from "./chunks/invoke-fetch-
|
|
1
|
+
import "./chunks/public-runtime-modules-2KfyI2qM.js";
|
|
2
|
+
import { n as invokeFetch, t as clearApiCache } from "./chunks/invoke-fetch-DMAi6Fg3.js";
|
|
3
3
|
|
|
4
4
|
//#region src/public/rest/di-projects.ts
|
|
5
5
|
/**
|
|
@@ -202,6 +202,27 @@ async function prepareDiProjectDiTask(projectId, dataTaskId, body, options) {
|
|
|
202
202
|
});
|
|
203
203
|
}
|
|
204
204
|
/**
|
|
205
|
+
* Recreates datasets in the specified data task.
|
|
206
|
+
*
|
|
207
|
+
* @param projectId Identifier of the data project.
|
|
208
|
+
* @param dataTaskId Identifier of the data task.
|
|
209
|
+
* @param body an object with the body content
|
|
210
|
+
* @throws RecreateDatasetsDiProjectDiTaskHttpError
|
|
211
|
+
*/
|
|
212
|
+
async function recreateDatasetsDiProjectDiTask(projectId, dataTaskId, body, options) {
|
|
213
|
+
return invokeFetch("di-projects", {
|
|
214
|
+
method: "post",
|
|
215
|
+
pathTemplate: "/api/v1/di-projects/{projectId}/di-tasks/{dataTaskId}/actions/recreate-datasets",
|
|
216
|
+
pathVariables: {
|
|
217
|
+
projectId,
|
|
218
|
+
dataTaskId
|
|
219
|
+
},
|
|
220
|
+
body,
|
|
221
|
+
contentType: "application/json",
|
|
222
|
+
options
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
205
226
|
* Registers a request to reload the datasets associated with the specified data task. The reload does not occur immediately; it will take effect on the next scheduled or manual run of the task.
|
|
206
227
|
*
|
|
207
228
|
* @param projectId Identifier of the data project.
|
|
@@ -358,6 +379,7 @@ const diProjectsExport = {
|
|
|
358
379
|
getDiProjectDiTasks,
|
|
359
380
|
getDiProjectDiTask,
|
|
360
381
|
prepareDiProjectDiTask,
|
|
382
|
+
recreateDatasetsDiProjectDiTask,
|
|
361
383
|
requestReloadDiProjectDiTask,
|
|
362
384
|
validateDiProjectDiTask,
|
|
363
385
|
startDiProjectDiTaskRuntimeWithBody,
|
|
@@ -367,7 +389,6 @@ const diProjectsExport = {
|
|
|
367
389
|
getDiProjectDiTaskRuntimeStateDatasets,
|
|
368
390
|
clearCache
|
|
369
391
|
};
|
|
370
|
-
var di_projects_default = diProjectsExport;
|
|
371
392
|
|
|
372
393
|
//#endregion
|
|
373
|
-
export { clearCache, createDiProject,
|
|
394
|
+
export { clearCache, createDiProject, diProjectsExport as default, exportDiProject, getDiProject, getDiProjectDiTask, getDiProjectDiTaskRuntimeState, getDiProjectDiTaskRuntimeStateDatasets, getDiProjectDiTasks, getDiProjectExportVariables, getDiProjects, importDiProject, prepareDiProject, prepareDiProjectDiTask, recreateDatasetsDiProjectDiTask, requestReloadDiProjectDiTask, setDiProjectExportVariables, startDiProjectDiTaskRuntime, startDiProjectDiTaskRuntimeWithBody, stopDiProjectDiTaskRuntime, validateDiProject, validateDiProjectDiTask };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import "./chunks/invoke-fetch-
|
|
1
|
+
import { ApiCallOptions } from "./invoke-fetch-types.js";
|
|
2
|
+
import "./chunks/invoke-fetch-DFc3yzaj.js";
|
|
3
3
|
|
|
4
4
|
//#region src/public/rest/direct-access-agents.d.ts
|
|
5
5
|
type CancelBenchmarkResponse = {
|
|
@@ -62,9 +62,7 @@ type ErrorResponse = {
|
|
|
62
62
|
traceId?: string;
|
|
63
63
|
};
|
|
64
64
|
type FileConnectorAllowedPath = {
|
|
65
|
-
/** The Path property in the File connector allowed paths file. */
|
|
66
|
-
path: string;
|
|
67
|
-
/** The Spaces property in the Odbc custom type mappings file. */
|
|
65
|
+
/** The Path property in the File connector allowed paths file. */path: string; /** The Spaces property in the Odbc custom type mappings file. */
|
|
68
66
|
spaces?: string[];
|
|
69
67
|
};
|
|
70
68
|
type FileConnectorAllowedPathsResponse = {
|
|
@@ -72,40 +70,26 @@ type FileConnectorAllowedPathsResponse = {
|
|
|
72
70
|
result?: FileConnectorAllowedPath[];
|
|
73
71
|
};
|
|
74
72
|
type GetBenchmarkStatusResponse = {
|
|
75
|
-
/** The ISO 8601 formatted timestamp when the benchmark task completed or was cancelled */
|
|
76
|
-
|
|
77
|
-
/** The benchmark ID */
|
|
78
|
-
benchmarkId?: string;
|
|
79
|
-
/** The ISO 8601 formatted timestamp when the benchmark task started execution */
|
|
73
|
+
/** The ISO 8601 formatted timestamp when the benchmark task completed or was cancelled */benchmarkEndTime?: string; /** The benchmark ID */
|
|
74
|
+
benchmarkId?: string; /** The ISO 8601 formatted timestamp when the benchmark task started execution */
|
|
80
75
|
benchmarkStartTime?: string;
|
|
81
|
-
results?: GetBenchmarkStatusResults;
|
|
82
|
-
/**
|
|
83
|
-
|
|
84
|
-
/** Additional details about the benchmark status */
|
|
85
|
-
statusMessage?: string;
|
|
86
|
-
/** The total bytes requested to be transferred during the benchmark */
|
|
76
|
+
results?: GetBenchmarkStatusResults; /** The benchmark status */
|
|
77
|
+
status?: string; /** Additional details about the benchmark status */
|
|
78
|
+
statusMessage?: string; /** The total bytes requested to be transferred during the benchmark */
|
|
87
79
|
totalBytesRequested?: number;
|
|
88
80
|
};
|
|
89
81
|
type GetBenchmarkStatusResults = {
|
|
90
|
-
/** The ISO 8601 formatted timestamp when data transmission completed */
|
|
91
|
-
|
|
92
|
-
/** The
|
|
93
|
-
|
|
94
|
-
/** The latency in ms measured during data transmission */
|
|
95
|
-
latency?: number;
|
|
96
|
-
/** The data throughput in KB/s measured during data transmission */
|
|
97
|
-
throughput?: number;
|
|
98
|
-
/** The total number of bytes successfully transferred during data transmission */
|
|
82
|
+
/** The ISO 8601 formatted timestamp when data transmission completed */dataTransmissionEndTime?: string; /** The ISO 8601 formatted timestamp when data transmission start */
|
|
83
|
+
dataTransmissionStartTime?: string; /** The latency in ms measured during data transmission */
|
|
84
|
+
latency?: number; /** The data throughput in KB/s measured during data transmission */
|
|
85
|
+
throughput?: number; /** The total number of bytes successfully transferred during data transmission */
|
|
99
86
|
totalBytesTransferred?: number;
|
|
100
87
|
};
|
|
101
88
|
type LinkResponseObject = {
|
|
102
|
-
/** The URL to the related resource */
|
|
103
|
-
href?: string;
|
|
89
|
+
/** The URL to the related resource */href?: string;
|
|
104
90
|
};
|
|
105
91
|
type MetricsCollectorConnectorConfigurationApiResponse = {
|
|
106
|
-
/** Indicates whether metrics collection is enabled for this connector. */
|
|
107
|
-
metricsCollectionEnabled: boolean;
|
|
108
|
-
/** Frequency in seconds at which metrics are collected from this connector. */
|
|
92
|
+
/** Indicates whether metrics collection is enabled for this connector. */metricsCollectionEnabled: boolean; /** Frequency in seconds at which metrics are collected from this connector. */
|
|
109
93
|
scrapeIntervalSeconds: number;
|
|
110
94
|
};
|
|
111
95
|
type MetricsCollectorConnectorConfigurationsApiResponse = {
|
|
@@ -119,22 +103,16 @@ type MetricsCollectorConnectorConfigurationsApiResponse = {
|
|
|
119
103
|
systemMetrics?: MetricsCollectorConnectorConfigurationApiResponse;
|
|
120
104
|
};
|
|
121
105
|
type MetricsCollectorIndividualConnectorConfigurationRequest = {
|
|
122
|
-
/** Indicates whether metrics collection is enabled for this connector. */
|
|
123
|
-
metricsCollectionEnabled: boolean;
|
|
124
|
-
/** Frequency in seconds at which metrics are collected from this connector. */
|
|
106
|
+
/** Indicates whether metrics collection is enabled for this connector. */metricsCollectionEnabled: boolean; /** Frequency in seconds at which metrics are collected from this connector. */
|
|
125
107
|
scrapeIntervalSeconds: number;
|
|
126
108
|
};
|
|
127
109
|
type MetricsCollectorSettings = {
|
|
128
110
|
/** The base interval in seconds for the metrics collection loop.
|
|
129
111
|
* This defines how frequently the collector checks whether to scrape each connector, not the interval at which each connector is scraped. Must be equal to or less than the lowest individual connector scrape interval. */
|
|
130
|
-
baseScrapeIntervalSeconds: number;
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
/**
|
|
134
|
-
enabled: boolean;
|
|
135
|
-
/** The number of days to retain local data. */
|
|
136
|
-
localDataRetentionDays: number;
|
|
137
|
-
/** The file location for the local metrics database. If not specified, defaults to `C:\ProgramData\Qlik\Gateway\tmp`. */
|
|
112
|
+
baseScrapeIntervalSeconds: number; /** The interval in minutes the metrics collector checks for and deletes old data. */
|
|
113
|
+
dataRetentionCheckIntervalMinutes: number; /** Indicates whether the metrics collector is enabled. */
|
|
114
|
+
enabled: boolean; /** The number of days to retain local data. */
|
|
115
|
+
localDataRetentionDays: number; /** The file location for the local metrics database. If not specified, defaults to `C:\ProgramData\Qlik\Gateway\tmp`. */
|
|
138
116
|
localDatabaseFileLocation?: string;
|
|
139
117
|
/** The port number that the metrics collector API will run on.
|
|
140
118
|
* This must match the port that the SYSTEM connector runs on to enable network metrics collection. */
|
|
@@ -149,15 +127,10 @@ type MetricsCollectorSettingsApiResponseConnectorConfigurationResponse = {
|
|
|
149
127
|
result?: MetricsCollectorSettingsApiResponse;
|
|
150
128
|
};
|
|
151
129
|
type OdbcCustomDataType = {
|
|
152
|
-
/** The IsBit property in the ODBC custom data type mapping file. */
|
|
153
|
-
|
|
154
|
-
/** The
|
|
155
|
-
|
|
156
|
-
/** The NativeDataType property in the ODBC custom data type mapping file. */
|
|
157
|
-
nativeDataType: string;
|
|
158
|
-
/** The QlikDataType property in the ODBC custom data type mapping file. */
|
|
159
|
-
qlikDataType: string;
|
|
160
|
-
/** The Size property in the ODBC custom data type mapping file. */
|
|
130
|
+
/** The IsBit property in the ODBC custom data type mapping file. */bit?: boolean; /** The Identifier property in the ODBC custom data type mapping file. */
|
|
131
|
+
id: string; /** The NativeDataType property in the ODBC custom data type mapping file. */
|
|
132
|
+
nativeDataType: string; /** The QlikDataType property in the ODBC custom data type mapping file. */
|
|
133
|
+
qlikDataType: string; /** The Size property in the ODBC custom data type mapping file. */
|
|
161
134
|
size?: number;
|
|
162
135
|
};
|
|
163
136
|
type OdbcCustomDataTypeResponse = {
|
|
@@ -208,14 +181,10 @@ type UpdateMetricsCollectorConnectorConfigurationsRequest = {
|
|
|
208
181
|
type UpdateMetricsCollectorSettings = {
|
|
209
182
|
/** The base interval in seconds for the metrics collection loop.
|
|
210
183
|
* This defines how frequently the collector checks whether to scrape each connector, not the interval at which each connector is scraped. Must be equal to or less than the lowest individual connector scrape interval. */
|
|
211
|
-
baseScrapeIntervalSeconds: number;
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
/**
|
|
215
|
-
enabled: boolean;
|
|
216
|
-
/** The number of days to retain local data. */
|
|
217
|
-
localDataRetentionDays: number;
|
|
218
|
-
/** The file location for the local metrics database. If not specified, defaults to `C:\ProgramData\Qlik\Gateway\tmp`. */
|
|
184
|
+
baseScrapeIntervalSeconds: number; /** The interval in minutes the metrics collector checks for and deletes old data. */
|
|
185
|
+
dataRetentionCheckIntervalMinutes: number; /** Indicates whether the metrics collector is enabled. */
|
|
186
|
+
enabled: boolean; /** The number of days to retain local data. */
|
|
187
|
+
localDataRetentionDays: number; /** The file location for the local metrics database. If not specified, defaults to `C:\ProgramData\Qlik\Gateway\tmp`. */
|
|
219
188
|
localDatabaseFileLocation: string;
|
|
220
189
|
/** The port number that the metrics collector API will run on.
|
|
221
190
|
* This must match the port that the SYSTEM connector runs on to enable network metrics collection. */
|
|
@@ -254,9 +223,7 @@ type RestartDirectAccessAgentHttpError = {
|
|
|
254
223
|
* @throws CreateDirectAccessAgentBenchmarkHttpError
|
|
255
224
|
*/
|
|
256
225
|
declare function createDirectAccessAgentBenchmark(agentId: string, query: {
|
|
257
|
-
/** Forces the benchmark to start regardless of the state of the agent. Does not override QCS resource limits. Use with caution. */
|
|
258
|
-
force?: boolean;
|
|
259
|
-
/** The volume of data in GB to transfer during the throughput measurement part of the benchmark. */
|
|
226
|
+
/** Forces the benchmark to start regardless of the state of the agent. Does not override QCS resource limits. Use with caution. */force?: boolean; /** The volume of data in GB to transfer during the throughput measurement part of the benchmark. */
|
|
260
227
|
gigaBytesToTransfer?: number;
|
|
261
228
|
}, options?: ApiCallOptions): Promise<CreateDirectAccessAgentBenchmarkHttpResponse>;
|
|
262
229
|
type CreateDirectAccessAgentBenchmarkHttpResponse = {
|
|
@@ -313,8 +280,7 @@ type CreateDirectAccessAgentBenchmarkCancelHttpError = {
|
|
|
313
280
|
* @throws GetDirectAccessAgentConfigurationHttpError
|
|
314
281
|
*/
|
|
315
282
|
declare function getDirectAccessAgentConfiguration(agentId: string, query: {
|
|
316
|
-
/** Individual properties within the agent configuration */
|
|
317
|
-
queryProperties?: string[];
|
|
283
|
+
/** Individual properties within the agent configuration */queryProperties?: string[];
|
|
318
284
|
}, options?: ApiCallOptions): Promise<GetDirectAccessAgentConfigurationHttpResponse>;
|
|
319
285
|
type GetDirectAccessAgentConfigurationHttpResponse = {
|
|
320
286
|
data: ConfigurationResponse;
|