@gooddata/api-client-tiger 11.26.0-alpha.3 → 11.26.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/__version.d.ts +1 -1
- package/esm/__version.js +1 -1
- package/esm/api-client-tiger.d.ts +224 -171
- package/esm/endpoints/actions/index.d.ts +1 -1
- package/esm/endpoints/actions/index.d.ts.map +1 -1
- package/esm/endpoints/actions/index.js +1 -1
- package/esm/endpoints/actions/index.js.map +1 -1
- package/esm/generated/afm-rest-api/api.d.ts +127 -0
- package/esm/generated/afm-rest-api/api.d.ts.map +1 -1
- package/esm/generated/afm-rest-api/api.js +122 -0
- package/esm/generated/afm-rest-api/api.js.map +1 -1
- package/esm/generated/metadata-json-api/api.d.ts +153 -153
- package/esm/generated/metadata-json-api/api.d.ts.map +1 -1
- package/package.json +5 -5
|
@@ -2445,6 +2445,41 @@ export async function ActionsApiAxiosParamCreator_ResolveLlmEndpoints(workspaceI
|
|
|
2445
2445
|
};
|
|
2446
2446
|
}
|
|
2447
2447
|
// ActionsApi FP - ActionsApiAxiosParamCreator
|
|
2448
|
+
/**
|
|
2449
|
+
* Resolves the active LLM configuration for the given workspace. When the ENABLE_LLM_ENDPOINT_REPLACEMENT feature flag is enabled, returns LLM Providers with their associated models. Otherwise, falls back to the legacy LLM Endpoints.
|
|
2450
|
+
* @summary Get Active LLM configuration for this workspace
|
|
2451
|
+
* @param {string} workspaceId Workspace identifier
|
|
2452
|
+
* @param {*} [options] Override http request option.
|
|
2453
|
+
* @param {Configuration} [configuration] Optional configuration.
|
|
2454
|
+
* @throws {RequiredError}
|
|
2455
|
+
*/
|
|
2456
|
+
export async function ActionsApiAxiosParamCreator_ResolveLlmProviders(workspaceId, options = {}, configuration) {
|
|
2457
|
+
// verify required parameter 'workspaceId' is not null or undefined
|
|
2458
|
+
assertParamExists('resolveLlmProviders', 'workspaceId', workspaceId);
|
|
2459
|
+
const localVarPath = `/api/v1/actions/workspaces/{workspaceId}/ai/resolveLlmProviders`
|
|
2460
|
+
.replace(`{${"workspaceId"}}`, encodeURIComponent(String(workspaceId)));
|
|
2461
|
+
// use dummy base URL string because the URL constructor only accepts absolute URLs.
|
|
2462
|
+
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
|
|
2463
|
+
let baseOptions;
|
|
2464
|
+
if (configuration) {
|
|
2465
|
+
baseOptions = configuration.baseOptions;
|
|
2466
|
+
}
|
|
2467
|
+
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options };
|
|
2468
|
+
const localVarHeaderParameter = {};
|
|
2469
|
+
const localVarQueryParameter = {};
|
|
2470
|
+
setSearchParams(localVarUrlObj, localVarQueryParameter);
|
|
2471
|
+
const headersFromBaseOptions = baseOptions?.headers ? baseOptions.headers : {};
|
|
2472
|
+
localVarRequestOptions.headers = {
|
|
2473
|
+
...localVarHeaderParameter,
|
|
2474
|
+
...headersFromBaseOptions,
|
|
2475
|
+
...options.headers,
|
|
2476
|
+
};
|
|
2477
|
+
return {
|
|
2478
|
+
url: toPathString(localVarUrlObj),
|
|
2479
|
+
options: localVarRequestOptions,
|
|
2480
|
+
};
|
|
2481
|
+
}
|
|
2482
|
+
// ActionsApi FP - ActionsApiAxiosParamCreator
|
|
2448
2483
|
/**
|
|
2449
2484
|
* The resource provides execution result\'s metadata as AFM and resultSpec used in execution request and an executionResponse
|
|
2450
2485
|
* @summary Get a single execution result\'s metadata.
|
|
@@ -3568,6 +3603,21 @@ export async function ActionsApi_ResolveLlmEndpoints(axios, basePath, requestPar
|
|
|
3568
3603
|
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, basePath);
|
|
3569
3604
|
}
|
|
3570
3605
|
// ActionsApi Api FP
|
|
3606
|
+
/**
|
|
3607
|
+
* Resolves the active LLM configuration for the given workspace. When the ENABLE_LLM_ENDPOINT_REPLACEMENT feature flag is enabled, returns LLM Providers with their associated models. Otherwise, falls back to the legacy LLM Endpoints.
|
|
3608
|
+
* @summary Get Active LLM configuration for this workspace
|
|
3609
|
+
* @param {AxiosInstance} axios Axios instance.
|
|
3610
|
+
* @param {string} basePath Base path.
|
|
3611
|
+
* @param {ActionsApiResolveLlmProvidersRequest} requestParameters Request parameters.
|
|
3612
|
+
* @param {*} [options] Override http request option.
|
|
3613
|
+
* @param {Configuration} [configuration] Optional configuration.
|
|
3614
|
+
* @throws {RequiredError}
|
|
3615
|
+
*/
|
|
3616
|
+
export async function ActionsApi_ResolveLlmProviders(axios, basePath, requestParameters, options, configuration) {
|
|
3617
|
+
const localVarAxiosArgs = await ActionsApiAxiosParamCreator_ResolveLlmProviders(requestParameters.workspaceId, options || {}, configuration);
|
|
3618
|
+
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, basePath);
|
|
3619
|
+
}
|
|
3620
|
+
// ActionsApi Api FP
|
|
3571
3621
|
/**
|
|
3572
3622
|
* The resource provides execution result\'s metadata as AFM and resultSpec used in execution request and an executionResponse
|
|
3573
3623
|
* @summary Get a single execution result\'s metadata.
|
|
@@ -4185,6 +4235,17 @@ export class ActionsApi extends BaseAPI {
|
|
|
4185
4235
|
resolveLlmEndpoints(requestParameters, options) {
|
|
4186
4236
|
return ActionsApi_ResolveLlmEndpoints(this.axios, this.basePath, requestParameters, options, this.configuration);
|
|
4187
4237
|
}
|
|
4238
|
+
/**
|
|
4239
|
+
* Resolves the active LLM configuration for the given workspace. When the ENABLE_LLM_ENDPOINT_REPLACEMENT feature flag is enabled, returns LLM Providers with their associated models. Otherwise, falls back to the legacy LLM Endpoints.
|
|
4240
|
+
* @summary Get Active LLM configuration for this workspace
|
|
4241
|
+
* @param {ActionsApiResolveLlmProvidersRequest} requestParameters Request parameters.
|
|
4242
|
+
* @param {*} [options] Override http request option.
|
|
4243
|
+
* @throws {RequiredError}
|
|
4244
|
+
* @memberof ActionsApi
|
|
4245
|
+
*/
|
|
4246
|
+
resolveLlmProviders(requestParameters, options) {
|
|
4247
|
+
return ActionsApi_ResolveLlmProviders(this.axios, this.basePath, requestParameters, options, this.configuration);
|
|
4248
|
+
}
|
|
4188
4249
|
/**
|
|
4189
4250
|
* The resource provides execution result\'s metadata as AFM and resultSpec used in execution request and an executionResponse
|
|
4190
4251
|
* @summary Get a single execution result\'s metadata.
|
|
@@ -6379,6 +6440,41 @@ export async function SmartFunctionsApiAxiosParamCreator_ResolveLlmEndpoints(wor
|
|
|
6379
6440
|
};
|
|
6380
6441
|
}
|
|
6381
6442
|
// SmartFunctionsApi FP - SmartFunctionsApiAxiosParamCreator
|
|
6443
|
+
/**
|
|
6444
|
+
* Resolves the active LLM configuration for the given workspace. When the ENABLE_LLM_ENDPOINT_REPLACEMENT feature flag is enabled, returns LLM Providers with their associated models. Otherwise, falls back to the legacy LLM Endpoints.
|
|
6445
|
+
* @summary Get Active LLM configuration for this workspace
|
|
6446
|
+
* @param {string} workspaceId Workspace identifier
|
|
6447
|
+
* @param {*} [options] Override http request option.
|
|
6448
|
+
* @param {Configuration} [configuration] Optional configuration.
|
|
6449
|
+
* @throws {RequiredError}
|
|
6450
|
+
*/
|
|
6451
|
+
export async function SmartFunctionsApiAxiosParamCreator_ResolveLlmProviders(workspaceId, options = {}, configuration) {
|
|
6452
|
+
// verify required parameter 'workspaceId' is not null or undefined
|
|
6453
|
+
assertParamExists('resolveLlmProviders', 'workspaceId', workspaceId);
|
|
6454
|
+
const localVarPath = `/api/v1/actions/workspaces/{workspaceId}/ai/resolveLlmProviders`
|
|
6455
|
+
.replace(`{${"workspaceId"}}`, encodeURIComponent(String(workspaceId)));
|
|
6456
|
+
// use dummy base URL string because the URL constructor only accepts absolute URLs.
|
|
6457
|
+
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
|
|
6458
|
+
let baseOptions;
|
|
6459
|
+
if (configuration) {
|
|
6460
|
+
baseOptions = configuration.baseOptions;
|
|
6461
|
+
}
|
|
6462
|
+
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options };
|
|
6463
|
+
const localVarHeaderParameter = {};
|
|
6464
|
+
const localVarQueryParameter = {};
|
|
6465
|
+
setSearchParams(localVarUrlObj, localVarQueryParameter);
|
|
6466
|
+
const headersFromBaseOptions = baseOptions?.headers ? baseOptions.headers : {};
|
|
6467
|
+
localVarRequestOptions.headers = {
|
|
6468
|
+
...localVarHeaderParameter,
|
|
6469
|
+
...headersFromBaseOptions,
|
|
6470
|
+
...options.headers,
|
|
6471
|
+
};
|
|
6472
|
+
return {
|
|
6473
|
+
url: toPathString(localVarUrlObj),
|
|
6474
|
+
options: localVarRequestOptions,
|
|
6475
|
+
};
|
|
6476
|
+
}
|
|
6477
|
+
// SmartFunctionsApi FP - SmartFunctionsApiAxiosParamCreator
|
|
6382
6478
|
/**
|
|
6383
6479
|
* Returns a list of tags for this workspace
|
|
6384
6480
|
* @summary Get Analytics Catalog Tags
|
|
@@ -6972,6 +7068,21 @@ export async function SmartFunctionsApi_ResolveLlmEndpoints(axios, basePath, req
|
|
|
6972
7068
|
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, basePath);
|
|
6973
7069
|
}
|
|
6974
7070
|
// SmartFunctionsApi Api FP
|
|
7071
|
+
/**
|
|
7072
|
+
* Resolves the active LLM configuration for the given workspace. When the ENABLE_LLM_ENDPOINT_REPLACEMENT feature flag is enabled, returns LLM Providers with their associated models. Otherwise, falls back to the legacy LLM Endpoints.
|
|
7073
|
+
* @summary Get Active LLM configuration for this workspace
|
|
7074
|
+
* @param {AxiosInstance} axios Axios instance.
|
|
7075
|
+
* @param {string} basePath Base path.
|
|
7076
|
+
* @param {SmartFunctionsApiResolveLlmProvidersRequest} requestParameters Request parameters.
|
|
7077
|
+
* @param {*} [options] Override http request option.
|
|
7078
|
+
* @param {Configuration} [configuration] Optional configuration.
|
|
7079
|
+
* @throws {RequiredError}
|
|
7080
|
+
*/
|
|
7081
|
+
export async function SmartFunctionsApi_ResolveLlmProviders(axios, basePath, requestParameters, options, configuration) {
|
|
7082
|
+
const localVarAxiosArgs = await SmartFunctionsApiAxiosParamCreator_ResolveLlmProviders(requestParameters.workspaceId, options || {}, configuration);
|
|
7083
|
+
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, basePath);
|
|
7084
|
+
}
|
|
7085
|
+
// SmartFunctionsApi Api FP
|
|
6975
7086
|
/**
|
|
6976
7087
|
* Returns a list of tags for this workspace
|
|
6977
7088
|
* @summary Get Analytics Catalog Tags
|
|
@@ -7304,6 +7415,17 @@ export class SmartFunctionsApi extends BaseAPI {
|
|
|
7304
7415
|
resolveLlmEndpoints(requestParameters, options) {
|
|
7305
7416
|
return SmartFunctionsApi_ResolveLlmEndpoints(this.axios, this.basePath, requestParameters, options, this.configuration);
|
|
7306
7417
|
}
|
|
7418
|
+
/**
|
|
7419
|
+
* Resolves the active LLM configuration for the given workspace. When the ENABLE_LLM_ENDPOINT_REPLACEMENT feature flag is enabled, returns LLM Providers with their associated models. Otherwise, falls back to the legacy LLM Endpoints.
|
|
7420
|
+
* @summary Get Active LLM configuration for this workspace
|
|
7421
|
+
* @param {SmartFunctionsApiResolveLlmProvidersRequest} requestParameters Request parameters.
|
|
7422
|
+
* @param {*} [options] Override http request option.
|
|
7423
|
+
* @throws {RequiredError}
|
|
7424
|
+
* @memberof SmartFunctionsApi
|
|
7425
|
+
*/
|
|
7426
|
+
resolveLlmProviders(requestParameters, options) {
|
|
7427
|
+
return SmartFunctionsApi_ResolveLlmProviders(this.axios, this.basePath, requestParameters, options, this.configuration);
|
|
7428
|
+
}
|
|
7307
7429
|
/**
|
|
7308
7430
|
* Returns a list of tags for this workspace
|
|
7309
7431
|
* @summary Get Analytics Catalog Tags
|