@squonk/data-manager-client 1.1.5 → 1.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/accounting/accounting.d.ts +1 -1
- package/admin/admin.d.ts +1 -1
- package/application/application.d.ts +1 -1
- package/{custom-instance-e49ba702.d.ts → custom-instance-b01bd8f0.d.ts} +28 -7
- package/dataset/dataset.d.ts +1 -1
- package/exchange-rate/exchange-rate.d.ts +1 -1
- package/file/file.d.ts +1 -1
- package/index.cjs.map +1 -1
- package/index.d.ts +1 -1
- package/index.js.map +1 -1
- package/instance/instance.d.ts +1 -1
- package/job/job.d.ts +1 -1
- package/metadata/metadata.d.ts +1 -1
- package/package.json +1 -1
- package/project/project.d.ts +1 -1
- package/src/data-manager-api.schemas.ts +24 -6
- package/task/task.d.ts +1 -1
- package/type/type.d.ts +1 -1
- package/user/user.d.ts +1 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
2
|
-
import {
|
|
2
|
+
import { bR as customInstance, bL as AccountServerGetNamespaceResponse, bS as ErrorType, bN as DmError, bM as AccountServerGetRegistrationResponse, b2 as VersionGetResponse } from '../custom-instance-b01bd8f0.js';
|
|
3
3
|
import 'axios';
|
|
4
4
|
|
|
5
5
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/admin/admin.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { A as AdminGetServiceErrorsParams,
|
|
1
|
+
import { A as AdminGetServiceErrorsParams, bR as customInstance, ba as ServiceErrorsGetResponse, bS as ErrorType, bN as DmError, ah as UserAccountDetail, a0 as UserPatchBodyBody, bK as AdminJobManifestGetResponse, a6 as JobManifestPutBodyBody, bJ as AdminJobManifestLoadPutResponse, a7 as JobManifestLoadPutBodyBody } from '../custom-instance-b01bd8f0.js';
|
|
2
2
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
3
3
|
import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from '@tanstack/react-query';
|
|
4
4
|
import 'axios';
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
2
|
-
import {
|
|
2
|
+
import { bR as customInstance, bG as ApplicationsGetResponse, bS as ErrorType, bN as DmError, bH as ApplicationGetResponse } from '../custom-instance-b01bd8f0.js';
|
|
3
3
|
import 'axios';
|
|
4
4
|
|
|
5
5
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
|
@@ -892,19 +892,40 @@ type JobReplacements = JobReplacement[];
|
|
|
892
892
|
interface JobOrderDetail {
|
|
893
893
|
options: string[];
|
|
894
894
|
}
|
|
895
|
+
/**
|
|
896
|
+
* The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.
|
|
897
|
+
|
|
898
|
+
*/
|
|
899
|
+
type JobVariablesOptions = {
|
|
900
|
+
[key: string]: any;
|
|
901
|
+
};
|
|
902
|
+
/**
|
|
903
|
+
* The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.
|
|
904
|
+
|
|
905
|
+
*/
|
|
906
|
+
type JobVariablesOutputs = {
|
|
907
|
+
[key: string]: any;
|
|
908
|
+
};
|
|
909
|
+
/**
|
|
910
|
+
* The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.
|
|
911
|
+
|
|
912
|
+
*/
|
|
913
|
+
type JobVariablesInputs = {
|
|
914
|
+
[key: string]: any;
|
|
915
|
+
};
|
|
895
916
|
interface JobVariables {
|
|
896
917
|
/** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.
|
|
897
918
|
*/
|
|
898
919
|
order?: JobOrderDetail;
|
|
899
|
-
/** The Job command's inputs.
|
|
920
|
+
/** The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.
|
|
900
921
|
*/
|
|
901
|
-
inputs?:
|
|
902
|
-
/** The Job command's outputs.
|
|
922
|
+
inputs?: JobVariablesInputs;
|
|
923
|
+
/** The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.
|
|
903
924
|
*/
|
|
904
|
-
outputs?:
|
|
905
|
-
/** The Job command's options.
|
|
925
|
+
outputs?: JobVariablesOutputs;
|
|
926
|
+
/** The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.
|
|
906
927
|
*/
|
|
907
|
-
options?:
|
|
928
|
+
options?: JobVariablesOptions;
|
|
908
929
|
}
|
|
909
930
|
/**
|
|
910
931
|
* The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)
|
|
@@ -1924,4 +1945,4 @@ declare const setBaseUrl: (baseUrl: string) => void;
|
|
|
1924
1945
|
declare const customInstance: <TReturn>(config: AxiosRequestConfig, options?: AxiosRequestConfig) => Promise<TReturn>;
|
|
1925
1946
|
type ErrorType<TError> = AxiosError<TError>;
|
|
1926
1947
|
|
|
1927
|
-
export { QApplicationIdParameter as $, AdminGetServiceErrorsParams as A, QOnlyUndefinedParameter as B, QKeepProjectFilesParameter as C, DeleteUnmanagedFileParams as D, QJobVersionParameter as E, QJobJobParameter as F, GetUserApiLogParams as G, QJobIdParameter as H, QJobCollectionParameter as I, QInstanceArchiveParameter as J, QIncludeHiddenParameter as K, QIncludeDeletedParameter as L, QIncludeAcknowlegedParameter as M, QFromParameter as N, QFileProjectIdParameter as O, PatchInstanceParams as P, QEditorsParameter as Q, QFilePathParameter as R, QFileParameter as S, QExcludePurposeParameter as T, QExcludeRemovalParameter as U, QExcludeDoneParameter as V, QEventPriorOrdinalParameter as W, QEventLimitParameter as X, QDatasetMimeTypeParameter as Y, QDoNotImpersonateParameter as Z, QCurrentParameter as _, GetUserAccountParams as a,
|
|
1948
|
+
export { QApplicationIdParameter as $, AdminGetServiceErrorsParams as A, QOnlyUndefinedParameter as B, QKeepProjectFilesParameter as C, DeleteUnmanagedFileParams as D, QJobVersionParameter as E, QJobJobParameter as F, GetUserApiLogParams as G, QJobIdParameter as H, QJobCollectionParameter as I, QInstanceArchiveParameter as J, QIncludeHiddenParameter as K, QIncludeDeletedParameter as L, QIncludeAcknowlegedParameter as M, QFromParameter as N, QFileProjectIdParameter as O, PatchInstanceParams as P, QEditorsParameter as Q, QFilePathParameter as R, QFileParameter as S, QExcludePurposeParameter as T, QExcludeRemovalParameter as U, QExcludeDoneParameter as V, QEventPriorOrdinalParameter as W, QEventLimitParameter as X, QDatasetMimeTypeParameter as Y, QDoNotImpersonateParameter as Z, QCurrentParameter as _, GetUserAccountParams as a, ApplicationImageVariants as a$, UserPatchBodyBody as a0, UserAccountPatchBodyBody as a1, ProjectPostBodyBody as a2, ProjectPatchBodyBody as a3, ProjectFilePutBodyBody as a4, ExchangeRatePutBodyBody as a5, JobManifestPutBodyBody as a6, JobManifestLoadPutBodyBody as a7, InstancePostBodyBody as a8, FilePostBodyBody as a9, JobVariablesOutputs as aA, JobVariablesInputs as aB, JobVariables as aC, JobSummaryImageType as aD, JobSummary as aE, JobManifestDetail as aF, JobApplication as aG, InstanceSummaryJobImageType as aH, InstanceSummaryPhase as aI, InstanceSummaryApplicationType as aJ, InstanceSummary as aK, FileStat as aL, FilePathFile as aM, JobExchangeRateSummary as aN, ApplicationExchangeRateSummary as aO, ExchangeRateDetail as aP, DatasetVersionSummaryLabels as aQ, DatasetVersionSummaryProcessingStage as aR, DatasetVersionSummary as aS, DatasetVersionProjectFile as aT, DatasetVersionDetailLabels as aU, DatasetVersionDetailProcessingStage as aV, DatasetVersionDetail as aW, DatasetSummary as aX, DatasetDetail as aY, ApplicationSummary as aZ, ApplicationImageVariant as a_, DatasetPutBodyBody as aa, DatasetVersionMetaPostBodyBody as ab, DatasetMetaPostBodyBody as ac, DatasetPostBodyBody as ad, AsAdditionalDataProcessingCharge as ae, UserSummary as af, UserDetail as ag, UserAccountDetail as ah, TypeSummaryFormatterOptionsType as ai, TypeSummaryFormatterOptions as aj, TypeSummary as ak, TaskSummaryProcessingStage as al, TaskSummary as am, TaskStateState as an, TaskState as ao, TaskIdentity as ap, TaskEventLevel as aq, TaskEvent as ar, ServiceErrorSummarySeverity as as, ServiceErrorSummary as at, ProjectFileDetail as au, ProjectDetail as av, JobReplacement as aw, JobReplacements as ax, JobOrderDetail as ay, JobVariablesOptions as az, GetInstancesParams as b, ApiLogDetailMethod as b0, ApiLogDetail as b1, VersionGetResponse as b2, UsersGetResponse as b3, UserApiLogGetResponse as b4, UserAccountGetResponse as b5, TypesGetResponse as b6, TasksGetResponse as b7, TaskGetResponsePurpose as b8, TaskGetResponse as b9, DatasetVersionDeleteResponse as bA, DatasetSchemaGetResponseType as bB, DatasetSchemaGetResponse as bC, DatasetMetaGetResponse as bD, DatasetDigestGetResponse as bE, DatasetPutPostResponse as bF, ApplicationsGetResponse as bG, ApplicationGetResponse as bH, AdminUserPutResponse as bI, AdminJobManifestLoadPutResponse as bJ, AdminJobManifestGetResponse as bK, AccountServerGetNamespaceResponse as bL, AccountServerGetRegistrationResponse as bM, DmError as bN, AXIOS_INSTANCE as bO, setAuthToken as bP, setBaseUrl as bQ, customInstance as bR, ErrorType as bS, ServiceErrorsGetResponse as ba, ProjectsGetResponse as bb, ProjectPostResponse as bc, ProjectGetResponse as bd, ProjectDeleteResponse as be, JobsGetResponse as bf, JobGetResponseImageType as bg, JobGetResponse as bh, InstanceTaskPurpose as bi, InstanceTask as bj, InstancesGetResponse as bk, InstanceGetResponseJobImageType as bl, InstanceGetResponsePhase as bm, InstanceGetResponseApplicationType as bn, InstanceGetResponse as bo, GetExchangeRatesResponseId as bp, GetExchangeRatesResponse as bq, GetAllExchangeRatesResponseExchangeRatesItem as br, GetAllExchangeRatesResponse as bs, FilesGetResponse as bt, FilePostResponse as bu, InstancePostResponse as bv, InstanceDryRunPostResponse as bw, InstanceDeleteResponse as bx, DatasetsGetResponse as by, DatasetVersionsGetResponse as bz, GetJobExchangeRatesParams as c, GetAllJobExchangeRatesParams as d, GetJobParams as e, GetJobByVersionParams as f, GetJobsParams as g, GetApplicationExchangeRatesParams as h, GetAllApplicationExchangeRatesParams as i, GetTaskParams as j, GetTasksParams as k, GetFilesParams as l, DeleteDatasetParams as m, GetVersionsParams as n, GetDatasetsParams as o, GetProjectFileWithTokenParams as p, GetProjectFileParams as q, GetProjectsParams as r, QOwnersParameter as s, QUsernameParameter as t, QUntilParameter as u, QTokenParameter as v, QInstanceCallbackContextParameter as w, QProjectNameParameter as x, QProjectIdParameter as y, QLabelsParameter as z };
|
package/dataset/dataset.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { aa as DatasetPutBodyBody,
|
|
1
|
+
import { aa as DatasetPutBodyBody, bR as customInstance, bF as DatasetPutPostResponse, bS as ErrorType, bN as DmError, ad as DatasetPostBodyBody, o as GetDatasetsParams, by as DatasetsGetResponse, n as GetVersionsParams, aY as DatasetDetail, m as DeleteDatasetParams, ap as TaskIdentity, bE as DatasetDigestGetResponse, bC as DatasetSchemaGetResponse } from '../custom-instance-b01bd8f0.js';
|
|
2
2
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
3
3
|
import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
4
4
|
import 'axios';
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
2
2
|
import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from '@tanstack/react-query';
|
|
3
|
-
import { i as GetAllApplicationExchangeRatesParams,
|
|
3
|
+
import { i as GetAllApplicationExchangeRatesParams, bR as customInstance, bs as GetAllExchangeRatesResponse, bS as ErrorType, bN as DmError, a5 as ExchangeRatePutBodyBody, h as GetApplicationExchangeRatesParams, bq as GetExchangeRatesResponse, d as GetAllJobExchangeRatesParams, c as GetJobExchangeRatesParams } from '../custom-instance-b01bd8f0.js';
|
|
4
4
|
import 'axios';
|
|
5
5
|
|
|
6
6
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/file/file.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
2
2
|
import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from '@tanstack/react-query';
|
|
3
|
-
import { l as GetFilesParams,
|
|
3
|
+
import { l as GetFilesParams, bR as customInstance, bt as FilesGetResponse, bS as ErrorType, bN as DmError, a9 as FilePostBodyBody, bu as FilePostResponse, D as DeleteUnmanagedFileParams } from '../custom-instance-b01bd8f0.js';
|
|
4
4
|
import 'axios';
|
|
5
5
|
|
|
6
6
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/data-manager-api.schemas.ts"],"names":[],"mappings":";;;;;;;;AAquBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AA6CO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA2CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA0BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAqBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAqHO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkGO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AAyJO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAsEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgHO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAqEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAsEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA+EO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AAmBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2LO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV","sourcesContent":["/**\n * Generated by orval v6.15.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 1.0\n */\nexport type AdminGetServiceErrorsParams = {\n /**\n * Set to include acknowledged items\n\n */\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n /**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\n from?: QFromParameter;\n /**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\n until?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n /**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = {\n /**\n * True to archive the instance\n */\n archive?: QInstanceArchiveParameter;\n};\n\nexport type GetInstancesParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllJobExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetJobParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobByVersionParams = {\n /**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\n collection: QJobCollectionParameter;\n /**\n * The Job, i.e. \"nop\"\n\n */\n job: QJobJobParameter;\n /**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\n version: QJobVersionParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobsParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetApplicationExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllApplicationExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetTaskParams = {\n /**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\n event_limit?: QEventLimitParameter;\n /**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n /**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\n exclude_done?: QExcludeDoneParameter;\n /**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\n exclude_removal?: QExcludeRemovalParameter;\n /**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\n exclude_purpose?: QExcludePurposeParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n /**\n * An instance callback context string\n */\n instance_callback_context?: QInstanceCallbackContextParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n /**\n * A project file.\n\n */\n file: QFileParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * Whether to include hidden files and directories\n */\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n /**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n};\n\nexport type GetDatasetsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n /**\n * Filter the datasets by username\n\n */\n username?: QUsernameParameter;\n /**\n * Filter the datasets by the supplied mime_type.\n\n */\n dataset_mime_type?: QDatasetMimeTypeParameter;\n /**\n * Filter the datasets by a comma separated list of owners\n\n */\n owners?: QOwnersParameter;\n /**\n * Filter the datasets by a comma separated list of editors\n\n */\n editors?: QEditorsParameter;\n /**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\n labels?: QLabelsParameter;\n};\n\nexport type GetProjectFileWithTokenParams = {\n /**\n * A token\n */\n token?: QTokenParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectFileParams = {\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectsParams = {\n /**\n * A Project name\n */\n project_name?: QProjectNameParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A token\n */\nexport type QTokenParameter = string;\n\n/**\n * An instance callback context string\n */\nexport type QInstanceCallbackContextParameter = string;\n\n/**\n * A Project name\n */\nexport type QProjectNameParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\n/**\n * Only return records where the exchange rate is undefined\n\n */\nexport type QOnlyUndefinedParameter = boolean;\n\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nexport type QJobVersionParameter = string;\n\n/**\n * The Job, i.e. \"nop\"\n\n */\nexport type QJobJobParameter = string;\n\n/**\n * A Job identity\n\n */\nexport type QJobIdParameter = number;\n\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\nexport type QJobCollectionParameter = string;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexport type QExcludeRemovalParameter = boolean;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\n/**\n * Set to get current\n\n */\nexport type QCurrentParameter = boolean;\n\n/**\n * An Application identity\n\n */\nexport type QApplicationIdParameter = string;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user account can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to change things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type ExchangeRatePutBodyBody = {\n /** A decimal value used as the new Exchange Rate. Application _raw_ **costs** are multiplied by this value to covert costs to **coins**. A string is used to avoid rounding errors. Internally the value is treated as a Python Decimal.\n */\n rate: string;\n /** A brief comment relating to the new rate\n */\n comment?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type JobManifestLoadPutBodyBody = {\n /** Set to remove all pre-existing Job Definitions that are not present in the existing manifests after the load is complete.\n\nJobs in the collection `im-test` are not removed */\n purge?: boolean;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n\nApplication IDs currently supported include `datamanagerjobs.squonk.it` and `jupyternotebooks.squonk.it`\n */\n application_id: string;\n /** A supported application version to launch.\n\nThis property is **Deprecated**. It is currently ignored will be removed in a future release\n */\n application_version?: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** If set a callback token will be provided in the response. The token allows files to be downloaded from the instance project and expires after a pre-configured amount of time after the instance is complete or if the user revokes the token.\n\nCaution should be taken using this feature. A 3rd party can access the Project's files without authentication, they just need the token and the project identity.\n\nTokens should therefore be revoked when they're no longer required\n */\n generate_callback_token?: boolean;\n /** An optional 22-character **sortuuid** callback token that is supplied by the remote service. If not provided the user can use `generate_callback_token` to have one generated and returned in the response.\n\nSee the Python module's `shortuuid.get_alphabet()` for the full list of permitted characters\n */\n callback_token?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen launching a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n\nApplications also support `variables`. The Jupyter notebook application supports the definition of the notebook `\"image\"`, `\"cpu\"` and `\"memory\"`. A full Jupyter notebook specification might be `{\"variables\":{\"image\":\"jupyter/tensorflow-notebook:tensorflow-2.9.1\",\"cpu\":2,\"memory\":\"4Gi\"}}`, where `memory` is limited to `Gi` as a suffix.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset. Values like `chemical/x-mdl-sdfile`, `chemical/x-mdl-molfile`, and `chemical/x-pdb` are permitted. See the **\\/type** endpoint for a full list of types.\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface AsAdditionalDataProcessingCharge {\n cost: string;\n cost_to_coins_er: string;\n cost_scale_factor: string;\n instance_id: string;\n instance_name: string;\n started: string;\n stopped?: string;\n run_time?: string;\n error_message?: string;\n job_collection?: string;\n job_job?: string;\n job_version?: string;\n collateral_pod_count?: number;\n collateral_cpu_hours?: string;\n}\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Private accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administrative capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType =\n (typeof TypeSummaryFormatterOptionsType)[keyof typeof TypeSummaryFormatterOptionsType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\",\n} as const;\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n (typeof TaskSummaryProcessingStage)[keyof typeof TaskSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n (typeof TaskStateState)[keyof typeof TaskStateState];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\",\n STARTED: \"STARTED\",\n RETRY: \"RETRY\",\n SUCCESS: \"SUCCESS\",\n FAILURE: \"FAILURE\",\n} as const;\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n (typeof TaskEventLevel)[keyof typeof TaskEventLevel];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n INFO: \"INFO\",\n DEBUG: \"DEBUG\",\n} as const;\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity =\n (typeof ServiceErrorSummarySeverity)[keyof typeof ServiceErrorSummarySeverity];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n} as const;\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n created: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** True if the project is private. Private projects are only visible to the owner and its editors.\n */\n private: boolean;\n /** An editor (user_id) of the project */\n editors: string[];\n /** An observer (user_id) of the project */\n observers: string[];\n /** The approximate size of all the files in the Project volume. This is updated regularly throughout the day and its current size may differ from what is reported here. The smallest billable unit is 1GiB (1,073,741,824 bytes). Therefore a project that contains 32KiB of files is recorded as 1GiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobReplacement {\n collection: string;\n job: string;\n}\n\n/**\n * A list of Jobs, collection and job that are either replacing or being replaced\n\n */\nexport type JobReplacements = JobReplacement[];\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType =\n (typeof JobSummaryImageType)[keyof typeof JobSummaryImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport interface JobManifestDetail {\n /** The Job Manifest record ID */\n id: number;\n url: string;\n header?: string;\n params?: string;\n /** The Date (and time) the manifest was created\n */\n created: string;\n /** The (admin) user who created the manifest\n */\n creator?: string;\n /** The number of job definition files loaded during the most recent successful load\n */\n job_definition_files_loaded?: number;\n /** The number of job definitions loaded during the most recent successful load\n */\n job_definitions_loaded?: number;\n /** The Date (and time) the manifest was last loaded successfully\n */\n last_successful_load_time?: string;\n /** The Date (and time) the manifest was last loaded, successfully or otherwise. If the manifest (or any of the Job definition files it refers to) fails to load the `load_status` should provide some diagnostic feedback\n */\n last_load_time?: string;\n /** The status of the time the manifest was last loaded. If the load was successful this will be `SUCCESS` and `last_successful_load` will be the same as `last_load`\n */\n last_load_status: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType =\n (typeof InstanceSummaryJobImageType)[keyof typeof InstanceSummaryJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceSummaryPhase =\n (typeof InstanceSummaryPhase)[keyof typeof InstanceSummaryPhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType =\n (typeof InstanceSummaryApplicationType)[keyof typeof InstanceSummaryApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was launched\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was launched\n */\n launched: string;\n /** The data and time (UTC) the instance started running\n */\n started?: string;\n /** The data and time (UTC) the instance stopped running\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The application instance owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's known outputs, a JSON string defining a map of all the outputs. Typically applied only to JOB application types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** The current running coin cost of the instance.\n */\n coins?: string;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\nexport interface JobExchangeRateSummary {\n id: number;\n rate?: string;\n collection: string;\n job: string;\n version: string;\n}\n\nexport interface ApplicationExchangeRateSummary {\n id: string;\n rate?: string;\n}\n\nexport interface ExchangeRateDetail {\n id: number;\n rate: string;\n created: string;\n user_id: string;\n comment?: string;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n (typeof DatasetVersionSummaryProcessingStage)[keyof typeof DatasetVersionSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n (typeof DatasetVersionDetailProcessingStage)[keyof typeof DatasetVersionDetailProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n /** The application's latest version as declared in The **Custom Resource Definition**\n */\n latest_version: string;\n}\n\nexport interface ApplicationImageVariant {\n name: string;\n image: string;\n}\n\nexport interface ApplicationImageVariants {\n public?: ApplicationImageVariant[];\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod =\n (typeof ApiLogDetailMethod)[keyof typeof ApiLogDetailMethod];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\",\n PATCH: \"PATCH\",\n POST: \"POST\",\n PUT: \"PUT\",\n} as const;\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representing the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of available MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n (typeof TaskGetResponsePurpose)[keyof typeof TaskGetResponsePurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\",\n FILE: \"FILE\",\n INSTANCE: \"INSTANCE\",\n PROJECT: \"PROJECT\",\n} as const;\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n\n */\nexport type JobGetResponseImageType =\n (typeof JobGetResponseImageType)[keyof typeof JobGetResponseImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n exchange_rate: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport type InstanceTaskPurpose =\n (typeof InstanceTaskPurpose)[keyof typeof InstanceTaskPurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\",\n DELETE: \"DELETE\",\n} as const;\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceGetResponseJobImageType =\n (typeof InstanceGetResponseJobImageType)[keyof typeof InstanceGetResponseJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceGetResponsePhase =\n (typeof InstanceGetResponsePhase)[keyof typeof InstanceGetResponsePhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType =\n (typeof InstanceGetResponseApplicationType)[keyof typeof InstanceGetResponseApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The date and time the instance was started, an ISO-8601 format string.\n */\n launched: string;\n /** The date and time the instance started running in the cluster. This is typically close to the launch time but contention may mean the instance starts only when resources are available.\n */\n started?: string;\n /** The date and time the instance stopped, an ISO-8601 format string.\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceGetResponseJobImageType;\n /** The JSON string representation of the JobDefinition's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleting the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n /** Set if the instance has a valid callback token. Instances with a valid token allow users to read files form the project without authentication\n */\n has_valid_callback_token: boolean;\n /** The cost exchange rate that applies to the Job at the time of launch. This is a string representation of a Decimal, e.g. `'0.5'`\n */\n launch_exchange_rate?: string;\n /** The accumulated cost accrued by the running Job. Depending on the the Job, this is either known when the Job completes or may change as the Job runs. This is a string representation of a Decimal value, e.g. `'32.8'`\n */\n cost?: string;\n /** The accumulated coins accrued by the running Job. Coins are calculated based on the `cost` multiplied by the `launch_exchange_rate`. This is a string representation of a Decimal value, e.g. `'16.4'`\n */\n coins?: string;\n /** Where available, this is the number of hours that the JOb would take if the collateral Pods had access to only one CPU core. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. A value of `'0.5'` implies the job would have run in 30 minutes on 1 core. The minimum time resolution is 0.001 (3.6 seconds).\n */\n collateral_cpu_hours?: string;\n /** Where available, the number of collateral Pods spawned by the instance, typically used by Job instances. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. For `NEXTFLOW` jobs this is a count of the number of Task pods spawned.\n */\n collateral_pod_count?: number;\n /** An optional error message, used to report underlying problems.\n */\n error_message?: string;\n}\n\nexport type GetExchangeRatesResponseId = number | string;\n\nexport interface GetExchangeRatesResponse {\n id: GetExchangeRatesResponseId;\n exchange_rates: ExchangeRateDetail[];\n}\n\nexport type GetAllExchangeRatesResponseExchangeRatesItem =\n | ApplicationExchangeRateSummary\n | JobExchangeRateSummary;\n\nexport interface GetAllExchangeRatesResponse {\n only_undefined: boolean;\n exchange_rates: GetAllExchangeRatesResponseExchangeRatesItem[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it. The token is only provided if asked for when the instance is launched.\n */\n callback_token?: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport interface InstanceDryRunPostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType =\n (typeof DatasetSchemaGetResponseType)[keyof typeof DatasetSchemaGetResponseType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\",\n} as const;\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed applications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The Application COST exchange rate\n */\n exchange_rate: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n image_variants?: ApplicationImageVariants;\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n /** The number of Jobs purged\n */\n jobs_purged?: number;\n}\n\nexport interface AdminJobManifestGetResponse {\n /** The list of known Job manifests\n */\n job_manifests: JobManifestDetail[];\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n}\n\nexport interface AccountServerGetRegistrationResponse {\n merchant_id: number;\n name: string;\n registered: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["../src/data-manager-api.schemas.ts"],"names":[],"mappings":";;;;;;;;AAquBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AA6CO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA2CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA0BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAqBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAuIO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkGO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AAyJO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAsEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgHO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAqEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAsEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA+EO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AAmBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2LO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV","sourcesContent":["/**\n * Generated by orval v6.15.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 1.0\n */\nexport type AdminGetServiceErrorsParams = {\n /**\n * Set to include acknowledged items\n\n */\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n /**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\n from?: QFromParameter;\n /**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\n until?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n /**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = {\n /**\n * True to archive the instance\n */\n archive?: QInstanceArchiveParameter;\n};\n\nexport type GetInstancesParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllJobExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetJobParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobByVersionParams = {\n /**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\n collection: QJobCollectionParameter;\n /**\n * The Job, i.e. \"nop\"\n\n */\n job: QJobJobParameter;\n /**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\n version: QJobVersionParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobsParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetApplicationExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllApplicationExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetTaskParams = {\n /**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\n event_limit?: QEventLimitParameter;\n /**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n /**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\n exclude_done?: QExcludeDoneParameter;\n /**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\n exclude_removal?: QExcludeRemovalParameter;\n /**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\n exclude_purpose?: QExcludePurposeParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n /**\n * An instance callback context string\n */\n instance_callback_context?: QInstanceCallbackContextParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n /**\n * A project file.\n\n */\n file: QFileParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * Whether to include hidden files and directories\n */\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n /**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n};\n\nexport type GetDatasetsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n /**\n * Filter the datasets by username\n\n */\n username?: QUsernameParameter;\n /**\n * Filter the datasets by the supplied mime_type.\n\n */\n dataset_mime_type?: QDatasetMimeTypeParameter;\n /**\n * Filter the datasets by a comma separated list of owners\n\n */\n owners?: QOwnersParameter;\n /**\n * Filter the datasets by a comma separated list of editors\n\n */\n editors?: QEditorsParameter;\n /**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\n labels?: QLabelsParameter;\n};\n\nexport type GetProjectFileWithTokenParams = {\n /**\n * A token\n */\n token?: QTokenParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectFileParams = {\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectsParams = {\n /**\n * A Project name\n */\n project_name?: QProjectNameParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A token\n */\nexport type QTokenParameter = string;\n\n/**\n * An instance callback context string\n */\nexport type QInstanceCallbackContextParameter = string;\n\n/**\n * A Project name\n */\nexport type QProjectNameParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\n/**\n * Only return records where the exchange rate is undefined\n\n */\nexport type QOnlyUndefinedParameter = boolean;\n\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nexport type QJobVersionParameter = string;\n\n/**\n * The Job, i.e. \"nop\"\n\n */\nexport type QJobJobParameter = string;\n\n/**\n * A Job identity\n\n */\nexport type QJobIdParameter = number;\n\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\nexport type QJobCollectionParameter = string;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexport type QExcludeRemovalParameter = boolean;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\n/**\n * Set to get current\n\n */\nexport type QCurrentParameter = boolean;\n\n/**\n * An Application identity\n\n */\nexport type QApplicationIdParameter = string;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user account can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to change things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type ExchangeRatePutBodyBody = {\n /** A decimal value used as the new Exchange Rate. Application _raw_ **costs** are multiplied by this value to covert costs to **coins**. A string is used to avoid rounding errors. Internally the value is treated as a Python Decimal.\n */\n rate: string;\n /** A brief comment relating to the new rate\n */\n comment?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type JobManifestLoadPutBodyBody = {\n /** Set to remove all pre-existing Job Definitions that are not present in the existing manifests after the load is complete.\n\nJobs in the collection `im-test` are not removed */\n purge?: boolean;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n\nApplication IDs currently supported include `datamanagerjobs.squonk.it` and `jupyternotebooks.squonk.it`\n */\n application_id: string;\n /** A supported application version to launch.\n\nThis property is **Deprecated**. It is currently ignored will be removed in a future release\n */\n application_version?: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** If set a callback token will be provided in the response. The token allows files to be downloaded from the instance project and expires after a pre-configured amount of time after the instance is complete or if the user revokes the token.\n\nCaution should be taken using this feature. A 3rd party can access the Project's files without authentication, they just need the token and the project identity.\n\nTokens should therefore be revoked when they're no longer required\n */\n generate_callback_token?: boolean;\n /** An optional 22-character **sortuuid** callback token that is supplied by the remote service. If not provided the user can use `generate_callback_token` to have one generated and returned in the response.\n\nSee the Python module's `shortuuid.get_alphabet()` for the full list of permitted characters\n */\n callback_token?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen launching a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n\nApplications also support `variables`. The Jupyter notebook application supports the definition of the notebook `\"image\"`, `\"cpu\"` and `\"memory\"`. A full Jupyter notebook specification might be `{\"variables\":{\"image\":\"jupyter/tensorflow-notebook:tensorflow-2.9.1\",\"cpu\":2,\"memory\":\"4Gi\"}}`, where `memory` is limited to `Gi` as a suffix.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset. Values like `chemical/x-mdl-sdfile`, `chemical/x-mdl-molfile`, and `chemical/x-pdb` are permitted. See the **\\/type** endpoint for a full list of types.\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface AsAdditionalDataProcessingCharge {\n cost: string;\n cost_to_coins_er: string;\n cost_scale_factor: string;\n instance_id: string;\n instance_name: string;\n started: string;\n stopped?: string;\n run_time?: string;\n error_message?: string;\n job_collection?: string;\n job_job?: string;\n job_version?: string;\n collateral_pod_count?: number;\n collateral_cpu_hours?: string;\n}\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Private accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administrative capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType =\n (typeof TypeSummaryFormatterOptionsType)[keyof typeof TypeSummaryFormatterOptionsType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\",\n} as const;\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n (typeof TaskSummaryProcessingStage)[keyof typeof TaskSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n (typeof TaskStateState)[keyof typeof TaskStateState];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\",\n STARTED: \"STARTED\",\n RETRY: \"RETRY\",\n SUCCESS: \"SUCCESS\",\n FAILURE: \"FAILURE\",\n} as const;\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n (typeof TaskEventLevel)[keyof typeof TaskEventLevel];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n INFO: \"INFO\",\n DEBUG: \"DEBUG\",\n} as const;\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity =\n (typeof ServiceErrorSummarySeverity)[keyof typeof ServiceErrorSummarySeverity];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n} as const;\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n created: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** True if the project is private. Private projects are only visible to the owner and its editors.\n */\n private: boolean;\n /** An editor (user_id) of the project */\n editors: string[];\n /** An observer (user_id) of the project */\n observers: string[];\n /** The approximate size of all the files in the Project volume. This is updated regularly throughout the day and its current size may differ from what is reported here. The smallest billable unit is 1GiB (1,073,741,824 bytes). Therefore a project that contains 32KiB of files is recorded as 1GiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobReplacement {\n collection: string;\n job: string;\n}\n\n/**\n * A list of Jobs, collection and job that are either replacing or being replaced\n\n */\nexport type JobReplacements = JobReplacement[];\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\n/**\n * The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOptions = { [key: string]: any };\n\n/**\n * The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOutputs = { [key: string]: any };\n\n/**\n * The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesInputs = { [key: string]: any };\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: JobVariablesInputs;\n /** The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: JobVariablesOutputs;\n /** The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: JobVariablesOptions;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType =\n (typeof JobSummaryImageType)[keyof typeof JobSummaryImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport interface JobManifestDetail {\n /** The Job Manifest record ID */\n id: number;\n url: string;\n header?: string;\n params?: string;\n /** The Date (and time) the manifest was created\n */\n created: string;\n /** The (admin) user who created the manifest\n */\n creator?: string;\n /** The number of job definition files loaded during the most recent successful load\n */\n job_definition_files_loaded?: number;\n /** The number of job definitions loaded during the most recent successful load\n */\n job_definitions_loaded?: number;\n /** The Date (and time) the manifest was last loaded successfully\n */\n last_successful_load_time?: string;\n /** The Date (and time) the manifest was last loaded, successfully or otherwise. If the manifest (or any of the Job definition files it refers to) fails to load the `load_status` should provide some diagnostic feedback\n */\n last_load_time?: string;\n /** The status of the time the manifest was last loaded. If the load was successful this will be `SUCCESS` and `last_successful_load` will be the same as `last_load`\n */\n last_load_status: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType =\n (typeof InstanceSummaryJobImageType)[keyof typeof InstanceSummaryJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceSummaryPhase =\n (typeof InstanceSummaryPhase)[keyof typeof InstanceSummaryPhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType =\n (typeof InstanceSummaryApplicationType)[keyof typeof InstanceSummaryApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was launched\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was launched\n */\n launched: string;\n /** The data and time (UTC) the instance started running\n */\n started?: string;\n /** The data and time (UTC) the instance stopped running\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The application instance owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's known outputs, a JSON string defining a map of all the outputs. Typically applied only to JOB application types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** The current running coin cost of the instance.\n */\n coins?: string;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\nexport interface JobExchangeRateSummary {\n id: number;\n rate?: string;\n collection: string;\n job: string;\n version: string;\n}\n\nexport interface ApplicationExchangeRateSummary {\n id: string;\n rate?: string;\n}\n\nexport interface ExchangeRateDetail {\n id: number;\n rate: string;\n created: string;\n user_id: string;\n comment?: string;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n (typeof DatasetVersionSummaryProcessingStage)[keyof typeof DatasetVersionSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n (typeof DatasetVersionDetailProcessingStage)[keyof typeof DatasetVersionDetailProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n /** The application's latest version as declared in The **Custom Resource Definition**\n */\n latest_version: string;\n}\n\nexport interface ApplicationImageVariant {\n name: string;\n image: string;\n}\n\nexport interface ApplicationImageVariants {\n public?: ApplicationImageVariant[];\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod =\n (typeof ApiLogDetailMethod)[keyof typeof ApiLogDetailMethod];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\",\n PATCH: \"PATCH\",\n POST: \"POST\",\n PUT: \"PUT\",\n} as const;\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representing the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of available MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n (typeof TaskGetResponsePurpose)[keyof typeof TaskGetResponsePurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\",\n FILE: \"FILE\",\n INSTANCE: \"INSTANCE\",\n PROJECT: \"PROJECT\",\n} as const;\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n\n */\nexport type JobGetResponseImageType =\n (typeof JobGetResponseImageType)[keyof typeof JobGetResponseImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n exchange_rate: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport type InstanceTaskPurpose =\n (typeof InstanceTaskPurpose)[keyof typeof InstanceTaskPurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\",\n DELETE: \"DELETE\",\n} as const;\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceGetResponseJobImageType =\n (typeof InstanceGetResponseJobImageType)[keyof typeof InstanceGetResponseJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceGetResponsePhase =\n (typeof InstanceGetResponsePhase)[keyof typeof InstanceGetResponsePhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType =\n (typeof InstanceGetResponseApplicationType)[keyof typeof InstanceGetResponseApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The date and time the instance was started, an ISO-8601 format string.\n */\n launched: string;\n /** The date and time the instance started running in the cluster. This is typically close to the launch time but contention may mean the instance starts only when resources are available.\n */\n started?: string;\n /** The date and time the instance stopped, an ISO-8601 format string.\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceGetResponseJobImageType;\n /** The JSON string representation of the JobDefinition's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleting the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n /** Set if the instance has a valid callback token. Instances with a valid token allow users to read files form the project without authentication\n */\n has_valid_callback_token: boolean;\n /** The cost exchange rate that applies to the Job at the time of launch. This is a string representation of a Decimal, e.g. `'0.5'`\n */\n launch_exchange_rate?: string;\n /** The accumulated cost accrued by the running Job. Depending on the the Job, this is either known when the Job completes or may change as the Job runs. This is a string representation of a Decimal value, e.g. `'32.8'`\n */\n cost?: string;\n /** The accumulated coins accrued by the running Job. Coins are calculated based on the `cost` multiplied by the `launch_exchange_rate`. This is a string representation of a Decimal value, e.g. `'16.4'`\n */\n coins?: string;\n /** Where available, this is the number of hours that the JOb would take if the collateral Pods had access to only one CPU core. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. A value of `'0.5'` implies the job would have run in 30 minutes on 1 core. The minimum time resolution is 0.001 (3.6 seconds).\n */\n collateral_cpu_hours?: string;\n /** Where available, the number of collateral Pods spawned by the instance, typically used by Job instances. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. For `NEXTFLOW` jobs this is a count of the number of Task pods spawned.\n */\n collateral_pod_count?: number;\n /** An optional error message, used to report underlying problems.\n */\n error_message?: string;\n}\n\nexport type GetExchangeRatesResponseId = number | string;\n\nexport interface GetExchangeRatesResponse {\n id: GetExchangeRatesResponseId;\n exchange_rates: ExchangeRateDetail[];\n}\n\nexport type GetAllExchangeRatesResponseExchangeRatesItem =\n | ApplicationExchangeRateSummary\n | JobExchangeRateSummary;\n\nexport interface GetAllExchangeRatesResponse {\n only_undefined: boolean;\n exchange_rates: GetAllExchangeRatesResponseExchangeRatesItem[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it. The token is only provided if asked for when the instance is launched.\n */\n callback_token?: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport interface InstanceDryRunPostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType =\n (typeof DatasetSchemaGetResponseType)[keyof typeof DatasetSchemaGetResponseType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\",\n} as const;\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed applications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The Application COST exchange rate\n */\n exchange_rate: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n image_variants?: ApplicationImageVariants;\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n /** The number of Jobs purged\n */\n jobs_purged?: number;\n}\n\nexport interface AdminJobManifestGetResponse {\n /** The list of known Job manifests\n */\n job_manifests: JobManifestDetail[];\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n}\n\nexport interface AccountServerGetRegistrationResponse {\n merchant_id: number;\n name: string;\n registered: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"]}
|
package/index.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export {
|
|
1
|
+
export { bO as AXIOS_INSTANCE, bL as AccountServerGetNamespaceResponse, bM as AccountServerGetRegistrationResponse, A as AdminGetServiceErrorsParams, bK as AdminJobManifestGetResponse, bJ as AdminJobManifestLoadPutResponse, bI as AdminUserPutResponse, b1 as ApiLogDetail, b0 as ApiLogDetailMethod, aO as ApplicationExchangeRateSummary, bH as ApplicationGetResponse, a_ as ApplicationImageVariant, a$ as ApplicationImageVariants, aZ as ApplicationSummary, bG as ApplicationsGetResponse, ae as AsAdditionalDataProcessingCharge, aY as DatasetDetail, bE as DatasetDigestGetResponse, bD as DatasetMetaGetResponse, ac as DatasetMetaPostBodyBody, ad as DatasetPostBodyBody, aa as DatasetPutBodyBody, bF as DatasetPutPostResponse, bC as DatasetSchemaGetResponse, bB as DatasetSchemaGetResponseType, aX as DatasetSummary, bA as DatasetVersionDeleteResponse, aW as DatasetVersionDetail, aU as DatasetVersionDetailLabels, aV as DatasetVersionDetailProcessingStage, ab as DatasetVersionMetaPostBodyBody, aT as DatasetVersionProjectFile, aS as DatasetVersionSummary, aQ as DatasetVersionSummaryLabels, aR as DatasetVersionSummaryProcessingStage, bz as DatasetVersionsGetResponse, by as DatasetsGetResponse, m as DeleteDatasetParams, D as DeleteUnmanagedFileParams, bN as DmError, bS as ErrorType, aP as ExchangeRateDetail, a5 as ExchangeRatePutBodyBody, aM as FilePathFile, a9 as FilePostBodyBody, bu as FilePostResponse, aL as FileStat, bt as FilesGetResponse, i as GetAllApplicationExchangeRatesParams, bs as GetAllExchangeRatesResponse, br as GetAllExchangeRatesResponseExchangeRatesItem, d as GetAllJobExchangeRatesParams, h as GetApplicationExchangeRatesParams, o as GetDatasetsParams, bq as GetExchangeRatesResponse, bp as GetExchangeRatesResponseId, l as GetFilesParams, b as GetInstancesParams, f as GetJobByVersionParams, c as GetJobExchangeRatesParams, e as GetJobParams, g as GetJobsParams, q as GetProjectFileParams, p as GetProjectFileWithTokenParams, r as GetProjectsParams, j as GetTaskParams, k as GetTasksParams, a as GetUserAccountParams, G as GetUserApiLogParams, n as GetVersionsParams, bx as InstanceDeleteResponse, bw as InstanceDryRunPostResponse, bo as InstanceGetResponse, bn as InstanceGetResponseApplicationType, bl as InstanceGetResponseJobImageType, bm as InstanceGetResponsePhase, a8 as InstancePostBodyBody, bv as InstancePostResponse, aK as InstanceSummary, aJ as InstanceSummaryApplicationType, aH as InstanceSummaryJobImageType, aI as InstanceSummaryPhase, bj as InstanceTask, bi as InstanceTaskPurpose, bk as InstancesGetResponse, aG as JobApplication, aN as JobExchangeRateSummary, bh as JobGetResponse, bg as JobGetResponseImageType, aF as JobManifestDetail, a7 as JobManifestLoadPutBodyBody, a6 as JobManifestPutBodyBody, ay as JobOrderDetail, aw as JobReplacement, ax as JobReplacements, aE as JobSummary, aD as JobSummaryImageType, aC as JobVariables, aB as JobVariablesInputs, az as JobVariablesOptions, aA as JobVariablesOutputs, bf as JobsGetResponse, P as PatchInstanceParams, be as ProjectDeleteResponse, av as ProjectDetail, au as ProjectFileDetail, a4 as ProjectFilePutBodyBody, bd as ProjectGetResponse, a3 as ProjectPatchBodyBody, a2 as ProjectPostBodyBody, bc as ProjectPostResponse, bb as ProjectsGetResponse, $ as QApplicationIdParameter, _ as QCurrentParameter, Y as QDatasetMimeTypeParameter, Z as QDoNotImpersonateParameter, Q as QEditorsParameter, X as QEventLimitParameter, W as QEventPriorOrdinalParameter, V as QExcludeDoneParameter, T as QExcludePurposeParameter, U as QExcludeRemovalParameter, S as QFileParameter, R as QFilePathParameter, O as QFileProjectIdParameter, N as QFromParameter, M as QIncludeAcknowlegedParameter, L as QIncludeDeletedParameter, K as QIncludeHiddenParameter, J as QInstanceArchiveParameter, w as QInstanceCallbackContextParameter, I as QJobCollectionParameter, H as QJobIdParameter, F as QJobJobParameter, E as QJobVersionParameter, C as QKeepProjectFilesParameter, z as QLabelsParameter, B as QOnlyUndefinedParameter, s as QOwnersParameter, y as QProjectIdParameter, x as QProjectNameParameter, v as QTokenParameter, u as QUntilParameter, t as QUsernameParameter, at as ServiceErrorSummary, as as ServiceErrorSummarySeverity, ba as ServiceErrorsGetResponse, ar as TaskEvent, aq as TaskEventLevel, b9 as TaskGetResponse, b8 as TaskGetResponsePurpose, ap as TaskIdentity, ao as TaskState, an as TaskStateState, am as TaskSummary, al as TaskSummaryProcessingStage, b7 as TasksGetResponse, ak as TypeSummary, aj as TypeSummaryFormatterOptions, ai as TypeSummaryFormatterOptionsType, b6 as TypesGetResponse, ah as UserAccountDetail, b5 as UserAccountGetResponse, a1 as UserAccountPatchBodyBody, b4 as UserApiLogGetResponse, ag as UserDetail, a0 as UserPatchBodyBody, af as UserSummary, b3 as UsersGetResponse, b2 as VersionGetResponse, bR as customInstance, bP as setAuthToken, bQ as setBaseUrl } from './custom-instance-b01bd8f0.js';
|
|
2
2
|
import 'axios';
|
package/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["/**\n * Generated by orval v6.15.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 1.0\n */\nexport type AdminGetServiceErrorsParams = {\n /**\n * Set to include acknowledged items\n\n */\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n /**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\n from?: QFromParameter;\n /**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\n until?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n /**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = {\n /**\n * True to archive the instance\n */\n archive?: QInstanceArchiveParameter;\n};\n\nexport type GetInstancesParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllJobExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetJobParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobByVersionParams = {\n /**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\n collection: QJobCollectionParameter;\n /**\n * The Job, i.e. \"nop\"\n\n */\n job: QJobJobParameter;\n /**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\n version: QJobVersionParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobsParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetApplicationExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllApplicationExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetTaskParams = {\n /**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\n event_limit?: QEventLimitParameter;\n /**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n /**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\n exclude_done?: QExcludeDoneParameter;\n /**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\n exclude_removal?: QExcludeRemovalParameter;\n /**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\n exclude_purpose?: QExcludePurposeParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n /**\n * An instance callback context string\n */\n instance_callback_context?: QInstanceCallbackContextParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n /**\n * A project file.\n\n */\n file: QFileParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * Whether to include hidden files and directories\n */\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n /**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n};\n\nexport type GetDatasetsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n /**\n * Filter the datasets by username\n\n */\n username?: QUsernameParameter;\n /**\n * Filter the datasets by the supplied mime_type.\n\n */\n dataset_mime_type?: QDatasetMimeTypeParameter;\n /**\n * Filter the datasets by a comma separated list of owners\n\n */\n owners?: QOwnersParameter;\n /**\n * Filter the datasets by a comma separated list of editors\n\n */\n editors?: QEditorsParameter;\n /**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\n labels?: QLabelsParameter;\n};\n\nexport type GetProjectFileWithTokenParams = {\n /**\n * A token\n */\n token?: QTokenParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectFileParams = {\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectsParams = {\n /**\n * A Project name\n */\n project_name?: QProjectNameParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A token\n */\nexport type QTokenParameter = string;\n\n/**\n * An instance callback context string\n */\nexport type QInstanceCallbackContextParameter = string;\n\n/**\n * A Project name\n */\nexport type QProjectNameParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\n/**\n * Only return records where the exchange rate is undefined\n\n */\nexport type QOnlyUndefinedParameter = boolean;\n\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nexport type QJobVersionParameter = string;\n\n/**\n * The Job, i.e. \"nop\"\n\n */\nexport type QJobJobParameter = string;\n\n/**\n * A Job identity\n\n */\nexport type QJobIdParameter = number;\n\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\nexport type QJobCollectionParameter = string;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexport type QExcludeRemovalParameter = boolean;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\n/**\n * Set to get current\n\n */\nexport type QCurrentParameter = boolean;\n\n/**\n * An Application identity\n\n */\nexport type QApplicationIdParameter = string;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user account can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to change things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type ExchangeRatePutBodyBody = {\n /** A decimal value used as the new Exchange Rate. Application _raw_ **costs** are multiplied by this value to covert costs to **coins**. A string is used to avoid rounding errors. Internally the value is treated as a Python Decimal.\n */\n rate: string;\n /** A brief comment relating to the new rate\n */\n comment?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type JobManifestLoadPutBodyBody = {\n /** Set to remove all pre-existing Job Definitions that are not present in the existing manifests after the load is complete.\n\nJobs in the collection `im-test` are not removed */\n purge?: boolean;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n\nApplication IDs currently supported include `datamanagerjobs.squonk.it` and `jupyternotebooks.squonk.it`\n */\n application_id: string;\n /** A supported application version to launch.\n\nThis property is **Deprecated**. It is currently ignored will be removed in a future release\n */\n application_version?: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** If set a callback token will be provided in the response. The token allows files to be downloaded from the instance project and expires after a pre-configured amount of time after the instance is complete or if the user revokes the token.\n\nCaution should be taken using this feature. A 3rd party can access the Project's files without authentication, they just need the token and the project identity.\n\nTokens should therefore be revoked when they're no longer required\n */\n generate_callback_token?: boolean;\n /** An optional 22-character **sortuuid** callback token that is supplied by the remote service. If not provided the user can use `generate_callback_token` to have one generated and returned in the response.\n\nSee the Python module's `shortuuid.get_alphabet()` for the full list of permitted characters\n */\n callback_token?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen launching a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n\nApplications also support `variables`. The Jupyter notebook application supports the definition of the notebook `\"image\"`, `\"cpu\"` and `\"memory\"`. A full Jupyter notebook specification might be `{\"variables\":{\"image\":\"jupyter/tensorflow-notebook:tensorflow-2.9.1\",\"cpu\":2,\"memory\":\"4Gi\"}}`, where `memory` is limited to `Gi` as a suffix.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset. Values like `chemical/x-mdl-sdfile`, `chemical/x-mdl-molfile`, and `chemical/x-pdb` are permitted. See the **\\/type** endpoint for a full list of types.\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface AsAdditionalDataProcessingCharge {\n cost: string;\n cost_to_coins_er: string;\n cost_scale_factor: string;\n instance_id: string;\n instance_name: string;\n started: string;\n stopped?: string;\n run_time?: string;\n error_message?: string;\n job_collection?: string;\n job_job?: string;\n job_version?: string;\n collateral_pod_count?: number;\n collateral_cpu_hours?: string;\n}\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Private accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administrative capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType =\n (typeof TypeSummaryFormatterOptionsType)[keyof typeof TypeSummaryFormatterOptionsType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\",\n} as const;\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n (typeof TaskSummaryProcessingStage)[keyof typeof TaskSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n (typeof TaskStateState)[keyof typeof TaskStateState];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\",\n STARTED: \"STARTED\",\n RETRY: \"RETRY\",\n SUCCESS: \"SUCCESS\",\n FAILURE: \"FAILURE\",\n} as const;\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n (typeof TaskEventLevel)[keyof typeof TaskEventLevel];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n INFO: \"INFO\",\n DEBUG: \"DEBUG\",\n} as const;\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity =\n (typeof ServiceErrorSummarySeverity)[keyof typeof ServiceErrorSummarySeverity];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n} as const;\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n created: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** True if the project is private. Private projects are only visible to the owner and its editors.\n */\n private: boolean;\n /** An editor (user_id) of the project */\n editors: string[];\n /** An observer (user_id) of the project */\n observers: string[];\n /** The approximate size of all the files in the Project volume. This is updated regularly throughout the day and its current size may differ from what is reported here. The smallest billable unit is 1GiB (1,073,741,824 bytes). Therefore a project that contains 32KiB of files is recorded as 1GiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobReplacement {\n collection: string;\n job: string;\n}\n\n/**\n * A list of Jobs, collection and job that are either replacing or being replaced\n\n */\nexport type JobReplacements = JobReplacement[];\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType =\n (typeof JobSummaryImageType)[keyof typeof JobSummaryImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport interface JobManifestDetail {\n /** The Job Manifest record ID */\n id: number;\n url: string;\n header?: string;\n params?: string;\n /** The Date (and time) the manifest was created\n */\n created: string;\n /** The (admin) user who created the manifest\n */\n creator?: string;\n /** The number of job definition files loaded during the most recent successful load\n */\n job_definition_files_loaded?: number;\n /** The number of job definitions loaded during the most recent successful load\n */\n job_definitions_loaded?: number;\n /** The Date (and time) the manifest was last loaded successfully\n */\n last_successful_load_time?: string;\n /** The Date (and time) the manifest was last loaded, successfully or otherwise. If the manifest (or any of the Job definition files it refers to) fails to load the `load_status` should provide some diagnostic feedback\n */\n last_load_time?: string;\n /** The status of the time the manifest was last loaded. If the load was successful this will be `SUCCESS` and `last_successful_load` will be the same as `last_load`\n */\n last_load_status: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType =\n (typeof InstanceSummaryJobImageType)[keyof typeof InstanceSummaryJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceSummaryPhase =\n (typeof InstanceSummaryPhase)[keyof typeof InstanceSummaryPhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType =\n (typeof InstanceSummaryApplicationType)[keyof typeof InstanceSummaryApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was launched\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was launched\n */\n launched: string;\n /** The data and time (UTC) the instance started running\n */\n started?: string;\n /** The data and time (UTC) the instance stopped running\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The application instance owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's known outputs, a JSON string defining a map of all the outputs. Typically applied only to JOB application types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** The current running coin cost of the instance.\n */\n coins?: string;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\nexport interface JobExchangeRateSummary {\n id: number;\n rate?: string;\n collection: string;\n job: string;\n version: string;\n}\n\nexport interface ApplicationExchangeRateSummary {\n id: string;\n rate?: string;\n}\n\nexport interface ExchangeRateDetail {\n id: number;\n rate: string;\n created: string;\n user_id: string;\n comment?: string;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n (typeof DatasetVersionSummaryProcessingStage)[keyof typeof DatasetVersionSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n (typeof DatasetVersionDetailProcessingStage)[keyof typeof DatasetVersionDetailProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n /** The application's latest version as declared in The **Custom Resource Definition**\n */\n latest_version: string;\n}\n\nexport interface ApplicationImageVariant {\n name: string;\n image: string;\n}\n\nexport interface ApplicationImageVariants {\n public?: ApplicationImageVariant[];\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod =\n (typeof ApiLogDetailMethod)[keyof typeof ApiLogDetailMethod];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\",\n PATCH: \"PATCH\",\n POST: \"POST\",\n PUT: \"PUT\",\n} as const;\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representing the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of available MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n (typeof TaskGetResponsePurpose)[keyof typeof TaskGetResponsePurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\",\n FILE: \"FILE\",\n INSTANCE: \"INSTANCE\",\n PROJECT: \"PROJECT\",\n} as const;\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n\n */\nexport type JobGetResponseImageType =\n (typeof JobGetResponseImageType)[keyof typeof JobGetResponseImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n exchange_rate: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport type InstanceTaskPurpose =\n (typeof InstanceTaskPurpose)[keyof typeof InstanceTaskPurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\",\n DELETE: \"DELETE\",\n} as const;\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceGetResponseJobImageType =\n (typeof InstanceGetResponseJobImageType)[keyof typeof InstanceGetResponseJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceGetResponsePhase =\n (typeof InstanceGetResponsePhase)[keyof typeof InstanceGetResponsePhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType =\n (typeof InstanceGetResponseApplicationType)[keyof typeof InstanceGetResponseApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The date and time the instance was started, an ISO-8601 format string.\n */\n launched: string;\n /** The date and time the instance started running in the cluster. This is typically close to the launch time but contention may mean the instance starts only when resources are available.\n */\n started?: string;\n /** The date and time the instance stopped, an ISO-8601 format string.\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceGetResponseJobImageType;\n /** The JSON string representation of the JobDefinition's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleting the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n /** Set if the instance has a valid callback token. Instances with a valid token allow users to read files form the project without authentication\n */\n has_valid_callback_token: boolean;\n /** The cost exchange rate that applies to the Job at the time of launch. This is a string representation of a Decimal, e.g. `'0.5'`\n */\n launch_exchange_rate?: string;\n /** The accumulated cost accrued by the running Job. Depending on the the Job, this is either known when the Job completes or may change as the Job runs. This is a string representation of a Decimal value, e.g. `'32.8'`\n */\n cost?: string;\n /** The accumulated coins accrued by the running Job. Coins are calculated based on the `cost` multiplied by the `launch_exchange_rate`. This is a string representation of a Decimal value, e.g. `'16.4'`\n */\n coins?: string;\n /** Where available, this is the number of hours that the JOb would take if the collateral Pods had access to only one CPU core. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. A value of `'0.5'` implies the job would have run in 30 minutes on 1 core. The minimum time resolution is 0.001 (3.6 seconds).\n */\n collateral_cpu_hours?: string;\n /** Where available, the number of collateral Pods spawned by the instance, typically used by Job instances. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. For `NEXTFLOW` jobs this is a count of the number of Task pods spawned.\n */\n collateral_pod_count?: number;\n /** An optional error message, used to report underlying problems.\n */\n error_message?: string;\n}\n\nexport type GetExchangeRatesResponseId = number | string;\n\nexport interface GetExchangeRatesResponse {\n id: GetExchangeRatesResponseId;\n exchange_rates: ExchangeRateDetail[];\n}\n\nexport type GetAllExchangeRatesResponseExchangeRatesItem =\n | ApplicationExchangeRateSummary\n | JobExchangeRateSummary;\n\nexport interface GetAllExchangeRatesResponse {\n only_undefined: boolean;\n exchange_rates: GetAllExchangeRatesResponseExchangeRatesItem[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it. The token is only provided if asked for when the instance is launched.\n */\n callback_token?: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport interface InstanceDryRunPostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType =\n (typeof DatasetSchemaGetResponseType)[keyof typeof DatasetSchemaGetResponseType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\",\n} as const;\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed applications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The Application COST exchange rate\n */\n exchange_rate: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n image_variants?: ApplicationImageVariants;\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n /** The number of Jobs purged\n */\n jobs_purged?: number;\n}\n\nexport interface AdminJobManifestGetResponse {\n /** The list of known Job manifests\n */\n job_manifests: JobManifestDetail[];\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n}\n\nexport interface AccountServerGetRegistrationResponse {\n merchant_id: number;\n name: string;\n registered: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"],"mappings":";;;;;;;;AAquBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AA6CO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA2CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA0BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAqBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAqHO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkGO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AAyJO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAsEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgHO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAqEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAsEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA+EO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AAmBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2LO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["/**\n * Generated by orval v6.15.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 1.0\n */\nexport type AdminGetServiceErrorsParams = {\n /**\n * Set to include acknowledged items\n\n */\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n /**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\n from?: QFromParameter;\n /**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\n until?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n /**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = {\n /**\n * True to archive the instance\n */\n archive?: QInstanceArchiveParameter;\n};\n\nexport type GetInstancesParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllJobExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetJobParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobByVersionParams = {\n /**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\n collection: QJobCollectionParameter;\n /**\n * The Job, i.e. \"nop\"\n\n */\n job: QJobJobParameter;\n /**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\n version: QJobVersionParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetJobsParams = {\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n};\n\nexport type GetApplicationExchangeRatesParams = {\n /**\n * Set to get current\n\n */\n current?: QCurrentParameter;\n};\n\nexport type GetAllApplicationExchangeRatesParams = {\n /**\n * Only return records where the exchange rate is undefined\n\n */\n only_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetTaskParams = {\n /**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\n event_limit?: QEventLimitParameter;\n /**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n /**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\n exclude_done?: QExcludeDoneParameter;\n /**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\n exclude_removal?: QExcludeRemovalParameter;\n /**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\n exclude_purpose?: QExcludePurposeParameter;\n /**\n * A Project identity\n */\n project_id?: QProjectIdParameter;\n /**\n * An instance callback context string\n */\n instance_callback_context?: QInstanceCallbackContextParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n /**\n * A project file.\n\n */\n file: QFileParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n /**\n * The Project identity\n */\n project_id: QFileProjectIdParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * Whether to include hidden files and directories\n */\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n /**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n};\n\nexport type GetDatasetsParams = {\n /**\n * Whether to include records that are deleted\n */\n include_deleted?: QIncludeDeletedParameter;\n /**\n * Filter the datasets by username\n\n */\n username?: QUsernameParameter;\n /**\n * Filter the datasets by the supplied mime_type.\n\n */\n dataset_mime_type?: QDatasetMimeTypeParameter;\n /**\n * Filter the datasets by a comma separated list of owners\n\n */\n owners?: QOwnersParameter;\n /**\n * Filter the datasets by a comma separated list of editors\n\n */\n editors?: QEditorsParameter;\n /**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\n labels?: QLabelsParameter;\n};\n\nexport type GetProjectFileWithTokenParams = {\n /**\n * A token\n */\n token?: QTokenParameter;\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectFileParams = {\n /**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\n path?: QFilePathParameter;\n /**\n * A project file.\n\n */\n file: QFileParameter;\n};\n\nexport type GetProjectsParams = {\n /**\n * A Project name\n */\n project_name?: QProjectNameParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A token\n */\nexport type QTokenParameter = string;\n\n/**\n * An instance callback context string\n */\nexport type QInstanceCallbackContextParameter = string;\n\n/**\n * A Project name\n */\nexport type QProjectNameParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\n/**\n * Only return records where the exchange rate is undefined\n\n */\nexport type QOnlyUndefinedParameter = boolean;\n\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nexport type QJobVersionParameter = string;\n\n/**\n * The Job, i.e. \"nop\"\n\n */\nexport type QJobJobParameter = string;\n\n/**\n * A Job identity\n\n */\nexport type QJobIdParameter = number;\n\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\nexport type QJobCollectionParameter = string;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexport type QExcludeRemovalParameter = boolean;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\n/**\n * Set to get current\n\n */\nexport type QCurrentParameter = boolean;\n\n/**\n * An Application identity\n\n */\nexport type QApplicationIdParameter = string;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user account can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to change things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type ExchangeRatePutBodyBody = {\n /** A decimal value used as the new Exchange Rate. Application _raw_ **costs** are multiplied by this value to covert costs to **coins**. A string is used to avoid rounding errors. Internally the value is treated as a Python Decimal.\n */\n rate: string;\n /** A brief comment relating to the new rate\n */\n comment?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type JobManifestLoadPutBodyBody = {\n /** Set to remove all pre-existing Job Definitions that are not present in the existing manifests after the load is complete.\n\nJobs in the collection `im-test` are not removed */\n purge?: boolean;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n\nApplication IDs currently supported include `datamanagerjobs.squonk.it` and `jupyternotebooks.squonk.it`\n */\n application_id: string;\n /** A supported application version to launch.\n\nThis property is **Deprecated**. It is currently ignored will be removed in a future release\n */\n application_version?: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** If set a callback token will be provided in the response. The token allows files to be downloaded from the instance project and expires after a pre-configured amount of time after the instance is complete or if the user revokes the token.\n\nCaution should be taken using this feature. A 3rd party can access the Project's files without authentication, they just need the token and the project identity.\n\nTokens should therefore be revoked when they're no longer required\n */\n generate_callback_token?: boolean;\n /** An optional 22-character **sortuuid** callback token that is supplied by the remote service. If not provided the user can use `generate_callback_token` to have one generated and returned in the response.\n\nSee the Python module's `shortuuid.get_alphabet()` for the full list of permitted characters\n */\n callback_token?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen launching a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n\nApplications also support `variables`. The Jupyter notebook application supports the definition of the notebook `\"image\"`, `\"cpu\"` and `\"memory\"`. A full Jupyter notebook specification might be `{\"variables\":{\"image\":\"jupyter/tensorflow-notebook:tensorflow-2.9.1\",\"cpu\":2,\"memory\":\"4Gi\"}}`, where `memory` is limited to `Gi` as a suffix.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset. Values like `chemical/x-mdl-sdfile`, `chemical/x-mdl-molfile`, and `chemical/x-pdb` are permitted. See the **\\/type** endpoint for a full list of types.\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface AsAdditionalDataProcessingCharge {\n cost: string;\n cost_to_coins_er: string;\n cost_scale_factor: string;\n instance_id: string;\n instance_name: string;\n started: string;\n stopped?: string;\n run_time?: string;\n error_message?: string;\n job_collection?: string;\n job_job?: string;\n job_version?: string;\n collateral_pod_count?: number;\n collateral_cpu_hours?: string;\n}\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Private accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administrative capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType =\n (typeof TypeSummaryFormatterOptionsType)[keyof typeof TypeSummaryFormatterOptionsType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\",\n} as const;\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n (typeof TaskSummaryProcessingStage)[keyof typeof TaskSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n (typeof TaskStateState)[keyof typeof TaskStateState];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\",\n STARTED: \"STARTED\",\n RETRY: \"RETRY\",\n SUCCESS: \"SUCCESS\",\n FAILURE: \"FAILURE\",\n} as const;\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n (typeof TaskEventLevel)[keyof typeof TaskEventLevel];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n INFO: \"INFO\",\n DEBUG: \"DEBUG\",\n} as const;\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity =\n (typeof ServiceErrorSummarySeverity)[keyof typeof ServiceErrorSummarySeverity];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\",\n ERROR: \"ERROR\",\n WARNING: \"WARNING\",\n} as const;\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n created: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** True if the project is private. Private projects are only visible to the owner and its editors.\n */\n private: boolean;\n /** An editor (user_id) of the project */\n editors: string[];\n /** An observer (user_id) of the project */\n observers: string[];\n /** The approximate size of all the files in the Project volume. This is updated regularly throughout the day and its current size may differ from what is reported here. The smallest billable unit is 1GiB (1,073,741,824 bytes). Therefore a project that contains 32KiB of files is recorded as 1GiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobReplacement {\n collection: string;\n job: string;\n}\n\n/**\n * A list of Jobs, collection and job that are either replacing or being replaced\n\n */\nexport type JobReplacements = JobReplacement[];\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\n/**\n * The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOptions = { [key: string]: any };\n\n/**\n * The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOutputs = { [key: string]: any };\n\n/**\n * The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesInputs = { [key: string]: any };\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: JobVariablesInputs;\n /** The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: JobVariablesOutputs;\n /** The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: JobVariablesOptions;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType =\n (typeof JobSummaryImageType)[keyof typeof JobSummaryImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport interface JobManifestDetail {\n /** The Job Manifest record ID */\n id: number;\n url: string;\n header?: string;\n params?: string;\n /** The Date (and time) the manifest was created\n */\n created: string;\n /** The (admin) user who created the manifest\n */\n creator?: string;\n /** The number of job definition files loaded during the most recent successful load\n */\n job_definition_files_loaded?: number;\n /** The number of job definitions loaded during the most recent successful load\n */\n job_definitions_loaded?: number;\n /** The Date (and time) the manifest was last loaded successfully\n */\n last_successful_load_time?: string;\n /** The Date (and time) the manifest was last loaded, successfully or otherwise. If the manifest (or any of the Job definition files it refers to) fails to load the `load_status` should provide some diagnostic feedback\n */\n last_load_time?: string;\n /** The status of the time the manifest was last loaded. If the load was successful this will be `SUCCESS` and `last_successful_load` will be the same as `last_load`\n */\n last_load_status: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType =\n (typeof InstanceSummaryJobImageType)[keyof typeof InstanceSummaryJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceSummaryPhase =\n (typeof InstanceSummaryPhase)[keyof typeof InstanceSummaryPhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType =\n (typeof InstanceSummaryApplicationType)[keyof typeof InstanceSummaryApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was launched\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was launched\n */\n launched: string;\n /** The data and time (UTC) the instance started running\n */\n started?: string;\n /** The data and time (UTC) the instance stopped running\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The application instance owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's known outputs, a JSON string defining a map of all the outputs. Typically applied only to JOB application types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** The current running coin cost of the instance.\n */\n coins?: string;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\nexport interface JobExchangeRateSummary {\n id: number;\n rate?: string;\n collection: string;\n job: string;\n version: string;\n}\n\nexport interface ApplicationExchangeRateSummary {\n id: string;\n rate?: string;\n}\n\nexport interface ExchangeRateDetail {\n id: number;\n rate: string;\n created: string;\n user_id: string;\n comment?: string;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n (typeof DatasetVersionSummaryProcessingStage)[keyof typeof DatasetVersionSummaryProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n (typeof DatasetVersionDetailProcessingStage)[keyof typeof DatasetVersionDetailProcessingStage];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\",\n FAILED: \"FAILED\",\n FORMATTING: \"FORMATTING\",\n LOADING: \"LOADING\",\n DELETING: \"DELETING\",\n DONE: \"DONE\",\n} as const;\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n /** The application's latest version as declared in The **Custom Resource Definition**\n */\n latest_version: string;\n}\n\nexport interface ApplicationImageVariant {\n name: string;\n image: string;\n}\n\nexport interface ApplicationImageVariants {\n public?: ApplicationImageVariant[];\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod =\n (typeof ApiLogDetailMethod)[keyof typeof ApiLogDetailMethod];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\",\n PATCH: \"PATCH\",\n POST: \"POST\",\n PUT: \"PUT\",\n} as const;\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representing the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of available MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n (typeof TaskGetResponsePurpose)[keyof typeof TaskGetResponsePurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\",\n FILE: \"FILE\",\n INSTANCE: \"INSTANCE\",\n PROJECT: \"PROJECT\",\n} as const;\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n\n */\nexport type JobGetResponseImageType =\n (typeof JobGetResponseImageType)[keyof typeof JobGetResponseImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n exchange_rate: string;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n replaces?: JobReplacements;\n replaced_by?: JobReplacements;\n}\n\nexport type InstanceTaskPurpose =\n (typeof InstanceTaskPurpose)[keyof typeof InstanceTaskPurpose];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\",\n DELETE: \"DELETE\",\n} as const;\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceGetResponseJobImageType =\n (typeof InstanceGetResponseJobImageType)[keyof typeof InstanceGetResponseJobImageType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseJobImageType = {\n SIMPLE: \"SIMPLE\",\n NEXTFLOW: \"NEXTFLOW\",\n} as const;\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceGetResponsePhase =\n (typeof InstanceGetResponsePhase)[keyof typeof InstanceGetResponsePhase];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\",\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\",\n FAILED: \"FAILED\",\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\",\n PENDING: \"PENDING\",\n RUNNING: \"RUNNING\",\n SUCCEEDED: \"SUCCEEDED\",\n UNKNOWN: \"UNKNOWN\",\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType =\n (typeof InstanceGetResponseApplicationType)[keyof typeof InstanceGetResponseApplicationType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\",\n JOB: \"JOB\",\n} as const;\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The date and time the instance was started, an ISO-8601 format string.\n */\n launched: string;\n /** The date and time the instance started running in the cluster. This is typically close to the launch time but contention may mean the instance starts only when resources are available.\n */\n started?: string;\n /** The date and time the instance stopped, an ISO-8601 format string.\n */\n stopped?: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceGetResponseJobImageType;\n /** The JSON string representation of the JobDefinition's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleting the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n /** Set if the instance has a valid callback token. Instances with a valid token allow users to read files form the project without authentication\n */\n has_valid_callback_token: boolean;\n /** The cost exchange rate that applies to the Job at the time of launch. This is a string representation of a Decimal, e.g. `'0.5'`\n */\n launch_exchange_rate?: string;\n /** The accumulated cost accrued by the running Job. Depending on the the Job, this is either known when the Job completes or may change as the Job runs. This is a string representation of a Decimal value, e.g. `'32.8'`\n */\n cost?: string;\n /** The accumulated coins accrued by the running Job. Coins are calculated based on the `cost` multiplied by the `launch_exchange_rate`. This is a string representation of a Decimal value, e.g. `'16.4'`\n */\n coins?: string;\n /** Where available, this is the number of hours that the JOb would take if the collateral Pods had access to only one CPU core. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. A value of `'0.5'` implies the job would have run in 30 minutes on 1 core. The minimum time resolution is 0.001 (3.6 seconds).\n */\n collateral_cpu_hours?: string;\n /** Where available, the number of collateral Pods spawned by the instance, typically used by Job instances. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. For `NEXTFLOW` jobs this is a count of the number of Task pods spawned.\n */\n collateral_pod_count?: number;\n /** An optional error message, used to report underlying problems.\n */\n error_message?: string;\n}\n\nexport type GetExchangeRatesResponseId = number | string;\n\nexport interface GetExchangeRatesResponse {\n id: GetExchangeRatesResponseId;\n exchange_rates: ExchangeRateDetail[];\n}\n\nexport type GetAllExchangeRatesResponseExchangeRatesItem =\n | ApplicationExchangeRateSummary\n | JobExchangeRateSummary;\n\nexport interface GetAllExchangeRatesResponse {\n only_undefined: boolean;\n exchange_rates: GetAllExchangeRatesResponseExchangeRatesItem[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it. The token is only provided if asked for when the instance is launched.\n */\n callback_token?: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport interface InstanceDryRunPostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType =\n (typeof DatasetSchemaGetResponseType)[keyof typeof DatasetSchemaGetResponseType];\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\",\n} as const;\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed applications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The Application COST exchange rate\n */\n exchange_rate: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n image_variants?: ApplicationImageVariants;\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n /** The number of Jobs purged\n */\n jobs_purged?: number;\n}\n\nexport interface AdminJobManifestGetResponse {\n /** The list of known Job manifests\n */\n job_manifests: JobManifestDetail[];\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n}\n\nexport interface AccountServerGetRegistrationResponse {\n merchant_id: number;\n name: string;\n registered: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"],"mappings":";;;;;;;;AAquBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AA6CO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA2CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA0BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAqBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAuIO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkGO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AAyJO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAsEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgHO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAqEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAsEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA+EO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AAmBO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA,EACR,UAAU;AACZ;AAWO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2LO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV;","names":[]}
|
package/instance/instance.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a8 as InstancePostBodyBody,
|
|
1
|
+
import { a8 as InstancePostBodyBody, bR as customInstance, bv as InstancePostResponse, bS as ErrorType, bN as DmError, b as GetInstancesParams, bk as InstancesGetResponse, bw as InstanceDryRunPostResponse, bo as InstanceGetResponse, ap as TaskIdentity, P as PatchInstanceParams } from '../custom-instance-b01bd8f0.js';
|
|
2
2
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
3
3
|
import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
4
4
|
import 'axios';
|
package/job/job.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
2
|
-
import { g as GetJobsParams,
|
|
2
|
+
import { g as GetJobsParams, bR as customInstance, bf as JobsGetResponse, bS as ErrorType, bN as DmError, f as GetJobByVersionParams, bh as JobGetResponse, e as GetJobParams } from '../custom-instance-b01bd8f0.js';
|
|
3
3
|
import 'axios';
|
|
4
4
|
|
|
5
5
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/metadata/metadata.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
2
2
|
import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
3
|
-
import { ab as DatasetVersionMetaPostBodyBody,
|
|
3
|
+
import { ab as DatasetVersionMetaPostBodyBody, bR as customInstance, bD as DatasetMetaGetResponse, bS as ErrorType, bN as DmError, ac as DatasetMetaPostBodyBody } from '../custom-instance-b01bd8f0.js';
|
|
4
4
|
import 'axios';
|
|
5
5
|
|
|
6
6
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/package.json
CHANGED
package/project/project.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { r as GetProjectsParams,
|
|
1
|
+
import { r as GetProjectsParams, bR as customInstance, bb as ProjectsGetResponse, bS as ErrorType, bN as DmError, a2 as ProjectPostBodyBody, bc as ProjectPostResponse, av as ProjectDetail, a3 as ProjectPatchBodyBody, ap as TaskIdentity, q as GetProjectFileParams, a4 as ProjectFilePutBodyBody, p as GetProjectFileWithTokenParams } from '../custom-instance-b01bd8f0.js';
|
|
2
2
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
3
3
|
import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from '@tanstack/react-query';
|
|
4
4
|
import 'axios';
|
|
@@ -993,19 +993,37 @@ export interface JobOrderDetail {
|
|
|
993
993
|
options: string[];
|
|
994
994
|
}
|
|
995
995
|
|
|
996
|
+
/**
|
|
997
|
+
* The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.
|
|
998
|
+
|
|
999
|
+
*/
|
|
1000
|
+
export type JobVariablesOptions = { [key: string]: any };
|
|
1001
|
+
|
|
1002
|
+
/**
|
|
1003
|
+
* The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.
|
|
1004
|
+
|
|
1005
|
+
*/
|
|
1006
|
+
export type JobVariablesOutputs = { [key: string]: any };
|
|
1007
|
+
|
|
1008
|
+
/**
|
|
1009
|
+
* The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.
|
|
1010
|
+
|
|
1011
|
+
*/
|
|
1012
|
+
export type JobVariablesInputs = { [key: string]: any };
|
|
1013
|
+
|
|
996
1014
|
export interface JobVariables {
|
|
997
1015
|
/** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.
|
|
998
1016
|
*/
|
|
999
1017
|
order?: JobOrderDetail;
|
|
1000
|
-
/** The Job command's inputs.
|
|
1018
|
+
/** The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.
|
|
1001
1019
|
*/
|
|
1002
|
-
inputs?:
|
|
1003
|
-
/** The Job command's outputs.
|
|
1020
|
+
inputs?: JobVariablesInputs;
|
|
1021
|
+
/** The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.
|
|
1004
1022
|
*/
|
|
1005
|
-
outputs?:
|
|
1006
|
-
/** The Job command's options.
|
|
1023
|
+
outputs?: JobVariablesOutputs;
|
|
1024
|
+
/** The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.
|
|
1007
1025
|
*/
|
|
1008
|
-
options?:
|
|
1026
|
+
options?: JobVariablesOptions;
|
|
1009
1027
|
}
|
|
1010
1028
|
|
|
1011
1029
|
/**
|
package/task/task.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
2
2
|
import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from '@tanstack/react-query';
|
|
3
|
-
import { k as GetTasksParams,
|
|
3
|
+
import { k as GetTasksParams, bR as customInstance, b7 as TasksGetResponse, bS as ErrorType, bN as DmError, j as GetTaskParams, b9 as TaskGetResponse } from '../custom-instance-b01bd8f0.js';
|
|
4
4
|
import 'axios';
|
|
5
5
|
|
|
6
6
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/type/type.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UseQueryOptions, QueryKey, UseQueryResult } from '@tanstack/react-query';
|
|
2
|
-
import {
|
|
2
|
+
import { bR as customInstance, b6 as TypesGetResponse, bS as ErrorType, bN as DmError } from '../custom-instance-b01bd8f0.js';
|
|
3
3
|
import 'axios';
|
|
4
4
|
|
|
5
5
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|
package/user/user.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as _tanstack_react_query from '@tanstack/react-query';
|
|
2
2
|
import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from '@tanstack/react-query';
|
|
3
|
-
import {
|
|
3
|
+
import { bR as customInstance, b3 as UsersGetResponse, bS as ErrorType, bN as DmError, a as GetUserAccountParams, ah as UserAccountDetail, a1 as UserAccountPatchBodyBody, G as GetUserApiLogParams, b4 as UserApiLogGetResponse } from '../custom-instance-b01bd8f0.js';
|
|
4
4
|
import 'axios';
|
|
5
5
|
|
|
6
6
|
type SecondParameter<T extends (...args: any) => any> = T extends (config: any, args: infer P) => any ? P : never;
|