@squonk/data-manager-client 0.7.8 → 0.7.9-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  import { UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
2
- import { bb as customInstance, b6 as AccountServerGetNamespaceResponse, bc as ErrorType, b7 as DmError, aw as VersionGetResponse } from '../custom-instance-f51d6877.js';
2
+ import { bb as customInstance, b6 as AccountServerGetNamespaceResponse, bc as ErrorType, b7 as DmError, aw as VersionGetResponse } from '../custom-instance-6e6b0801.js';
3
3
  import 'axios';
4
4
 
5
5
  /**
package/admin/admin.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { A as AdminGetServiceErrorsParams, bb as customInstance, aE as ServiceErrorsGetResponse, bc as ErrorType, b7 as DmError, W as UserAccountDetail, U as UserPatchBodyBody, K as JobManifestPutBodyBody, b5 as AdminJobManifestLoadPutResponse } from '../custom-instance-f51d6877.js';
1
+ import { A as AdminGetServiceErrorsParams, bb as customInstance, aE as ServiceErrorsGetResponse, bc as ErrorType, b7 as DmError, W as UserAccountDetail, U as UserPatchBodyBody, K as JobManifestPutBodyBody, b5 as AdminJobManifestLoadPutResponse } from '../custom-instance-6e6b0801.js';
2
2
  import * as react_query from 'react-query';
3
3
  import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from 'react-query';
4
4
  import 'axios';
@@ -1,5 +1,5 @@
1
1
  import { UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
2
- import { bb as customInstance, b2 as ApplicationsGetResponse, bc as ErrorType, b7 as DmError, b3 as ApplicationGetResponse } from '../custom-instance-f51d6877.js';
2
+ import { bb as customInstance, b2 as ApplicationsGetResponse, bc as ErrorType, b7 as DmError, b3 as ApplicationGetResponse } from '../custom-instance-6e6b0801.js';
3
3
  import 'axios';
4
4
 
5
5
  /**
@@ -589,6 +589,9 @@ interface ProjectDetail {
589
589
  /** The project (owner) creator
590
590
  */
591
591
  owner: string;
592
+ /** True if the project is private. Private projects are only visible to the owner and its editors.
593
+ */
594
+ private: boolean;
592
595
  /** An editor (user_id) of the project */
593
596
  editors: string[];
594
597
  /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */
@@ -1,4 +1,4 @@
1
- import { N as DatasetPutBodyBody, bb as customInstance, b1 as DatasetPutPostResponse, bc as ErrorType, b7 as DmError, S as DatasetPostBodyBody, m as GetDatasetsParams, aW as DatasetsGetResponse, f as GetVersionsParams, as as DatasetDetail, e as DeleteDatasetParams, a2 as TaskIdentity, b0 as DatasetDigestGetResponse, a_ as DatasetSchemaGetResponse } from '../custom-instance-f51d6877.js';
1
+ import { N as DatasetPutBodyBody, bb as customInstance, b1 as DatasetPutPostResponse, bc as ErrorType, b7 as DmError, S as DatasetPostBodyBody, m as GetDatasetsParams, aW as DatasetsGetResponse, f as GetVersionsParams, as as DatasetDetail, e as DeleteDatasetParams, a2 as TaskIdentity, b0 as DatasetDigestGetResponse, a_ as DatasetSchemaGetResponse } from '../custom-instance-6e6b0801.js';
2
2
  import * as react_query from 'react-query';
3
3
  import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
4
4
  import 'axios';
package/file/file.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import * as react_query from 'react-query';
2
2
  import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from 'react-query';
3
- import { d as GetFilesParams, bb as customInstance, aS as FilesGetResponse, bc as ErrorType, b7 as DmError, M as FilePostBodyBody, aT as FilePostResponse, D as DeleteUnmanagedFileParams } from '../custom-instance-f51d6877.js';
3
+ import { d as GetFilesParams, bb as customInstance, aS as FilesGetResponse, bc as ErrorType, b7 as DmError, M as FilePostBodyBody, aT as FilePostResponse, D as DeleteUnmanagedFileParams } from '../custom-instance-6e6b0801.js';
4
4
  import 'axios';
5
5
 
6
6
  declare type AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (...args: any) => Promise<infer R> ? R : any;
package/index.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/data-manager-api.schemas.ts"],"names":[],"mappings":";;;;;;;;AA+aO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AAkDO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA+CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA8BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAoBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAsGO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAiDO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkBO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AA0HO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAwEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgGO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAwEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAqEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA0DO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AA2BO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2GO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV","sourcesContent":["/**\n * Generated by orval v6.7.1 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nexport type AdminGetServiceErrorsParams = {\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n from?: QFromParameter;\n until?: QUntilParameter;\n};\n\nexport type PatchInstanceParams = { archive?: QInstanceArchiveParameter };\n\nexport type GetInstancesParams = { project_id?: QProjectIdParameter };\n\nexport type GetTaskParams = {\n event_limit?: QEventLimitParameter;\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n exclude_done?: QExcludeDoneParameter;\n exclude_purpose?: QExcludePurposeParameter;\n project_id?: QProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n file: QFileParameter;\n path?: QFilePathParameter;\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n project_id: QFileProjectIdParameter;\n path?: QFilePathParameter;\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = { include_deleted?: QIncludeDeletedParameter };\n\nexport type GetProjectFileParams = {\n path?: QFilePathParameter;\n file: QFileParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"labe2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\nexport type GetDatasetsParams = {\n include_deleted?: QIncludeDeletedParameter;\n username?: QUsernameParameter;\n dataset_mime_type?: QDatasetMimeTypeParameter;\n owners?: QOwnersParameter;\n editors?: QEditorsParameter;\n labels?: QLabelsParameter;\n};\n\n/**\n * Whether to convert Project managed file instances to unmanged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `INSTANCE`, `FILE` or `DATASET`. To exclude file and dataset tasks set to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\nexport type GetUserAccountParams = {\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user accont can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to chnage things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Organisation the Project belongs to\n */\n organisation_id: string;\n /** The Organisational Unit the Project belongs to\n */\n unit_id: string;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n */\n application_id: string;\n /** A supported application version to launch\n */\n application_version: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen laucnhing a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to. If not supplied the Project Organisation is used\n */\n organisation_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to\n */\n organisation_id: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Provate accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administartive capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\" as TypeSummaryFormatterOptionsType,\n};\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\" as TaskSummaryProcessingStage,\n FAILED: \"FAILED\" as TaskSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as TaskSummaryProcessingStage,\n LOADING: \"LOADING\" as TaskSummaryProcessingStage,\n DELETING: \"DELETING\" as TaskSummaryProcessingStage,\n DONE: \"DONE\" as TaskSummaryProcessingStage,\n};\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n | \"PENDING\"\n | \"STARTED\"\n | \"RETRY\"\n | \"SUCCESS\"\n | \"FAILURE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\" as TaskStateState,\n STARTED: \"STARTED\" as TaskStateState,\n RETRY: \"RETRY\" as TaskStateState,\n SUCCESS: \"SUCCESS\" as TaskStateState,\n FAILURE: \"FAILURE\" as TaskStateState,\n};\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n | \"CRITICAL\"\n | \"ERROR\"\n | \"WARNING\"\n | \"INFO\"\n | \"DEBUG\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\" as TaskEventLevel,\n ERROR: \"ERROR\" as TaskEventLevel,\n WARNING: \"WARNING\" as TaskEventLevel,\n INFO: \"INFO\" as TaskEventLevel,\n DEBUG: \"DEBUG\" as TaskEventLevel,\n};\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = \"CRITICAL\" | \"ERROR\" | \"WARNING\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\" as ServiceErrorSummarySeverity,\n ERROR: \"ERROR\" as ServiceErrorSummarySeverity,\n WARNING: \"WARNING\" as ServiceErrorSummarySeverity,\n};\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Organisation the Project Product belongs to\n */\n organisation_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** An editor (user_id) of the project */\n editors: string[];\n /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\" as JobSummaryImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobSummaryImageType,\n};\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\" as InstanceSummaryJobImageType,\n NEXTFLOW: \"NEXTFLOW\" as InstanceSummaryJobImageType,\n};\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceSummaryPhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\" as InstanceSummaryPhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceSummaryPhase,\n FAILED: \"FAILED\" as InstanceSummaryPhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceSummaryPhase,\n PENDING: \"PENDING\" as InstanceSummaryPhase,\n RUNNING: \"RUNNING\" as InstanceSummaryPhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceSummaryPhase,\n UNKNOWN: \"UNKNOWN\" as InstanceSummaryPhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceSummaryApplicationType,\n JOB: \"JOB\" as InstanceSummaryApplicationType,\n};\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was laucnhed\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was laucnhed\n */\n launched: string;\n /** The application instance owner, the person who launched tha application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job defintion.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job defintion.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_name?: string;\n /** The Job defintion's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's knwon outputs, a JSON string defining a map of all the outputs. Typcially applied only to JOB applcation types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionSummaryProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionSummaryProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionSummaryProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionSummaryProcessingStage,\n DONE: \"DONE\" as DatasetVersionSummaryProcessingStage,\n};\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionDetailProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionDetailProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionDetailProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionDetailProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionDetailProcessingStage,\n DONE: \"DONE\" as DatasetVersionDetailProcessingStage,\n};\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = \"DELETE\" | \"PATCH\" | \"POST\" | \"PUT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\" as ApiLogDetailMethod,\n PATCH: \"PATCH\" as ApiLogDetailMethod,\n POST: \"POST\" as ApiLogDetailMethod,\n PUT: \"PUT\" as ApiLogDetailMethod,\n};\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representign the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of availabel MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n | \"DATASET\"\n | \"FILE\"\n | \"INSTANCE\"\n | \"PROJECT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\" as TaskGetResponsePurpose,\n FILE: \"FILE\" as TaskGetResponsePurpose,\n INSTANCE: \"INSTANCE\" as TaskGetResponsePurpose,\n PROJECT: \"PROJECT\" as TaskGetResponsePurpose,\n};\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobGetResponseImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\" as JobGetResponseImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobGetResponseImageType,\n};\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n}\n\nexport type InstanceTaskPurpose = \"CREATE\" | \"DELETE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\" as InstanceTaskPurpose,\n DELETE: \"DELETE\" as InstanceTaskPurpose,\n};\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceGetResponsePhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\" as InstanceGetResponsePhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceGetResponsePhase,\n FAILED: \"FAILED\" as InstanceGetResponsePhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceGetResponsePhase,\n PENDING: \"PENDING\" as InstanceGetResponsePhase,\n RUNNING: \"RUNNING\" as InstanceGetResponsePhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceGetResponsePhase,\n UNKNOWN: \"UNKNOWN\" as InstanceGetResponsePhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceGetResponseApplicationType,\n JOB: \"JOB\" as InstanceGetResponseApplicationType,\n};\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched tha apllication and is the only user than can stop it.\n */\n owner: string;\n /** The application unique reference\n */\n launched: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** The JSON string representation of the JobDefintion's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleteing the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\" as DatasetSchemaGetResponseType,\n};\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed appications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n /** The name of this Data Manager instance, used to distinguish itself on calls to the Account Server. Every Data Manager is deployed with a unique name, regardless of the Account Server that's being used.\n */\n data_manager_name: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"]}
1
+ {"version":3,"sources":["../src/data-manager-api.schemas.ts"],"names":[],"mappings":";;;;;;;;AA+aO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AAkDO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA+CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA8BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAoBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAyGO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAiDO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkBO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AA0HO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAwEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgGO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAwEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAqEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA0DO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AA2BO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2GO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV","sourcesContent":["/**\n * Generated by orval v6.7.1 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nexport type AdminGetServiceErrorsParams = {\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n from?: QFromParameter;\n until?: QUntilParameter;\n};\n\nexport type PatchInstanceParams = { archive?: QInstanceArchiveParameter };\n\nexport type GetInstancesParams = { project_id?: QProjectIdParameter };\n\nexport type GetTaskParams = {\n event_limit?: QEventLimitParameter;\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n exclude_done?: QExcludeDoneParameter;\n exclude_purpose?: QExcludePurposeParameter;\n project_id?: QProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n file: QFileParameter;\n path?: QFilePathParameter;\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n project_id: QFileProjectIdParameter;\n path?: QFilePathParameter;\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = { include_deleted?: QIncludeDeletedParameter };\n\nexport type GetProjectFileParams = {\n path?: QFilePathParameter;\n file: QFileParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"labe2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\nexport type GetDatasetsParams = {\n include_deleted?: QIncludeDeletedParameter;\n username?: QUsernameParameter;\n dataset_mime_type?: QDatasetMimeTypeParameter;\n owners?: QOwnersParameter;\n editors?: QEditorsParameter;\n labels?: QLabelsParameter;\n};\n\n/**\n * Whether to convert Project managed file instances to unmanged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `INSTANCE`, `FILE` or `DATASET`. To exclude file and dataset tasks set to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\nexport type GetUserAccountParams = {\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user accont can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to chnage things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Organisation the Project belongs to\n */\n organisation_id: string;\n /** The Organisational Unit the Project belongs to\n */\n unit_id: string;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n */\n application_id: string;\n /** A supported application version to launch\n */\n application_version: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen laucnhing a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to. If not supplied the Project Organisation is used\n */\n organisation_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to\n */\n organisation_id: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Provate accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administartive capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\" as TypeSummaryFormatterOptionsType,\n};\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\" as TaskSummaryProcessingStage,\n FAILED: \"FAILED\" as TaskSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as TaskSummaryProcessingStage,\n LOADING: \"LOADING\" as TaskSummaryProcessingStage,\n DELETING: \"DELETING\" as TaskSummaryProcessingStage,\n DONE: \"DONE\" as TaskSummaryProcessingStage,\n};\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n | \"PENDING\"\n | \"STARTED\"\n | \"RETRY\"\n | \"SUCCESS\"\n | \"FAILURE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\" as TaskStateState,\n STARTED: \"STARTED\" as TaskStateState,\n RETRY: \"RETRY\" as TaskStateState,\n SUCCESS: \"SUCCESS\" as TaskStateState,\n FAILURE: \"FAILURE\" as TaskStateState,\n};\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n | \"CRITICAL\"\n | \"ERROR\"\n | \"WARNING\"\n | \"INFO\"\n | \"DEBUG\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\" as TaskEventLevel,\n ERROR: \"ERROR\" as TaskEventLevel,\n WARNING: \"WARNING\" as TaskEventLevel,\n INFO: \"INFO\" as TaskEventLevel,\n DEBUG: \"DEBUG\" as TaskEventLevel,\n};\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = \"CRITICAL\" | \"ERROR\" | \"WARNING\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\" as ServiceErrorSummarySeverity,\n ERROR: \"ERROR\" as ServiceErrorSummarySeverity,\n WARNING: \"WARNING\" as ServiceErrorSummarySeverity,\n};\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Organisation the Project Product belongs to\n */\n organisation_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** True if the project is private. Private projects are only visible to the owner and its editors.\n */\n private: boolean;\n /** An editor (user_id) of the project */\n editors: string[];\n /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\" as JobSummaryImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobSummaryImageType,\n};\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\" as InstanceSummaryJobImageType,\n NEXTFLOW: \"NEXTFLOW\" as InstanceSummaryJobImageType,\n};\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceSummaryPhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\" as InstanceSummaryPhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceSummaryPhase,\n FAILED: \"FAILED\" as InstanceSummaryPhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceSummaryPhase,\n PENDING: \"PENDING\" as InstanceSummaryPhase,\n RUNNING: \"RUNNING\" as InstanceSummaryPhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceSummaryPhase,\n UNKNOWN: \"UNKNOWN\" as InstanceSummaryPhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceSummaryApplicationType,\n JOB: \"JOB\" as InstanceSummaryApplicationType,\n};\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was laucnhed\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was laucnhed\n */\n launched: string;\n /** The application instance owner, the person who launched tha application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job defintion.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job defintion.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_name?: string;\n /** The Job defintion's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's knwon outputs, a JSON string defining a map of all the outputs. Typcially applied only to JOB applcation types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionSummaryProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionSummaryProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionSummaryProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionSummaryProcessingStage,\n DONE: \"DONE\" as DatasetVersionSummaryProcessingStage,\n};\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionDetailProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionDetailProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionDetailProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionDetailProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionDetailProcessingStage,\n DONE: \"DONE\" as DatasetVersionDetailProcessingStage,\n};\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = \"DELETE\" | \"PATCH\" | \"POST\" | \"PUT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\" as ApiLogDetailMethod,\n PATCH: \"PATCH\" as ApiLogDetailMethod,\n POST: \"POST\" as ApiLogDetailMethod,\n PUT: \"PUT\" as ApiLogDetailMethod,\n};\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representign the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of availabel MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n | \"DATASET\"\n | \"FILE\"\n | \"INSTANCE\"\n | \"PROJECT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\" as TaskGetResponsePurpose,\n FILE: \"FILE\" as TaskGetResponsePurpose,\n INSTANCE: \"INSTANCE\" as TaskGetResponsePurpose,\n PROJECT: \"PROJECT\" as TaskGetResponsePurpose,\n};\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobGetResponseImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\" as JobGetResponseImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobGetResponseImageType,\n};\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n}\n\nexport type InstanceTaskPurpose = \"CREATE\" | \"DELETE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\" as InstanceTaskPurpose,\n DELETE: \"DELETE\" as InstanceTaskPurpose,\n};\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceGetResponsePhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\" as InstanceGetResponsePhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceGetResponsePhase,\n FAILED: \"FAILED\" as InstanceGetResponsePhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceGetResponsePhase,\n PENDING: \"PENDING\" as InstanceGetResponsePhase,\n RUNNING: \"RUNNING\" as InstanceGetResponsePhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceGetResponsePhase,\n UNKNOWN: \"UNKNOWN\" as InstanceGetResponsePhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceGetResponseApplicationType,\n JOB: \"JOB\" as InstanceGetResponseApplicationType,\n};\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched tha apllication and is the only user than can stop it.\n */\n owner: string;\n /** The application unique reference\n */\n launched: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** The JSON string representation of the JobDefintion's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleteing the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\" as DatasetSchemaGetResponseType,\n};\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed appications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n /** The name of this Data Manager instance, used to distinguish itself on calls to the Account Server. Every Data Manager is deployed with a unique name, regardless of the Account Server that's being used.\n */\n data_manager_name: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"]}
package/index.d.ts CHANGED
@@ -1,2 +1,2 @@
1
- export { b8 as AXIOS_INSTANCE, b6 as AccountServerGetNamespaceResponse, A as AdminGetServiceErrorsParams, b5 as AdminJobManifestLoadPutResponse, b4 as AdminUserPutResponse, av as ApiLogDetail, au as ApiLogDetailMethod, b3 as ApplicationGetResponse, at as ApplicationSummary, b2 as ApplicationsGetResponse, as as DatasetDetail, b0 as DatasetDigestGetResponse, a$ as DatasetMetaGetResponse, R as DatasetMetaPostBodyBody, S as DatasetPostBodyBody, N as DatasetPutBodyBody, b1 as DatasetPutPostResponse, a_ as DatasetSchemaGetResponse, aZ as DatasetSchemaGetResponseType, ar as DatasetSummary, aY as DatasetVersionDeleteResponse, aq as DatasetVersionDetail, ao as DatasetVersionDetailLabels, ap as DatasetVersionDetailProcessingStage, O as DatasetVersionMetaPostBodyBody, an as DatasetVersionProjectFile, am as DatasetVersionSummary, ak as DatasetVersionSummaryLabels, al as DatasetVersionSummaryProcessingStage, aX as DatasetVersionsGetResponse, aW as DatasetsGetResponse, e as DeleteDatasetParams, D as DeleteUnmanagedFileParams, b7 as DmError, bc as ErrorType, aj as FilePathFile, M as FilePostBodyBody, aT as FilePostResponse, ai as FileStat, aS as FilesGetResponse, m as GetDatasetsParams, d as GetFilesParams, a as GetInstancesParams, g as GetProjectFileParams, b as GetTaskParams, c as GetTasksParams, E as GetUserAccountParams, G as GetUserApiLogParams, f as GetVersionsParams, aV as InstanceDeleteResponse, aR as InstanceGetResponse, aQ as InstanceGetResponseApplicationType, aP as InstanceGetResponsePhase, L as InstancePostBodyBody, aU as InstancePostResponse, ah as InstanceSummary, ag as InstanceSummaryApplicationType, ae as InstanceSummaryJobImageType, af as InstanceSummaryPhase, aN as InstanceTask, aM as InstanceTaskPurpose, aO as InstancesGetResponse, ad as JobApplication, aL as JobGetResponse, aK as JobGetResponseImageType, K as JobManifestPutBodyBody, a9 as JobOrderDetail, ac as JobSummary, ab as JobSummaryImageType, aa as JobVariables, aJ as JobsGetResponse, P as PatchInstanceParams, aI as ProjectDeleteResponse, a8 as ProjectDetail, a7 as ProjectFileDetail, J as ProjectFilePutBodyBody, aH as ProjectGetResponse, I as ProjectPatchBodyBody, H as ProjectPostBodyBody, aG as ProjectPostResponse, aF as ProjectsGetResponse, B as QDatasetMimeTypeParameter, C as QDoNotImpersonateParameter, Q as QEditorsParameter, z as QEventLimitParameter, y as QEventPriorOrdinalParameter, x as QExcludeDoneParameter, w as QExcludePurposeParameter, v as QFileParameter, u as QFilePathParameter, t as QFileProjectIdParameter, s as QFromParameter, r as QIncludeAcknowlegedParameter, q as QIncludeDeletedParameter, p as QIncludeHiddenParameter, o as QInstanceArchiveParameter, n as QKeepProjectFilesParameter, l as QLabelsParameter, h as QOwnersParameter, k as QProjectIdParameter, j as QUntilParameter, i as QUsernameParameter, a6 as ServiceErrorSummary, a5 as ServiceErrorSummarySeverity, aE as ServiceErrorsGetResponse, a4 as TaskEvent, a3 as TaskEventLevel, aD as TaskGetResponse, aC as TaskGetResponsePurpose, a2 as TaskIdentity, a1 as TaskState, a0 as TaskStateState, $ as TaskSummary, _ as TaskSummaryProcessingStage, aB as TasksGetResponse, Z as TypeSummary, Y as TypeSummaryFormatterOptions, X as TypeSummaryFormatterOptionsType, aA as TypesGetResponse, W as UserAccountDetail, az as UserAccountGetResponse, F as UserAccountPatchBodyBody, ay as UserApiLogGetResponse, V as UserDetail, U as UserPatchBodyBody, T as UserSummary, ax as UsersGetResponse, aw as VersionGetResponse, bb as customInstance, b9 as setAuthToken, ba as setBaseUrl } from './custom-instance-f51d6877.js';
1
+ export { b8 as AXIOS_INSTANCE, b6 as AccountServerGetNamespaceResponse, A as AdminGetServiceErrorsParams, b5 as AdminJobManifestLoadPutResponse, b4 as AdminUserPutResponse, av as ApiLogDetail, au as ApiLogDetailMethod, b3 as ApplicationGetResponse, at as ApplicationSummary, b2 as ApplicationsGetResponse, as as DatasetDetail, b0 as DatasetDigestGetResponse, a$ as DatasetMetaGetResponse, R as DatasetMetaPostBodyBody, S as DatasetPostBodyBody, N as DatasetPutBodyBody, b1 as DatasetPutPostResponse, a_ as DatasetSchemaGetResponse, aZ as DatasetSchemaGetResponseType, ar as DatasetSummary, aY as DatasetVersionDeleteResponse, aq as DatasetVersionDetail, ao as DatasetVersionDetailLabels, ap as DatasetVersionDetailProcessingStage, O as DatasetVersionMetaPostBodyBody, an as DatasetVersionProjectFile, am as DatasetVersionSummary, ak as DatasetVersionSummaryLabels, al as DatasetVersionSummaryProcessingStage, aX as DatasetVersionsGetResponse, aW as DatasetsGetResponse, e as DeleteDatasetParams, D as DeleteUnmanagedFileParams, b7 as DmError, bc as ErrorType, aj as FilePathFile, M as FilePostBodyBody, aT as FilePostResponse, ai as FileStat, aS as FilesGetResponse, m as GetDatasetsParams, d as GetFilesParams, a as GetInstancesParams, g as GetProjectFileParams, b as GetTaskParams, c as GetTasksParams, E as GetUserAccountParams, G as GetUserApiLogParams, f as GetVersionsParams, aV as InstanceDeleteResponse, aR as InstanceGetResponse, aQ as InstanceGetResponseApplicationType, aP as InstanceGetResponsePhase, L as InstancePostBodyBody, aU as InstancePostResponse, ah as InstanceSummary, ag as InstanceSummaryApplicationType, ae as InstanceSummaryJobImageType, af as InstanceSummaryPhase, aN as InstanceTask, aM as InstanceTaskPurpose, aO as InstancesGetResponse, ad as JobApplication, aL as JobGetResponse, aK as JobGetResponseImageType, K as JobManifestPutBodyBody, a9 as JobOrderDetail, ac as JobSummary, ab as JobSummaryImageType, aa as JobVariables, aJ as JobsGetResponse, P as PatchInstanceParams, aI as ProjectDeleteResponse, a8 as ProjectDetail, a7 as ProjectFileDetail, J as ProjectFilePutBodyBody, aH as ProjectGetResponse, I as ProjectPatchBodyBody, H as ProjectPostBodyBody, aG as ProjectPostResponse, aF as ProjectsGetResponse, B as QDatasetMimeTypeParameter, C as QDoNotImpersonateParameter, Q as QEditorsParameter, z as QEventLimitParameter, y as QEventPriorOrdinalParameter, x as QExcludeDoneParameter, w as QExcludePurposeParameter, v as QFileParameter, u as QFilePathParameter, t as QFileProjectIdParameter, s as QFromParameter, r as QIncludeAcknowlegedParameter, q as QIncludeDeletedParameter, p as QIncludeHiddenParameter, o as QInstanceArchiveParameter, n as QKeepProjectFilesParameter, l as QLabelsParameter, h as QOwnersParameter, k as QProjectIdParameter, j as QUntilParameter, i as QUsernameParameter, a6 as ServiceErrorSummary, a5 as ServiceErrorSummarySeverity, aE as ServiceErrorsGetResponse, a4 as TaskEvent, a3 as TaskEventLevel, aD as TaskGetResponse, aC as TaskGetResponsePurpose, a2 as TaskIdentity, a1 as TaskState, a0 as TaskStateState, $ as TaskSummary, _ as TaskSummaryProcessingStage, aB as TasksGetResponse, Z as TypeSummary, Y as TypeSummaryFormatterOptions, X as TypeSummaryFormatterOptionsType, aA as TypesGetResponse, W as UserAccountDetail, az as UserAccountGetResponse, F as UserAccountPatchBodyBody, ay as UserApiLogGetResponse, V as UserDetail, U as UserPatchBodyBody, T as UserSummary, ax as UsersGetResponse, aw as VersionGetResponse, bb as customInstance, b9 as setAuthToken, ba as setBaseUrl } from './custom-instance-6e6b0801.js';
2
2
  import 'axios';
package/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["/**\n * Generated by orval v6.7.1 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nexport type AdminGetServiceErrorsParams = {\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n from?: QFromParameter;\n until?: QUntilParameter;\n};\n\nexport type PatchInstanceParams = { archive?: QInstanceArchiveParameter };\n\nexport type GetInstancesParams = { project_id?: QProjectIdParameter };\n\nexport type GetTaskParams = {\n event_limit?: QEventLimitParameter;\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n exclude_done?: QExcludeDoneParameter;\n exclude_purpose?: QExcludePurposeParameter;\n project_id?: QProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n file: QFileParameter;\n path?: QFilePathParameter;\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n project_id: QFileProjectIdParameter;\n path?: QFilePathParameter;\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = { include_deleted?: QIncludeDeletedParameter };\n\nexport type GetProjectFileParams = {\n path?: QFilePathParameter;\n file: QFileParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"labe2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\nexport type GetDatasetsParams = {\n include_deleted?: QIncludeDeletedParameter;\n username?: QUsernameParameter;\n dataset_mime_type?: QDatasetMimeTypeParameter;\n owners?: QOwnersParameter;\n editors?: QEditorsParameter;\n labels?: QLabelsParameter;\n};\n\n/**\n * Whether to convert Project managed file instances to unmanged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `INSTANCE`, `FILE` or `DATASET`. To exclude file and dataset tasks set to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\nexport type GetUserAccountParams = {\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user accont can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to chnage things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Organisation the Project belongs to\n */\n organisation_id: string;\n /** The Organisational Unit the Project belongs to\n */\n unit_id: string;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n */\n application_id: string;\n /** A supported application version to launch\n */\n application_version: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen laucnhing a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to. If not supplied the Project Organisation is used\n */\n organisation_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to\n */\n organisation_id: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Provate accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administartive capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\" as TypeSummaryFormatterOptionsType,\n};\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\" as TaskSummaryProcessingStage,\n FAILED: \"FAILED\" as TaskSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as TaskSummaryProcessingStage,\n LOADING: \"LOADING\" as TaskSummaryProcessingStage,\n DELETING: \"DELETING\" as TaskSummaryProcessingStage,\n DONE: \"DONE\" as TaskSummaryProcessingStage,\n};\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n | \"PENDING\"\n | \"STARTED\"\n | \"RETRY\"\n | \"SUCCESS\"\n | \"FAILURE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\" as TaskStateState,\n STARTED: \"STARTED\" as TaskStateState,\n RETRY: \"RETRY\" as TaskStateState,\n SUCCESS: \"SUCCESS\" as TaskStateState,\n FAILURE: \"FAILURE\" as TaskStateState,\n};\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n | \"CRITICAL\"\n | \"ERROR\"\n | \"WARNING\"\n | \"INFO\"\n | \"DEBUG\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\" as TaskEventLevel,\n ERROR: \"ERROR\" as TaskEventLevel,\n WARNING: \"WARNING\" as TaskEventLevel,\n INFO: \"INFO\" as TaskEventLevel,\n DEBUG: \"DEBUG\" as TaskEventLevel,\n};\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = \"CRITICAL\" | \"ERROR\" | \"WARNING\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\" as ServiceErrorSummarySeverity,\n ERROR: \"ERROR\" as ServiceErrorSummarySeverity,\n WARNING: \"WARNING\" as ServiceErrorSummarySeverity,\n};\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Organisation the Project Product belongs to\n */\n organisation_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** An editor (user_id) of the project */\n editors: string[];\n /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\" as JobSummaryImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobSummaryImageType,\n};\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\" as InstanceSummaryJobImageType,\n NEXTFLOW: \"NEXTFLOW\" as InstanceSummaryJobImageType,\n};\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceSummaryPhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\" as InstanceSummaryPhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceSummaryPhase,\n FAILED: \"FAILED\" as InstanceSummaryPhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceSummaryPhase,\n PENDING: \"PENDING\" as InstanceSummaryPhase,\n RUNNING: \"RUNNING\" as InstanceSummaryPhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceSummaryPhase,\n UNKNOWN: \"UNKNOWN\" as InstanceSummaryPhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceSummaryApplicationType,\n JOB: \"JOB\" as InstanceSummaryApplicationType,\n};\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was laucnhed\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was laucnhed\n */\n launched: string;\n /** The application instance owner, the person who launched tha application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job defintion.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job defintion.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_name?: string;\n /** The Job defintion's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's knwon outputs, a JSON string defining a map of all the outputs. Typcially applied only to JOB applcation types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionSummaryProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionSummaryProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionSummaryProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionSummaryProcessingStage,\n DONE: \"DONE\" as DatasetVersionSummaryProcessingStage,\n};\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionDetailProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionDetailProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionDetailProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionDetailProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionDetailProcessingStage,\n DONE: \"DONE\" as DatasetVersionDetailProcessingStage,\n};\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = \"DELETE\" | \"PATCH\" | \"POST\" | \"PUT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\" as ApiLogDetailMethod,\n PATCH: \"PATCH\" as ApiLogDetailMethod,\n POST: \"POST\" as ApiLogDetailMethod,\n PUT: \"PUT\" as ApiLogDetailMethod,\n};\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representign the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of availabel MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n | \"DATASET\"\n | \"FILE\"\n | \"INSTANCE\"\n | \"PROJECT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\" as TaskGetResponsePurpose,\n FILE: \"FILE\" as TaskGetResponsePurpose,\n INSTANCE: \"INSTANCE\" as TaskGetResponsePurpose,\n PROJECT: \"PROJECT\" as TaskGetResponsePurpose,\n};\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobGetResponseImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\" as JobGetResponseImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobGetResponseImageType,\n};\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n}\n\nexport type InstanceTaskPurpose = \"CREATE\" | \"DELETE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\" as InstanceTaskPurpose,\n DELETE: \"DELETE\" as InstanceTaskPurpose,\n};\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceGetResponsePhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\" as InstanceGetResponsePhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceGetResponsePhase,\n FAILED: \"FAILED\" as InstanceGetResponsePhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceGetResponsePhase,\n PENDING: \"PENDING\" as InstanceGetResponsePhase,\n RUNNING: \"RUNNING\" as InstanceGetResponsePhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceGetResponsePhase,\n UNKNOWN: \"UNKNOWN\" as InstanceGetResponsePhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceGetResponseApplicationType,\n JOB: \"JOB\" as InstanceGetResponseApplicationType,\n};\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched tha apllication and is the only user than can stop it.\n */\n owner: string;\n /** The application unique reference\n */\n launched: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** The JSON string representation of the JobDefintion's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleteing the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\" as DatasetSchemaGetResponseType,\n};\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed appications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n /** The name of this Data Manager instance, used to distinguish itself on calls to the Account Server. Every Data Manager is deployed with a unique name, regardless of the Account Server that's being used.\n */\n data_manager_name: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"],"mappings":";;;;;;;;AA+aO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AAkDO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA+CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA8BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAoBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAsGO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAiDO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkBO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AA0HO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAwEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgGO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAwEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAqEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA0DO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AA2BO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2GO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV;","names":[]}
1
+ {"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["/**\n * Generated by orval v6.7.1 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nexport type AdminGetServiceErrorsParams = {\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n from?: QFromParameter;\n until?: QUntilParameter;\n};\n\nexport type PatchInstanceParams = { archive?: QInstanceArchiveParameter };\n\nexport type GetInstancesParams = { project_id?: QProjectIdParameter };\n\nexport type GetTaskParams = {\n event_limit?: QEventLimitParameter;\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n exclude_done?: QExcludeDoneParameter;\n exclude_purpose?: QExcludePurposeParameter;\n project_id?: QProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n file: QFileParameter;\n path?: QFilePathParameter;\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n project_id: QFileProjectIdParameter;\n path?: QFilePathParameter;\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = { include_deleted?: QIncludeDeletedParameter };\n\nexport type GetProjectFileParams = {\n path?: QFilePathParameter;\n file: QFileParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"labe2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\nexport type GetDatasetsParams = {\n include_deleted?: QIncludeDeletedParameter;\n username?: QUsernameParameter;\n dataset_mime_type?: QDatasetMimeTypeParameter;\n owners?: QOwnersParameter;\n editors?: QEditorsParameter;\n labels?: QLabelsParameter;\n};\n\n/**\n * Whether to convert Project managed file instances to unmanged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `INSTANCE`, `FILE` or `DATASET`. To exclude file and dataset tasks set to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\nexport type GetUserAccountParams = {\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user accont can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to chnage things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Organisation the Project belongs to\n */\n organisation_id: string;\n /** The Organisational Unit the Project belongs to\n */\n unit_id: string;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n */\n application_id: string;\n /** A supported application version to launch\n */\n application_version: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen laucnhing a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to. If not supplied the Project Organisation is used\n */\n organisation_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to\n */\n organisation_id: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Provate accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administartive capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\" as TypeSummaryFormatterOptionsType,\n};\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\" as TaskSummaryProcessingStage,\n FAILED: \"FAILED\" as TaskSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as TaskSummaryProcessingStage,\n LOADING: \"LOADING\" as TaskSummaryProcessingStage,\n DELETING: \"DELETING\" as TaskSummaryProcessingStage,\n DONE: \"DONE\" as TaskSummaryProcessingStage,\n};\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n | \"PENDING\"\n | \"STARTED\"\n | \"RETRY\"\n | \"SUCCESS\"\n | \"FAILURE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\" as TaskStateState,\n STARTED: \"STARTED\" as TaskStateState,\n RETRY: \"RETRY\" as TaskStateState,\n SUCCESS: \"SUCCESS\" as TaskStateState,\n FAILURE: \"FAILURE\" as TaskStateState,\n};\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n | \"CRITICAL\"\n | \"ERROR\"\n | \"WARNING\"\n | \"INFO\"\n | \"DEBUG\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\" as TaskEventLevel,\n ERROR: \"ERROR\" as TaskEventLevel,\n WARNING: \"WARNING\" as TaskEventLevel,\n INFO: \"INFO\" as TaskEventLevel,\n DEBUG: \"DEBUG\" as TaskEventLevel,\n};\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = \"CRITICAL\" | \"ERROR\" | \"WARNING\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\" as ServiceErrorSummarySeverity,\n ERROR: \"ERROR\" as ServiceErrorSummarySeverity,\n WARNING: \"WARNING\" as ServiceErrorSummarySeverity,\n};\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Organisation the Project Product belongs to\n */\n organisation_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** True if the project is private. Private projects are only visible to the owner and its editors.\n */\n private: boolean;\n /** An editor (user_id) of the project */\n editors: string[];\n /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\" as JobSummaryImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobSummaryImageType,\n};\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\" as InstanceSummaryJobImageType,\n NEXTFLOW: \"NEXTFLOW\" as InstanceSummaryJobImageType,\n};\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceSummaryPhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\" as InstanceSummaryPhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceSummaryPhase,\n FAILED: \"FAILED\" as InstanceSummaryPhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceSummaryPhase,\n PENDING: \"PENDING\" as InstanceSummaryPhase,\n RUNNING: \"RUNNING\" as InstanceSummaryPhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceSummaryPhase,\n UNKNOWN: \"UNKNOWN\" as InstanceSummaryPhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceSummaryApplicationType,\n JOB: \"JOB\" as InstanceSummaryApplicationType,\n};\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was laucnhed\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was laucnhed\n */\n launched: string;\n /** The application instance owner, the person who launched tha application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job defintion.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job defintion.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_name?: string;\n /** The Job defintion's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's knwon outputs, a JSON string defining a map of all the outputs. Typcially applied only to JOB applcation types\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionSummaryProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionSummaryProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionSummaryProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionSummaryProcessingStage,\n DONE: \"DONE\" as DatasetVersionSummaryProcessingStage,\n};\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: any };\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionDetailProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionDetailProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionDetailProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionDetailProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionDetailProcessingStage,\n DONE: \"DONE\" as DatasetVersionDetailProcessingStage,\n};\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = \"DELETE\" | \"PATCH\" | \"POST\" | \"PUT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\" as ApiLogDetailMethod,\n PATCH: \"PATCH\" as ApiLogDetailMethod,\n POST: \"POST\" as ApiLogDetailMethod,\n PUT: \"PUT\" as ApiLogDetailMethod,\n};\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representign the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of availabel MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n | \"DATASET\"\n | \"FILE\"\n | \"INSTANCE\"\n | \"PROJECT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\" as TaskGetResponsePurpose,\n FILE: \"FILE\" as TaskGetResponsePurpose,\n INSTANCE: \"INSTANCE\" as TaskGetResponsePurpose,\n PROJECT: \"PROJECT\" as TaskGetResponsePurpose,\n};\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobGetResponseImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\" as JobGetResponseImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobGetResponseImageType,\n};\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n}\n\nexport type InstanceTaskPurpose = \"CREATE\" | \"DELETE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\" as InstanceTaskPurpose,\n DELETE: \"DELETE\" as InstanceTaskPurpose,\n};\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceGetResponsePhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\" as InstanceGetResponsePhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceGetResponsePhase,\n FAILED: \"FAILED\" as InstanceGetResponsePhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceGetResponsePhase,\n PENDING: \"PENDING\" as InstanceGetResponsePhase,\n RUNNING: \"RUNNING\" as InstanceGetResponsePhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceGetResponsePhase,\n UNKNOWN: \"UNKNOWN\" as InstanceGetResponsePhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceGetResponseApplicationType,\n JOB: \"JOB\" as InstanceGetResponseApplicationType,\n};\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched tha apllication and is the only user than can stop it.\n */\n owner: string;\n /** The application unique reference\n */\n launched: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** The JSON string representation of the JobDefintion's outputs\n */\n outputs?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleteing the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\" as DatasetSchemaGetResponseType,\n};\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n [key: string]: any;\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n [key: string]: any;\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed appications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n /** The name of this Data Manager instance, used to distinguish itself on calls to the Account Server. Every Data Manager is deployed with a unique name, regardless of the Account Server that's being used.\n */\n data_manager_name: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"],"mappings":";;;;;;;;AA+aO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AAkDO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA+CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA8BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAoBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AAyGO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAiDO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAkBO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AA0HO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAwEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAgGO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAwEO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAqEO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AA0DO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AA2BO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AASO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA2GO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV;","names":[]}
@@ -1,4 +1,4 @@
1
- import { L as InstancePostBodyBody, bb as customInstance, aU as InstancePostResponse, bc as ErrorType, b7 as DmError, a as GetInstancesParams, aO as InstancesGetResponse, aR as InstanceGetResponse, a2 as TaskIdentity, P as PatchInstanceParams } from '../custom-instance-f51d6877.js';
1
+ import { L as InstancePostBodyBody, bb as customInstance, aU as InstancePostResponse, bc as ErrorType, b7 as DmError, a as GetInstancesParams, aO as InstancesGetResponse, aR as InstanceGetResponse, a2 as TaskIdentity, P as PatchInstanceParams } from '../custom-instance-6e6b0801.js';
2
2
  import * as react_query from 'react-query';
3
3
  import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
4
4
  import 'axios';
package/job/job.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
2
- import { bb as customInstance, aJ as JobsGetResponse, bc as ErrorType, b7 as DmError, aL as JobGetResponse } from '../custom-instance-f51d6877.js';
2
+ import { bb as customInstance, aJ as JobsGetResponse, bc as ErrorType, b7 as DmError, aL as JobGetResponse } from '../custom-instance-6e6b0801.js';
3
3
  import 'axios';
4
4
 
5
5
  /**
@@ -1,6 +1,6 @@
1
1
  import * as react_query from 'react-query';
2
2
  import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
3
- import { O as DatasetVersionMetaPostBodyBody, bb as customInstance, a$ as DatasetMetaGetResponse, bc as ErrorType, b7 as DmError, R as DatasetMetaPostBodyBody } from '../custom-instance-f51d6877.js';
3
+ import { O as DatasetVersionMetaPostBodyBody, bb as customInstance, a$ as DatasetMetaGetResponse, bc as ErrorType, b7 as DmError, R as DatasetMetaPostBodyBody } from '../custom-instance-6e6b0801.js';
4
4
  import 'axios';
5
5
 
6
6
  declare type AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (...args: any) => Promise<infer R> ? R : any;
package/package.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "version": "0.7.8",
2
+ "version": "0.7.9-rc.1",
3
3
  "author": "Oliver Dudgeon",
4
4
  "name": "@squonk/data-manager-client",
5
5
  "private": false,
@@ -1,4 +1,4 @@
1
- import { bb as customInstance, aF as ProjectsGetResponse, bc as ErrorType, b7 as DmError, H as ProjectPostBodyBody, aG as ProjectPostResponse, a8 as ProjectDetail, I as ProjectPatchBodyBody, a2 as TaskIdentity, g as GetProjectFileParams, J as ProjectFilePutBodyBody } from '../custom-instance-f51d6877.js';
1
+ import { bb as customInstance, aF as ProjectsGetResponse, bc as ErrorType, b7 as DmError, H as ProjectPostBodyBody, aG as ProjectPostResponse, a8 as ProjectDetail, I as ProjectPatchBodyBody, a2 as TaskIdentity, g as GetProjectFileParams, J as ProjectFilePutBodyBody } from '../custom-instance-6e6b0801.js';
2
2
  import * as react_query from 'react-query';
3
3
  import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from 'react-query';
4
4
  import 'axios';
@@ -668,6 +668,9 @@ export interface ProjectDetail {
668
668
  /** The project (owner) creator
669
669
  */
670
670
  owner: string;
671
+ /** True if the project is private. Private projects are only visible to the owner and its editors.
672
+ */
673
+ private: boolean;
671
674
  /** An editor (user_id) of the project */
672
675
  editors: string[];
673
676
  /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */
package/task/task.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import * as react_query from 'react-query';
2
2
  import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from 'react-query';
3
- import { c as GetTasksParams, bb as customInstance, aB as TasksGetResponse, bc as ErrorType, b7 as DmError, b as GetTaskParams, aD as TaskGetResponse } from '../custom-instance-f51d6877.js';
3
+ import { c as GetTasksParams, bb as customInstance, aB as TasksGetResponse, bc as ErrorType, b7 as DmError, b as GetTaskParams, aD as TaskGetResponse } from '../custom-instance-6e6b0801.js';
4
4
  import 'axios';
5
5
 
6
6
  declare type AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (...args: any) => Promise<infer R> ? R : any;
package/type/type.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
2
- import { bb as customInstance, aA as TypesGetResponse, bc as ErrorType, b7 as DmError } from '../custom-instance-f51d6877.js';
2
+ import { bb as customInstance, aA as TypesGetResponse, bc as ErrorType, b7 as DmError } from '../custom-instance-6e6b0801.js';
3
3
  import 'axios';
4
4
 
5
5
  /**
package/user/user.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import * as react_query from 'react-query';
2
2
  import { UseQueryOptions, QueryKey, UseQueryResult, UseMutationOptions } from 'react-query';
3
- import { bb as customInstance, ax as UsersGetResponse, bc as ErrorType, b7 as DmError, E as GetUserAccountParams, W as UserAccountDetail, F as UserAccountPatchBodyBody, G as GetUserApiLogParams, ay as UserApiLogGetResponse } from '../custom-instance-f51d6877.js';
3
+ import { bb as customInstance, ax as UsersGetResponse, bc as ErrorType, b7 as DmError, E as GetUserAccountParams, W as UserAccountDetail, F as UserAccountPatchBodyBody, G as GetUserApiLogParams, ay as UserApiLogGetResponse } from '../custom-instance-6e6b0801.js';
4
4
  import 'axios';
5
5
 
6
6
  declare type AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (...args: any) => Promise<infer R> ? R : any;