@squonk/data-manager-client 3.1.0 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/accounting/accounting.cjs.map +1 -1
  2. package/accounting/accounting.js.map +1 -1
  3. package/admin/admin.cjs.map +1 -1
  4. package/admin/admin.js.map +1 -1
  5. package/application/application.cjs.map +1 -1
  6. package/application/application.js.map +1 -1
  7. package/configuration/configuration.cjs.map +1 -1
  8. package/configuration/configuration.js.map +1 -1
  9. package/dataset/dataset.cjs.map +1 -1
  10. package/dataset/dataset.js.map +1 -1
  11. package/digest/digest.cjs.map +1 -1
  12. package/digest/digest.js.map +1 -1
  13. package/exchange-rate/exchange-rate.cjs.map +1 -1
  14. package/exchange-rate/exchange-rate.js.map +1 -1
  15. package/file-and-path/file-and-path.cjs.map +1 -1
  16. package/file-and-path/file-and-path.js.map +1 -1
  17. package/index.cjs.map +1 -1
  18. package/index.d.cts +4 -1
  19. package/index.d.ts +4 -1
  20. package/index.js.map +1 -1
  21. package/instance/instance.cjs.map +1 -1
  22. package/instance/instance.js.map +1 -1
  23. package/inventory/inventory.cjs.map +1 -1
  24. package/inventory/inventory.js.map +1 -1
  25. package/job/job.cjs.map +1 -1
  26. package/job/job.js.map +1 -1
  27. package/metadata/metadata.cjs.map +1 -1
  28. package/metadata/metadata.js.map +1 -1
  29. package/package.json +1 -1
  30. package/project/project.cjs.map +1 -1
  31. package/project/project.js.map +1 -1
  32. package/src/accounting/accounting.ts +1 -1
  33. package/src/admin/admin.ts +1 -1
  34. package/src/application/application.ts +1 -1
  35. package/src/configuration/configuration.ts +1 -1
  36. package/src/data-manager-api.schemas.ts +4 -1
  37. package/src/dataset/dataset.ts +1 -1
  38. package/src/digest/digest.ts +1 -1
  39. package/src/exchange-rate/exchange-rate.ts +1 -1
  40. package/src/file-and-path/file-and-path.ts +1 -1
  41. package/src/instance/instance.ts +1 -1
  42. package/src/inventory/inventory.ts +1 -1
  43. package/src/job/job.ts +1 -1
  44. package/src/metadata/metadata.ts +1 -1
  45. package/src/project/project.ts +1 -1
  46. package/src/task/task.ts +1 -1
  47. package/src/type/type.ts +1 -1
  48. package/src/user/user.ts +1 -1
  49. package/task/task.cjs.map +1 -1
  50. package/task/task.js.map +1 -1
  51. package/type/type.cjs.map +1 -1
  52. package/type/type.js.map +1 -1
  53. package/user/user.cjs.map +1 -1
  54. package/user/user.js.map +1 -1
package/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["// @ts-nocheck\n/**\n * Generated by orval v7.2.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 3.1\n */\nexport type GetUserInventoryParams = {\n/**\n * An Organisation identity\n */\norg_id?: QOrgIdParameter;\n/**\n * A Unit identity\n */\nunit_id?: QUnitIdParameter;\n/**\n * A comma-separated list of usernames\n\n */\nusernames?: QUsernamesParameter;\n};\n\nexport type AdminDeleteJobManifestParams = {\n/**\n * Purge unreferenced Jobs\n */\npurge?: QPurgeParameter;\n};\n\nexport type AdminGetUsersParams = {\n/**\n * Maximum days a user has been idle (has not used the API). If you specify `2` and it's Monday then users who have not used the API since Saturday will be returned.\n\n */\nidle_days?: QIdleDaysParameter;\n/**\n * Minimum days a user has been active (has used the API). If you specify `2` and it's Monday then users who have used the API on Saturday or later will be returned.\n\n */\nactive_days?: QActiveDaysParameter;\n};\n\nexport type AdminGetServiceErrorsParams = {\n/**\n * Set to include acknowledged items\n\n */\ninclude_acknowledged?: QIncludeAcknowledgedParameter;\n};\n\nexport type GetUserApiLogParams = {\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nfrom?: QFromParameter;\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nuntil?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\ndo_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = {\n/**\n * True to archive the instance\n */\narchive?: QInstanceArchiveParameter;\n};\n\nexport type GetInstancesParams = {\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetJobExchangeRatesParams = {\n/**\n * Set to get current\n\n */\ncurrent?: QCurrentParameter;\n};\n\nexport type GetAllJobExchangeRatesParams = {\n/**\n * Only return records where the exchange rate is undefined\n\n */\nonly_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetJobParams = {\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetJobByVersionParams = {\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\ncollection: QJobCollectionParameter;\n/**\n * The Job, i.e. \"coin-test\"\n\n */\njob: QJobJobParameter;\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nversion: QJobVersionParameter;\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetJobsParams = {\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetApplicationExchangeRatesParams = {\n/**\n * Set to get current\n\n */\ncurrent?: QCurrentParameter;\n};\n\nexport type GetAllApplicationExchangeRatesParams = {\n/**\n * Only return records where the exchange rate is undefined\n\n */\nonly_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetTaskParams = {\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nevent_limit?: QEventLimitParameter;\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nevent_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n/**\n * Set to limit the response to objects relating to the named purpose.\n\n */\npurpose?: QPurposeParameter;\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexclude_done?: QExcludeDoneParameter;\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexclude_removal?: QExcludeRemovalParameter;\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexclude_purpose?: QExcludePurposeParameter;\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n/**\n * An instance callback context string\n */\ninstance_callback_context?: QInstanceCallbackContextParameter;\n};\n\nexport type MoveFileInProjectParams = {\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n/**\n * A project file.\n\n */\ndst_file?: QDstFileParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nsrc_path?: QFileSrcPathParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\ndst_path?: QFileDstPathParameter;\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * Whether to include hidden files and directories\n */\ninclude_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nkeep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = {\n/**\n * Whether to include records that are deleted\n */\ninclude_deleted?: QIncludeDeletedParameter;\n};\n\nexport type GetDatasetsParams = {\n/**\n * Whether to include records that are deleted\n */\ninclude_deleted?: QIncludeDeletedParameter;\n/**\n * Filter the datasets by username\n\n */\nusername?: QUsernameParameter;\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\ndataset_mime_type?: QDatasetMimeTypeParameter;\n/**\n * A comma-separated list of owners\n\n */\nowners?: QOwnersParameter;\n/**\n * A comma-separated list of editors\n\n */\neditors?: QEditorsParameter;\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nlabels?: QLabelsParameter;\n};\n\nexport type GetProjectFileWithTokenParams = {\n/**\n * A token\n */\ntoken?: QTokenParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n};\n\nexport type GetProjectFileParams = {\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n};\n\nexport type GetProjectsParams = {\n/**\n * A Project name\n */\nproject_name?: QProjectNameParameter;\n};\n\nexport type MovePathParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nsrc_path?: QFileSrcPathParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\ndst_path?: QFileDstPathParameter;\n};\n\nexport type DeletePathParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n};\n\nexport type CreatePathParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n};\n\n/**\n * A comma-separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Purge unreferenced Jobs\n */\nexport type QPurgeParameter = boolean;\n\n/**\n * A comma-separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * A comma-separated list of usernames\n\n */\nexport type QUsernamesParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Unit identity\n */\nexport type QUnitIdParameter = string;\n\n/**\n * A token\n */\nexport type QTokenParameter = string;\n\n/**\n * An instance callback context string\n */\nexport type QInstanceCallbackContextParameter = string;\n\n/**\n * A Project name\n */\nexport type QProjectNameParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * An Organisation identity\n */\nexport type QOrgIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\n/**\n * Only return records where the exchange rate is undefined\n\n */\nexport type QOnlyUndefinedParameter = boolean;\n\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nexport type QJobVersionParameter = string;\n\n/**\n * The Job, i.e. \"coin-test\"\n\n */\nexport type QJobJobParameter = string;\n\n/**\n * A Job identity\n\n */\nexport type QJobIdParameter = number;\n\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\nexport type QJobCollectionParameter = string;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowledgedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFileSrcPathParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFileDstPathParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QDstFileParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Minimum days a user has been active (has used the API). If you specify `2` and it's Monday then users who have used the API on Saturday or later will be returned.\n\n */\nexport type QActiveDaysParameter = number;\n\n/**\n * Maximum days a user has been idle (has not used the API). If you specify `2` and it's Monday then users who have not used the API since Saturday will be returned.\n\n */\nexport type QIdleDaysParameter = number;\n\nexport type QPurposeParameter = typeof QPurposeParameter[keyof typeof QPurposeParameter];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const QPurposeParameter = {\n DATASET: 'DATASET',\n FILE: 'FILE',\n INSTANCE: 'INSTANCE',\n PROJECT: 'PROJECT',\n} as const;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexport type QExcludeRemovalParameter = boolean;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\n/**\n * Set to get current\n\n */\nexport type QCurrentParameter = boolean;\n\n/**\n * An Application identity\n\n */\nexport type QApplicationIdParameter = string;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user account can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** For `admin` accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An `admin` user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to change things as the chosen user\n */\n become_admin?: boolean;\n /**\n * For `admin` accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n\n * @maxLength 80\n * @pattern ^(\\w(?:\\w*(?:[@.-]\\w+)?)*|)$\n */\n impersonate?: string;\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For `admin` accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n /**\n * @minLength 2\n * @maxLength 80\n */\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /**\n * The Data Manager *Tier Product ID* you're using to create the Project\n\n * @pattern ^product-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n /** The new name of the ptojct\n */\n name?: string;\n private?: boolean;\n};\n\nexport type ProjectFilePutBodyBody = {\n /**\n * An alternative filename to use for the uploaded File\n\n * @minLength 1\n * @maxLength 128\n */\n as_filename?: string;\n file: Blob;\n /**\n * The Project path of the file.\n\n * @minLength 1\n * @maxLength 260\n * @pattern ^/.+$|^/$\n */\n path?: string;\n};\n\nexport type ExchangeRatePutBodyBody = {\n /**\n * A brief comment relating to the new rate\n\n * @maxLength 80\n */\n comment?: string;\n /** A decimal value used as the new Exchange Rate. Application _raw_ **costs** are multiplied by this value to covert costs to **coins**. A string is used to avoid rounding errors. Internally the value is treated as a Python Decimal.\n */\n rate: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /**\n * Optional URL header values (a JSON string)\n * @pattern ^|{.*}$\n */\n header?: string;\n /**\n * Optional URL parameter values (a JSON string)\n * @pattern ^|{.*}$\n */\n params?: string;\n /** The URL of the Job Manifest */\n url: string;\n};\n\nexport type InstancePostBodyBody = {\n /**\n * A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n\nApplication IDs currently supported include `datamanagerjobs.squonk.it` and `jupyternotebooks.squonk.it`\n\n * @minLength 1\n * @maxLength 80\n */\n application_id: string;\n /**\n * The name to use for the instance\n\n * @minLength 2\n * @maxLength 80\n * @pattern ^[A-Za-z0-9]+[A-Za-z0-9-_. ]*[A-Za-z0-9]+$\n */\n as_name: string;\n /**\n * Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n\n * @minLength 1\n * @maxLength 256\n */\n callback_context?: string;\n /**\n * An optional 22-character **sortuuid** callback token that is supplied by the remote service. If not provided the user can use `generate_callback_token` to have one generated and returned in the response.\n\nSee the Python module's `shortuuid.get_alphabet()` for the full list of permitted characters\n\n * @minLength 22\n * @maxLength 22\n * @pattern ^[2-9A-HJ-NP-Za-km-z]{22}$\n */\n callback_token?: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** If set a callback token will be provided in the response. The token allows files to be downloaded from the instance project and expires after a pre-configured amount of time after the instance is complete or if the user revokes the token.\n\nCaution should be taken using this feature. A 3rd party can access the Project's files without authentication, they just need the token and the project identity.\n\nTokens should therefore be revoked when they're no longer required\n */\n generate_callback_token?: boolean;\n /**\n * The project to attach\n\n * @pattern ^project-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n project_id: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen launching a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"coin-test\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n\nApplications also support `variables`. The Jupyter notebook application supports the definition of the notebook `\"image\"`, `\"cpu\"` and `\"memory\"`. A full Jupyter notebook specification might be `{\"variables\":{\"image\":\"Tensorflow 2.9\",\"cpu\":2,\"memory\":\"4Gi\"}}`, where `memory` is limited to `Gi` as a suffix.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /**\n * The Dataset UUID for the File that you intend to attach\n\n * @pattern ^dataset-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n dataset_id: string;\n /**\n * The Dataset version to attach\n\n * @minimum 1\n */\n dataset_version: number;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n /**\n * A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n\n * @maxLength 260\n * @pattern ^/.+$|^/$\n */\n path?: string;\n /**\n * The Project UUID you're attaching to\n\n * @pattern ^project-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n project_id: string;\n};\n\nexport type DatasetPutBodyBody = {\n /**\n * If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n\n * @pattern ^(dataset-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}|)$\n */\n dataset_id?: string;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /**\n * The file name of the file in the Project path to load as a new Dataset.\n\n * @minLength 1\n * @maxLength 128\n */\n file_name: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /**\n * The Project path of the file.\n\n * @minLength 1\n * @maxLength 260\n * @pattern ^/.+$|^/$\n */\n path: string;\n /**\n * The Project the file belongs to\n\n * @pattern ^project-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n project_id: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /**\n * The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n\n * @pattern ^unit-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n};\n\nexport type DatasetPostBodyBody = {\n /**\n * An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n\n * @minLength 1\n * @maxLength 128\n */\n as_filename?: string;\n dataset_file: Blob;\n /**\n * If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n\n * @pattern ^(dataset-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}|)$\n */\n dataset_id?: string;\n /** The MIME type of the Dataset. Values like `chemical/x-mdl-sdfile`, `chemical/x-mdl-molfile`, and `chemical/x-pdb` are permitted. See the **\\/type** endpoint for a full list of types.\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /**\n * The Organisational Unit you want the Dataset to belong to\n\n * @pattern ^unit-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n unit_id: string;\n};\n\nexport interface AsAdditionalDataProcessingCharge {\n collateral_cpu_hours?: string;\n collateral_pod_count?: number;\n cost: string;\n cost_scale_factor: string;\n cost_to_coins_er: string;\n error_message?: string;\n instance_id: string;\n instance_name: string;\n job_collection?: string;\n job_job?: string;\n job_version?: string;\n run_time?: string;\n started: string;\n stopped?: string;\n}\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** For admin accounts, whether the user is acting in an administrative capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** The user's filesystem user uid\n */\n f_uid?: number;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n /** The date the user was last seen\n */\n last_seen_date?: string;\n /** Set if the user's account is marked as private. Private accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserAccountDetail {\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n /** The Data Manager roles the user has */\n data_manager_roles: string[];\n user: UserDetail;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = typeof TypeSummaryFormatterOptionsType[keyof typeof TypeSummaryFormatterOptionsType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: 'object',\n} as const;\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** Required properties\n */\n required: string[];\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n [key: string]: unknown;\n };\n\nexport interface TypeSummary {\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** The File Type MIME\n */\n mime: string;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage = typeof TaskSummaryProcessingStage[keyof typeof TaskSummaryProcessingStage];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: 'COPYING',\n FAILED: 'FAILED',\n FORMATTING: 'FORMATTING',\n LOADING: 'LOADING',\n DELETING: 'DELETING',\n DONE: 'DONE',\n} as const;\n\nexport interface TaskSummary {\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState = typeof TaskStateState[keyof typeof TaskStateState];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: 'PENDING',\n STARTED: 'STARTED',\n RETRY: 'RETRY',\n SUCCESS: 'SUCCESS',\n FAILURE: 'FAILURE',\n} as const;\n\nexport interface TaskState {\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel = typeof TaskEventLevel[keyof typeof TaskEventLevel];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: 'CRITICAL',\n ERROR: 'ERROR',\n WARNING: 'WARNING',\n INFO: 'INFO',\n DEBUG: 'DEBUG',\n} as const;\n\nexport interface TaskEvent {\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** A short message.\n */\n message: string;\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = typeof ServiceErrorSummarySeverity[keyof typeof ServiceErrorSummarySeverity];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: 'CRITICAL',\n ERROR: 'ERROR',\n WARNING: 'WARNING',\n} as const;\n\nexport interface ServiceErrorSummary {\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n created: string;\n error_code?: number;\n hostname: string;\n id: number;\n severity: ServiceErrorSummarySeverity;\n stack_trace: string;\n summary: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n}\n\nexport interface ProjectDetail {\n /** An administrator (user_id) of the project */\n administrators: string[];\n created: string;\n /** The user who created the project\n */\n creator: string;\n /** An editor (user_id) of the project */\n editors: string[];\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n /** The project name\n */\n name: string;\n /** An observer (user_id) of the project */\n observers: string[];\n /** The Account Server Organisation the Project Product Unit belongs to\n */\n organisation_id?: string;\n /** True if the project is private. Private projects are only visible to editors.\n */\n private: boolean;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The project unique reference\n */\n project_id: string;\n /** The approximate size of all the files in the Project volume. This is updated regularly throughout the day and its current size may differ from what is reported here. The smallest billable unit is 1GiB (1,073,741,824 bytes). Therefore a project that contains 32KiB of files is recorded as 1GiB in size */\n size: number;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n}\n\nexport interface JobReplacement {\n collection: string;\n job: string;\n}\n\n/**\n * A list of Jobs, collection and job that are either replacing or being replaced\n\n */\nexport type JobReplacements = JobReplacement[];\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\n/**\n * The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOutputs = { [key: string]: unknown };\n\n/**\n * The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOptions = { [key: string]: unknown };\n\n/**\n * The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesInputs = { [key: string]: unknown };\n\nexport interface JobVariables {\n /** The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: JobVariablesInputs;\n /** The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: JobVariablesOptions;\n order?: JobOrderDetail;\n /** The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: JobVariablesOutputs;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = typeof JobSummaryImageType[keyof typeof JobSummaryImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\nexport interface JobSummary {\n /** The Job's category\n */\n category?: string;\n /** The Job namespace\n */\n collection: string;\n /** The description of the job in English\n */\n description?: string;\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's unique ID\n */\n id: number;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** The name of the job in English\n */\n name: string;\n replaced_by?: JobReplacements;\n replaces?: JobReplacements;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** The Job version\n */\n version: string;\n}\n\nexport interface JobManifestDetail {\n /** The Date (and time) the manifest was created\n */\n created: string;\n /** The (admin) user who created the manifest\n */\n creator?: string;\n header?: string;\n /** The Job Manifest record ID */\n id: number;\n /** The number of job definition files loaded during the most recent successful load\n */\n job_definition_files_loaded?: number;\n /** The number of job definitions loaded during the most recent successful load\n */\n job_definitions_loaded?: number;\n /** The status of the time the manifest was last loaded. If the load was successful this will be `SUCCESS` and `last_successful_load` will be the same as `last_load`\n */\n last_load_status: string;\n /** The Date (and time) the manifest was last loaded, successfully or otherwise. If the manifest (or any of the Job definition files it refers to) fails to load the `load_status` should provide some diagnostic feedback\n */\n last_load_time?: string;\n /** The Date (and time) the manifest was last loaded successfully\n */\n last_successful_load_time?: string;\n params?: string;\n url: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceSummaryPhase = typeof InstanceSummaryPhase[keyof typeof InstanceSummaryPhase];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: 'COMPLETED',\n CRASH_LOOP_BACKOFF: 'CRASH_LOOP_BACKOFF',\n FAILED: 'FAILED',\n IMAGE_PULL_BACKOFF: 'IMAGE_PULL_BACKOFF',\n PENDING: 'PENDING',\n RUNNING: 'RUNNING',\n SUCCEEDED: 'SUCCEEDED',\n UNKNOWN: 'UNKNOWN',\n} as const;\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = typeof InstanceSummaryJobImageType[keyof typeof InstanceSummaryJobImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = typeof InstanceSummaryApplicationType[keyof typeof InstanceSummaryApplicationType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: 'APPLICATION',\n JOB: 'JOB',\n} as const;\n\nexport interface InstanceSummary {\n /** The application ID\n */\n application_id: string;\n /** The User's specification, provided when the application was launched\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The application version\n */\n application_version: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The current running coin cost of the instance.\n */\n coins?: string;\n /** If the instance has failed, this will be the error message\n */\n error_message?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** The application instance ID\n */\n id: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The data and time (UTC) the instance was launched\n */\n launched: string;\n /** The Instance name\n */\n name: string;\n /** The instance's known outputs, a JSON string defining a map of all the outputs. Typically applied only to JOB application types\n */\n outputs?: string;\n /** The application instance owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceSummaryPhase;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python `timedelta`` object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The data and time (UTC) the instance started running\n */\n started?: string;\n /** The data and time (UTC) the instance stopped running\n */\n stopped?: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will only be available when the application has started.\n */\n url?: string;\n}\n\nexport interface FileStat {\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n /** The size of the file in bytes\n */\n size: number;\n}\n\nexport interface FilePathFile {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** The file name\n */\n file_name: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n stat: FileStat;\n}\n\nexport interface JobExchangeRateSummary {\n collection: string;\n id: number;\n job: string;\n rate?: string;\n version: string;\n}\n\nexport interface ApplicationExchangeRateSummary {\n id: string;\n rate?: string;\n}\n\nexport interface ExchangeRateDetail {\n comment?: string;\n created: string;\n id: number;\n rate: string;\n user_id: string;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage = typeof DatasetVersionSummaryProcessingStage[keyof typeof DatasetVersionSummaryProcessingStage];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: 'COPYING',\n FAILED: 'FAILED',\n FORMATTING: 'FORMATTING',\n LOADING: 'LOADING',\n DELETING: 'DELETING',\n DONE: 'DONE',\n} as const;\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: unknown };\n\nexport interface DatasetVersionSummary {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The version of the dataset\n */\n version: number;\n}\n\nexport interface DatasetVersionProjectFile {\n files: string[];\n project: string;\n project_name: string;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage = typeof DatasetVersionDetailProcessingStage[keyof typeof DatasetVersionDetailProcessingStage];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: 'COPYING',\n FAILED: 'FAILED',\n FORMATTING: 'FORMATTING',\n LOADING: 'LOADING',\n DELETING: 'DELETING',\n DONE: 'DONE',\n} as const;\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: unknown };\n\nexport interface DatasetVersionDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The owner of the Dataset version\n */\n owner: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The version of the dataset\n */\n version: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The number of datasets\n */\n count: number;\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n /** The application group\n */\n group?: string;\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application's latest version as declared in The **Custom Resource Definition**\n */\n latest_version: string;\n}\n\nexport interface ApplicationImageVariant {\n image: string;\n name: string;\n}\n\nexport interface ApplicationImageVariants {\n public?: ApplicationImageVariant[];\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = typeof ApiLogDetailMethod[keyof typeof ApiLogDetailMethod];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: 'DELETE',\n PATCH: 'PATCH',\n POST: 'POST',\n PUT: 'PUT',\n} as const;\n\nexport interface ApiLogDetail {\n /** The date/time the API call began\n */\n began: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** A JSON string representing the in-query properties used in the call\n */\n params?: string;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The HTTP response status code\n */\n status_code?: number;\n}\n\nexport interface ModeGetResponse {\n /** The Data Manager mode. The mode determines what features are available through the API. There are two modes, DEVELOPMENT, and PRODUCTION. In DEVELOPMENT mode some destructive operation are permitted (for example the removal of Job Definitions)\n */\n mode: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n count: number;\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n count: number;\n}\n\nexport interface UserActivityDetailPeriod {\n /** A list of dates where the API has been used during the monitoring period. Dates are returned if when the activity is not 100% and active dates are present. Dates are listed in reverse chronological order (i.e. the most recent first)\n */\n active_dates?: string[];\n /** The number of days the API has been used\n */\n active_days: number;\n /** Active days, as a percentage, over the monitoring period.\n */\n activity: string;\n /** The number of days the API has not been used\n */\n inactive_days: number;\n /** The period over which the activity is monitored\n */\n monitoring_period: string;\n}\n\nexport interface UserActivityDetail {\n period_a: UserActivityDetailPeriod;\n period_b?: UserActivityDetailPeriod;\n /** The total percentage activity since first seen\n */\n total_activity: string;\n /** The total number of days active since first seen\n */\n total_days_active: number;\n /** The total number of days inactive since first seen\n */\n total_days_inactive: number;\n /** The total number of days since the user was first seen, including the day the user was first seen\n */\n total_days_since_first_seen: number;\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n count: number;\n /** A list of available MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n count: number;\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose = typeof TaskGetResponsePurpose[keyof typeof TaskGetResponsePurpose];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: 'DATASET',\n FILE: 'FILE',\n INSTANCE: 'INSTANCE',\n PROJECT: 'PROJECT',\n} as const;\n\nexport interface TaskGetResponse {\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** The number of service errors\n */\n count: number;\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n count: number;\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n count: number;\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n\n */\nexport type JobGetResponseImageType = typeof JobGetResponseImageType[keyof typeof JobGetResponseImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\nexport interface JobGetResponse {\n application: ApplicationSummary;\n /** The Job's category\n */\n category?: string;\n /** The Job collection\n */\n collection: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** English description of the job\n */\n description?: string;\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n exchange_rate: string;\n /** The Job's unique ID\n */\n id: number;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The container image tag\n */\n image_tag: string;\n /** The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n */\n image_type?: JobGetResponseImageType;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** The Job's descriptive name\n */\n name: string;\n replaced_by?: JobReplacements;\n replaces?: JobReplacements;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n variables?: JobVariables;\n /** The Job version\n */\n version: string;\n}\n\nexport interface InventoryProjectDetail {\n id: string;\n name: string;\n unit_id: string;\n}\n\nexport interface InventoryDatasetDetail {\n filename: string;\n id: string;\n unit_id: string;\n version: number;\n}\n\nexport type InventoryUserDetailProjects = {\n administrator: InventoryProjectDetail[];\n editor: InventoryProjectDetail[];\n observer: InventoryProjectDetail[];\n};\n\nexport type InventoryUserDetailDatasets = {\n editor?: InventoryDatasetDetail[];\n owner?: InventoryDatasetDetail[];\n};\n\nexport interface InventoryUserDetail {\n activity: UserActivityDetail;\n datasets: InventoryUserDetailDatasets;\n f_uid: number;\n first_seen: string;\n last_seen_date: string;\n projects: InventoryUserDetailProjects;\n username: string;\n}\n\nexport type InstanceTaskPurpose = typeof InstanceTaskPurpose[keyof typeof InstanceTaskPurpose];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: 'CREATE',\n DELETE: 'DELETE',\n} as const;\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n count: number;\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceGetResponsePhase = typeof InstanceGetResponsePhase[keyof typeof InstanceGetResponsePhase];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: 'COMPLETED',\n CRASH_LOOP_BACKOFF: 'CRASH_LOOP_BACKOFF',\n FAILED: 'FAILED',\n IMAGE_PULL_BACKOFF: 'IMAGE_PULL_BACKOFF',\n PENDING: 'PENDING',\n RUNNING: 'RUNNING',\n SUCCEEDED: 'SUCCEEDED',\n UNKNOWN: 'UNKNOWN',\n} as const;\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceGetResponseJobImageType = typeof InstanceGetResponseJobImageType[keyof typeof InstanceGetResponseJobImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseJobImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = typeof InstanceGetResponseApplicationType[keyof typeof InstanceGetResponseApplicationType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: 'APPLICATION',\n JOB: 'JOB',\n} as const;\n\nexport interface InstanceGetResponse {\n /** The application ID\n */\n application_id: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application version\n */\n application_version: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The accumulated coins accrued by the running Job. Coins are calculated based on the `cost` multiplied by the `launch_exchange_rate`. This is a string representation of a Decimal value, e.g. `'16.4'`\n */\n coins?: string;\n /** Where available, this is the number of hours that the JOb would take if the collateral Pods had access to only one CPU core. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. A value of `'0.5'` implies the job would have run in 30 minutes on 1 core. The minimum time resolution is 0.001 (3.6 seconds).\n */\n collateral_cpu_hours?: string;\n /** Where available, the number of collateral Pods spawned by the instance, typically used by Job instances. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. For `NEXTFLOW` jobs this is a count of the number of Task pods spawned.\n */\n collateral_pod_count?: number;\n /** The accumulated cost accrued by the running Job. Depending on the the Job, this is either known when the Job completes or may change as the Job runs. This is a string representation of a Decimal value, e.g. `'32.8'`\n */\n cost?: string;\n /** An optional error message, used to report underlying problems.\n */\n error_message?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** Set if the instance has a valid callback token. Instances with a valid token allow users to read files form the project without authentication\n */\n has_valid_callback_token: boolean;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceGetResponseJobImageType;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The cost exchange rate that applies to the Job at the time of launch. This is a string representation of a Decimal, e.g. `'0.5'`\n */\n launch_exchange_rate?: string;\n /** The date and time the instance was started, an ISO-8601 format string.\n */\n launched: string;\n /** The application name\n */\n name: string;\n /** The JSON string representation of the JobDefinition's outputs\n */\n outputs?: string;\n /** The application owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceGetResponsePhase;\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The date and time the instance started running in the cluster. This is typically close to the launch time but contention may mean the instance starts only when resources are available.\n */\n started?: string;\n /** The date and time the instance stopped, an ISO-8601 format string.\n */\n stopped?: string;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleting the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n /** The application endpoint\n */\n url?: string;\n}\n\nexport type GetExchangeRatesResponseId = number | string;\n\nexport interface GetExchangeRatesResponse {\n count: number;\n exchange_rates: ExchangeRateDetail[];\n id: GetExchangeRatesResponseId;\n}\n\nexport type GetAllExchangeRatesResponseExchangeRatesItem = ApplicationExchangeRateSummary | JobExchangeRateSummary;\n\nexport interface GetAllExchangeRatesResponse {\n count: number;\n exchange_rates: GetAllExchangeRatesResponseExchangeRatesItem[];\n only_undefined: boolean;\n}\n\nexport interface FilesGetResponse {\n /** The number of files in the Project path\n */\n count: number;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** The project path\n */\n path: string;\n /** Sub-directories in the current path\n */\n paths: string[];\n /** The project\n */\n project_id: string;\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it. The token is only provided if asked for when the instance is launched.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n}\n\nexport interface InstanceDryRunPostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n count: number;\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = typeof DatasetSchemaGetResponseType[keyof typeof DatasetSchemaGetResponseType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: 'object',\n} as const;\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata description\n */\n description: string;\n /** Required properties\n */\n required: string[];\n /** The Metadata title\n */\n title: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n [key: string]: unknown;\n }\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The date and time of creation\n */\n created: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The Metadata's labels\n */\n labels: unknown[];\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The Metadata version\n */\n metadata_version: string;\n [key: string]: unknown;\n }\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /**\n * The Dataset version\n\n * @minimum 1\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed applications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n /** The number of known Applications\n */\n count: number;\n}\n\nexport interface ApplicationGetResponse {\n /** The Application COST exchange rate\n */\n exchange_rate: string;\n /** The application group\n */\n group: string;\n /** The Application's unique ID\n */\n id: string;\n image_variants?: ApplicationImageVariants;\n /** A list of instances of the application\n */\n instances: string[];\n /** The name (kind) of the application\n */\n kind: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport interface AdminUsersGetResponse {\n /** The number of Users that have used the Data Manager\n */\n count: number;\n /** A list of Users that have used the Data Manager\n */\n users: UserDetail[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** Errors raised during the processing of individual Job Definitions\n */\n job_definition_failures?: string[];\n /** Errors raised during Job Definition file processing\n */\n job_definition_file_failures?: string[];\n /** Errors raised during Manifests file processing\n */\n manifest_file_failures?: string[];\n /** The number of Job Definitions inspected\n */\n num_job_definition_files_inspected: number;\n /** The number of Jobs inspected\n */\n num_jobs_inspected: number;\n /** The number of Jobs loaded or changed\n */\n num_jobs_loaded: number;\n /** The number of Jobs removed\n */\n num_jobs_purged?: number;\n /** The number of Job Manifests inspected\n */\n num_manifest_files_inspected: number;\n /** True if there are no errors, false otherwise\n */\n status: boolean;\n}\n\nexport interface InventoryUserGetResponse {\n today: string;\n /** The list of known Users\n */\n users: InventoryUserDetail[];\n}\n\nexport interface AdminJobManifestGetResponse {\n /** The number of known Job Manifests\n */\n count: number;\n /** The list of known Job manifests\n */\n job_manifests: JobManifestDetail[];\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n}\n\nexport interface AccountServerGetRegistrationResponse {\n /** @minimum 1 */\n merchant_id: number;\n name: string;\n registered: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n\n"],"mappings":";;;;;;;;AA4jBO,IAAM,oBAAoB;AAAA,EAC/B,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAuaO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AA6CO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA2CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA0BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAqBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AA0IO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAmGO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAUO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AAsJO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAsEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAyHO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAkHO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AA2EO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AAiHO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AAqBO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA,EACR,UAAU;AACZ;AAUO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AA+LO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV;","names":[]}
1
+ {"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["// @ts-nocheck\n/**\n * Generated by orval v7.2.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 3.3\n */\nexport type GetUserInventoryParams = {\n/**\n * An Organisation identity\n */\norg_id?: QOrgIdParameter;\n/**\n * A Unit identity\n */\nunit_id?: QUnitIdParameter;\n/**\n * A comma-separated list of usernames\n\n */\nusernames?: QUsernamesParameter;\n};\n\nexport type AdminDeleteJobManifestParams = {\n/**\n * Purge unreferenced Jobs\n */\npurge?: QPurgeParameter;\n};\n\nexport type AdminGetUsersParams = {\n/**\n * Maximum days a user has been idle (has not used the API). If you specify `2` and it's Monday then users who have not used the API since Saturday will be returned.\n\n */\nidle_days?: QIdleDaysParameter;\n/**\n * Minimum days a user has been active (has used the API). If you specify `2` and it's Monday then users who have used the API on Saturday or later will be returned.\n\n */\nactive_days?: QActiveDaysParameter;\n};\n\nexport type AdminGetServiceErrorsParams = {\n/**\n * Set to include acknowledged items\n\n */\ninclude_acknowledged?: QIncludeAcknowledgedParameter;\n};\n\nexport type GetUserApiLogParams = {\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nfrom?: QFromParameter;\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nuntil?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\ndo_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = {\n/**\n * True to archive the instance\n */\narchive?: QInstanceArchiveParameter;\n};\n\nexport type GetInstancesParams = {\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetJobExchangeRatesParams = {\n/**\n * Set to get current\n\n */\ncurrent?: QCurrentParameter;\n};\n\nexport type GetAllJobExchangeRatesParams = {\n/**\n * Only return records where the exchange rate is undefined\n\n */\nonly_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetJobParams = {\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetJobByVersionParams = {\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\ncollection: QJobCollectionParameter;\n/**\n * The Job, i.e. \"coin-test\"\n\n */\njob: QJobJobParameter;\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nversion: QJobVersionParameter;\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetJobsParams = {\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n};\n\nexport type GetApplicationExchangeRatesParams = {\n/**\n * Set to get current\n\n */\ncurrent?: QCurrentParameter;\n};\n\nexport type GetAllApplicationExchangeRatesParams = {\n/**\n * Only return records where the exchange rate is undefined\n\n */\nonly_undefined?: QOnlyUndefinedParameter;\n};\n\nexport type GetTaskParams = {\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nevent_limit?: QEventLimitParameter;\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nevent_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n/**\n * Set to limit the response to objects relating to the named purpose.\n\n */\npurpose?: QPurposeParameter;\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexclude_done?: QExcludeDoneParameter;\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexclude_removal?: QExcludeRemovalParameter;\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexclude_purpose?: QExcludePurposeParameter;\n/**\n * A Project identity\n */\nproject_id?: QProjectIdParameter;\n/**\n * An instance callback context string\n */\ninstance_callback_context?: QInstanceCallbackContextParameter;\n};\n\nexport type MoveFileInProjectParams = {\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n/**\n * A project file.\n\n */\ndst_file?: QDstFileParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nsrc_path?: QFileSrcPathParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\ndst_path?: QFileDstPathParameter;\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * Whether to include hidden files and directories\n */\ninclude_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nkeep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = {\n/**\n * Whether to include records that are deleted\n */\ninclude_deleted?: QIncludeDeletedParameter;\n};\n\nexport type GetDatasetsParams = {\n/**\n * Whether to include records that are deleted\n */\ninclude_deleted?: QIncludeDeletedParameter;\n/**\n * Filter the datasets by username\n\n */\nusername?: QUsernameParameter;\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\ndataset_mime_type?: QDatasetMimeTypeParameter;\n/**\n * A comma-separated list of owners\n\n */\nowners?: QOwnersParameter;\n/**\n * A comma-separated list of editors\n\n */\neditors?: QEditorsParameter;\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nlabels?: QLabelsParameter;\n};\n\nexport type GetProjectFileWithTokenParams = {\n/**\n * A token\n */\ntoken?: QTokenParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n};\n\nexport type GetProjectFileParams = {\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n/**\n * A project file.\n\n */\nfile: QFileParameter;\n};\n\nexport type GetProjectsParams = {\n/**\n * A Project name\n */\nproject_name?: QProjectNameParameter;\n};\n\nexport type MovePathParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nsrc_path?: QFileSrcPathParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\ndst_path?: QFileDstPathParameter;\n};\n\nexport type DeletePathParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n};\n\nexport type CreatePathParams = {\n/**\n * The Project identity\n */\nproject_id: QFileProjectIdParameter;\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\npath?: QFilePathParameter;\n};\n\n/**\n * A comma-separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Purge unreferenced Jobs\n */\nexport type QPurgeParameter = boolean;\n\n/**\n * A comma-separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * A comma-separated list of usernames\n\n */\nexport type QUsernamesParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Unit identity\n */\nexport type QUnitIdParameter = string;\n\n/**\n * A token\n */\nexport type QTokenParameter = string;\n\n/**\n * An instance callback context string\n */\nexport type QInstanceCallbackContextParameter = string;\n\n/**\n * A Project name\n */\nexport type QProjectNameParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * An Organisation identity\n */\nexport type QOrgIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"label2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\n/**\n * Only return records where the exchange rate is undefined\n\n */\nexport type QOnlyUndefinedParameter = boolean;\n\n/**\n * Whether to convert Project managed file instances to unmanaged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * The version of a Job, i.e. \"1.0.0\"\n\n */\nexport type QJobVersionParameter = string;\n\n/**\n * The Job, i.e. \"coin-test\"\n\n */\nexport type QJobJobParameter = string;\n\n/**\n * A Job identity\n\n */\nexport type QJobIdParameter = number;\n\n/**\n * The Collection for a Job, i.e. \"im-test\"\n\n */\nexport type QJobCollectionParameter = string;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowledgedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFileSrcPathParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFileDstPathParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QDstFileParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Minimum days a user has been active (has used the API). If you specify `2` and it's Monday then users who have used the API on Saturday or later will be returned.\n\n */\nexport type QActiveDaysParameter = number;\n\n/**\n * Maximum days a user has been idle (has not used the API). If you specify `2` and it's Monday then users who have not used the API since Saturday will be returned.\n\n */\nexport type QIdleDaysParameter = number;\n\nexport type QPurposeParameter = typeof QPurposeParameter[keyof typeof QPurposeParameter];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const QPurposeParameter = {\n DATASET: 'DATASET',\n FILE: 'FILE',\n INSTANCE: 'INSTANCE',\n PROJECT: 'PROJECT',\n} as const;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `DATASET`, `FILE`, `INSTANCE`, or `PROJECT`. To exclude file and dataset tasks set this field to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude Tasks related to object removal.\n\n */\nexport type QExcludeRemovalParameter = boolean;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\n/**\n * Set to get current\n\n */\nexport type QCurrentParameter = boolean;\n\n/**\n * An Application identity\n\n */\nexport type QApplicationIdParameter = string;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user account can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** For `admin` accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An `admin` user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to change things as the chosen user\n */\n become_admin?: boolean;\n /**\n * For `admin` accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n\n * @maxLength 80\n * @pattern ^(\\w(?:\\w*(?:[@.-]\\w+)?)*|)$\n */\n impersonate?: string;\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For `admin` accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n /**\n * @minLength 2\n * @maxLength 80\n */\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /**\n * The Data Manager *Tier Product ID* you're using to create the Project\n\n * @pattern ^product-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n /** The new name of the ptojct\n */\n name?: string;\n private?: boolean;\n};\n\nexport type ProjectFilePutBodyBody = {\n /**\n * An alternative filename to use for the uploaded File\n\n * @minLength 1\n * @maxLength 128\n */\n as_filename?: string;\n file: Blob;\n /**\n * The Project path of the file.\n\n * @minLength 1\n * @maxLength 260\n * @pattern ^/.+$|^/$\n */\n path?: string;\n};\n\nexport type ExchangeRatePutBodyBody = {\n /**\n * A brief comment relating to the new rate\n\n * @maxLength 80\n */\n comment?: string;\n /** A decimal value used as the new Exchange Rate. Application _raw_ **costs** are multiplied by this value to covert costs to **coins**. A string is used to avoid rounding errors. Internally the value is treated as a Python Decimal.\n */\n rate: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /**\n * Optional URL header values (a JSON string)\n * @pattern ^|{.*}$\n */\n header?: string;\n /**\n * Optional URL parameter values (a JSON string)\n * @pattern ^|{.*}$\n */\n params?: string;\n /** The URL of the Job Manifest */\n url: string;\n};\n\nexport type InstancePostBodyBody = {\n /**\n * A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n\nApplication IDs currently supported include `datamanagerjobs.squonk.it` and `jupyternotebooks.squonk.it`\n\n * @minLength 1\n * @maxLength 80\n */\n application_id: string;\n /**\n * The name to use for the instance\n\n * @minLength 2\n * @maxLength 80\n * @pattern ^[A-Za-z0-9]+[A-Za-z0-9-_. ]*[A-Za-z0-9]+$\n */\n as_name: string;\n /**\n * Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n\n * @minLength 1\n * @maxLength 256\n */\n callback_context?: string;\n /**\n * An optional 22-character **sortuuid** callback token that is supplied by the remote service. If not provided the user can use `generate_callback_token` to have one generated and returned in the response.\n\nSee the Python module's `shortuuid.get_alphabet()` for the full list of permitted characters\n\n * @minLength 22\n * @maxLength 22\n * @pattern ^[2-9A-HJ-NP-Za-km-z]{22}$\n */\n callback_token?: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** If set a callback token will be provided in the response. The token allows files to be downloaded from the instance project and expires after a pre-configured amount of time after the instance is complete or if the user revokes the token.\n\nCaution should be taken using this feature. A 3rd party can access the Project's files without authentication, they just need the token and the project identity.\n\nTokens should therefore be revoked when they're no longer required\n */\n generate_callback_token?: boolean;\n /**\n * The project to attach\n\n * @pattern ^project-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n project_id: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen launching a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"coin-test\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n\nApplications also support `variables`. The Jupyter notebook application supports the definition of the notebook `\"image\"`, `\"cpu\"` and `\"memory\"`. A full Jupyter notebook specification might be `{\"variables\":{\"image\":\"Tensorflow 2.9\",\"cpu\":2,\"memory\":\"4Gi\"}}`, where `memory` is limited to `Gi` as a suffix.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /**\n * The Dataset UUID for the File that you intend to attach\n\n * @pattern ^dataset-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n dataset_id: string;\n /**\n * The Dataset version to attach\n\n * @minimum 1\n */\n dataset_version: number;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n /**\n * A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n\n * @maxLength 260\n * @pattern ^/.+$|^/$\n */\n path?: string;\n /**\n * The Project UUID you're attaching to\n\n * @pattern ^project-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n project_id: string;\n};\n\nexport type DatasetPutBodyBody = {\n /**\n * If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n\n * @pattern ^(dataset-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}|)$\n */\n dataset_id?: string;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /**\n * The file name of the file in the Project path to load as a new Dataset.\n\n * @minLength 1\n * @maxLength 128\n */\n file_name: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /**\n * The Project path of the file.\n\n * @minLength 1\n * @maxLength 260\n * @pattern ^/.+$|^/$\n */\n path: string;\n /**\n * The Project the file belongs to\n\n * @pattern ^project-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n project_id: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /**\n * The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n\n * @pattern ^unit-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n};\n\nexport type DatasetPostBodyBody = {\n /**\n * An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n\n * @minLength 1\n * @maxLength 128\n */\n as_filename?: string;\n dataset_file: Blob;\n /**\n * If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n\n * @pattern ^(dataset-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}|)$\n */\n dataset_id?: string;\n /** The MIME type of the Dataset. Values like `chemical/x-mdl-sdfile`, `chemical/x-mdl-molfile`, and `chemical/x-pdb` are permitted. See the **\\/type** endpoint for a full list of types.\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /**\n * The Organisational Unit you want the Dataset to belong to\n\n * @pattern ^unit-[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$\n */\n unit_id: string;\n};\n\nexport interface AsAdditionalDataProcessingCharge {\n collateral_cpu_hours?: string;\n collateral_pod_count?: number;\n cost: string;\n cost_scale_factor: string;\n cost_to_coins_er: string;\n error_message?: string;\n instance_id: string;\n instance_name: string;\n job_collection?: string;\n job_job?: string;\n job_version?: string;\n run_time?: string;\n started: string;\n stopped?: string;\n}\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** For admin accounts, whether the user is acting in an administrative capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** The user's filesystem user uid\n */\n f_uid?: number;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n /** The date the user was last seen\n */\n last_seen_date?: string;\n /** Set if the user's account is marked as private. Private accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserAccountDetail {\n /** Whether the caller has admin privilege */\n caller_has_admin_privilege: boolean;\n /** The Data Manager roles the user has */\n data_manager_roles: string[];\n user: UserDetail;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = typeof TypeSummaryFormatterOptionsType[keyof typeof TypeSummaryFormatterOptionsType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: 'object',\n} as const;\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** Required properties\n */\n required: string[];\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n [key: string]: unknown;\n };\n\nexport interface TypeSummary {\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** The File Type MIME\n */\n mime: string;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage = typeof TaskSummaryProcessingStage[keyof typeof TaskSummaryProcessingStage];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: 'COPYING',\n FAILED: 'FAILED',\n FORMATTING: 'FORMATTING',\n LOADING: 'LOADING',\n DELETING: 'DELETING',\n DONE: 'DONE',\n} as const;\n\nexport interface TaskSummary {\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState = typeof TaskStateState[keyof typeof TaskStateState];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: 'PENDING',\n STARTED: 'STARTED',\n RETRY: 'RETRY',\n SUCCESS: 'SUCCESS',\n FAILURE: 'FAILURE',\n} as const;\n\nexport interface TaskState {\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel = typeof TaskEventLevel[keyof typeof TaskEventLevel];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: 'CRITICAL',\n ERROR: 'ERROR',\n WARNING: 'WARNING',\n INFO: 'INFO',\n DEBUG: 'DEBUG',\n} as const;\n\nexport interface TaskEvent {\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** A short message.\n */\n message: string;\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = typeof ServiceErrorSummarySeverity[keyof typeof ServiceErrorSummarySeverity];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: 'CRITICAL',\n ERROR: 'ERROR',\n WARNING: 'WARNING',\n} as const;\n\nexport interface ServiceErrorSummary {\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n created: string;\n error_code?: number;\n hostname: string;\n id: number;\n severity: ServiceErrorSummarySeverity;\n stack_trace: string;\n summary: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n}\n\nexport interface ProjectDetail {\n /** An administrator (user_id) of the project */\n administrators: string[];\n created: string;\n /** The user who created the project\n */\n creator: string;\n /** An editor (user_id) of the project */\n editors: string[];\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n /** The project name\n */\n name: string;\n /** An observer (user_id) of the project */\n observers: string[];\n /** The Account Server Organisation the Project Product Unit belongs to\n */\n organisation_id?: string;\n /** True if the project is private. Private projects are only visible to editors.\n */\n private: boolean;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The project unique reference\n */\n project_id: string;\n /** The approximate size of all the files in the Project volume. This is updated regularly throughout the day and its current size may differ from what is reported here. The smallest billable unit is 1GiB (1,073,741,824 bytes). Therefore a project that contains 32KiB of files is recorded as 1GiB in size */\n size: number;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n}\n\nexport interface JobReplacement {\n collection: string;\n job: string;\n}\n\n/**\n * A list of Jobs, collection and job that are either replacing or being replaced\n\n */\nexport type JobReplacements = JobReplacement[];\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\n/**\n * The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOutputs = { [key: string]: unknown };\n\n/**\n * The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesOptions = { [key: string]: unknown };\n\n/**\n * The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n\n */\nexport type JobVariablesInputs = { [key: string]: unknown };\n\nexport interface JobVariables {\n /** The Job command's inputs. The JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: JobVariablesInputs;\n /** The Job command's options. The JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: JobVariablesOptions;\n order?: JobOrderDetail;\n /** The Job command's outputs. The JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: JobVariablesOutputs;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = typeof JobSummaryImageType[keyof typeof JobSummaryImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\nexport interface JobSummary {\n /** The Job's category\n */\n category?: string;\n /** The Job namespace\n */\n collection: string;\n /** The description of the job in English\n */\n description?: string;\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's unique ID\n */\n id: number;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** The name of the job in English\n */\n name: string;\n replaced_by?: JobReplacements;\n replaces?: JobReplacements;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n /** The Job version\n */\n version: string;\n}\n\nexport interface JobManifestDetail {\n /** The Date (and time) the manifest was created\n */\n created: string;\n /** The (admin) user who created the manifest\n */\n creator?: string;\n header?: string;\n /** The Job Manifest record ID */\n id: number;\n /** The number of job definition files loaded during the most recent successful load\n */\n job_definition_files_loaded?: number;\n /** The number of job definitions loaded during the most recent successful load\n */\n job_definitions_loaded?: number;\n /** The status of the time the manifest was last loaded. If the load was successful this will be `SUCCESS` and `last_successful_load` will be the same as `last_load`\n */\n last_load_status: string;\n /** The Date (and time) the manifest was last loaded, successfully or otherwise. If the manifest (or any of the Job definition files it refers to) fails to load the `load_status` should provide some diagnostic feedback\n */\n last_load_time?: string;\n /** The Date (and time) the manifest was last loaded successfully\n */\n last_successful_load_time?: string;\n params?: string;\n url: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceSummaryPhase = typeof InstanceSummaryPhase[keyof typeof InstanceSummaryPhase];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: 'COMPLETED',\n CRASH_LOOP_BACKOFF: 'CRASH_LOOP_BACKOFF',\n FAILED: 'FAILED',\n IMAGE_PULL_BACKOFF: 'IMAGE_PULL_BACKOFF',\n PENDING: 'PENDING',\n RUNNING: 'RUNNING',\n SUCCEEDED: 'SUCCEEDED',\n UNKNOWN: 'UNKNOWN',\n} as const;\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = typeof InstanceSummaryJobImageType[keyof typeof InstanceSummaryJobImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = typeof InstanceSummaryApplicationType[keyof typeof InstanceSummaryApplicationType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: 'APPLICATION',\n JOB: 'JOB',\n} as const;\n\nexport interface InstanceSummary {\n /** The application ID\n */\n application_id: string;\n /** The User's specification, provided when the application was launched\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The application version\n */\n application_version: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The current running coin cost of the instance.\n */\n coins?: string;\n /** If the instance has failed, this will be the error message\n */\n error_message?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** The application instance ID\n */\n id: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The data and time (UTC) the instance was launched\n */\n launched: string;\n /** The Instance name\n */\n name: string;\n /** The instance's known outputs, a JSON string defining a map of all the outputs. Typically applied only to JOB application types\n */\n outputs?: string;\n /** The application instance owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceSummaryPhase;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python `timedelta`` object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The data and time (UTC) the instance started running\n */\n started?: string;\n /** The data and time (UTC) the instance stopped running\n */\n stopped?: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will only be available when the application has started.\n */\n url?: string;\n}\n\nexport interface FileStat {\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n /** The size of the file in bytes\n */\n size: number;\n}\n\nexport interface FilePathFile {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** The file name\n */\n file_name: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n stat: FileStat;\n}\n\nexport interface JobExchangeRateSummary {\n collection: string;\n id: number;\n job: string;\n rate?: string;\n version: string;\n}\n\nexport interface ApplicationExchangeRateSummary {\n id: string;\n rate?: string;\n}\n\nexport interface ExchangeRateDetail {\n comment?: string;\n created: string;\n id: number;\n rate: string;\n user_id: string;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage = typeof DatasetVersionSummaryProcessingStage[keyof typeof DatasetVersionSummaryProcessingStage];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: 'COPYING',\n FAILED: 'FAILED',\n FORMATTING: 'FORMATTING',\n LOADING: 'LOADING',\n DELETING: 'DELETING',\n DONE: 'DONE',\n} as const;\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = { [key: string]: unknown };\n\nexport interface DatasetVersionSummary {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The version of the dataset\n */\n version: number;\n}\n\nexport interface DatasetVersionProjectFile {\n files: string[];\n project: string;\n project_name: string;\n}\n\n/**\n * The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage = typeof DatasetVersionDetailProcessingStage[keyof typeof DatasetVersionDetailProcessingStage];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: 'COPYING',\n FAILED: 'FAILED',\n FORMATTING: 'FORMATTING',\n LOADING: 'LOADING',\n DELETING: 'DELETING',\n DONE: 'DONE',\n} as const;\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = { [key: string]: unknown };\n\nexport interface DatasetVersionDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The name of the Pod handling the creation of the Dataset\n */\n creator_pod_name?: string;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The owner of the Dataset version\n */\n owner: string;\n /** The processing stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The version of the dataset\n */\n version: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The number of datasets\n */\n count: number;\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n /** The application group\n */\n group?: string;\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application's latest version as declared in The **Custom Resource Definition**\n */\n latest_version: string;\n}\n\nexport interface ApplicationImageVariant {\n image: string;\n name: string;\n}\n\nexport interface ApplicationImageVariants {\n public?: ApplicationImageVariant[];\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = typeof ApiLogDetailMethod[keyof typeof ApiLogDetailMethod];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: 'DELETE',\n PATCH: 'PATCH',\n POST: 'POST',\n PUT: 'PUT',\n} as const;\n\nexport interface ApiLogDetail {\n /** The date/time the API call began\n */\n began: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** A JSON string representing the in-query properties used in the call\n */\n params?: string;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The HTTP response status code\n */\n status_code?: number;\n}\n\nexport interface ModeGetResponse {\n /** The Data Manager mode. The mode determines what features are available through the API. There are two modes, DEVELOPMENT, and PRODUCTION. In DEVELOPMENT mode some destructive operation are permitted (for example the removal of Job Definitions)\n */\n mode: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n count: number;\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n count: number;\n}\n\nexport interface UserActivityDetailPeriod {\n /** A list of dates where the API has been used during the monitoring period. Dates are returned if when the activity is not 100% and active dates are present. Dates are listed in reverse chronological order (i.e. the most recent first)\n */\n active_dates?: string[];\n /** The number of days the API has been used\n */\n active_days: number;\n /** Active days, as a percentage, over the monitoring period.\n */\n activity: string;\n /** The number of days the API has not been used\n */\n inactive_days: number;\n /** The period over which the activity is monitored\n */\n monitoring_period: string;\n}\n\nexport interface UserActivityDetail {\n period_a: UserActivityDetailPeriod;\n period_b?: UserActivityDetailPeriod;\n /** The total percentage activity since first seen\n */\n total_activity: string;\n /** The total number of days active since first seen\n */\n total_days_active: number;\n /** The total number of days inactive since first seen\n */\n total_days_inactive: number;\n /** The total number of days since the user was first seen, including the day the user was first seen\n */\n total_days_since_first_seen: number;\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n count: number;\n /** A list of available MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n count: number;\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose = typeof TaskGetResponsePurpose[keyof typeof TaskGetResponsePurpose];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: 'DATASET',\n FILE: 'FILE',\n INSTANCE: 'INSTANCE',\n PROJECT: 'PROJECT',\n} as const;\n\nexport interface TaskGetResponse {\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** The number of service errors\n */\n count: number;\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n count: number;\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n count: number;\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n\n */\nexport type JobGetResponseImageType = typeof JobGetResponseImageType[keyof typeof JobGetResponseImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\nexport interface JobGetResponse {\n application: ApplicationSummary;\n /** The Job's category\n */\n category?: string;\n /** The Job collection\n */\n collection: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** English description of the job\n */\n description?: string;\n /** True if disabled. Disabled Jobs cannot be executed. If disabled a reason will be found in `disabled_reason`\n */\n disabled: boolean;\n /** A reason why the Job has been disabled.\n */\n disabled_reason?: string;\n /** A suggested remedy that accompanies the \"disabled_reason\". This provides the client with advice on how to avoid the reason that the Job's been disabled, for reasons that can be avoided (not all are).\n\nWhere there is no remedy for a given reason the remedy will be \"There is no remedy\".\n */\n disabled_remedy?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n exchange_rate: string;\n /** The Job's unique ID\n */\n id: number;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The container image tag\n */\n image_tag: string;\n /** The optional container image type. Typically a single-container `SIMPLE` (where only one container runs) or a workflow where multiple container images can be spawned (like `NEXTFLOW`)\n */\n image_type?: JobGetResponseImageType;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** A list of Account Server \"required_assets\" that cannot be found. If assets cannot be found the user will receive a \"disabled_reason\" that should explain the problem.\n */\n missing_assets?: string[];\n /** The Job's descriptive name\n */\n name: string;\n replaced_by?: JobReplacements;\n replaces?: JobReplacements;\n /** A list of Account Server assets names required to run the Job. You need access to these assets in order to run the Job\n */\n required_assets: string[];\n variables?: JobVariables;\n /** The Job version\n */\n version: string;\n}\n\nexport interface InventoryProjectDetail {\n id: string;\n name: string;\n unit_id: string;\n}\n\nexport interface InventoryDatasetDetail {\n filename: string;\n id: string;\n unit_id: string;\n version: number;\n}\n\nexport type InventoryUserDetailProjects = {\n administrator: InventoryProjectDetail[];\n editor: InventoryProjectDetail[];\n observer: InventoryProjectDetail[];\n};\n\nexport type InventoryUserDetailDatasets = {\n editor?: InventoryDatasetDetail[];\n owner?: InventoryDatasetDetail[];\n};\n\nexport interface InventoryUserDetail {\n activity: UserActivityDetail;\n datasets: InventoryUserDetailDatasets;\n f_uid: number;\n first_seen: string;\n last_seen_date: string;\n projects: InventoryUserDetailProjects;\n username: string;\n}\n\nexport type InstanceTaskPurpose = typeof InstanceTaskPurpose[keyof typeof InstanceTaskPurpose];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: 'CREATE',\n DELETE: 'DELETE',\n} as const;\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n count: number;\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n\n */\nexport type InstanceGetResponsePhase = typeof InstanceGetResponsePhase[keyof typeof InstanceGetResponsePhase];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: 'COMPLETED',\n CRASH_LOOP_BACKOFF: 'CRASH_LOOP_BACKOFF',\n FAILED: 'FAILED',\n IMAGE_PULL_BACKOFF: 'IMAGE_PULL_BACKOFF',\n PENDING: 'PENDING',\n RUNNING: 'RUNNING',\n SUCCEEDED: 'SUCCEEDED',\n UNKNOWN: 'UNKNOWN',\n} as const;\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceGetResponseJobImageType = typeof InstanceGetResponseJobImageType[keyof typeof InstanceGetResponseJobImageType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseJobImageType = {\n SIMPLE: 'SIMPLE',\n NEXTFLOW: 'NEXTFLOW',\n} as const;\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = typeof InstanceGetResponseApplicationType[keyof typeof InstanceGetResponseApplicationType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: 'APPLICATION',\n JOB: 'JOB',\n} as const;\n\nexport interface InstanceGetResponse {\n /** The application ID\n */\n application_id: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application version\n */\n application_version: string;\n /** True if the instance is archived (protected from automatic deletion)\n */\n archived: boolean;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The accumulated coins accrued by the running Job. Coins are calculated based on the `cost` multiplied by the `launch_exchange_rate`. This is a string representation of a Decimal value, e.g. `'16.4'`\n */\n coins?: string;\n /** Where available, this is the number of hours that the JOb would take if the collateral Pods had access to only one CPU core. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. A value of `'0.5'` implies the job would have run in 30 minutes on 1 core. The minimum time resolution is 0.001 (3.6 seconds).\n */\n collateral_cpu_hours?: string;\n /** Where available, the number of collateral Pods spawned by the instance, typically used by Job instances. Collateral Pods are all the pods created by the Job that are in addition to the main (controlling) Pod. For `NEXTFLOW` jobs this is a count of the number of Task pods spawned.\n */\n collateral_pod_count?: number;\n /** The accumulated cost accrued by the running Job. Depending on the the Job, this is either known when the Job completes or may change as the Job runs. This is a string representation of a Decimal value, e.g. `'32.8'`\n */\n cost?: string;\n /** An optional error message, used to report underlying problems.\n */\n error_message?: string;\n /** Set if the instance output file permissions are expected to be fixed by the Data Manager when the Instance completes.\n */\n fix_permissions?: boolean;\n /** Set if the instance has a valid callback token. Instances with a valid token allow users to read files form the project without authentication\n */\n has_valid_callback_token: boolean;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job definition.\n */\n job_collection?: string;\n /** The Job definition's unique ID\n */\n job_id?: number;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceGetResponseJobImageType;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job definition.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_name?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job definition.\n */\n job_version?: string;\n /** The cost exchange rate that applies to the Job at the time of launch. This is a string representation of a Decimal, e.g. `'0.5'`\n */\n launch_exchange_rate?: string;\n /** The date and time the instance was started, an ISO-8601 format string.\n */\n launched: string;\n /** The application name\n */\n name: string;\n /** The JSON string representation of the JobDefinition's outputs\n */\n outputs?: string;\n /** The application owner, the person who launched the application and is the only user than can stop it.\n */\n owner: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparing to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccessfully.\n */\n phase: InstanceGetResponsePhase;\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The time the instance has spent running in the cluster. It's a string representation of a Python timedelta object, e.g. `0:12:32` for a run-time of 12 minutes and 32 seconds. The run-time must be considered as an _estimate_ until the instance has stopped and the instance is only considered to be running once `started` has been set.\n */\n run_time: string;\n /** The date and time the instance started running in the cluster. This is typically close to the launch time but contention may mean the instance starts only when resources are available.\n */\n started?: string;\n /** The date and time the instance stopped, an ISO-8601 format string.\n */\n stopped?: string;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleting the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n /** The application endpoint\n */\n url?: string;\n /** A list of Project-relative annotation files.\n */\n written_job_annotation_files?: string[];\n}\n\nexport type GetExchangeRatesResponseId = number | string;\n\nexport interface GetExchangeRatesResponse {\n count: number;\n exchange_rates: ExchangeRateDetail[];\n id: GetExchangeRatesResponseId;\n}\n\nexport type GetAllExchangeRatesResponseExchangeRatesItem = ApplicationExchangeRateSummary | JobExchangeRateSummary;\n\nexport interface GetAllExchangeRatesResponse {\n count: number;\n exchange_rates: GetAllExchangeRatesResponseExchangeRatesItem[];\n only_undefined: boolean;\n}\n\nexport interface FilesGetResponse {\n /** The number of files in the Project path\n */\n count: number;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** The project path\n */\n path: string;\n /** Sub-directories in the current path\n */\n paths: string[];\n /** The project\n */\n project_id: string;\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it. The token is only provided if asked for when the instance is launched.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n}\n\nexport interface InstanceDryRunPostResponse {\n /** A token that can be used to access data in the project without further authentication. The token expires automatically or if the user revokes it.\n */\n callback_token?: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n count: number;\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = typeof DatasetSchemaGetResponseType[keyof typeof DatasetSchemaGetResponseType];\n\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: 'object',\n} as const;\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata description\n */\n description: string;\n /** Required properties\n */\n required: string[];\n /** The Metadata title\n */\n title: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n [key: string]: unknown;\n }\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The date and time of creation\n */\n created: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The Metadata's labels\n */\n labels: unknown[];\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The Metadata version\n */\n metadata_version: string;\n [key: string]: unknown;\n }\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /**\n * The Dataset version\n\n * @minimum 1\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed applications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n /** The number of known Applications\n */\n count: number;\n}\n\nexport interface ApplicationGetResponse {\n /** The Application COST exchange rate\n */\n exchange_rate: string;\n /** The application group\n */\n group: string;\n /** The Application's unique ID\n */\n id: string;\n image_variants?: ApplicationImageVariants;\n /** A list of instances of the application\n */\n instances: string[];\n /** The name (kind) of the application\n */\n kind: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport interface AdminUsersGetResponse {\n /** The number of Users that have used the Data Manager\n */\n count: number;\n /** A list of Users that have used the Data Manager\n */\n users: UserDetail[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** Errors raised during the processing of individual Job Definitions\n */\n job_definition_failures?: string[];\n /** Errors raised during Job Definition file processing\n */\n job_definition_file_failures?: string[];\n /** Errors raised during Manifests file processing\n */\n manifest_file_failures?: string[];\n /** The number of Job Definitions inspected\n */\n num_job_definition_files_inspected: number;\n /** The number of Jobs inspected\n */\n num_jobs_inspected: number;\n /** The number of Jobs loaded or changed\n */\n num_jobs_loaded: number;\n /** The number of Jobs removed\n */\n num_jobs_purged?: number;\n /** The number of Job Manifests inspected\n */\n num_manifest_files_inspected: number;\n /** True if there are no errors, false otherwise\n */\n status: boolean;\n}\n\nexport interface InventoryUserGetResponse {\n today: string;\n /** The list of known Users\n */\n users: InventoryUserDetail[];\n}\n\nexport interface AdminJobManifestGetResponse {\n /** The number of known Job Manifests\n */\n count: number;\n /** The list of known Job manifests\n */\n job_manifests: JobManifestDetail[];\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n}\n\nexport interface AccountServerGetRegistrationResponse {\n /** @minimum 1 */\n merchant_id: number;\n name: string;\n registered: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n\n"],"mappings":";;;;;;;;AA4jBO,IAAM,oBAAoB;AAAA,EAC/B,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AAuaO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AACV;AA6CO,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AA2CO,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AACX;AA0BO,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AACT;AAqBO,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AACX;AA0IO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AACZ;AAmGO,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AACZ;AAUO,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AACP;AAsJO,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAsEO,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AACR;AAyHO,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AACP;AAkHO,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AACX;AA2EO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AACZ;AAiHO,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AACV;AAqBO,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AACX;AAUO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA,EACR,UAAU;AACZ;AAUO,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AACP;AAkMO,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AACV;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/squonk2-data-manager-js-client/squonk2-data-manager-js-client/dist/instance/instance.cjs","../../src/instance/instance.ts"],"names":[],"mappings":"AAAA;AACE;AACF,yDAA8B;AAC9B;AACA;ACOA;AACE;AACA;AACA;AAAA,mDACK;AA4CA,IAAM,eAAA,EAAiB,CAC1B,oBAAA,EACH,OAAA,EAAA,GAAsD;AAEjD,EAAA,MAAM,SAAA,EAAW,IAAI,QAAA,CAAS,CAAA;AACpC,EAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AACrE,EAAA,QAAA,CAAS,MAAA,CAAO,YAAA,EAAc,oBAAA,CAAqB,UAAU,CAAA;AAC7D,EAAA,QAAA,CAAS,MAAA,CAAO,SAAA,EAAW,oBAAA,CAAqB,OAAO,CAAA;AACvD,EAAA,GAAA,CAAG,oBAAA,CAAqB,aAAA,IAAiB,KAAA,CAAA,EAAW;AACnD,IAAA,QAAA,CAAS,MAAA,CAAO,cAAA,EAAgB,oBAAA,CAAqB,YAAY,CAAA;AAAA,EACjE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,iBAAA,IAAqB,KAAA,CAAA,EAAW;AACvD,IAAA,QAAA,CAAS,MAAA,CAAO,kBAAA,EAAoB,oBAAA,CAAqB,gBAAgB,CAAA;AAAA,EACzE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,wBAAA,IAA4B,KAAA,CAAA,EAAW;AAC9D,IAAA,QAAA,CAAS,MAAA,CAAO,yBAAA,EAA2B,oBAAA,CAAqB,uBAAA,CAAwB,QAAA,CAAS,CAAC,CAAA;AAAA,EAClG;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,eAAA,IAAmB,KAAA,CAAA,EAAW;AACrD,IAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AAAA,EACrE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,MAAA,IAAU,KAAA,CAAA,EAAW;AAC5C,IAAA,QAAA,CAAS,MAAA,CAAO,OAAA,EAAS,oBAAA,CAAqB,KAAK,CAAA;AAAA,EACnD;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,cAAA,IAAkB,KAAA,CAAA,EAAW;AACpD,IAAA,QAAA,CAAS,MAAA,CAAO,eAAA,EAAiB,oBAAA,CAAqB,aAAa,CAAA;AAAA,EACnE;AAEK,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,SAAA,CAAA;AAAA,MAAa,MAAA,EAAQ,MAAA;AAAA,MAC3B,OAAA,EAAS,EAAC,cAAA,EAAgB,sBAAuB,CAAA;AAAA,MAChD,IAAA,EAAM;AAAA,IACT,CAAA;AAAA,IACE;AAAA,EAAO,CAAA;AACT,CAAA;AAIG,IAAM,iCAAA,EAAmC,CACxB,OAAA,EAAA,GAC4F;AACpH,EAAA,MAAM,EAAC,QAAA,EAAU,eAAA,EAAiB,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAKnE,EAAA,MAAM,WAAA,EAAyG,CAAC,KAAA,EAAA,GAAU;AACtH,IAAA,MAAM,EAAC,KAAI,EAAA,mBAAI,KAAA,UAAS,CAAC,GAAA;AAEzB,IAAA,OAAQ,cAAA,CAAe,IAAA,EAAK,cAAc,CAAA;AAAA,EAC5C,CAAA;AAKN,EAAA,OAAQ,EAAE,UAAA,EAAY,GAAG,gBAAgB,CAAA;AAAC,CAAA;AASrC,IAAM,kBAAA,EAAoB,CACT,OAAA,EAAA,GAMb;AAEL,EAAA,MAAM,gBAAA,EAAkB,gCAAA,CAAiC,OAAO,CAAA;AAEhE,EAAA,OAAO,qCAAA,eAA2B,CAAA;AACpC,CAAA;AAMG,IAAM,aAAA,EAAe,CACxB,MAAA,EACH,OAAA,EAAiD,MAAA,EAAA,GAC7C;AAGC,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,SAAA,CAAA;AAAA,MAAa,MAAA,EAAQ,KAAA;AAAA,MACzB,MAAA;AAAA,MAAQ;AAAA,IACZ,CAAA;AAAA,IACE;AAAA,EAAO,CAAA;AACT,CAAA;AAGG,IAAM,wBAAA,EAA0B,CAAC,MAAA,EAAA,GAAiC;AACrE,EAAA,OAAO,CAAC,kBAAA,EAAoB,CAAA,SAAA,CAAA,EAAa,GAAI,OAAA,EAAS,CAAC,MAAM,EAAA,EAAG,CAAC,CAAE,CAAA;AACnE,CAAA;AAGG,IAAM,4BAAA,EAA8B,CAAuF,MAAA,EAA6B,OAAA,EAAA,GAC1J;AAEL,EAAA,MAAM,EAAC,KAAA,EAAO,YAAA,EAAc,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAEjE,EAAA,MAAM,SAAA,mBAAA,CAAY,aAAA,GAAA,KAAA,EAAA,KAAA,EAAA,EAAA,YAAA,CAAc,QAAA,CAAA,UAAY,uBAAA,CAAwB,MAAM,GAAA;AAIxE,EAAA,MAAM,QAAA,EAAmE,CAAC,EAAE,OAAO,CAAA,EAAA,GAAM,YAAA,CAAa,MAAA,EAAQ,cAAA,EAAgB,MAAM,CAAA;AAMrI,EAAA,OAAQ,EAAE,QAAA,EAAU,OAAA,EAAS,GAAG,aAAY,CAAA;AAC/C,CAAA;AAkCO,SAAS,eAAA,CACf,MAAA,EAA6B,OAAA,EAE+B;AAE3D,EAAA,MAAM,aAAA,EAAe,2BAAA,CAA4B,MAAA,EAAO,OAAO,CAAA;AAE/D,EAAA,MAAM,MAAA,EAAQ,kCAAA,YAAqB,CAAA;AAEnC,EAAA,KAAA,CAAM,SAAA,EAAW,YAAA,CAAa,QAAA;AAE9B,EAAA,OAAO,KAAA;AACT;AAIO,IAAM,oCAAA,EAAsC,CAAuF,MAAA,EAA6B,OAAA,EAAA,GAClK;AAEL,EAAA,MAAM,EAAC,KAAA,EAAO,YAAA,EAAc,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAEjE,EAAA,MAAM,SAAA,mBAAA,CAAY,aAAA,GAAA,KAAA,EAAA,KAAA,EAAA,EAAA,YAAA,CAAc,QAAA,CAAA,UAAY,uBAAA,CAAwB,MAAM,GAAA;AAIxE,EAAA,MAAM,QAAA,EAAmE,CAAC,EAAE,OAAO,CAAA,EAAA,GAAM,YAAA,CAAa,MAAA,EAAQ,cAAA,EAAgB,MAAM,CAAA;AAMrI,EAAA,OAAQ,EAAE,QAAA,EAAU,OAAA,EAAS,GAAG,aAAY,CAAA;AAC/C,CAAA;AAsBO,SAAS,uBAAA,CACf,MAAA,EAA6B,OAAA,EAEuC;AAEnE,EAAA,MAAM,aAAA,EAAe,mCAAA,CAAoC,MAAA,EAAO,OAAO,CAAA;AAEvE,EAAA,MAAM,MAAA,EAAQ,0CAAA,YAA6B,CAAA;AAE3C,EAAA,KAAA,CAAM,SAAA,EAAW,YAAA,CAAa,QAAA;AAE9B,EAAA,OAAO,KAAA;AACT;AAWO,IAAM,eAAA,EAAiB,CAC1B,oBAAA,EACH,OAAA,EAAA,GAAsD;AAEjD,EAAA,MAAM,SAAA,EAAW,IAAI,QAAA,CAAS,CAAA;AACpC,EAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AACrE,EAAA,QAAA,CAAS,MAAA,CAAO,YAAA,EAAc,oBAAA,CAAqB,UAAU,CAAA;AAC7D,EAAA,QAAA,CAAS,MAAA,CAAO,SAAA,EAAW,oBAAA,CAAqB,OAAO,CAAA;AACvD,EAAA,GAAA,CAAG,oBAAA,CAAqB,aAAA,IAAiB,KAAA,CAAA,EAAW;AACnD,IAAA,QAAA,CAAS,MAAA,CAAO,cAAA,EAAgB,oBAAA,CAAqB,YAAY,CAAA;AAAA,EACjE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,iBAAA,IAAqB,KAAA,CAAA,EAAW;AACvD,IAAA,QAAA,CAAS,MAAA,CAAO,kBAAA,EAAoB,oBAAA,CAAqB,gBAAgB,CAAA;AAAA,EACzE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,wBAAA,IAA4B,KAAA,CAAA,EAAW;AAC9D,IAAA,QAAA,CAAS,MAAA,CAAO,yBAAA,EAA2B,oBAAA,CAAqB,uBAAA,CAAwB,QAAA,CAAS,CAAC,CAAA;AAAA,EAClG;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,eAAA,IAAmB,KAAA,CAAA,EAAW;AACrD,IAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AAAA,EACrE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,MAAA,IAAU,KAAA,CAAA,EAAW;AAC5C,IAAA,QAAA,CAAS,MAAA,CAAO,OAAA,EAAS,oBAAA,CAAqB,KAAK,CAAA;AAAA,EACnD;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,cAAA,IAAkB,KAAA,CAAA,EAAW;AACpD,IAAA,QAAA,CAAS,MAAA,CAAO,eAAA,EAAiB,oBAAA,CAAqB,aAAa,CAAA;AAAA,EACnE;AAEK,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,iBAAA,CAAA;AAAA,MAAqB,MAAA,EAAQ,MAAA;AAAA,MACnC,OAAA,EAAS,EAAC,cAAA,EAAgB,sBAAuB,CAAA;AAAA,MAChD,IAAA,EAAM;AAAA,IACT,CAAA;AAAA,IACE;AAAA,EAAO,CAAA;AACT,CAAA;AAIG,IAAM,iCAAA,EAAmC,CACxB,OAAA,EAAA,GAC4F;AACpH,EAAA,MAAM,EAAC,QAAA,EAAU,eAAA,EAAiB,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAKnE,EAAA,MAAM,WAAA,EAAyG,CAAC,KAAA,EAAA,GAAU;AACtH,IAAA,MAAM,EAAC,KAAI,EAAA,mBAAI,KAAA,UAAS,CAAC,GAAA;AAEzB,IAAA,OAAQ,cAAA,CAAe,IAAA,EAAK,cAAc,CAAA;AAAA,EAC5C,CAAA;AAKN,EAAA,OAAQ,EAAE,UAAA,EAAY,GAAG,gBAAgB,CAAA;AAAC,CAAA;AASrC,IAAM,kBAAA,EAAoB,CACT,OAAA,EAAA,GAMb;AAEL,EAAA,MAAM,gBAAA,EAAkB,gCAAA,CAAiC,OAAO,CAAA;AAEhE,EAAA,OAAO,qCAAA,eAA2B,CAAA;AACpC,CAAA;AAIG,IAAM,YAAA,EAAc,CACvB,UAAA,EACH,OAAA,EAAiD,MAAA,EAAA,GAC7C;AAGC,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,UAAA,EAAa,UAAU,CAAA,CAAA;AAAY,MAAA;AAAO,MAAA;AAClD,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAGmC;AACP,EAAA;AAC5B;AAG4H;AAGpG,EAAA;AAER,EAAA;AAIwE,EAAA;AAM5D,EAAA;AAChC;AAmCC;AAIsB,EAAA;AAEE,EAAA;AAEO,EAAA;AAEvB,EAAA;AACT;AAIa;AAGe,EAAA;AAER,EAAA;AAIwE,EAAA;AAM5D,EAAA;AAChC;AAuBC;AAIsB,EAAA;AAEU,EAAA;AAED,EAAA;AAEvB,EAAA;AACT;AAYI;AAIS,EAAA;AACP,IAAA;AAA6B,MAAA;AAAY,MAAA;AAC3C,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAIS;AAGI,EAAA;AAK0G,EAAA;AAC5F,IAAA;AAEK,IAAA;AAC5B,EAAA;AAKmB,EAAA;AAAiB;AAUpB;AAQM,EAAA;AAEL,EAAA;AACrB;AAUA;AAIS,EAAA;AACP,IAAA;AAA6B,MAAA;AAAY,MAAA;AACvC,MAAA;AACJ,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAIS;AAGI,EAAA;AAKmI,EAAA;AAC9G,IAAA;AAEN,IAAA;AACxB,EAAA;AAKmB,EAAA;AAAiB;AAUpB;AAQM,EAAA;AAEL,EAAA;AACrB;AAWA;AAKS,EAAA;AACP,IAAA;AAA6B,MAAA;AAA2B,MAAA;AAC1D,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAIS;AAGI,EAAA;AAK0H,EAAA;AACtG,IAAA;AAEC,IAAA;AAC9B,EAAA;AAKmB,EAAA;AAAiB;AAUpB;AAQM,EAAA;AAEL,EAAA;AACrB;AD7b+B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/squonk2-data-manager-js-client/squonk2-data-manager-js-client/dist/instance/instance.cjs","sourcesContent":[null,"// @ts-nocheck\n/**\n * Generated by orval v7.2.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 3.1\n */\nimport {\n useMutation,\n useQuery,\n useSuspenseQuery\n} from '@tanstack/react-query'\nimport type {\n DefinedInitialDataOptions,\n DefinedUseQueryResult,\n MutationFunction,\n QueryFunction,\n QueryKey,\n UndefinedInitialDataOptions,\n UseMutationOptions,\n UseMutationResult,\n UseQueryOptions,\n UseQueryResult,\n UseSuspenseQueryOptions,\n UseSuspenseQueryResult\n} from '@tanstack/react-query'\nimport type {\n DmError,\n GetInstancesParams,\n InstanceDryRunPostResponse,\n InstanceGetResponse,\n InstancePostBodyBody,\n InstancePostResponse,\n InstancesGetResponse,\n PatchInstanceParams,\n TaskIdentity\n} from '../data-manager-api.schemas'\nimport { customInstance } from '.././custom-instance';\nimport type { ErrorType } from '.././custom-instance';\n\n\ntype SecondParameter<T extends (...args: any) => any> = Parameters<T>[1];\n\n\n/**\n * Launches a new Application or Job instance, returning an Instance and Task ID. The Task ID should be used against the `/task` endpoint to determine the availability of the the running instance.\n\nInstance behaviour is controlled using the `specification`. You will need to consult individual applications to determine what can be placed in the specification. Applications typically provide a `template` describing its **options**.\n\nAn Application instance is not Ready for use until the corresponding **TaskState** is _STARTED_.\n\nA Job instance typically runs to completion, reaching the **TaskState** _SUCCESS_ when successful and _FAILURE_ is unsuccessful.\n\n * @summary Creates a new Job or Application instance\n */\nexport const createInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>,) => {\n \n const formData = new FormData();\nformData.append('application_id', instancePostBodyBody.application_id)\nformData.append('project_id', instancePostBodyBody.project_id)\nformData.append('as_name', instancePostBodyBody.as_name)\nif(instancePostBodyBody.callback_url !== undefined) {\n formData.append('callback_url', instancePostBodyBody.callback_url)\n }\nif(instancePostBodyBody.callback_context !== undefined) {\n formData.append('callback_context', instancePostBodyBody.callback_context)\n }\nif(instancePostBodyBody.generate_callback_token !== undefined) {\n formData.append('generate_callback_token', instancePostBodyBody.generate_callback_token.toString())\n }\nif(instancePostBodyBody.callback_token !== undefined) {\n formData.append('callback_token', instancePostBodyBody.callback_token)\n }\nif(instancePostBodyBody.debug !== undefined) {\n formData.append('debug', instancePostBodyBody.debug)\n }\nif(instancePostBodyBody.specification !== undefined) {\n formData.append('specification', instancePostBodyBody.specification)\n }\n\n return customInstance<InstancePostResponse>(\n {url: `/instance`, method: 'POST',\n headers: {'Content-Type': 'multipart/form-data', },\n data: formData\n },\n options);\n }\n \n\n\nexport const getCreateInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof createInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof createInstance>>, TError,{data: InstancePostBodyBody}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof createInstance>>, {data: InstancePostBodyBody}> = (props) => {\n const {data} = props ?? {};\n\n return createInstance(data,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type CreateInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof createInstance>>>\n export type CreateInstanceMutationBody = InstancePostBodyBody\n export type CreateInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Creates a new Job or Application instance\n */\nexport const useCreateInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof createInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof createInstance>>,\n TError,\n {data: InstancePostBodyBody},\n TContext\n > => {\n\n const mutationOptions = getCreateInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * Returns a summary of all running instances. Instances can be running as an Application or as a Job. The response will contain an `application_type` field that is either `job` or `application`\n\n * @summary Get summary information about all Job and Application instances\n */\nexport const getInstances = (\n params?: GetInstancesParams,\n options?: SecondParameter<typeof customInstance>,signal?: AbortSignal\n) => {\n \n \n return customInstance<InstancesGetResponse>(\n {url: `/instance`, method: 'GET',\n params, signal\n },\n options);\n }\n \n\nexport const getGetInstancesQueryKey = (params?: GetInstancesParams,) => {\n return [\"data-manager-api\", `/instance`, ...(params ? [params]: [])] as const;\n }\n\n \nexport const getGetInstancesQueryOptions = <TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstances>>> = ({ signal }) => getInstances(params, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, ...queryOptions} as UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstancesQueryResult = NonNullable<Awaited<ReturnType<typeof getInstances>>>\nexport type GetInstancesQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params: undefined | GetInstancesParams, options: { query:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>> & Pick<\n DefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstances>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): DefinedUseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>> & Pick<\n UndefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstances>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get summary information about all Job and Application instances\n */\n\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstancesQueryOptions(params,options)\n\n const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\nexport const getGetInstancesSuspenseQueryOptions = <TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstances>>> = ({ signal }) => getInstances(params, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, ...queryOptions} as UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstancesSuspenseQueryResult = NonNullable<Awaited<ReturnType<typeof getInstances>>>\nexport type GetInstancesSuspenseQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params: undefined | GetInstancesParams, options: { query:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get summary information about all Job and Application instances\n */\n\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstancesSuspenseQueryOptions(params,options)\n\n const query = useSuspenseQuery(queryOptions) as UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\n/**\n * Similar to the `/instance [POST]` endpoint this one is used to check whether a new **Application** or **Job** instance can be launched. Rather than returning an **Instance** (or **Task**) ID this endpoint is simply used to ensure that the Job/Application is runnable while also returning the compiled `command` (if the Instance is a Job).\n\nThe test result is only valid at the time of the call, whether an actual instance would start or not will require an identical call to `/instance POST`.\n\n * @summary Used to check the execution of new Job or Application instance\n */\nexport const dryRunInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>,) => {\n \n const formData = new FormData();\nformData.append('application_id', instancePostBodyBody.application_id)\nformData.append('project_id', instancePostBodyBody.project_id)\nformData.append('as_name', instancePostBodyBody.as_name)\nif(instancePostBodyBody.callback_url !== undefined) {\n formData.append('callback_url', instancePostBodyBody.callback_url)\n }\nif(instancePostBodyBody.callback_context !== undefined) {\n formData.append('callback_context', instancePostBodyBody.callback_context)\n }\nif(instancePostBodyBody.generate_callback_token !== undefined) {\n formData.append('generate_callback_token', instancePostBodyBody.generate_callback_token.toString())\n }\nif(instancePostBodyBody.callback_token !== undefined) {\n formData.append('callback_token', instancePostBodyBody.callback_token)\n }\nif(instancePostBodyBody.debug !== undefined) {\n formData.append('debug', instancePostBodyBody.debug)\n }\nif(instancePostBodyBody.specification !== undefined) {\n formData.append('specification', instancePostBodyBody.specification)\n }\n\n return customInstance<InstanceDryRunPostResponse>(\n {url: `/instance/dry-run`, method: 'POST',\n headers: {'Content-Type': 'multipart/form-data', },\n data: formData\n },\n options);\n }\n \n\n\nexport const getDryRunInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof dryRunInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof dryRunInstance>>, TError,{data: InstancePostBodyBody}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof dryRunInstance>>, {data: InstancePostBodyBody}> = (props) => {\n const {data} = props ?? {};\n\n return dryRunInstance(data,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type DryRunInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof dryRunInstance>>>\n export type DryRunInstanceMutationBody = InstancePostBodyBody\n export type DryRunInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Used to check the execution of new Job or Application instance\n */\nexport const useDryRunInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof dryRunInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof dryRunInstance>>,\n TError,\n {data: InstancePostBodyBody},\n TContext\n > => {\n\n const mutationOptions = getDryRunInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * @summary Get detailed information about an Instance\n */\nexport const getInstance = (\n instanceId: string,\n options?: SecondParameter<typeof customInstance>,signal?: AbortSignal\n) => {\n \n \n return customInstance<InstanceGetResponse>(\n {url: `/instance/${instanceId}`, method: 'GET', signal\n },\n options);\n }\n \n\nexport const getGetInstanceQueryKey = (instanceId: string,) => {\n return [\"data-manager-api\", `/instance/${instanceId}`] as const;\n }\n\n \nexport const getGetInstanceQueryOptions = <TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceId);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstance>>> = ({ signal }) => getInstance(instanceId, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, enabled: !!(instanceId), ...queryOptions} as UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstanceQueryResult = NonNullable<Awaited<ReturnType<typeof getInstance>>>\nexport type GetInstanceQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options: { query:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>> & Pick<\n DefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstance>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): DefinedUseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>> & Pick<\n UndefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstance>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get detailed information about an Instance\n */\n\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstanceQueryOptions(instanceId,options)\n\n const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\nexport const getGetInstanceSuspenseQueryOptions = <TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceId);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstance>>> = ({ signal }) => getInstance(instanceId, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, enabled: !!(instanceId), ...queryOptions} as UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstanceSuspenseQueryResult = NonNullable<Awaited<ReturnType<typeof getInstance>>>\nexport type GetInstanceSuspenseQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options: { query:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get detailed information about an Instance\n */\n\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstanceSuspenseQueryOptions(instanceId,options)\n\n const query = useSuspenseQuery(queryOptions) as UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\n/**\n * The Application or Job Instance is terminated.\n\nYou must be the `owner` or an `editor` of the Instance to delete it\n\n * @summary Delete a Job or Application Instance\n */\nexport const terminateInstance = (\n instanceId: string,\n options?: SecondParameter<typeof customInstance>,) => {\n \n \n return customInstance<TaskIdentity>(\n {url: `/instance/${instanceId}`, method: 'DELETE'\n },\n options);\n }\n \n\n\nexport const getTerminateInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof terminateInstance>>, TError,{instanceId: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof terminateInstance>>, TError,{instanceId: string}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof terminateInstance>>, {instanceId: string}> = (props) => {\n const {instanceId} = props ?? {};\n\n return terminateInstance(instanceId,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type TerminateInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof terminateInstance>>>\n \n export type TerminateInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Delete a Job or Application Instance\n */\nexport const useTerminateInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof terminateInstance>>, TError,{instanceId: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof terminateInstance>>,\n TError,\n {instanceId: string},\n TContext\n > => {\n\n const mutationOptions = getTerminateInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * The Application or Job Instance is updated according to the patch parameters.\n\nYou must be the `owner` or an `editor` of the Instance to patch it\n\n * @summary Update a Job or Application Instance\n */\nexport const patchInstance = (\n instanceId: string,\n params?: PatchInstanceParams,\n options?: SecondParameter<typeof customInstance>,) => {\n \n \n return customInstance<void>(\n {url: `/instance/${instanceId}`, method: 'PATCH',\n params\n },\n options);\n }\n \n\n\nexport const getPatchInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof patchInstance>>, TError,{instanceId: string;params?: PatchInstanceParams}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof patchInstance>>, TError,{instanceId: string;params?: PatchInstanceParams}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof patchInstance>>, {instanceId: string;params?: PatchInstanceParams}> = (props) => {\n const {instanceId,params} = props ?? {};\n\n return patchInstance(instanceId,params,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type PatchInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof patchInstance>>>\n \n export type PatchInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Update a Job or Application Instance\n */\nexport const usePatchInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof patchInstance>>, TError,{instanceId: string;params?: PatchInstanceParams}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof patchInstance>>,\n TError,\n {instanceId: string;params?: PatchInstanceParams},\n TContext\n > => {\n\n const mutationOptions = getPatchInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * This revokes the instance **Token**, which was optionally generated when the instance was launched. No authentication is required to use this endpoint, which is typically used by a remote system driven by instance callbacks.\n\nTokens automatically expire after a period of time but can be revoked instantly with this endpoint.\n\nThe remote system will revoke the token when it's finished with it\n\n * @summary Delete (revoke) the Instance Token\n */\nexport const deleteInstanceToken = (\n instanceId: string,\n token: string,\n options?: SecondParameter<typeof customInstance>,) => {\n \n \n return customInstance<void>(\n {url: `/instance/${instanceId}/token/${token}`, method: 'DELETE'\n },\n options);\n }\n \n\n\nexport const getDeleteInstanceTokenMutationOptions = <TError = ErrorType<DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof deleteInstanceToken>>, TError,{instanceId: string;token: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof deleteInstanceToken>>, TError,{instanceId: string;token: string}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof deleteInstanceToken>>, {instanceId: string;token: string}> = (props) => {\n const {instanceId,token} = props ?? {};\n\n return deleteInstanceToken(instanceId,token,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type DeleteInstanceTokenMutationResult = NonNullable<Awaited<ReturnType<typeof deleteInstanceToken>>>\n \n export type DeleteInstanceTokenMutationError = ErrorType<DmError>\n\n /**\n * @summary Delete (revoke) the Instance Token\n */\nexport const useDeleteInstanceToken = <TError = ErrorType<DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof deleteInstanceToken>>, TError,{instanceId: string;token: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof deleteInstanceToken>>,\n TError,\n {instanceId: string;token: string},\n TContext\n > => {\n\n const mutationOptions = getDeleteInstanceTokenMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n "]}
1
+ {"version":3,"sources":["/home/runner/work/squonk2-data-manager-js-client/squonk2-data-manager-js-client/dist/instance/instance.cjs","../../src/instance/instance.ts"],"names":[],"mappings":"AAAA;AACE;AACF,yDAA8B;AAC9B;AACA;ACOA;AACE;AACA;AACA;AAAA,mDACK;AA4CA,IAAM,eAAA,EAAiB,CAC1B,oBAAA,EACH,OAAA,EAAA,GAAsD;AAEjD,EAAA,MAAM,SAAA,EAAW,IAAI,QAAA,CAAS,CAAA;AACpC,EAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AACrE,EAAA,QAAA,CAAS,MAAA,CAAO,YAAA,EAAc,oBAAA,CAAqB,UAAU,CAAA;AAC7D,EAAA,QAAA,CAAS,MAAA,CAAO,SAAA,EAAW,oBAAA,CAAqB,OAAO,CAAA;AACvD,EAAA,GAAA,CAAG,oBAAA,CAAqB,aAAA,IAAiB,KAAA,CAAA,EAAW;AACnD,IAAA,QAAA,CAAS,MAAA,CAAO,cAAA,EAAgB,oBAAA,CAAqB,YAAY,CAAA;AAAA,EACjE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,iBAAA,IAAqB,KAAA,CAAA,EAAW;AACvD,IAAA,QAAA,CAAS,MAAA,CAAO,kBAAA,EAAoB,oBAAA,CAAqB,gBAAgB,CAAA;AAAA,EACzE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,wBAAA,IAA4B,KAAA,CAAA,EAAW;AAC9D,IAAA,QAAA,CAAS,MAAA,CAAO,yBAAA,EAA2B,oBAAA,CAAqB,uBAAA,CAAwB,QAAA,CAAS,CAAC,CAAA;AAAA,EAClG;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,eAAA,IAAmB,KAAA,CAAA,EAAW;AACrD,IAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AAAA,EACrE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,MAAA,IAAU,KAAA,CAAA,EAAW;AAC5C,IAAA,QAAA,CAAS,MAAA,CAAO,OAAA,EAAS,oBAAA,CAAqB,KAAK,CAAA;AAAA,EACnD;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,cAAA,IAAkB,KAAA,CAAA,EAAW;AACpD,IAAA,QAAA,CAAS,MAAA,CAAO,eAAA,EAAiB,oBAAA,CAAqB,aAAa,CAAA;AAAA,EACnE;AAEK,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,SAAA,CAAA;AAAA,MAAa,MAAA,EAAQ,MAAA;AAAA,MAC3B,OAAA,EAAS,EAAC,cAAA,EAAgB,sBAAuB,CAAA;AAAA,MAChD,IAAA,EAAM;AAAA,IACT,CAAA;AAAA,IACE;AAAA,EAAO,CAAA;AACT,CAAA;AAIG,IAAM,iCAAA,EAAmC,CACxB,OAAA,EAAA,GAC4F;AACpH,EAAA,MAAM,EAAC,QAAA,EAAU,eAAA,EAAiB,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAKnE,EAAA,MAAM,WAAA,EAAyG,CAAC,KAAA,EAAA,GAAU;AACtH,IAAA,MAAM,EAAC,KAAI,EAAA,mBAAI,KAAA,UAAS,CAAC,GAAA;AAEzB,IAAA,OAAQ,cAAA,CAAe,IAAA,EAAK,cAAc,CAAA;AAAA,EAC5C,CAAA;AAKN,EAAA,OAAQ,EAAE,UAAA,EAAY,GAAG,gBAAgB,CAAA;AAAC,CAAA;AASrC,IAAM,kBAAA,EAAoB,CACT,OAAA,EAAA,GAMb;AAEL,EAAA,MAAM,gBAAA,EAAkB,gCAAA,CAAiC,OAAO,CAAA;AAEhE,EAAA,OAAO,qCAAA,eAA2B,CAAA;AACpC,CAAA;AAMG,IAAM,aAAA,EAAe,CACxB,MAAA,EACH,OAAA,EAAiD,MAAA,EAAA,GAC7C;AAGC,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,SAAA,CAAA;AAAA,MAAa,MAAA,EAAQ,KAAA;AAAA,MACzB,MAAA;AAAA,MAAQ;AAAA,IACZ,CAAA;AAAA,IACE;AAAA,EAAO,CAAA;AACT,CAAA;AAGG,IAAM,wBAAA,EAA0B,CAAC,MAAA,EAAA,GAAiC;AACrE,EAAA,OAAO,CAAC,kBAAA,EAAoB,CAAA,SAAA,CAAA,EAAa,GAAI,OAAA,EAAS,CAAC,MAAM,EAAA,EAAG,CAAC,CAAE,CAAA;AACnE,CAAA;AAGG,IAAM,4BAAA,EAA8B,CAAuF,MAAA,EAA6B,OAAA,EAAA,GAC1J;AAEL,EAAA,MAAM,EAAC,KAAA,EAAO,YAAA,EAAc,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAEjE,EAAA,MAAM,SAAA,mBAAA,CAAY,aAAA,GAAA,KAAA,EAAA,KAAA,EAAA,EAAA,YAAA,CAAc,QAAA,CAAA,UAAY,uBAAA,CAAwB,MAAM,GAAA;AAIxE,EAAA,MAAM,QAAA,EAAmE,CAAC,EAAE,OAAO,CAAA,EAAA,GAAM,YAAA,CAAa,MAAA,EAAQ,cAAA,EAAgB,MAAM,CAAA;AAMrI,EAAA,OAAQ,EAAE,QAAA,EAAU,OAAA,EAAS,GAAG,aAAY,CAAA;AAC/C,CAAA;AAkCO,SAAS,eAAA,CACf,MAAA,EAA6B,OAAA,EAE+B;AAE3D,EAAA,MAAM,aAAA,EAAe,2BAAA,CAA4B,MAAA,EAAO,OAAO,CAAA;AAE/D,EAAA,MAAM,MAAA,EAAQ,kCAAA,YAAqB,CAAA;AAEnC,EAAA,KAAA,CAAM,SAAA,EAAW,YAAA,CAAa,QAAA;AAE9B,EAAA,OAAO,KAAA;AACT;AAIO,IAAM,oCAAA,EAAsC,CAAuF,MAAA,EAA6B,OAAA,EAAA,GAClK;AAEL,EAAA,MAAM,EAAC,KAAA,EAAO,YAAA,EAAc,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAEjE,EAAA,MAAM,SAAA,mBAAA,CAAY,aAAA,GAAA,KAAA,EAAA,KAAA,EAAA,EAAA,YAAA,CAAc,QAAA,CAAA,UAAY,uBAAA,CAAwB,MAAM,GAAA;AAIxE,EAAA,MAAM,QAAA,EAAmE,CAAC,EAAE,OAAO,CAAA,EAAA,GAAM,YAAA,CAAa,MAAA,EAAQ,cAAA,EAAgB,MAAM,CAAA;AAMrI,EAAA,OAAQ,EAAE,QAAA,EAAU,OAAA,EAAS,GAAG,aAAY,CAAA;AAC/C,CAAA;AAsBO,SAAS,uBAAA,CACf,MAAA,EAA6B,OAAA,EAEuC;AAEnE,EAAA,MAAM,aAAA,EAAe,mCAAA,CAAoC,MAAA,EAAO,OAAO,CAAA;AAEvE,EAAA,MAAM,MAAA,EAAQ,0CAAA,YAA6B,CAAA;AAE3C,EAAA,KAAA,CAAM,SAAA,EAAW,YAAA,CAAa,QAAA;AAE9B,EAAA,OAAO,KAAA;AACT;AAWO,IAAM,eAAA,EAAiB,CAC1B,oBAAA,EACH,OAAA,EAAA,GAAsD;AAEjD,EAAA,MAAM,SAAA,EAAW,IAAI,QAAA,CAAS,CAAA;AACpC,EAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AACrE,EAAA,QAAA,CAAS,MAAA,CAAO,YAAA,EAAc,oBAAA,CAAqB,UAAU,CAAA;AAC7D,EAAA,QAAA,CAAS,MAAA,CAAO,SAAA,EAAW,oBAAA,CAAqB,OAAO,CAAA;AACvD,EAAA,GAAA,CAAG,oBAAA,CAAqB,aAAA,IAAiB,KAAA,CAAA,EAAW;AACnD,IAAA,QAAA,CAAS,MAAA,CAAO,cAAA,EAAgB,oBAAA,CAAqB,YAAY,CAAA;AAAA,EACjE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,iBAAA,IAAqB,KAAA,CAAA,EAAW;AACvD,IAAA,QAAA,CAAS,MAAA,CAAO,kBAAA,EAAoB,oBAAA,CAAqB,gBAAgB,CAAA;AAAA,EACzE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,wBAAA,IAA4B,KAAA,CAAA,EAAW;AAC9D,IAAA,QAAA,CAAS,MAAA,CAAO,yBAAA,EAA2B,oBAAA,CAAqB,uBAAA,CAAwB,QAAA,CAAS,CAAC,CAAA;AAAA,EAClG;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,eAAA,IAAmB,KAAA,CAAA,EAAW;AACrD,IAAA,QAAA,CAAS,MAAA,CAAO,gBAAA,EAAkB,oBAAA,CAAqB,cAAc,CAAA;AAAA,EACrE;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,MAAA,IAAU,KAAA,CAAA,EAAW;AAC5C,IAAA,QAAA,CAAS,MAAA,CAAO,OAAA,EAAS,oBAAA,CAAqB,KAAK,CAAA;AAAA,EACnD;AACD,EAAA,GAAA,CAAG,oBAAA,CAAqB,cAAA,IAAkB,KAAA,CAAA,EAAW;AACpD,IAAA,QAAA,CAAS,MAAA,CAAO,eAAA,EAAiB,oBAAA,CAAqB,aAAa,CAAA;AAAA,EACnE;AAEK,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,iBAAA,CAAA;AAAA,MAAqB,MAAA,EAAQ,MAAA;AAAA,MACnC,OAAA,EAAS,EAAC,cAAA,EAAgB,sBAAuB,CAAA;AAAA,MAChD,IAAA,EAAM;AAAA,IACT,CAAA;AAAA,IACE;AAAA,EAAO,CAAA;AACT,CAAA;AAIG,IAAM,iCAAA,EAAmC,CACxB,OAAA,EAAA,GAC4F;AACpH,EAAA,MAAM,EAAC,QAAA,EAAU,eAAA,EAAiB,OAAA,EAAS,eAAc,EAAA,mBAAI,OAAA,UAAW,CAAC,GAAA;AAKnE,EAAA,MAAM,WAAA,EAAyG,CAAC,KAAA,EAAA,GAAU;AACtH,IAAA,MAAM,EAAC,KAAI,EAAA,mBAAI,KAAA,UAAS,CAAC,GAAA;AAEzB,IAAA,OAAQ,cAAA,CAAe,IAAA,EAAK,cAAc,CAAA;AAAA,EAC5C,CAAA;AAKN,EAAA,OAAQ,EAAE,UAAA,EAAY,GAAG,gBAAgB,CAAA;AAAC,CAAA;AASrC,IAAM,kBAAA,EAAoB,CACT,OAAA,EAAA,GAMb;AAEL,EAAA,MAAM,gBAAA,EAAkB,gCAAA,CAAiC,OAAO,CAAA;AAEhE,EAAA,OAAO,qCAAA,eAA2B,CAAA;AACpC,CAAA;AAIG,IAAM,YAAA,EAAc,CACvB,UAAA,EACH,OAAA,EAAiD,MAAA,EAAA,GAC7C;AAGC,EAAA,OAAO,8CAAA;AAAA,IACP;AAAA,MAAC,GAAA,EAAK,CAAA,UAAA,EAAa,UAAU,CAAA,CAAA;AAAY,MAAA;AAAO,MAAA;AAClD,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAGmC;AACP,EAAA;AAC5B;AAG4H;AAGpG,EAAA;AAER,EAAA;AAIwE,EAAA;AAM5D,EAAA;AAChC;AAmCC;AAIsB,EAAA;AAEE,EAAA;AAEO,EAAA;AAEvB,EAAA;AACT;AAIa;AAGe,EAAA;AAER,EAAA;AAIwE,EAAA;AAM5D,EAAA;AAChC;AAuBC;AAIsB,EAAA;AAEU,EAAA;AAED,EAAA;AAEvB,EAAA;AACT;AAYI;AAIS,EAAA;AACP,IAAA;AAA6B,MAAA;AAAY,MAAA;AAC3C,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAIS;AAGI,EAAA;AAK0G,EAAA;AAC5F,IAAA;AAEK,IAAA;AAC5B,EAAA;AAKmB,EAAA;AAAiB;AAUpB;AAQM,EAAA;AAEL,EAAA;AACrB;AAUA;AAIS,EAAA;AACP,IAAA;AAA6B,MAAA;AAAY,MAAA;AACvC,MAAA;AACJ,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAIS;AAGI,EAAA;AAKmI,EAAA;AAC9G,IAAA;AAEN,IAAA;AACxB,EAAA;AAKmB,EAAA;AAAiB;AAUpB;AAQM,EAAA;AAEL,EAAA;AACrB;AAWA;AAKS,EAAA;AACP,IAAA;AAA6B,MAAA;AAA2B,MAAA;AAC1D,IAAA;AACE,IAAA;AAAO,EAAA;AACT;AAIS;AAGI,EAAA;AAK0H,EAAA;AACtG,IAAA;AAEC,IAAA;AAC9B,EAAA;AAKmB,EAAA;AAAiB;AAUpB;AAQM,EAAA;AAEL,EAAA;AACrB;AD7b+B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/squonk2-data-manager-js-client/squonk2-data-manager-js-client/dist/instance/instance.cjs","sourcesContent":[null,"// @ts-nocheck\n/**\n * Generated by orval v7.2.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 3.3\n */\nimport {\n useMutation,\n useQuery,\n useSuspenseQuery\n} from '@tanstack/react-query'\nimport type {\n DefinedInitialDataOptions,\n DefinedUseQueryResult,\n MutationFunction,\n QueryFunction,\n QueryKey,\n UndefinedInitialDataOptions,\n UseMutationOptions,\n UseMutationResult,\n UseQueryOptions,\n UseQueryResult,\n UseSuspenseQueryOptions,\n UseSuspenseQueryResult\n} from '@tanstack/react-query'\nimport type {\n DmError,\n GetInstancesParams,\n InstanceDryRunPostResponse,\n InstanceGetResponse,\n InstancePostBodyBody,\n InstancePostResponse,\n InstancesGetResponse,\n PatchInstanceParams,\n TaskIdentity\n} from '../data-manager-api.schemas'\nimport { customInstance } from '.././custom-instance';\nimport type { ErrorType } from '.././custom-instance';\n\n\ntype SecondParameter<T extends (...args: any) => any> = Parameters<T>[1];\n\n\n/**\n * Launches a new Application or Job instance, returning an Instance and Task ID. The Task ID should be used against the `/task` endpoint to determine the availability of the the running instance.\n\nInstance behaviour is controlled using the `specification`. You will need to consult individual applications to determine what can be placed in the specification. Applications typically provide a `template` describing its **options**.\n\nAn Application instance is not Ready for use until the corresponding **TaskState** is _STARTED_.\n\nA Job instance typically runs to completion, reaching the **TaskState** _SUCCESS_ when successful and _FAILURE_ is unsuccessful.\n\n * @summary Creates a new Job or Application instance\n */\nexport const createInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>,) => {\n \n const formData = new FormData();\nformData.append('application_id', instancePostBodyBody.application_id)\nformData.append('project_id', instancePostBodyBody.project_id)\nformData.append('as_name', instancePostBodyBody.as_name)\nif(instancePostBodyBody.callback_url !== undefined) {\n formData.append('callback_url', instancePostBodyBody.callback_url)\n }\nif(instancePostBodyBody.callback_context !== undefined) {\n formData.append('callback_context', instancePostBodyBody.callback_context)\n }\nif(instancePostBodyBody.generate_callback_token !== undefined) {\n formData.append('generate_callback_token', instancePostBodyBody.generate_callback_token.toString())\n }\nif(instancePostBodyBody.callback_token !== undefined) {\n formData.append('callback_token', instancePostBodyBody.callback_token)\n }\nif(instancePostBodyBody.debug !== undefined) {\n formData.append('debug', instancePostBodyBody.debug)\n }\nif(instancePostBodyBody.specification !== undefined) {\n formData.append('specification', instancePostBodyBody.specification)\n }\n\n return customInstance<InstancePostResponse>(\n {url: `/instance`, method: 'POST',\n headers: {'Content-Type': 'multipart/form-data', },\n data: formData\n },\n options);\n }\n \n\n\nexport const getCreateInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof createInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof createInstance>>, TError,{data: InstancePostBodyBody}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof createInstance>>, {data: InstancePostBodyBody}> = (props) => {\n const {data} = props ?? {};\n\n return createInstance(data,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type CreateInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof createInstance>>>\n export type CreateInstanceMutationBody = InstancePostBodyBody\n export type CreateInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Creates a new Job or Application instance\n */\nexport const useCreateInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof createInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof createInstance>>,\n TError,\n {data: InstancePostBodyBody},\n TContext\n > => {\n\n const mutationOptions = getCreateInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * Returns a summary of all running instances. Instances can be running as an Application or as a Job. The response will contain an `application_type` field that is either `job` or `application`\n\n * @summary Get summary information about all Job and Application instances\n */\nexport const getInstances = (\n params?: GetInstancesParams,\n options?: SecondParameter<typeof customInstance>,signal?: AbortSignal\n) => {\n \n \n return customInstance<InstancesGetResponse>(\n {url: `/instance`, method: 'GET',\n params, signal\n },\n options);\n }\n \n\nexport const getGetInstancesQueryKey = (params?: GetInstancesParams,) => {\n return [\"data-manager-api\", `/instance`, ...(params ? [params]: [])] as const;\n }\n\n \nexport const getGetInstancesQueryOptions = <TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstances>>> = ({ signal }) => getInstances(params, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, ...queryOptions} as UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstancesQueryResult = NonNullable<Awaited<ReturnType<typeof getInstances>>>\nexport type GetInstancesQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params: undefined | GetInstancesParams, options: { query:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>> & Pick<\n DefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstances>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): DefinedUseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>> & Pick<\n UndefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstances>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get summary information about all Job and Application instances\n */\n\nexport function useGetInstances<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstancesQueryOptions(params,options)\n\n const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\nexport const getGetInstancesSuspenseQueryOptions = <TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstances>>> = ({ signal }) => getInstances(params, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, ...queryOptions} as UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstancesSuspenseQueryResult = NonNullable<Awaited<ReturnType<typeof getInstances>>>\nexport type GetInstancesSuspenseQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params: undefined | GetInstancesParams, options: { query:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get summary information about all Job and Application instances\n */\n\nexport function useGetInstancesSuspense<TData = Awaited<ReturnType<typeof getInstances>>, TError = ErrorType<void | DmError>>(\n params?: GetInstancesParams, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstances>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstancesSuspenseQueryOptions(params,options)\n\n const query = useSuspenseQuery(queryOptions) as UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\n/**\n * Similar to the `/instance [POST]` endpoint this one is used to check whether a new **Application** or **Job** instance can be launched. Rather than returning an **Instance** (or **Task**) ID this endpoint is simply used to ensure that the Job/Application is runnable while also returning the compiled `command` (if the Instance is a Job).\n\nThe test result is only valid at the time of the call, whether an actual instance would start or not will require an identical call to `/instance POST`.\n\n * @summary Used to check the execution of new Job or Application instance\n */\nexport const dryRunInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>,) => {\n \n const formData = new FormData();\nformData.append('application_id', instancePostBodyBody.application_id)\nformData.append('project_id', instancePostBodyBody.project_id)\nformData.append('as_name', instancePostBodyBody.as_name)\nif(instancePostBodyBody.callback_url !== undefined) {\n formData.append('callback_url', instancePostBodyBody.callback_url)\n }\nif(instancePostBodyBody.callback_context !== undefined) {\n formData.append('callback_context', instancePostBodyBody.callback_context)\n }\nif(instancePostBodyBody.generate_callback_token !== undefined) {\n formData.append('generate_callback_token', instancePostBodyBody.generate_callback_token.toString())\n }\nif(instancePostBodyBody.callback_token !== undefined) {\n formData.append('callback_token', instancePostBodyBody.callback_token)\n }\nif(instancePostBodyBody.debug !== undefined) {\n formData.append('debug', instancePostBodyBody.debug)\n }\nif(instancePostBodyBody.specification !== undefined) {\n formData.append('specification', instancePostBodyBody.specification)\n }\n\n return customInstance<InstanceDryRunPostResponse>(\n {url: `/instance/dry-run`, method: 'POST',\n headers: {'Content-Type': 'multipart/form-data', },\n data: formData\n },\n options);\n }\n \n\n\nexport const getDryRunInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof dryRunInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof dryRunInstance>>, TError,{data: InstancePostBodyBody}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof dryRunInstance>>, {data: InstancePostBodyBody}> = (props) => {\n const {data} = props ?? {};\n\n return dryRunInstance(data,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type DryRunInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof dryRunInstance>>>\n export type DryRunInstanceMutationBody = InstancePostBodyBody\n export type DryRunInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Used to check the execution of new Job or Application instance\n */\nexport const useDryRunInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof dryRunInstance>>, TError,{data: InstancePostBodyBody}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof dryRunInstance>>,\n TError,\n {data: InstancePostBodyBody},\n TContext\n > => {\n\n const mutationOptions = getDryRunInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * @summary Get detailed information about an Instance\n */\nexport const getInstance = (\n instanceId: string,\n options?: SecondParameter<typeof customInstance>,signal?: AbortSignal\n) => {\n \n \n return customInstance<InstanceGetResponse>(\n {url: `/instance/${instanceId}`, method: 'GET', signal\n },\n options);\n }\n \n\nexport const getGetInstanceQueryKey = (instanceId: string,) => {\n return [\"data-manager-api\", `/instance/${instanceId}`] as const;\n }\n\n \nexport const getGetInstanceQueryOptions = <TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceId);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstance>>> = ({ signal }) => getInstance(instanceId, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, enabled: !!(instanceId), ...queryOptions} as UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstanceQueryResult = NonNullable<Awaited<ReturnType<typeof getInstance>>>\nexport type GetInstanceQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options: { query:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>> & Pick<\n DefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstance>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): DefinedUseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>> & Pick<\n UndefinedInitialDataOptions<\n Awaited<ReturnType<typeof getInstance>>,\n TError,\n TData\n > , 'initialData'\n >, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get detailed information about an Instance\n */\n\nexport function useGetInstance<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstanceQueryOptions(instanceId,options)\n\n const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\nexport const getGetInstanceSuspenseQueryOptions = <TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n) => {\n\nconst {query: queryOptions, request: requestOptions} = options ?? {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceId);\n\n \n\n const queryFn: QueryFunction<Awaited<ReturnType<typeof getInstance>>> = ({ signal }) => getInstance(instanceId, requestOptions, signal);\n\n \n\n \n\n return { queryKey, queryFn, enabled: !!(instanceId), ...queryOptions} as UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData> & { queryKey: QueryKey }\n}\n\nexport type GetInstanceSuspenseQueryResult = NonNullable<Awaited<ReturnType<typeof getInstance>>>\nexport type GetInstanceSuspenseQueryError = ErrorType<void | DmError>\n\n\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options: { query:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey }\n/**\n * @summary Get detailed information about an Instance\n */\n\nexport function useGetInstanceSuspense<TData = Awaited<ReturnType<typeof getInstance>>, TError = ErrorType<void | DmError>>(\n instanceId: string, options?: { query?:Partial<UseSuspenseQueryOptions<Awaited<ReturnType<typeof getInstance>>, TError, TData>>, request?: SecondParameter<typeof customInstance>}\n\n ): UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey } {\n\n const queryOptions = getGetInstanceSuspenseQueryOptions(instanceId,options)\n\n const query = useSuspenseQuery(queryOptions) as UseSuspenseQueryResult<TData, TError> & { queryKey: QueryKey };\n\n query.queryKey = queryOptions.queryKey ;\n\n return query;\n}\n\n\n\n/**\n * The Application or Job Instance is terminated.\n\nYou must be the `owner` or an `editor` of the Instance to delete it\n\n * @summary Delete a Job or Application Instance\n */\nexport const terminateInstance = (\n instanceId: string,\n options?: SecondParameter<typeof customInstance>,) => {\n \n \n return customInstance<TaskIdentity>(\n {url: `/instance/${instanceId}`, method: 'DELETE'\n },\n options);\n }\n \n\n\nexport const getTerminateInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof terminateInstance>>, TError,{instanceId: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof terminateInstance>>, TError,{instanceId: string}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof terminateInstance>>, {instanceId: string}> = (props) => {\n const {instanceId} = props ?? {};\n\n return terminateInstance(instanceId,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type TerminateInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof terminateInstance>>>\n \n export type TerminateInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Delete a Job or Application Instance\n */\nexport const useTerminateInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof terminateInstance>>, TError,{instanceId: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof terminateInstance>>,\n TError,\n {instanceId: string},\n TContext\n > => {\n\n const mutationOptions = getTerminateInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * The Application or Job Instance is updated according to the patch parameters.\n\nYou must be the `owner` or an `editor` of the Instance to patch it\n\n * @summary Update a Job or Application Instance\n */\nexport const patchInstance = (\n instanceId: string,\n params?: PatchInstanceParams,\n options?: SecondParameter<typeof customInstance>,) => {\n \n \n return customInstance<void>(\n {url: `/instance/${instanceId}`, method: 'PATCH',\n params\n },\n options);\n }\n \n\n\nexport const getPatchInstanceMutationOptions = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof patchInstance>>, TError,{instanceId: string;params?: PatchInstanceParams}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof patchInstance>>, TError,{instanceId: string;params?: PatchInstanceParams}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof patchInstance>>, {instanceId: string;params?: PatchInstanceParams}> = (props) => {\n const {instanceId,params} = props ?? {};\n\n return patchInstance(instanceId,params,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type PatchInstanceMutationResult = NonNullable<Awaited<ReturnType<typeof patchInstance>>>\n \n export type PatchInstanceMutationError = ErrorType<void | DmError>\n\n /**\n * @summary Update a Job or Application Instance\n */\nexport const usePatchInstance = <TError = ErrorType<void | DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof patchInstance>>, TError,{instanceId: string;params?: PatchInstanceParams}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof patchInstance>>,\n TError,\n {instanceId: string;params?: PatchInstanceParams},\n TContext\n > => {\n\n const mutationOptions = getPatchInstanceMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n /**\n * This revokes the instance **Token**, which was optionally generated when the instance was launched. No authentication is required to use this endpoint, which is typically used by a remote system driven by instance callbacks.\n\nTokens automatically expire after a period of time but can be revoked instantly with this endpoint.\n\nThe remote system will revoke the token when it's finished with it\n\n * @summary Delete (revoke) the Instance Token\n */\nexport const deleteInstanceToken = (\n instanceId: string,\n token: string,\n options?: SecondParameter<typeof customInstance>,) => {\n \n \n return customInstance<void>(\n {url: `/instance/${instanceId}/token/${token}`, method: 'DELETE'\n },\n options);\n }\n \n\n\nexport const getDeleteInstanceTokenMutationOptions = <TError = ErrorType<DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof deleteInstanceToken>>, TError,{instanceId: string;token: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationOptions<Awaited<ReturnType<typeof deleteInstanceToken>>, TError,{instanceId: string;token: string}, TContext> => {\nconst {mutation: mutationOptions, request: requestOptions} = options ?? {};\n\n \n\n\n const mutationFn: MutationFunction<Awaited<ReturnType<typeof deleteInstanceToken>>, {instanceId: string;token: string}> = (props) => {\n const {instanceId,token} = props ?? {};\n\n return deleteInstanceToken(instanceId,token,requestOptions)\n }\n\n \n\n\n return { mutationFn, ...mutationOptions }}\n\n export type DeleteInstanceTokenMutationResult = NonNullable<Awaited<ReturnType<typeof deleteInstanceToken>>>\n \n export type DeleteInstanceTokenMutationError = ErrorType<DmError>\n\n /**\n * @summary Delete (revoke) the Instance Token\n */\nexport const useDeleteInstanceToken = <TError = ErrorType<DmError>,\n TContext = unknown>(options?: { mutation?:UseMutationOptions<Awaited<ReturnType<typeof deleteInstanceToken>>, TError,{instanceId: string;token: string}, TContext>, request?: SecondParameter<typeof customInstance>}\n): UseMutationResult<\n Awaited<ReturnType<typeof deleteInstanceToken>>,\n TError,\n {instanceId: string;token: string},\n TContext\n > => {\n\n const mutationOptions = getDeleteInstanceTokenMutationOptions(options);\n\n return useMutation(mutationOptions);\n }\n "]}