@squonk/data-manager-client 0.7.0 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/accounting/accounting.cjs +24 -8
- package/accounting/accounting.cjs.map +1 -1
- package/accounting/accounting.d.ts +19 -8
- package/accounting/accounting.js +26 -10
- package/accounting/accounting.js.map +1 -1
- package/admin/admin.cjs +22 -1
- package/admin/admin.cjs.map +1 -1
- package/admin/admin.d.ts +20 -2
- package/admin/admin.js +21 -0
- package/admin/admin.js.map +1 -1
- package/application/application.d.ts +1 -1
- package/{custom-instance-9873bed3.d.ts → custom-instance-eb1ebf45.d.ts} +83 -19
- package/dataset/dataset.d.ts +1 -1
- package/file/file.d.ts +1 -1
- package/index.cjs +23 -1
- package/index.cjs.map +1 -1
- package/index.d.ts +1 -1
- package/index.js +22 -0
- package/index.js.map +1 -1
- package/instance/instance.cjs +6 -0
- package/instance/instance.cjs.map +1 -1
- package/instance/instance.d.ts +1 -1
- package/instance/instance.js +6 -0
- package/instance/instance.js.map +1 -1
- package/job/job.d.ts +1 -1
- package/metadata/metadata.cjs +93 -0
- package/metadata/metadata.cjs.map +1 -0
- package/metadata/metadata.d.ts +72 -0
- package/metadata/metadata.js +93 -0
- package/metadata/metadata.js.map +1 -0
- package/metadata/package.json +7 -0
- package/package.json +1 -1
- package/project/project.d.ts +1 -1
- package/src/accounting/accounting.ts +56 -15
- package/src/admin/admin.ts +59 -0
- package/src/data-manager-api.schemas.ts +108 -20
- package/src/instance/instance.ts +6 -0
- package/src/metadata/metadata.ts +283 -0
- package/task/task.d.ts +1 -1
- package/type/type.d.ts +1 -1
- package/user/user.d.ts +1 -1
- package/annotation/annotation.cjs +0 -64
- package/annotation/annotation.cjs.map +0 -1
- package/annotation/annotation.d.ts +0 -58
- package/annotation/annotation.js +0 -64
- package/annotation/annotation.js.map +0 -1
- package/annotation/package.json +0 -7
- package/src/annotation/annotation.ts +0 -217
package/index.js
CHANGED
|
@@ -44,6 +44,16 @@ var InstanceSummaryJobImageType = {
|
|
|
44
44
|
SIMPLE: "SIMPLE",
|
|
45
45
|
NEXTFLOW: "NEXTFLOW"
|
|
46
46
|
};
|
|
47
|
+
var InstanceSummaryPhase = {
|
|
48
|
+
COMPLETED: "COMPLETED",
|
|
49
|
+
CRASH_LOOP_BACKOFF: "CRASH_LOOP_BACKOFF",
|
|
50
|
+
FAILED: "FAILED",
|
|
51
|
+
IMAGE_PULL_BACKOFF: "IMAGE_PULL_BACKOFF",
|
|
52
|
+
PENDING: "PENDING",
|
|
53
|
+
RUNNING: "RUNNING",
|
|
54
|
+
SUCCEEDED: "SUCCEEDED",
|
|
55
|
+
UNKNOWN: "UNKNOWN"
|
|
56
|
+
};
|
|
47
57
|
var InstanceSummaryApplicationType = {
|
|
48
58
|
APPLICATION: "APPLICATION",
|
|
49
59
|
JOB: "JOB"
|
|
@@ -84,6 +94,16 @@ var InstanceTaskPurpose = {
|
|
|
84
94
|
CREATE: "CREATE",
|
|
85
95
|
DELETE: "DELETE"
|
|
86
96
|
};
|
|
97
|
+
var InstanceGetResponsePhase = {
|
|
98
|
+
COMPLETED: "COMPLETED",
|
|
99
|
+
CRASH_LOOP_BACKOFF: "CRASH_LOOP_BACKOFF",
|
|
100
|
+
FAILED: "FAILED",
|
|
101
|
+
IMAGE_PULL_BACKOFF: "IMAGE_PULL_BACKOFF",
|
|
102
|
+
PENDING: "PENDING",
|
|
103
|
+
RUNNING: "RUNNING",
|
|
104
|
+
SUCCEEDED: "SUCCEEDED",
|
|
105
|
+
UNKNOWN: "UNKNOWN"
|
|
106
|
+
};
|
|
87
107
|
var InstanceGetResponseApplicationType = {
|
|
88
108
|
APPLICATION: "APPLICATION",
|
|
89
109
|
JOB: "JOB"
|
|
@@ -98,8 +118,10 @@ export {
|
|
|
98
118
|
DatasetVersionDetailProcessingStage,
|
|
99
119
|
DatasetVersionSummaryProcessingStage,
|
|
100
120
|
InstanceGetResponseApplicationType,
|
|
121
|
+
InstanceGetResponsePhase,
|
|
101
122
|
InstanceSummaryApplicationType,
|
|
102
123
|
InstanceSummaryJobImageType,
|
|
124
|
+
InstanceSummaryPhase,
|
|
103
125
|
InstanceTaskPurpose,
|
|
104
126
|
JobGetResponseImageType,
|
|
105
127
|
JobSummaryImageType,
|
package/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nexport type AdminGetServiceErrorsParams = {\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n from?: QFromParameter;\n until?: QUntilParameter;\n};\n\nexport type GetUserAccountParams = {\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type PatchInstanceParams = { archive?: QInstanceArchiveParameter };\n\nexport type GetInstancesParams = { project_id?: QProjectIdParameter };\n\nexport type GetTaskParams = {\n event_limit?: QEventLimitParameter;\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n exclude_done?: QExcludeDoneParameter;\n exclude_purpose?: QExcludePurposeParameter;\n project_id?: QProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n file: QFileParameter;\n path?: QFilePathParameter;\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n project_id: QFileProjectIdParameter;\n path?: QFilePathParameter;\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = { include_deleted?: QIncludeDeletedParameter };\n\nexport type GetProjectFileParams = {\n path?: QFilePathParameter;\n file: QFileParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"labe2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\nexport type GetDatasetsParams = {\n include_deleted?: QIncludeDeletedParameter;\n username?: QUsernameParameter;\n dataset_mime_type?: QDatasetMimeTypeParameter;\n owners?: QOwnersParameter;\n editors?: QEditorsParameter;\n labels?: QLabelsParameter;\n};\n\n/**\n * Whether to convert Project managed file instances to unmanged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `INSTANCE`, `FILE` or `DATASET`. To exclude file and dataset tasks set to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user accont can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to chnage things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Organisation the Project belongs to\n */\n organisation_id: string;\n /** The Organisational Unit the Project belongs to\n */\n unit_id: string;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n */\n application_id: string;\n /** A supported application version to launch\n */\n application_version: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen laucnhing a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"blob\",\"job\":\"filter\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path`. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can provide `\"sub_path\":\"foo/bar\"` in the specification. In this instance the Job will start in the directory `/data/foo/bar` with the Data Manager creating the directory if it does not exist. You can only use a sub-path for a Job if the Job defines a working directory.\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to. If not supplied the Project Organisation is used\n */\n organisation_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetAnnotationsPostBodyBody = {\n /** JSON string containing a list of annotations. The format of these annotations is expected to have been created using the data-manager-metadata library. The same library will be used to created the annotation object(s) that is added to the metadata for the dataset, so the formats should match.\n */\n annotations: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to\n */\n organisation_id: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Provate accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administartive capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\" as TypeSummaryFormatterOptionsType,\n};\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\" as TaskSummaryProcessingStage,\n FAILED: \"FAILED\" as TaskSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as TaskSummaryProcessingStage,\n LOADING: \"LOADING\" as TaskSummaryProcessingStage,\n DELETING: \"DELETING\" as TaskSummaryProcessingStage,\n DONE: \"DONE\" as TaskSummaryProcessingStage,\n};\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n | \"PENDING\"\n | \"STARTED\"\n | \"RETRY\"\n | \"SUCCESS\"\n | \"FAILURE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\" as TaskStateState,\n STARTED: \"STARTED\" as TaskStateState,\n RETRY: \"RETRY\" as TaskStateState,\n SUCCESS: \"SUCCESS\" as TaskStateState,\n FAILURE: \"FAILURE\" as TaskStateState,\n};\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n | \"CRITICAL\"\n | \"ERROR\"\n | \"WARNING\"\n | \"INFO\"\n | \"DEBUG\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\" as TaskEventLevel,\n ERROR: \"ERROR\" as TaskEventLevel,\n WARNING: \"WARNING\" as TaskEventLevel,\n INFO: \"INFO\" as TaskEventLevel,\n DEBUG: \"DEBUG\" as TaskEventLevel,\n};\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = \"CRITICAL\" | \"ERROR\" | \"WARNING\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\" as ServiceErrorSummarySeverity,\n ERROR: \"ERROR\" as ServiceErrorSummarySeverity,\n WARNING: \"WARNING\" as ServiceErrorSummarySeverity,\n};\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Organisation the Project Product belongs to\n */\n organisation_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** An editor (user_id) of the project */\n editors: string[];\n /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\" as JobSummaryImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobSummaryImageType,\n};\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\" as InstanceSummaryJobImageType,\n NEXTFLOW: \"NEXTFLOW\" as InstanceSummaryJobImageType,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceSummaryApplicationType,\n JOB: \"JOB\" as InstanceSummaryApplicationType,\n};\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was laucnhed\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\n */\n phase: string;\n /** The data and time (UTC) the instance was laucnhed\n */\n launched: string;\n /** The application instance owner, the person who launched tha application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job defintion.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job defintion.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_name?: string;\n /** The Job defintion's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's knwon outputs, a JSON string defining a map of all the outputs. Typcially applied only to JOB applcation types\n */\n outputs?: string;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = {};\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionSummaryProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionSummaryProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionSummaryProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionSummaryProcessingStage,\n DONE: \"DONE\" as DatasetVersionSummaryProcessingStage,\n};\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = {};\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionDetailProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionDetailProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionDetailProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionDetailProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionDetailProcessingStage,\n DONE: \"DONE\" as DatasetVersionDetailProcessingStage,\n};\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = \"DELETE\" | \"PATCH\" | \"POST\" | \"PUT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\" as ApiLogDetailMethod,\n PATCH: \"PATCH\" as ApiLogDetailMethod,\n POST: \"POST\" as ApiLogDetailMethod,\n PUT: \"PUT\" as ApiLogDetailMethod,\n};\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representign the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of availabel MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n | \"DATASET\"\n | \"FILE\"\n | \"INSTANCE\"\n | \"PROJECT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\" as TaskGetResponsePurpose,\n FILE: \"FILE\" as TaskGetResponsePurpose,\n INSTANCE: \"INSTANCE\" as TaskGetResponsePurpose,\n PROJECT: \"PROJECT\" as TaskGetResponsePurpose,\n};\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobGetResponseImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\" as JobGetResponseImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobGetResponseImageType,\n};\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n}\n\nexport type InstanceTaskPurpose = \"CREATE\" | \"DELETE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\" as InstanceTaskPurpose,\n DELETE: \"DELETE\" as InstanceTaskPurpose,\n};\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceGetResponseApplicationType,\n JOB: \"JOB\" as InstanceGetResponseApplicationType,\n};\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched tha apllication and is the only user than can stop it.\n */\n owner: string;\n /** The application unique reference\n */\n launched: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\n */\n phase: string;\n /** The application endpoint\n */\n url?: string;\n /** The JSON string representation of the JobDefintion's outputs\n */\n outputs?: string;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleteing the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\" as DatasetSchemaGetResponseType,\n};\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface DatasetAnnotationsPostResponse {\n annotations: unknown[];\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed appications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n}\n\nexport interface AccountServerGetHostanmeResponse {\n /** The configured Account Server hostname, which will be an empty string if one is not configured\n */\n hostname: string;\n /** The name of this Data Manager instance, used to distinguish itself on calls to the Account Server. Every Data Manager is deployed with a unique name, regardless of the Account Server that's being used.\n */\n data_manager_name: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"],"mappings":";;;;;;;;AAkZO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA;AAkDH,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AAAA;AAgDD,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA;AA+BJ,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AAAA;AAqBF,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA;AAuGJ,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AAAA;AAkDL,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AAAA;AAUL,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AAAA;AAuHA,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AAAA;AAyED,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AAAA;AAiGD,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AAAA;AAmEA,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AAAA;AAsEJ,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AAAA;AA2DL,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AAAA;AAmBH,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AAAA;AAwGA,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AAAA;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/data-manager-api.schemas.ts"],"sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nexport type AdminGetServiceErrorsParams = {\n include_acknowleged?: QIncludeAcknowlegedParameter;\n};\n\nexport type GetUserApiLogParams = {\n from?: QFromParameter;\n until?: QUntilParameter;\n};\n\nexport type PatchInstanceParams = { archive?: QInstanceArchiveParameter };\n\nexport type GetInstancesParams = { project_id?: QProjectIdParameter };\n\nexport type GetTaskParams = {\n event_limit?: QEventLimitParameter;\n event_prior_ordinal?: QEventPriorOrdinalParameter;\n};\n\nexport type GetTasksParams = {\n exclude_done?: QExcludeDoneParameter;\n exclude_purpose?: QExcludePurposeParameter;\n project_id?: QProjectIdParameter;\n};\n\nexport type DeleteUnmanagedFileParams = {\n file: QFileParameter;\n path?: QFilePathParameter;\n project_id: QFileProjectIdParameter;\n};\n\nexport type GetFilesParams = {\n project_id: QFileProjectIdParameter;\n path?: QFilePathParameter;\n include_hidden?: QIncludeHiddenParameter;\n};\n\nexport type DeleteDatasetParams = {\n keep_project_files?: QKeepProjectFilesParameter;\n};\n\nexport type GetVersionsParams = { include_deleted?: QIncludeDeletedParameter };\n\nexport type GetProjectFileParams = {\n path?: QFilePathParameter;\n file: QFileParameter;\n};\n\n/**\n * Filter the datasets by a comma separated list of editors\n\n */\nexport type QEditorsParameter = string;\n\n/**\n * Filter the datasets by a comma separated list of owners\n\n */\nexport type QOwnersParameter = string;\n\n/**\n * Filter the datasets by username\n\n */\nexport type QUsernameParameter = string;\n\n/**\n * An until (exclusive) date-time. If provided only API calls made before this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QUntilParameter = string;\n\n/**\n * A Project identity\n */\nexport type QProjectIdParameter = string;\n\n/**\n * JSON string containing a list of label/value pairs for the datasets to be filtered by. If the value is set to null, then only the label is matched. If both the label and value are set, then both are matched. e.g. '{\"label1\": null, \"labe2\": \"value2\"}'\n\n */\nexport type QLabelsParameter = string;\n\nexport type GetDatasetsParams = {\n include_deleted?: QIncludeDeletedParameter;\n username?: QUsernameParameter;\n dataset_mime_type?: QDatasetMimeTypeParameter;\n owners?: QOwnersParameter;\n editors?: QEditorsParameter;\n labels?: QLabelsParameter;\n};\n\n/**\n * Whether to convert Project managed file instances to unmanged files\n\n */\nexport type QKeepProjectFilesParameter = boolean;\n\n/**\n * True to archive the instance\n */\nexport type QInstanceArchiveParameter = boolean;\n\n/**\n * Whether to include hidden files and directories\n */\nexport type QIncludeHiddenParameter = boolean;\n\n/**\n * Whether to include records that are deleted\n */\nexport type QIncludeDeletedParameter = boolean;\n\n/**\n * Set to include acknowledged items\n\n */\nexport type QIncludeAcknowlegedParameter = boolean;\n\n/**\n * A from (inclusive) date-time. If provided no API calls prior to this will be returned. UTC is assumed if no timezone is provided\n\n */\nexport type QFromParameter = string;\n\n/**\n * The Project identity\n */\nexport type QFileProjectIdParameter = string;\n\n/**\n * A project path. If provided it must begin `/` and refers to a path where `/` represents the project's root directory\n\n */\nexport type QFilePathParameter = string;\n\n/**\n * A project file.\n\n */\nexport type QFileParameter = string;\n\n/**\n * Set to a dot-separated string of purpose enumerations, i.e. `INSTANCE`, `FILE` or `DATASET`. To exclude file and dataset tasks set to `FILE.DATASET`\n\n */\nexport type QExcludePurposeParameter = string;\n\n/**\n * Set true if you want to exclude 'done' tasks, i.e. just see those that are still running.\n\n */\nexport type QExcludeDoneParameter = boolean;\n\n/**\n * The ordinal of a previously received event. If set, only events subsequent to the ordinal provided will be returned. Providing a value of 0 will result in retrieving the first and subsequent events.\n\n */\nexport type QEventPriorOrdinalParameter = number;\n\n/**\n * Maximum number of events to return. If provided, can be 1 or more.\n\n */\nexport type QEventLimitParameter = number;\n\n/**\n * Filter the datasets by the supplied mime_type.\n\n */\nexport type QDatasetMimeTypeParameter = string;\n\n/**\n * Set, if you're an admin, to call the endpoint without impersonation\n\n */\nexport type QDoNotImpersonateParameter = boolean;\n\nexport type GetUserAccountParams = {\n do_not_impersonate?: QDoNotImpersonateParameter;\n};\n\nexport type UserPatchBodyBody = {\n /** If set to a message the user account is suspended, with the user receiving this message when they try and use the API. A suspended user accont can be restored by setting the message to `/restore`\n */\n suspend_message?: string;\n};\n\nexport type UserAccountPatchBodyBody = {\n /** If set the user account becomes private, if provided but false the user account becomes public. Public Users show up in user searches\n */\n private?: boolean;\n /** For **admin** accounts, if set the user account is able to read anything, i.e. `GET` API calls (i.e. endpoints that do not change the Data Manager state) behave as though the caller is acting as *everyone*. An **admin** user would set ths parameter in order to browse the system, and then switch to `impersonate` mode in order to chnage things as the chosen user\n */\n become_admin?: boolean;\n /** For **admin** accounts, if set API calls behave as though the caller is the user being impersonated. To stop impersonating set this to an empty string. To set impersonation to anything other than an empty string you must also set `become_admin`\n */\n impersonate?: string;\n /** For **admin** accounts, if this is set the account for the user being impersonated is patched, rather then the user's own account. To use this you must have a value for `impersonate`\n */\n use_impersonation?: boolean;\n};\n\nexport type ProjectPostBodyBody = {\n name: string;\n /** Whether the project is private. You may not be permitted to make the project private, that will depend on the project product you're using to create the project\n */\n private?: boolean;\n /** The Organisation the Project belongs to\n */\n organisation_id: string;\n /** The Organisational Unit the Project belongs to\n */\n unit_id: string;\n /** The Data Manager *Tier Product ID* you're using to create the Project\n */\n tier_product_id: string;\n};\n\nexport type ProjectPatchBodyBody = {\n private?: boolean;\n /** The new name of the ptojct\n */\n name?: string;\n};\n\nexport type ProjectFilePutBodyBody = {\n file: Blob;\n /** An alternative filename to use for the uploaded File\n */\n as_filename?: string;\n /** The Project path of the file.\n */\n path?: string;\n};\n\nexport type JobManifestPutBodyBody = {\n /** The URL of the Job Manifest */\n url: string;\n /** Optional URL header values (a JSON string) */\n header?: string;\n /** Optional URL parameter values (a JSON string) */\n params?: string;\n};\n\nexport type InstancePostBodyBody = {\n /** A supported application. Applications instances are managed using pre-deployed Kubernetes **Operators**. The application ID is a combination of the operator _plural_ and _group_.\n */\n application_id: string;\n /** A supported application version to launch\n */\n application_version: string;\n /** The project to attach\n */\n project_id: string;\n /** The name to use for the instance\n */\n as_name: string;\n /** A URL the DM will use to PUT job progress messages as the requested instance runs. Used, at the moment, for Job execution\n */\n callback_url?: string;\n /** Used in conjunction with the `callback_url` any value provided here will be passed back in the message payload that's delivered to the callback URL. It can be used by the recipient to provide a context that's meaningful\n */\n callback_context?: string;\n /** A debug value that may be used by the instance.\n\nFor Data Manager **Job** applications setting this to anything other zero ('0') prevents the Job's Pod from being deleted automatically, allowing a developer to inspect the Pod's log for example.\n\nThe behaviour of **Application** instances using this property is undefined. It will depend on whether the application **CRD** handles the Data Manager debug field.\n */\n debug?: string;\n /** The instance specification. A JSON string that's application-specific and controls the application's behaviour.\n\nWhen laucnhing a Data Manager **Job** Application you must identify the Job using the properties `collection`, `job` and `version`, e.g. `{\"collection\":\"im-test\",\"job\":\"nop\",\"version\":\"1.0.0\"}`\n\nJobs that offer commands will often advertise a series of **inputs** and **options** where the values can be provided using a **variables** map in the specification. Something like `\"variables\":{\"x\":7}`.\n\nJobs start in a Job-specific **working directory** but the starting directory for any Job can be adjusted by defining a `sub_path` to the root specification. For example, if you want the Job to start in the path `foo/bar` (inside the Job's built-in working directory) you can add `\"sub_path\":\"foo/bar\"` to the specification. You can only use a sub-path for a Job if the Job defines a working directory and `sub-path` cannot begin or end with a path separator (`/`).\n */\n specification?: string;\n};\n\nexport type FilePostBodyBody = {\n /** The Dataset UUID for the File that you intend to attach\n */\n dataset_id: string;\n /** The Dataset version to attach\n */\n dataset_version: number;\n /** The Project UUID you're attaching to\n */\n project_id: string;\n /** The desired Dataset file type (a MIME type). Whether or not the chosen fileType is supported will depend on the Dataset\n */\n as_type: string;\n /** A path within the Project to add the File, default is the project root ('/'), the mount-point within the application container. Paths must begin '/'\n */\n path?: string;\n /** Whether to compress the Dataset File as it's attached. Compression is achieved using gzip, resulting in a File ending `.gz`. By default the file will be compressed\n */\n compress?: boolean;\n /** Whether the Dataset File can be modified while in the Project. By default the File cannot be modified\n */\n immutable?: boolean;\n};\n\nexport type DatasetPutBodyBody = {\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** The Project the file belongs to\n */\n project_id: string;\n /** The Project path of the file.\n */\n path: string;\n /** The file name of the file in the Project path to load as a new Dataset.\n */\n file_name: string;\n /** If provided the File becomes a new version of the Dataset named. If not provided this File becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to. If not supplied the Project Organisation is used\n */\n organisation_id?: string;\n /** The Organisational Unit you want the Dataset to belong to. If not supplied the Project Unit is used\n */\n unit_id?: string;\n};\n\nexport type DatasetVersionMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of annotations. The format of the labels should match either the Fields Descriptor or Service Execution annotation formats described in the data-manager-metadata library.\n */\n annotations?: string;\n};\n\nexport type DatasetMetaPostBodyBody = {\n /** JSON string containing a list of parameter changes to the metadata. Only the description is currently allowed.\n */\n meta_properties?: string;\n /** JSON string containing a list of labels. The format of the labels should match the label annotation format described in the data-manager-metadata library.\n */\n labels?: string;\n};\n\nexport type DatasetPostBodyBody = {\n dataset_file: Blob;\n /** The MIME type of the Dataset\n */\n dataset_type: string;\n /** Extra variables (text) presented to the Dataset format-support container, which occurs during the upload and database processing stage. The content of the text is Dataset type specific.\n */\n format_extra_variables?: string;\n /** If set the post-format database load action is skipped. This means the dataset is uploaded but its molecules are not entered into the Data Manager database. This can save significant time if you're just experimenting with the dataset and do not need the molecules in the database\n */\n skip_molecule_load?: boolean;\n /** An optional new filename to use for the uploaded Dataset. The Dataset will be stored using this name.\n */\n as_filename?: string;\n /** If provided the Dataset becomes a new version of the Dataset named. If not provided this Dataset becomes the first version of a new Dataset, whose ID is returned to you on success.\n */\n dataset_id?: string;\n /** The Organisation you want the Dataset to belong to\n */\n organisation_id: string;\n /** The Organisational Unit you want the Dataset to belong to\n */\n unit_id: string;\n};\n\nexport interface UserSummary {\n /** The user's preferred username\n */\n username: string;\n}\n\nexport interface UserDetail {\n /** The user's preferred username\n */\n username: string;\n /** The date and time the user was first seen (an ISO-8601 formatted string in UTC)\n */\n first_seen?: string;\n /** Set if the user's account is marked as private. Provate accounts do not show up against general queries.\n */\n private: boolean;\n /** True if the account is suspended\n */\n suspended?: boolean;\n /** If the account is suspended this typically displays a reason for suspension\n */\n suspension_message?: string;\n /** For admin accounts, whether the user is acting in an administartive capacity, i.e. acting as everyone\n */\n become_admin?: boolean;\n /** For admin accounts, whether the user is impersonating another user\n */\n impersonate?: string;\n}\n\nexport interface UserAccountDetail {\n user: UserDetail;\n}\n\n/**\n * The Schema type (an object)\n\n */\nexport type TypeSummaryFormatterOptionsType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TypeSummaryFormatterOptionsType = {\n object: \"object\" as TypeSummaryFormatterOptionsType,\n};\n\n/**\n * If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n\n */\nexport type TypeSummaryFormatterOptions = {\n /** The title of the Formatter object\n */\n title: string;\n /** The Schema type (an object)\n */\n type: TypeSummaryFormatterOptionsType;\n /** Required properties\n */\n required: string[];\n};\n\nexport interface TypeSummary {\n /** The File Type MIME\n */\n mime: string;\n /** The file's supported file extensions. Each type is limited to a limited number of extensions. For example, SDF files must have the extension `.sdf` (or `.sdf.gz`).\n */\n file_extensions: string[];\n /** The file's type's format-support container image (if set). Types without a format support image cannot be uploaded, but they might be available for use as destination type when a Dataset is added to a Project.\n */\n formatter_image?: string;\n /** True if Datasets uploaded using this type's support loading of data into the Data Manager data-base.\n */\n formatter_supports_db_load?: boolean;\n /** If present, contains the formatter_options that can be entered in the format_extra_variables field in the POST /dataset api.\n */\n formatter_options?: TypeSummaryFormatterOptions;\n}\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type TaskSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskSummaryProcessingStage = {\n COPYING: \"COPYING\" as TaskSummaryProcessingStage,\n FAILED: \"FAILED\" as TaskSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as TaskSummaryProcessingStage,\n LOADING: \"LOADING\" as TaskSummaryProcessingStage,\n DELETING: \"DELETING\" as TaskSummaryProcessingStage,\n DONE: \"DONE\" as TaskSummaryProcessingStage,\n};\n\nexport interface TaskSummary {\n /** The Task UUID\n */\n id: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** The purpose for the Task. Tasks are responsible for _uploading_ a Dataset, _attaching_ a Dataset as a File to a project or launching an Applications _instance_.\n */\n purpose: string;\n /** The related object ID for the Task purpose. If the purpose relates to a **Dataset** upload the ID will be a Dataset ID.\n */\n purpose_id: string;\n /** The related object version for the Task purpose. This field will only be set if the **purpose** is `DATASET`.\n */\n purpose_version?: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage?: TaskSummaryProcessingStage;\n}\n\n/**\n * The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n\n */\nexport type TaskStateState =\n | \"PENDING\"\n | \"STARTED\"\n | \"RETRY\"\n | \"SUCCESS\"\n | \"FAILURE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskStateState = {\n PENDING: \"PENDING\" as TaskStateState,\n STARTED: \"STARTED\" as TaskStateState,\n RETRY: \"RETRY\" as TaskStateState,\n SUCCESS: \"SUCCESS\" as TaskStateState,\n FAILURE: \"FAILURE\" as TaskStateState,\n};\n\nexport interface TaskState {\n /** The task state. The typical state sequence is `PENDING`, then `STARTED` and finally `SUCCESS`\n */\n state: TaskStateState;\n /** A short message accompanying the state, generally only found when the state is `FAILURE`\n */\n message?: string;\n /** The date and time of the state change\n */\n time: string;\n}\n\nexport interface TaskIdentity {\n task_id: string;\n}\n\n/**\n * The level of the message, a typical logging framework value\n\n */\nexport type TaskEventLevel =\n | \"CRITICAL\"\n | \"ERROR\"\n | \"WARNING\"\n | \"INFO\"\n | \"DEBUG\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskEventLevel = {\n CRITICAL: \"CRITICAL\" as TaskEventLevel,\n ERROR: \"ERROR\" as TaskEventLevel,\n WARNING: \"WARNING\" as TaskEventLevel,\n INFO: \"INFO\" as TaskEventLevel,\n DEBUG: \"DEBUG\" as TaskEventLevel,\n};\n\nexport interface TaskEvent {\n /** The event sequence number. The first event is always '1'.\n */\n ordinal: number;\n /** A short message.\n */\n message: string;\n /** The level of the message, a typical logging framework value\n */\n level: TaskEventLevel;\n /** The date and time the event was generated\n */\n time: string;\n}\n\nexport type ServiceErrorSummarySeverity = \"CRITICAL\" | \"ERROR\" | \"WARNING\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ServiceErrorSummarySeverity = {\n CRITICAL: \"CRITICAL\" as ServiceErrorSummarySeverity,\n ERROR: \"ERROR\" as ServiceErrorSummarySeverity,\n WARNING: \"WARNING\" as ServiceErrorSummarySeverity,\n};\n\nexport interface ServiceErrorSummary {\n id: number;\n created: string;\n summary: string;\n severity: ServiceErrorSummarySeverity;\n hostname: string;\n error_code?: number;\n stack_trace: string;\n acknowledged: boolean;\n acknowledged_at?: string;\n acknowledging_user?: string;\n}\n\nexport interface ProjectFileDetail {\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The ProjectFile's Dataset origin\n */\n dataset_id?: string;\n /** The ProjectFile's Dataset origin version\n */\n dataset_version?: number;\n /** The ProjectFile's unique ID\n */\n file_id: string;\n /** The ProjectFile's filename within the Project\n */\n file_name: string;\n /** The ProjectFile's path within the Project volume\n */\n file_path: string;\n /** True if the ProjectFile cannot be modified while in the Project\n */\n immutable: boolean;\n /** The owner of the ProjectFile. This is the user that added the Dataset (as this file) to the Project\n */\n owner: string;\n /** The Project the ProjectFile belongs to\n */\n project_id?: string;\n /** The ProjectFile MIME type\n */\n mime_type: string;\n}\n\nexport interface ProjectDetail {\n /** The project name\n */\n name: string;\n /** The project unique reference\n */\n project_id: string;\n /** The Account Server Product the Project belongs to\n */\n product_id?: string;\n /** The Account Server Organisation the Project Product belongs to\n */\n organisation_id?: string;\n /** The Account Server Unit the Project Product belongs to\n */\n unit_id?: string;\n /** The project (owner) creator\n */\n owner: string;\n /** An editor (user_id) of the project */\n editors: string[];\n /** The approximate size of all the files in the Project volume. This is updated regaularly throughout the day and its current size may differ from what is reported here. The size resolution is 1MiB (the smallest billable unit). Therefore a project that contains 32KiB of files is recorded as 1MiB in size */\n size: number;\n /** A list of managed files in the Project\n */\n files?: ProjectFileDetail[];\n}\n\nexport interface JobOrderDetail {\n options: string[];\n}\n\nexport interface JobVariables {\n /** The Job command's variable ordering declaration. Contains a list of `options` in an ordered list, the order defines the order of presentation of the Job's variables.\n */\n order?: JobOrderDetail;\n /** The Job command's inputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/inputs** block of the Job's JobDefinition.\n */\n inputs?: string;\n /** The Job command's outputs. A string that represents a JSONSchema for the command's inputs, essentially the **variables/outputs** block of the Job's JobDefinition.\n */\n outputs?: string;\n /** The Job command's options. A string that represents a JSONSchema for the command's options, essentially the **variables/options** block of the Job's JobDefinition.\n */\n options?: string;\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobSummaryImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobSummaryImageType = {\n SIMPLE: \"SIMPLE\" as JobSummaryImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobSummaryImageType,\n};\n\nexport interface JobSummary {\n /** The Job's unique ID\n */\n id: number;\n /** The Job namespace\n */\n collection: string;\n /** The Job name, unique within a given namespace\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type: JobSummaryImageType;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n /** The name of the job in English\n */\n name: string;\n /** The description of the job in English\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n}\n\nexport interface JobApplication {\n /** The Job's operator ID */\n id: string;\n /** The Job's operator version */\n version: string;\n}\n\n/**\n * The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type InstanceSummaryJobImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryJobImageType = {\n SIMPLE: \"SIMPLE\" as InstanceSummaryJobImageType,\n NEXTFLOW: \"NEXTFLOW\" as InstanceSummaryJobImageType,\n};\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceSummaryPhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryPhase = {\n COMPLETED: \"COMPLETED\" as InstanceSummaryPhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceSummaryPhase,\n FAILED: \"FAILED\" as InstanceSummaryPhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceSummaryPhase,\n PENDING: \"PENDING\" as InstanceSummaryPhase,\n RUNNING: \"RUNNING\" as InstanceSummaryPhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceSummaryPhase,\n UNKNOWN: \"UNKNOWN\" as InstanceSummaryPhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceSummaryApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceSummaryApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceSummaryApplicationType,\n JOB: \"JOB\" as InstanceSummaryApplicationType,\n};\n\nexport interface InstanceSummary {\n /** The application instance ID\n */\n id: string;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The User's specification, provided when the application was laucnhed\n */\n application_specification?: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceSummaryApplicationType;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceSummaryPhase;\n /** The data and time (UTC) the instance was laucnhed\n */\n launched: string;\n /** The application instance owner, the person who launched tha application and is the only user than can stop it.\n */\n owner: string;\n /** The Project the instances is running in.\n */\n project_id: string;\n /** The Instance name\n */\n name: string;\n /** For applications (not Jobs) this is the URL the application has exposed. It will ponly be available when the application has started.\n */\n url?: string;\n /** If the instance relates to a job, this will be the job collection, as defined in the original collection's job defintion.\n */\n job_collection?: string;\n /** If the instance relates to a job, this will be the job, as defined in the original collection's job defintion.\n */\n job_job?: string;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_name?: string;\n /** The Job defintion's unique ID\n */\n job_id?: number;\n /** If the instance relates to a job, this will be the job's name, as defined in the original collection's job defintion.\n */\n job_version?: string;\n /** The Job container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n job_image_type?: InstanceSummaryJobImageType;\n /** The instance's knwon outputs, a JSON string defining a map of all the outputs. Typcially applied only to JOB applcation types\n */\n outputs?: string;\n}\n\nexport interface FileStat {\n /** The size of the file in bytes\n */\n size: number;\n /** The date and time (UTC) of the last modification\n */\n modified: string;\n}\n\nexport interface FilePathFile {\n /** The file's Dataset ID (if the file belongs to a Dataset)\n */\n dataset_id?: string;\n /** The file's Dataset version (if the file belongs to a Dataset)\n */\n dataset_version?: number;\n /** The file name\n */\n file_name: string;\n /** The ID of the file (if the file belongs to a Dataset)\n */\n file_id?: string;\n /** Whether the file is immutable (read-only)\n */\n immutable?: boolean;\n /** The file's MIME type\n */\n mime_type?: string;\n /** The file's owner\n */\n owner: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n stat: FileStat;\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionSummaryLabels = {};\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionSummaryProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionSummaryProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionSummaryProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionSummaryProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionSummaryProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionSummaryProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionSummaryProcessingStage,\n DONE: \"DONE\" as DatasetVersionSummaryProcessingStage,\n};\n\nexport interface DatasetVersionSummary {\n /** The owner of the Dataset version\n */\n owner?: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionSummaryProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionSummaryLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetVersionProjectFile {\n project_name: string;\n project: string;\n files: string[];\n}\n\n/**\n * The dictionary of label/value pairs\n\n */\nexport type DatasetVersionDetailLabels = {};\n\n/**\n * The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n\n */\nexport type DatasetVersionDetailProcessingStage =\n | \"COPYING\"\n | \"FAILED\"\n | \"FORMATTING\"\n | \"LOADING\"\n | \"DELETING\"\n | \"DONE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetVersionDetailProcessingStage = {\n COPYING: \"COPYING\" as DatasetVersionDetailProcessingStage,\n FAILED: \"FAILED\" as DatasetVersionDetailProcessingStage,\n FORMATTING: \"FORMATTING\" as DatasetVersionDetailProcessingStage,\n LOADING: \"LOADING\" as DatasetVersionDetailProcessingStage,\n DELETING: \"DELETING\" as DatasetVersionDetailProcessingStage,\n DONE: \"DONE\" as DatasetVersionDetailProcessingStage,\n};\n\nexport interface DatasetVersionDetail {\n /** The owner of the Dataset version\n */\n owner: string;\n /** The source of the Dataset - typically the name of the Dataset that was uploaded or a URL reference\n */\n source_ref: string;\n /** The date and time the Dataset was uploaded (an ISO-8601 formatted string in UTC)\n */\n published: string;\n /** The Dataset MIME type\n */\n type: string;\n /** The filename of the Dataset\n */\n file_name: string;\n /** The list of\n */\n project_files: DatasetVersionProjectFile[];\n /** The list of Project's the Dataset is attached to\n */\n projects: string[];\n /** Any extra variables passed in during the upload and handed to the format-support container.\n */\n format_extra_variables?: string;\n /** The version of the dataset\n */\n version: number;\n /** The processign stage. When loading a Dataset it typically passes through `COPYING`, `FORMATTING` and `LOADING` stages before reaching `DONE` (or `FAILED`). A Dataset can be used (and deleted) as long as it's passed the `FORMATTING` stage\n */\n processing_stage: DatasetVersionDetailProcessingStage;\n /** If this is a Deleted dataset this is the date and time the dataset was deleted (an ISO-8601 formatted string in UTC)\n */\n deleted?: string;\n /** If this is a Deleted dataset this is the username of the user that deleted the Dataset\n */\n deleting_user?: string;\n /** The dictionary of label/value pairs\n */\n labels?: DatasetVersionDetailLabels;\n /** The size, in bytes, of the formatted Dataset\n */\n size?: number;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n}\n\nexport interface DatasetSummary {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionSummary[];\n}\n\nexport interface DatasetDetail {\n /** The Dataset ID\n */\n dataset_id: string;\n /** The list of editors\n */\n editors: string[];\n /** The owner of the Dataset\n */\n owner: string;\n /** The set of separate versions of the Dataset\n */\n versions: DatasetVersionDetail[];\n}\n\nexport interface ApplicationSummary {\n /** The application name, the value of the Kubernetes **Custom Resource Definition** `spec.names.kind` property\n */\n kind: string;\n /** The application group\n */\n group?: string;\n /** The application unique reference, the value of the Kubernetes **Custom Resource Definition** `metadata.name` property\n */\n application_id: string;\n}\n\n/**\n * The REST method used. GET methods are not logged\n\n */\nexport type ApiLogDetailMethod = \"DELETE\" | \"PATCH\" | \"POST\" | \"PUT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const ApiLogDetailMethod = {\n DELETE: \"DELETE\" as ApiLogDetailMethod,\n PATCH: \"PATCH\" as ApiLogDetailMethod,\n POST: \"POST\" as ApiLogDetailMethod,\n PUT: \"PUT\" as ApiLogDetailMethod,\n};\n\nexport interface ApiLogDetail {\n /** The REST method used. GET methods are not logged\n */\n method: ApiLogDetailMethod;\n /** The API path used, e.g. `/dataset`\n */\n path: string;\n /** The date/time the API call began\n */\n began: string;\n /** The REST method approximate execution time (nS) in a humanised form, where 7969400 is presented as 7,969,400\n */\n duration_ns?: string;\n /** The HTTP response status code\n */\n status_code?: number;\n /** A JSON string representign the in-query properties used in the call\n */\n params?: string;\n /** A JSON string representing the in-body properties used in the call\n */\n body?: string;\n /** Not all API calls using your user ID may have been executed by you, In cases where an administrator has executed a REST API call on your behalf, their user ID will be revealed using this property\n */\n impersonator?: string;\n}\n\nexport interface VersionGetResponse {\n /** The Data Manager version. This is guaranteed to be a valid semantic version for official (tagged) images. The version value format for unofficial images is a string but otherwise undefined\n */\n version: string;\n}\n\nexport interface UsersGetResponse {\n /** A list of Users that have used the Data Manager\n */\n users: UserSummary[];\n}\n\nexport interface UserApiLogGetResponse {\n /** A list of API loc call records, with the oldest record first in the list\n */\n api_log: ApiLogDetail[];\n}\n\nexport type UserAccountGetResponse = UserAccountDetail;\n\nexport interface TypesGetResponse {\n /** A list of availabel MIME types\n */\n types: TypeSummary[];\n}\n\nexport interface TasksGetResponse {\n /** A list of Tasks\n */\n tasks: TaskSummary[];\n}\n\n/**\n * The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n\n */\nexport type TaskGetResponsePurpose =\n | \"DATASET\"\n | \"FILE\"\n | \"INSTANCE\"\n | \"PROJECT\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TaskGetResponsePurpose = {\n DATASET: \"DATASET\" as TaskGetResponsePurpose,\n FILE: \"FILE\" as TaskGetResponsePurpose,\n INSTANCE: \"INSTANCE\" as TaskGetResponsePurpose,\n PROJECT: \"PROJECT\" as TaskGetResponsePurpose,\n};\n\nexport interface TaskGetResponse {\n /** The purpose of the task. Tasks are typically executed for the purpose of Dataset processing, attaching as Project Files or Application Instance execution. Other fields in this object are only valid if the purpose is known (i.e. is not `UNKNOWN`)\n */\n purpose: TaskGetResponsePurpose;\n /** The identity of the purpose, where available. This will be the Dataset UUID if the purpose of the task is/was for Dataset processing.\n */\n purpose_id: string;\n /** The version number, relating to the object under control. For Datasets this will be the Dataset version.\n */\n purpose_version?: number;\n /** Is the Task Purpose is `INSTANCE`, and the instance was given a `specification` the specification can be found here. For **Applications** the specification is returned verbatim.\n */\n instance_specification?: string;\n /** If a container image is launched by the task the image name is available here\n */\n image?: string;\n /** The date and time the task was created\n */\n created: string;\n /** True if the task has run to completion. If the task finished successfully the `exit_code` will be zero.\n */\n done: boolean;\n /** True if the Task relates to an object removal, i.e. a DELETE\n */\n removal?: boolean;\n /** Present when `done` and zero if the task finished successfully.\n */\n exit_code?: number;\n /** A (possibly empty) list of application states, the oldest state occupies the first position in the list.\n */\n states?: TaskState[];\n /** A (possibly empty) list of application events. The oldest event occupies the first position in the list.\n */\n events?: TaskEvent[];\n}\n\nexport interface ServiceErrorsGetResponse {\n /** A list of service errors\n */\n service_errors: ServiceErrorSummary[];\n}\n\nexport interface ProjectsGetResponse {\n projects: ProjectDetail[];\n}\n\nexport interface ProjectPostResponse {\n /** The project identity\n */\n project_id: string;\n}\n\nexport type ProjectGetResponse = ProjectDetail;\n\nexport type ProjectDeleteResponse = TaskIdentity;\n\nexport interface JobsGetResponse {\n jobs: JobSummary[];\n}\n\n/**\n * The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n\n */\nexport type JobGetResponseImageType = \"SIMPLE\" | \"NEXTFLOW\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const JobGetResponseImageType = {\n SIMPLE: \"SIMPLE\" as JobGetResponseImageType,\n NEXTFLOW: \"NEXTFLOW\" as JobGetResponseImageType,\n};\n\nexport interface JobGetResponse {\n /** The Job's unique ID\n */\n id: number;\n application: ApplicationSummary;\n /** The Job collection\n */\n collection: string;\n /** The Job name, unique within a given collection\n */\n job: string;\n /** The Job version\n */\n version: string;\n /** The Job command (usually encoded)\n */\n command: string;\n /** The Job command encoding\n */\n command_encoding: string;\n /** The name of the container image that houses the job\n */\n image_name: string;\n /** The container image tag\n */\n image_tag: string;\n /** The directory where the project volume will be mounted. The root path to the project files\n */\n image_project_directory: string;\n /** The directory used as the container image working directory (if defined)\n */\n image_working_directory?: unknown;\n /** The optional container image type. Typically a single-container 'SIMPLE' (where only one container runs) or a workflow where multiple container images can be spawned (like NEXTFLOW)\n */\n image_type?: JobGetResponseImageType;\n /** The Job's descriptive name\n */\n name: string;\n /** English description of the job\n */\n description?: string;\n /** A URL linking to the Job documentation\n */\n doc_url?: string;\n /** The Job's category\n */\n category?: string;\n /** The list of keywords assigned to the Job\n */\n keywords?: string[];\n variables?: JobVariables;\n}\n\nexport type InstanceTaskPurpose = \"CREATE\" | \"DELETE\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceTaskPurpose = {\n CREATE: \"CREATE\" as InstanceTaskPurpose,\n DELETE: \"DELETE\" as InstanceTaskPurpose,\n};\n\nexport interface InstanceTask {\n id: string;\n purpose: InstanceTaskPurpose;\n}\n\nexport interface InstancesGetResponse {\n instances: InstanceSummary[];\n}\n\n/**\n * The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n\n */\nexport type InstanceGetResponsePhase =\n | \"COMPLETED\"\n | \"CRASH_LOOP_BACKOFF\"\n | \"FAILED\"\n | \"IMAGE_PULL_BACKOFF\"\n | \"PENDING\"\n | \"RUNNING\"\n | \"SUCCEEDED\"\n | \"UNKNOWN\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponsePhase = {\n COMPLETED: \"COMPLETED\" as InstanceGetResponsePhase,\n CRASH_LOOP_BACKOFF: \"CRASH_LOOP_BACKOFF\" as InstanceGetResponsePhase,\n FAILED: \"FAILED\" as InstanceGetResponsePhase,\n IMAGE_PULL_BACKOFF: \"IMAGE_PULL_BACKOFF\" as InstanceGetResponsePhase,\n PENDING: \"PENDING\" as InstanceGetResponsePhase,\n RUNNING: \"RUNNING\" as InstanceGetResponsePhase,\n SUCCEEDED: \"SUCCEEDED\" as InstanceGetResponsePhase,\n UNKNOWN: \"UNKNOWN\" as InstanceGetResponsePhase,\n};\n\n/**\n * The type of Application, which can be a `job` or an `application`\n\n */\nexport type InstanceGetResponseApplicationType = \"APPLICATION\" | \"JOB\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const InstanceGetResponseApplicationType = {\n APPLICATION: \"APPLICATION\" as InstanceGetResponseApplicationType,\n JOB: \"JOB\" as InstanceGetResponseApplicationType,\n};\n\nexport interface InstanceGetResponse {\n /** The Project the Instance is running in\n */\n project_id: string;\n /** The application name\n */\n name: string;\n /** The type of Application, which can be a `job` or an `application`\n */\n application_type: InstanceGetResponseApplicationType;\n /** The application ID\n */\n application_id: string;\n /** The application version\n */\n application_version: string;\n /** The optional application specification. For **Applications** this is returned verbatim. For **Jobs** additional material will be found.\n */\n application_specification?: string;\n /** The code obtained from the Account Server\n */\n authorisation_code?: number;\n /** The application owner, the person who launched tha apllication and is the only user than can stop it.\n */\n owner: string;\n /** The application unique reference\n */\n launched: string;\n /** The phase of the application. This is a string, one of a limited number of values that are defined internally within the Data Manager.\nThe initial phase, indicating that the Instance is preparring to run, is `PENDING`. The instance is running when the phase is `RUNNING`. `COMPLETED` indicates the Instance has finished successfully and `FAILED` when it's finished but unsuccesfully.\n */\n phase: InstanceGetResponsePhase;\n /** The application endpoint\n */\n url?: string;\n /** The JSON string representation of the JobDefintion's outputs\n */\n outputs?: string;\n /** A list of Tasks related to the application. Tasks are responsible for creating and deleteing the instance. You should fins at least one, assuming the corresponding task has not been deleted.\n */\n tasks: InstanceTask[];\n}\n\nexport interface FilesGetResponse {\n /** The project\n */\n project_id: string;\n /** The project path\n */\n path: string;\n /** The dataset identity (not its name). A unique reference assigned automatically when uploaded\n */\n files: FilePathFile[];\n /** Sub-directories in the current path\n */\n paths: string[];\n}\n\nexport interface FilePostResponse {\n /** The Project File identity, assigned automatically when a Dataset is added to a Project\n */\n file_id: string;\n /** The name of the File that will appear in the Project\n */\n file_name: string;\n /** The path to the file in the Project, relative to the volume root (mount point). Files in the root of the project will have a path value of '/'\n */\n file_path: string;\n /** The File task identity. The task assigned to convert and attach the Dataset File to the Project\n */\n task_id: string;\n}\n\nexport interface InstancePostResponse {\n /** The application instance identity (not its name). Assigned automatically when created\n */\n instance_id: string;\n /** The instance task identity. The task assigned to process the instance\n */\n task_id: string;\n /** The instance expanded command. Applies only to Job instances.\n */\n command?: string;\n}\n\nexport type InstanceDeleteResponse = TaskIdentity;\n\nexport interface DatasetsGetResponse {\n datasets: DatasetSummary[];\n}\n\nexport type DatasetVersionsGetResponse = DatasetDetail;\n\nexport type DatasetVersionDeleteResponse = TaskIdentity;\n\n/**\n * The Metadata type (an object)\n\n */\nexport type DatasetSchemaGetResponseType = \"object\";\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const DatasetSchemaGetResponseType = {\n object: \"object\" as DatasetSchemaGetResponseType,\n};\n\nexport interface DatasetSchemaGetResponse {\n /** The Metadata title\n */\n title: string;\n /** The Metadata description\n */\n description: string;\n /** The Metadata type (an object)\n */\n type: DatasetSchemaGetResponseType;\n /** Required properties\n */\n required: string[];\n}\n\nexport interface DatasetMetaGetResponse {\n /** The Metadata title\n */\n dataset_name: string;\n /** The Metadata description\n */\n dataset_id: string;\n /** The Metadata type (an object)\n */\n description: string;\n /** The date and time of creation\n */\n created: string;\n /** The date and time it was last updated\n */\n last_updated: string;\n /** The user who created the Metadata\n */\n created_by: string;\n /** The Metadata version\n */\n metadata_version: string;\n /** The Metadata's annotations\n */\n annotations: unknown[];\n /** The Metadata's labels\n */\n labels: unknown[];\n}\n\nexport interface DatasetDigestGetResponse {\n /** The Dataset matching the digest\n */\n dataset_id: string;\n /** The Dataset version\n */\n dataset_version: number;\n}\n\nexport interface DatasetPutPostResponse {\n /** The dataset identity (not its name). Assigned automatically when uploaded\n */\n dataset_id: string;\n /** The dataset identity version. Assigned automatically when uploaded\n */\n dataset_version: number;\n /** The dataset task identity. The task assigned to process the dataset\n */\n task_id: string;\n}\n\nexport interface ApplicationsGetResponse {\n /** A list of installed appications, which are application-compliant Kubernetes \"operators\"\n */\n applications: ApplicationSummary[];\n}\n\nexport interface ApplicationGetResponse {\n /** The Application's unique ID\n */\n id: string;\n /** The application specification template. Used when creating application instances\n */\n template: string;\n /** A list of instances of the application\n */\n instances: string[];\n /** The application group\n */\n group: string;\n /** The name (kind) of the application\n */\n kind: string;\n /** The list of available versions\n */\n versions: string[];\n}\n\nexport type AdminUserPutResponse = UserAccountDetail;\n\nexport interface AdminJobManifestLoadPutResponse {\n /** True if successful, false otherwise\n */\n status: boolean;\n /** The number of Job Manifests inspected\n */\n manifests_inspected: number;\n /** The number of Job Definitions inspected\n */\n job_definitions_inspected: number;\n /** The number of Jobs inspected\n */\n jobs_inspected: number;\n}\n\nexport interface AccountServerGetNamespaceResponse {\n /** The configured Account Server namespace, which will be an empty string if one is not configured. The AS API is expected as the service `as-api` in this namespace.\n */\n namespace: string;\n /** The name of this Data Manager instance, used to distinguish itself on calls to the Account Server. Every Data Manager is deployed with a unique name, regardless of the Account Server that's being used.\n */\n data_manager_name: string;\n}\n\nexport interface DmError {\n /** Brief error text that can be presented to the user\n */\n error: string;\n}\n"],"mappings":";;;;;;;;AA6aO,IAAM,kCAAkC;AAAA,EAC7C,QAAQ;AAAA;AAkDH,IAAM,6BAA6B;AAAA,EACxC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AAAA;AAgDD,IAAM,iBAAiB;AAAA,EAC5B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA;AA+BJ,IAAM,iBAAiB;AAAA,EAC5B,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AAAA,EACN,OAAO;AAAA;AAqBF,IAAM,8BAA8B;AAAA,EACzC,UAAU;AAAA,EACV,OAAO;AAAA,EACP,SAAS;AAAA;AAuGJ,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,UAAU;AAAA;AAkDL,IAAM,8BAA8B;AAAA,EACzC,QAAQ;AAAA,EACR,UAAU;AAAA;AAmBL,IAAM,uBAAuB;AAAA,EAClC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA;AAUJ,IAAM,iCAAiC;AAAA,EAC5C,aAAa;AAAA,EACb,KAAK;AAAA;AAwHA,IAAM,uCAAuC;AAAA,EAClD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AAAA;AAyED,IAAM,sCAAsC;AAAA,EACjD,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,MAAM;AAAA;AAiGD,IAAM,qBAAqB;AAAA,EAChC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,MAAM;AAAA,EACN,KAAK;AAAA;AAyEA,IAAM,yBAAyB;AAAA,EACpC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,UAAU;AAAA,EACV,SAAS;AAAA;AAsEJ,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,UAAU;AAAA;AA2DL,IAAM,sBAAsB;AAAA,EACjC,QAAQ;AAAA,EACR,QAAQ;AAAA;AA4BH,IAAM,2BAA2B;AAAA,EACtC,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,QAAQ;AAAA,EACR,oBAAoB;AAAA,EACpB,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA;AAUJ,IAAM,qCAAqC;AAAA,EAChD,aAAa;AAAA,EACb,KAAK;AAAA;AAyGA,IAAM,+BAA+B;AAAA,EAC1C,QAAQ;AAAA;","names":[]}
|
package/instance/instance.cjs
CHANGED
|
@@ -14,6 +14,12 @@ var createInstance = (instancePostBodyBody, options) => {
|
|
|
14
14
|
formData.append("application_version", instancePostBodyBody.application_version);
|
|
15
15
|
formData.append("project_id", instancePostBodyBody.project_id);
|
|
16
16
|
formData.append("as_name", instancePostBodyBody.as_name);
|
|
17
|
+
if (instancePostBodyBody.callback_url !== void 0) {
|
|
18
|
+
formData.append("callback_url", instancePostBodyBody.callback_url);
|
|
19
|
+
}
|
|
20
|
+
if (instancePostBodyBody.callback_context !== void 0) {
|
|
21
|
+
formData.append("callback_context", instancePostBodyBody.callback_context);
|
|
22
|
+
}
|
|
17
23
|
if (instancePostBodyBody.debug !== void 0) {
|
|
18
24
|
formData.append("debug", instancePostBodyBody.debug);
|
|
19
25
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/instance/instance.ts"],"names":[],"mappings":";;;;;;AAUA;AAAA;AAAA;AAAA;AAgDO,IAAM,iBAAiB,CAC5B,sBACA,YACG;AACH,QAAM,WAAW,IAAI;AACrB,WAAS,OAAO,kBAAkB,qBAAqB;AACvD,WAAS,OACP,uBACA,qBAAqB;AAEvB,WAAS,OAAO,cAAc,qBAAqB;AACnD,WAAS,OAAO,WAAW,qBAAqB;AAChD,MAAI,qBAAqB,UAAU,QAAW;AAC5C,aAAS,OAAO,SAAS,qBAAqB;AAAA;AAEhD,MAAI,qBAAqB,kBAAkB,QAAW;AACpD,aAAS,OAAO,iBAAiB,qBAAqB;AAAA;AAGxD,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,QAAQ,MAAM,YAC1C;AAAA;AAIG,IAAM,oBAAoB,CAG/B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,SAAS,SAAS;AAE1B,WAAO,eAAe,MAAM;AAAA;AAG9B,SAAO,YAKL,YAAY;AAAA;AAOT,IAAM,eAAe,CAC1B,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,OAAO,UACnC;AAAA;AAIG,IAAM,0BAA0B,CAAC,WAAgC;AAAA,EACtE;AAAA,EACA,GAAI,SAAS,CAAC,UAAU;AAAA;AAGnB,IAAM,kBAAkB,CAI7B,QACA,YAQ2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,wBAAwB;AAEnE,QAAM,UAA+D,MACnE,aAAa,QAAQ;AAEvB,QAAM,QAAQ,SACZ,UACA,SACA;AAGF,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AASA,IAAM,cAAc,CACzB,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAC1C;AAAA;AAIG,IAAM,yBAAyB,CAAC,eAAuB;AAAA,EAC5D,aAAa;AAAA;AAGR,IAAM,iBAAiB,CAI5B,YACA,YAI2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,uBAAuB;AAElE,QAAM,UAA8D,MAClE,YAAY,YAAY;AAE1B,QAAM,QAAQ,SACZ,UACA,SACA,iBAAE,SAAS,CAAC,CAAC,cAAe;AAG9B,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AAWA,IAAM,oBAAoB,CAC/B,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,YAC1C;AAAA;AAIG,IAAM,uBAAuB,CAGlC,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,eAAe,SAAS;AAEhC,WAAO,kBAAkB,YAAY;AAAA;AAGvC,SAAO,YAKL,YAAY;AAAA;AAST,IAAM,gBAAgB,CAC3B,YACA,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAAS,UACnD;AAAA;AAIG,IAAM,mBAAmB,CAG9B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,YAAY,WAAW,SAAS;AAExC,WAAO,cAAc,YAAY,QAAQ;AAAA;AAG3C,SAAO,YAKL,YAAY;AAAA","sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nimport {\n useQuery,\n useMutation,\n UseQueryOptions,\n UseMutationOptions,\n QueryFunction,\n MutationFunction,\n UseQueryResult,\n QueryKey,\n} from \"react-query\";\nimport type {\n InstancePostResponse,\n DmError,\n InstancePostBodyBody,\n InstancesGetResponse,\n GetInstancesParams,\n InstanceGetResponse,\n InstanceDeleteResponse,\n PatchInstanceParams,\n} from \"../data-manager-api.schemas\";\nimport { customInstance, ErrorType } from \".././custom-instance\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (\n ...args: any\n) => Promise<infer R>\n ? R\n : any;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype SecondParameter<T extends (...args: any) => any> = T extends (\n config: any,\n args: infer P\n) => any\n ? P\n : never;\n\n/**\n * Launches a new Application or Job instance, returning an Instance and Task ID. The Task ID should be used against the `/task` endpoint to determine the availability of the the running instance.\n\nInstance behaviour is controlled using the `specification`. You will need to consult individual applications to determine what can be placed in the specification. Applications typically provide a `template` describing its **options**.\n\nAn Application instance is not Ready for use until the corresponding **TaskState** is `STARTED`.\n\nA Job instance typically runs to completion, reahcing the **TaskState** `SUCCESS` when successful and `FAILURE` is unsuccessul.\n\n * @summary Creates a new application instance\n */\nexport const createInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>\n) => {\n const formData = new FormData();\n formData.append(\"application_id\", instancePostBodyBody.application_id);\n formData.append(\n \"application_version\",\n instancePostBodyBody.application_version\n );\n formData.append(\"project_id\", instancePostBodyBody.project_id);\n formData.append(\"as_name\", instancePostBodyBody.as_name);\n if (instancePostBodyBody.debug !== undefined) {\n formData.append(\"debug\", instancePostBodyBody.debug);\n }\n if (instancePostBodyBody.specification !== undefined) {\n formData.append(\"specification\", instancePostBodyBody.specification);\n }\n\n return customInstance<InstancePostResponse>(\n { url: `/instance`, method: \"post\", data: formData },\n options\n );\n};\n\nexport const useCreateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof createInstance>,\n { data: InstancePostBodyBody }\n > = (props) => {\n const { data } = props || {};\n\n return createInstance(data, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * Returns a summary of all running instances. Insatnces can be running as an Application or as a Job. The response will contain an `application_type` field that is either `job` or `application`\n\n * @summary Get summary information about all application instances\n */\nexport const getInstances = (\n params?: GetInstancesParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstancesGetResponse>(\n { url: `/instance`, method: \"get\", params },\n options\n );\n};\n\nexport const getGetInstancesQueryKey = (params?: GetInstancesParams) => [\n `/instance`,\n ...(params ? [params] : []),\n];\n\nexport const useGetInstances = <\n TData = AsyncReturnType<typeof getInstances>,\n TError = ErrorType<void | DmError>\n>(\n params?: GetInstancesParams,\n options?: {\n query?: UseQueryOptions<\n AsyncReturnType<typeof getInstances>,\n TError,\n TData\n >;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstances>> = () =>\n getInstances(params, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstances>, TError, TData>(\n queryKey,\n queryFn,\n queryOptions\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The instance information will be returned if available.\n\n * @summary Get detailed information about a specific instance\n */\nexport const getInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceGetResponse>(\n { url: `/instance/${instanceid}`, method: \"get\" },\n options\n );\n};\n\nexport const getGetInstanceQueryKey = (instanceid: string) => [\n `/instance/${instanceid}`,\n];\n\nexport const useGetInstance = <\n TData = AsyncReturnType<typeof getInstance>,\n TError = ErrorType<void | DmError>\n>(\n instanceid: string,\n options?: {\n query?: UseQueryOptions<AsyncReturnType<typeof getInstance>, TError, TData>;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceid);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstance>> = () =>\n getInstance(instanceid, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstance>, TError, TData>(\n queryKey,\n queryFn,\n { enabled: !!instanceid, ...queryOptions }\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The Application or Job instance is terminated.\n\nYou must be the `owner` or an `editor` of the instance to delete it\n\n * @summary Delete an application instance\n */\nexport const terminateInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceDeleteResponse>(\n { url: `/instance/${instanceid}`, method: \"delete\" },\n options\n );\n};\n\nexport const useTerminateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof terminateInstance>,\n { instanceid: string }\n > = (props) => {\n const { instanceid } = props || {};\n\n return terminateInstance(instanceid, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * The Application or Job instance is updated according to the patch parameters.\n\nYou must be the `owner` or an `editor` of the instance to patch it\n\n * @summary Update an application instance\n */\nexport const patchInstance = (\n instanceid: string,\n params?: PatchInstanceParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<void>(\n { url: `/instance/${instanceid}`, method: \"patch\", params },\n options\n );\n};\n\nexport const usePatchInstance = <\n TError = ErrorType<DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof patchInstance>,\n { instanceid: string; params?: PatchInstanceParams }\n > = (props) => {\n const { instanceid, params } = props || {};\n\n return patchInstance(instanceid, params, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >(mutationFn, mutationOptions);\n};\n"]}
|
|
1
|
+
{"version":3,"sources":["../../src/instance/instance.ts"],"names":[],"mappings":";;;;;;AAUA;AAAA;AAAA;AAAA;AAgDO,IAAM,iBAAiB,CAC5B,sBACA,YACG;AACH,QAAM,WAAW,IAAI;AACrB,WAAS,OAAO,kBAAkB,qBAAqB;AACvD,WAAS,OACP,uBACA,qBAAqB;AAEvB,WAAS,OAAO,cAAc,qBAAqB;AACnD,WAAS,OAAO,WAAW,qBAAqB;AAChD,MAAI,qBAAqB,iBAAiB,QAAW;AACnD,aAAS,OAAO,gBAAgB,qBAAqB;AAAA;AAEvD,MAAI,qBAAqB,qBAAqB,QAAW;AACvD,aAAS,OAAO,oBAAoB,qBAAqB;AAAA;AAE3D,MAAI,qBAAqB,UAAU,QAAW;AAC5C,aAAS,OAAO,SAAS,qBAAqB;AAAA;AAEhD,MAAI,qBAAqB,kBAAkB,QAAW;AACpD,aAAS,OAAO,iBAAiB,qBAAqB;AAAA;AAGxD,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,QAAQ,MAAM,YAC1C;AAAA;AAIG,IAAM,oBAAoB,CAG/B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,SAAS,SAAS;AAE1B,WAAO,eAAe,MAAM;AAAA;AAG9B,SAAO,YAKL,YAAY;AAAA;AAOT,IAAM,eAAe,CAC1B,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,OAAO,UACnC;AAAA;AAIG,IAAM,0BAA0B,CAAC,WAAgC;AAAA,EACtE;AAAA,EACA,GAAI,SAAS,CAAC,UAAU;AAAA;AAGnB,IAAM,kBAAkB,CAI7B,QACA,YAQ2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,wBAAwB;AAEnE,QAAM,UAA+D,MACnE,aAAa,QAAQ;AAEvB,QAAM,QAAQ,SACZ,UACA,SACA;AAGF,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AASA,IAAM,cAAc,CACzB,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAC1C;AAAA;AAIG,IAAM,yBAAyB,CAAC,eAAuB;AAAA,EAC5D,aAAa;AAAA;AAGR,IAAM,iBAAiB,CAI5B,YACA,YAI2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,uBAAuB;AAElE,QAAM,UAA8D,MAClE,YAAY,YAAY;AAE1B,QAAM,QAAQ,SACZ,UACA,SACA,iBAAE,SAAS,CAAC,CAAC,cAAe;AAG9B,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AAWA,IAAM,oBAAoB,CAC/B,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,YAC1C;AAAA;AAIG,IAAM,uBAAuB,CAGlC,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,eAAe,SAAS;AAEhC,WAAO,kBAAkB,YAAY;AAAA;AAGvC,SAAO,YAKL,YAAY;AAAA;AAST,IAAM,gBAAgB,CAC3B,YACA,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAAS,UACnD;AAAA;AAIG,IAAM,mBAAmB,CAG9B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,YAAY,WAAW,SAAS;AAExC,WAAO,cAAc,YAAY,QAAQ;AAAA;AAG3C,SAAO,YAKL,YAAY;AAAA","sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nimport {\n useQuery,\n useMutation,\n UseQueryOptions,\n UseMutationOptions,\n QueryFunction,\n MutationFunction,\n UseQueryResult,\n QueryKey,\n} from \"react-query\";\nimport type {\n InstancePostResponse,\n DmError,\n InstancePostBodyBody,\n InstancesGetResponse,\n GetInstancesParams,\n InstanceGetResponse,\n InstanceDeleteResponse,\n PatchInstanceParams,\n} from \"../data-manager-api.schemas\";\nimport { customInstance, ErrorType } from \".././custom-instance\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (\n ...args: any\n) => Promise<infer R>\n ? R\n : any;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype SecondParameter<T extends (...args: any) => any> = T extends (\n config: any,\n args: infer P\n) => any\n ? P\n : never;\n\n/**\n * Launches a new Application or Job instance, returning an Instance and Task ID. The Task ID should be used against the `/task` endpoint to determine the availability of the the running instance.\n\nInstance behaviour is controlled using the `specification`. You will need to consult individual applications to determine what can be placed in the specification. Applications typically provide a `template` describing its **options**.\n\nAn Application instance is not Ready for use until the corresponding **TaskState** is `STARTED`.\n\nA Job instance typically runs to completion, reahcing the **TaskState** `SUCCESS` when successful and `FAILURE` is unsuccessul.\n\n * @summary Creates a new application instance\n */\nexport const createInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>\n) => {\n const formData = new FormData();\n formData.append(\"application_id\", instancePostBodyBody.application_id);\n formData.append(\n \"application_version\",\n instancePostBodyBody.application_version\n );\n formData.append(\"project_id\", instancePostBodyBody.project_id);\n formData.append(\"as_name\", instancePostBodyBody.as_name);\n if (instancePostBodyBody.callback_url !== undefined) {\n formData.append(\"callback_url\", instancePostBodyBody.callback_url);\n }\n if (instancePostBodyBody.callback_context !== undefined) {\n formData.append(\"callback_context\", instancePostBodyBody.callback_context);\n }\n if (instancePostBodyBody.debug !== undefined) {\n formData.append(\"debug\", instancePostBodyBody.debug);\n }\n if (instancePostBodyBody.specification !== undefined) {\n formData.append(\"specification\", instancePostBodyBody.specification);\n }\n\n return customInstance<InstancePostResponse>(\n { url: `/instance`, method: \"post\", data: formData },\n options\n );\n};\n\nexport const useCreateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof createInstance>,\n { data: InstancePostBodyBody }\n > = (props) => {\n const { data } = props || {};\n\n return createInstance(data, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * Returns a summary of all running instances. Insatnces can be running as an Application or as a Job. The response will contain an `application_type` field that is either `job` or `application`\n\n * @summary Get summary information about all application instances\n */\nexport const getInstances = (\n params?: GetInstancesParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstancesGetResponse>(\n { url: `/instance`, method: \"get\", params },\n options\n );\n};\n\nexport const getGetInstancesQueryKey = (params?: GetInstancesParams) => [\n `/instance`,\n ...(params ? [params] : []),\n];\n\nexport const useGetInstances = <\n TData = AsyncReturnType<typeof getInstances>,\n TError = ErrorType<void | DmError>\n>(\n params?: GetInstancesParams,\n options?: {\n query?: UseQueryOptions<\n AsyncReturnType<typeof getInstances>,\n TError,\n TData\n >;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstances>> = () =>\n getInstances(params, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstances>, TError, TData>(\n queryKey,\n queryFn,\n queryOptions\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The instance information will be returned if available.\n\n * @summary Get detailed information about a specific instance\n */\nexport const getInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceGetResponse>(\n { url: `/instance/${instanceid}`, method: \"get\" },\n options\n );\n};\n\nexport const getGetInstanceQueryKey = (instanceid: string) => [\n `/instance/${instanceid}`,\n];\n\nexport const useGetInstance = <\n TData = AsyncReturnType<typeof getInstance>,\n TError = ErrorType<void | DmError>\n>(\n instanceid: string,\n options?: {\n query?: UseQueryOptions<AsyncReturnType<typeof getInstance>, TError, TData>;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceid);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstance>> = () =>\n getInstance(instanceid, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstance>, TError, TData>(\n queryKey,\n queryFn,\n { enabled: !!instanceid, ...queryOptions }\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The Application or Job instance is terminated.\n\nYou must be the `owner` or an `editor` of the instance to delete it\n\n * @summary Delete an application instance\n */\nexport const terminateInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceDeleteResponse>(\n { url: `/instance/${instanceid}`, method: \"delete\" },\n options\n );\n};\n\nexport const useTerminateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof terminateInstance>,\n { instanceid: string }\n > = (props) => {\n const { instanceid } = props || {};\n\n return terminateInstance(instanceid, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * The Application or Job instance is updated according to the patch parameters.\n\nYou must be the `owner` or an `editor` of the instance to patch it\n\n * @summary Update an application instance\n */\nexport const patchInstance = (\n instanceid: string,\n params?: PatchInstanceParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<void>(\n { url: `/instance/${instanceid}`, method: \"patch\", params },\n options\n );\n};\n\nexport const usePatchInstance = <\n TError = ErrorType<DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof patchInstance>,\n { instanceid: string; params?: PatchInstanceParams }\n > = (props) => {\n const { instanceid, params } = props || {};\n\n return patchInstance(instanceid, params, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >(mutationFn, mutationOptions);\n};\n"]}
|
package/instance/instance.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { L as InstancePostBodyBody, bb as customInstance, aU as InstancePostResponse, bc as ErrorType, b7 as DmError, a as GetInstancesParams, aO as InstancesGetResponse, aR as InstanceGetResponse, a2 as TaskIdentity, P as PatchInstanceParams } from '../custom-instance-eb1ebf45';
|
|
2
2
|
import * as react_query from 'react-query';
|
|
3
3
|
import { UseMutationOptions, UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
|
|
4
4
|
import 'axios';
|
package/instance/instance.js
CHANGED
|
@@ -14,6 +14,12 @@ var createInstance = (instancePostBodyBody, options) => {
|
|
|
14
14
|
formData.append("application_version", instancePostBodyBody.application_version);
|
|
15
15
|
formData.append("project_id", instancePostBodyBody.project_id);
|
|
16
16
|
formData.append("as_name", instancePostBodyBody.as_name);
|
|
17
|
+
if (instancePostBodyBody.callback_url !== void 0) {
|
|
18
|
+
formData.append("callback_url", instancePostBodyBody.callback_url);
|
|
19
|
+
}
|
|
20
|
+
if (instancePostBodyBody.callback_context !== void 0) {
|
|
21
|
+
formData.append("callback_context", instancePostBodyBody.callback_context);
|
|
22
|
+
}
|
|
17
23
|
if (instancePostBodyBody.debug !== void 0) {
|
|
18
24
|
formData.append("debug", instancePostBodyBody.debug);
|
|
19
25
|
}
|
package/instance/instance.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/instance/instance.ts"],"sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nimport {\n useQuery,\n useMutation,\n UseQueryOptions,\n UseMutationOptions,\n QueryFunction,\n MutationFunction,\n UseQueryResult,\n QueryKey,\n} from \"react-query\";\nimport type {\n InstancePostResponse,\n DmError,\n InstancePostBodyBody,\n InstancesGetResponse,\n GetInstancesParams,\n InstanceGetResponse,\n InstanceDeleteResponse,\n PatchInstanceParams,\n} from \"../data-manager-api.schemas\";\nimport { customInstance, ErrorType } from \".././custom-instance\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (\n ...args: any\n) => Promise<infer R>\n ? R\n : any;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype SecondParameter<T extends (...args: any) => any> = T extends (\n config: any,\n args: infer P\n) => any\n ? P\n : never;\n\n/**\n * Launches a new Application or Job instance, returning an Instance and Task ID. The Task ID should be used against the `/task` endpoint to determine the availability of the the running instance.\n\nInstance behaviour is controlled using the `specification`. You will need to consult individual applications to determine what can be placed in the specification. Applications typically provide a `template` describing its **options**.\n\nAn Application instance is not Ready for use until the corresponding **TaskState** is `STARTED`.\n\nA Job instance typically runs to completion, reahcing the **TaskState** `SUCCESS` when successful and `FAILURE` is unsuccessul.\n\n * @summary Creates a new application instance\n */\nexport const createInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>\n) => {\n const formData = new FormData();\n formData.append(\"application_id\", instancePostBodyBody.application_id);\n formData.append(\n \"application_version\",\n instancePostBodyBody.application_version\n );\n formData.append(\"project_id\", instancePostBodyBody.project_id);\n formData.append(\"as_name\", instancePostBodyBody.as_name);\n if (instancePostBodyBody.debug !== undefined) {\n formData.append(\"debug\", instancePostBodyBody.debug);\n }\n if (instancePostBodyBody.specification !== undefined) {\n formData.append(\"specification\", instancePostBodyBody.specification);\n }\n\n return customInstance<InstancePostResponse>(\n { url: `/instance`, method: \"post\", data: formData },\n options\n );\n};\n\nexport const useCreateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof createInstance>,\n { data: InstancePostBodyBody }\n > = (props) => {\n const { data } = props || {};\n\n return createInstance(data, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * Returns a summary of all running instances. Insatnces can be running as an Application or as a Job. The response will contain an `application_type` field that is either `job` or `application`\n\n * @summary Get summary information about all application instances\n */\nexport const getInstances = (\n params?: GetInstancesParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstancesGetResponse>(\n { url: `/instance`, method: \"get\", params },\n options\n );\n};\n\nexport const getGetInstancesQueryKey = (params?: GetInstancesParams) => [\n `/instance`,\n ...(params ? [params] : []),\n];\n\nexport const useGetInstances = <\n TData = AsyncReturnType<typeof getInstances>,\n TError = ErrorType<void | DmError>\n>(\n params?: GetInstancesParams,\n options?: {\n query?: UseQueryOptions<\n AsyncReturnType<typeof getInstances>,\n TError,\n TData\n >;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstances>> = () =>\n getInstances(params, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstances>, TError, TData>(\n queryKey,\n queryFn,\n queryOptions\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The instance information will be returned if available.\n\n * @summary Get detailed information about a specific instance\n */\nexport const getInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceGetResponse>(\n { url: `/instance/${instanceid}`, method: \"get\" },\n options\n );\n};\n\nexport const getGetInstanceQueryKey = (instanceid: string) => [\n `/instance/${instanceid}`,\n];\n\nexport const useGetInstance = <\n TData = AsyncReturnType<typeof getInstance>,\n TError = ErrorType<void | DmError>\n>(\n instanceid: string,\n options?: {\n query?: UseQueryOptions<AsyncReturnType<typeof getInstance>, TError, TData>;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceid);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstance>> = () =>\n getInstance(instanceid, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstance>, TError, TData>(\n queryKey,\n queryFn,\n { enabled: !!instanceid, ...queryOptions }\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The Application or Job instance is terminated.\n\nYou must be the `owner` or an `editor` of the instance to delete it\n\n * @summary Delete an application instance\n */\nexport const terminateInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceDeleteResponse>(\n { url: `/instance/${instanceid}`, method: \"delete\" },\n options\n );\n};\n\nexport const useTerminateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof terminateInstance>,\n { instanceid: string }\n > = (props) => {\n const { instanceid } = props || {};\n\n return terminateInstance(instanceid, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * The Application or Job instance is updated according to the patch parameters.\n\nYou must be the `owner` or an `editor` of the instance to patch it\n\n * @summary Update an application instance\n */\nexport const patchInstance = (\n instanceid: string,\n params?: PatchInstanceParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<void>(\n { url: `/instance/${instanceid}`, method: \"patch\", params },\n options\n );\n};\n\nexport const usePatchInstance = <\n TError = ErrorType<DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof patchInstance>,\n { instanceid: string; params?: PatchInstanceParams }\n > = (props) => {\n const { instanceid, params } = props || {};\n\n return patchInstance(instanceid, params, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >(mutationFn, mutationOptions);\n};\n"],"mappings":";;;;;;AAUA;AAAA;AAAA;AAAA;AAgDO,IAAM,iBAAiB,CAC5B,sBACA,YACG;AACH,QAAM,WAAW,IAAI;AACrB,WAAS,OAAO,kBAAkB,qBAAqB;AACvD,WAAS,OACP,uBACA,qBAAqB;AAEvB,WAAS,OAAO,cAAc,qBAAqB;AACnD,WAAS,OAAO,WAAW,qBAAqB;AAChD,MAAI,qBAAqB,UAAU,QAAW;AAC5C,aAAS,OAAO,SAAS,qBAAqB;AAAA;AAEhD,MAAI,qBAAqB,kBAAkB,QAAW;AACpD,aAAS,OAAO,iBAAiB,qBAAqB;AAAA;AAGxD,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,QAAQ,MAAM,YAC1C;AAAA;AAIG,IAAM,oBAAoB,CAG/B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,SAAS,SAAS;AAE1B,WAAO,eAAe,MAAM;AAAA;AAG9B,SAAO,YAKL,YAAY;AAAA;AAOT,IAAM,eAAe,CAC1B,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,OAAO,UACnC;AAAA;AAIG,IAAM,0BAA0B,CAAC,WAAgC;AAAA,EACtE;AAAA,EACA,GAAI,SAAS,CAAC,UAAU;AAAA;AAGnB,IAAM,kBAAkB,CAI7B,QACA,YAQ2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,wBAAwB;AAEnE,QAAM,UAA+D,MACnE,aAAa,QAAQ;AAEvB,QAAM,QAAQ,SACZ,UACA,SACA;AAGF,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AASA,IAAM,cAAc,CACzB,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAC1C;AAAA;AAIG,IAAM,yBAAyB,CAAC,eAAuB;AAAA,EAC5D,aAAa;AAAA;AAGR,IAAM,iBAAiB,CAI5B,YACA,YAI2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,uBAAuB;AAElE,QAAM,UAA8D,MAClE,YAAY,YAAY;AAE1B,QAAM,QAAQ,SACZ,UACA,SACA,iBAAE,SAAS,CAAC,CAAC,cAAe;AAG9B,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AAWA,IAAM,oBAAoB,CAC/B,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,YAC1C;AAAA;AAIG,IAAM,uBAAuB,CAGlC,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,eAAe,SAAS;AAEhC,WAAO,kBAAkB,YAAY;AAAA;AAGvC,SAAO,YAKL,YAAY;AAAA;AAST,IAAM,gBAAgB,CAC3B,YACA,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAAS,UACnD;AAAA;AAIG,IAAM,mBAAmB,CAG9B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,YAAY,WAAW,SAAS;AAExC,WAAO,cAAc,YAAY,QAAQ;AAAA;AAG3C,SAAO,YAKL,YAAY;AAAA;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/instance/instance.ts"],"sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nimport {\n useQuery,\n useMutation,\n UseQueryOptions,\n UseMutationOptions,\n QueryFunction,\n MutationFunction,\n UseQueryResult,\n QueryKey,\n} from \"react-query\";\nimport type {\n InstancePostResponse,\n DmError,\n InstancePostBodyBody,\n InstancesGetResponse,\n GetInstancesParams,\n InstanceGetResponse,\n InstanceDeleteResponse,\n PatchInstanceParams,\n} from \"../data-manager-api.schemas\";\nimport { customInstance, ErrorType } from \".././custom-instance\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (\n ...args: any\n) => Promise<infer R>\n ? R\n : any;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype SecondParameter<T extends (...args: any) => any> = T extends (\n config: any,\n args: infer P\n) => any\n ? P\n : never;\n\n/**\n * Launches a new Application or Job instance, returning an Instance and Task ID. The Task ID should be used against the `/task` endpoint to determine the availability of the the running instance.\n\nInstance behaviour is controlled using the `specification`. You will need to consult individual applications to determine what can be placed in the specification. Applications typically provide a `template` describing its **options**.\n\nAn Application instance is not Ready for use until the corresponding **TaskState** is `STARTED`.\n\nA Job instance typically runs to completion, reahcing the **TaskState** `SUCCESS` when successful and `FAILURE` is unsuccessul.\n\n * @summary Creates a new application instance\n */\nexport const createInstance = (\n instancePostBodyBody: InstancePostBodyBody,\n options?: SecondParameter<typeof customInstance>\n) => {\n const formData = new FormData();\n formData.append(\"application_id\", instancePostBodyBody.application_id);\n formData.append(\n \"application_version\",\n instancePostBodyBody.application_version\n );\n formData.append(\"project_id\", instancePostBodyBody.project_id);\n formData.append(\"as_name\", instancePostBodyBody.as_name);\n if (instancePostBodyBody.callback_url !== undefined) {\n formData.append(\"callback_url\", instancePostBodyBody.callback_url);\n }\n if (instancePostBodyBody.callback_context !== undefined) {\n formData.append(\"callback_context\", instancePostBodyBody.callback_context);\n }\n if (instancePostBodyBody.debug !== undefined) {\n formData.append(\"debug\", instancePostBodyBody.debug);\n }\n if (instancePostBodyBody.specification !== undefined) {\n formData.append(\"specification\", instancePostBodyBody.specification);\n }\n\n return customInstance<InstancePostResponse>(\n { url: `/instance`, method: \"post\", data: formData },\n options\n );\n};\n\nexport const useCreateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof createInstance>,\n { data: InstancePostBodyBody }\n > = (props) => {\n const { data } = props || {};\n\n return createInstance(data, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof createInstance>,\n TError,\n { data: InstancePostBodyBody },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * Returns a summary of all running instances. Insatnces can be running as an Application or as a Job. The response will contain an `application_type` field that is either `job` or `application`\n\n * @summary Get summary information about all application instances\n */\nexport const getInstances = (\n params?: GetInstancesParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstancesGetResponse>(\n { url: `/instance`, method: \"get\", params },\n options\n );\n};\n\nexport const getGetInstancesQueryKey = (params?: GetInstancesParams) => [\n `/instance`,\n ...(params ? [params] : []),\n];\n\nexport const useGetInstances = <\n TData = AsyncReturnType<typeof getInstances>,\n TError = ErrorType<void | DmError>\n>(\n params?: GetInstancesParams,\n options?: {\n query?: UseQueryOptions<\n AsyncReturnType<typeof getInstances>,\n TError,\n TData\n >;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstancesQueryKey(params);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstances>> = () =>\n getInstances(params, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstances>, TError, TData>(\n queryKey,\n queryFn,\n queryOptions\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The instance information will be returned if available.\n\n * @summary Get detailed information about a specific instance\n */\nexport const getInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceGetResponse>(\n { url: `/instance/${instanceid}`, method: \"get\" },\n options\n );\n};\n\nexport const getGetInstanceQueryKey = (instanceid: string) => [\n `/instance/${instanceid}`,\n];\n\nexport const useGetInstance = <\n TData = AsyncReturnType<typeof getInstance>,\n TError = ErrorType<void | DmError>\n>(\n instanceid: string,\n options?: {\n query?: UseQueryOptions<AsyncReturnType<typeof getInstance>, TError, TData>;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetInstanceQueryKey(instanceid);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getInstance>> = () =>\n getInstance(instanceid, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getInstance>, TError, TData>(\n queryKey,\n queryFn,\n { enabled: !!instanceid, ...queryOptions }\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * The Application or Job instance is terminated.\n\nYou must be the `owner` or an `editor` of the instance to delete it\n\n * @summary Delete an application instance\n */\nexport const terminateInstance = (\n instanceid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<InstanceDeleteResponse>(\n { url: `/instance/${instanceid}`, method: \"delete\" },\n options\n );\n};\n\nexport const useTerminateInstance = <\n TError = ErrorType<void | DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof terminateInstance>,\n { instanceid: string }\n > = (props) => {\n const { instanceid } = props || {};\n\n return terminateInstance(instanceid, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof terminateInstance>,\n TError,\n { instanceid: string },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * The Application or Job instance is updated according to the patch parameters.\n\nYou must be the `owner` or an `editor` of the instance to patch it\n\n * @summary Update an application instance\n */\nexport const patchInstance = (\n instanceid: string,\n params?: PatchInstanceParams,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<void>(\n { url: `/instance/${instanceid}`, method: \"patch\", params },\n options\n );\n};\n\nexport const usePatchInstance = <\n TError = ErrorType<DmError>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof patchInstance>,\n { instanceid: string; params?: PatchInstanceParams }\n > = (props) => {\n const { instanceid, params } = props || {};\n\n return patchInstance(instanceid, params, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof patchInstance>,\n TError,\n { instanceid: string; params?: PatchInstanceParams },\n TContext\n >(mutationFn, mutationOptions);\n};\n"],"mappings":";;;;;;AAUA;AAAA;AAAA;AAAA;AAgDO,IAAM,iBAAiB,CAC5B,sBACA,YACG;AACH,QAAM,WAAW,IAAI;AACrB,WAAS,OAAO,kBAAkB,qBAAqB;AACvD,WAAS,OACP,uBACA,qBAAqB;AAEvB,WAAS,OAAO,cAAc,qBAAqB;AACnD,WAAS,OAAO,WAAW,qBAAqB;AAChD,MAAI,qBAAqB,iBAAiB,QAAW;AACnD,aAAS,OAAO,gBAAgB,qBAAqB;AAAA;AAEvD,MAAI,qBAAqB,qBAAqB,QAAW;AACvD,aAAS,OAAO,oBAAoB,qBAAqB;AAAA;AAE3D,MAAI,qBAAqB,UAAU,QAAW;AAC5C,aAAS,OAAO,SAAS,qBAAqB;AAAA;AAEhD,MAAI,qBAAqB,kBAAkB,QAAW;AACpD,aAAS,OAAO,iBAAiB,qBAAqB;AAAA;AAGxD,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,QAAQ,MAAM,YAC1C;AAAA;AAIG,IAAM,oBAAoB,CAG/B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,SAAS,SAAS;AAE1B,WAAO,eAAe,MAAM;AAAA;AAG9B,SAAO,YAKL,YAAY;AAAA;AAOT,IAAM,eAAe,CAC1B,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,QAAQ,OAAO,UACnC;AAAA;AAIG,IAAM,0BAA0B,CAAC,WAAgC;AAAA,EACtE;AAAA,EACA,GAAI,SAAS,CAAC,UAAU;AAAA;AAGnB,IAAM,kBAAkB,CAI7B,QACA,YAQ2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,wBAAwB;AAEnE,QAAM,UAA+D,MACnE,aAAa,QAAQ;AAEvB,QAAM,QAAQ,SACZ,UACA,SACA;AAGF,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AASA,IAAM,cAAc,CACzB,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAC1C;AAAA;AAIG,IAAM,yBAAyB,CAAC,eAAuB;AAAA,EAC5D,aAAa;AAAA;AAGR,IAAM,iBAAiB,CAI5B,YACA,YAI2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,uBAAuB;AAElE,QAAM,UAA8D,MAClE,YAAY,YAAY;AAE1B,QAAM,QAAQ,SACZ,UACA,SACA,iBAAE,SAAS,CAAC,CAAC,cAAe;AAG9B,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AAWA,IAAM,oBAAoB,CAC/B,YACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,YAC1C;AAAA;AAIG,IAAM,uBAAuB,CAGlC,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,eAAe,SAAS;AAEhC,WAAO,kBAAkB,YAAY;AAAA;AAGvC,SAAO,YAKL,YAAY;AAAA;AAST,IAAM,gBAAgB,CAC3B,YACA,QACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,aAAa,cAAc,QAAQ,SAAS,UACnD;AAAA;AAIG,IAAM,mBAAmB,CAG9B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,YAAY,WAAW,SAAS;AAExC,WAAO,cAAc,YAAY,QAAQ;AAAA;AAG3C,SAAO,YAKL,YAAY;AAAA;","names":[]}
|
package/job/job.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UseQueryOptions, QueryKey, UseQueryResult } from 'react-query';
|
|
2
|
-
import {
|
|
2
|
+
import { bb as customInstance, aJ as JobsGetResponse, bc as ErrorType, b7 as DmError, aL as JobGetResponse } from '../custom-instance-eb1ebf45';
|
|
3
3
|
import 'axios';
|
|
4
4
|
|
|
5
5
|
/**
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
var _chunk3DXYUDZHcjs = require('../chunk-3DXYUDZH.cjs');
|
|
5
|
+
|
|
6
|
+
// src/metadata/metadata.ts
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
var _reactquery = require('react-query');
|
|
11
|
+
var addMetadataVersion = (datasetid, datasetversion, datasetVersionMetaPostBodyBody, options) => {
|
|
12
|
+
const formData = new FormData();
|
|
13
|
+
if (datasetVersionMetaPostBodyBody.meta_properties !== void 0) {
|
|
14
|
+
formData.append("meta_properties", datasetVersionMetaPostBodyBody.meta_properties);
|
|
15
|
+
}
|
|
16
|
+
if (datasetVersionMetaPostBodyBody.annotations !== void 0) {
|
|
17
|
+
formData.append("annotations", datasetVersionMetaPostBodyBody.annotations);
|
|
18
|
+
}
|
|
19
|
+
return _chunk3DXYUDZHcjs.customInstance.call(void 0, {
|
|
20
|
+
url: `/dataset/${datasetid}/meta/${datasetversion}`,
|
|
21
|
+
method: "post",
|
|
22
|
+
data: formData
|
|
23
|
+
}, options);
|
|
24
|
+
};
|
|
25
|
+
var useAddMetadataVersion = (options) => {
|
|
26
|
+
const { mutation: mutationOptions, request: requestOptions } = options || {};
|
|
27
|
+
const mutationFn = (props) => {
|
|
28
|
+
const { datasetid, datasetversion, data } = props || {};
|
|
29
|
+
return addMetadataVersion(datasetid, datasetversion, data, requestOptions);
|
|
30
|
+
};
|
|
31
|
+
return _reactquery.useMutation.call(void 0, mutationFn, mutationOptions);
|
|
32
|
+
};
|
|
33
|
+
var getMetadataVersion = (datasetid, datasetversion, options) => {
|
|
34
|
+
return _chunk3DXYUDZHcjs.customInstance.call(void 0, { url: `/dataset/${datasetid}/meta/${datasetversion}`, method: "get" }, options);
|
|
35
|
+
};
|
|
36
|
+
var getGetMetadataVersionQueryKey = (datasetid, datasetversion) => [`/dataset/${datasetid}/meta/${datasetversion}`];
|
|
37
|
+
var useGetMetadataVersion = (datasetid, datasetversion, options) => {
|
|
38
|
+
const { query: queryOptions, request: requestOptions } = options || {};
|
|
39
|
+
const queryKey = _nullishCoalesce((queryOptions == null ? void 0 : queryOptions.queryKey), () => ( getGetMetadataVersionQueryKey(datasetid, datasetversion)));
|
|
40
|
+
const queryFn = () => getMetadataVersion(datasetid, datasetversion, requestOptions);
|
|
41
|
+
const query = _reactquery.useQuery.call(void 0, queryKey, queryFn, _chunk3DXYUDZHcjs.__spreadValues.call(void 0, {
|
|
42
|
+
enabled: !!(datasetid && datasetversion)
|
|
43
|
+
}, queryOptions));
|
|
44
|
+
return _chunk3DXYUDZHcjs.__spreadValues.call(void 0, {
|
|
45
|
+
queryKey
|
|
46
|
+
}, query);
|
|
47
|
+
};
|
|
48
|
+
var addMetadata = (datasetid, datasetMetaPostBodyBody, options) => {
|
|
49
|
+
const formData = new FormData();
|
|
50
|
+
if (datasetMetaPostBodyBody.meta_properties !== void 0) {
|
|
51
|
+
formData.append("meta_properties", datasetMetaPostBodyBody.meta_properties);
|
|
52
|
+
}
|
|
53
|
+
if (datasetMetaPostBodyBody.labels !== void 0) {
|
|
54
|
+
formData.append("labels", datasetMetaPostBodyBody.labels);
|
|
55
|
+
}
|
|
56
|
+
return _chunk3DXYUDZHcjs.customInstance.call(void 0, { url: `/dataset/${datasetid}/meta`, method: "post", data: formData }, options);
|
|
57
|
+
};
|
|
58
|
+
var useAddMetadata = (options) => {
|
|
59
|
+
const { mutation: mutationOptions, request: requestOptions } = options || {};
|
|
60
|
+
const mutationFn = (props) => {
|
|
61
|
+
const { datasetid, data } = props || {};
|
|
62
|
+
return addMetadata(datasetid, data, requestOptions);
|
|
63
|
+
};
|
|
64
|
+
return _reactquery.useMutation.call(void 0, mutationFn, mutationOptions);
|
|
65
|
+
};
|
|
66
|
+
var getMetadata = (datasetid, options) => {
|
|
67
|
+
return _chunk3DXYUDZHcjs.customInstance.call(void 0, { url: `/dataset/${datasetid}/meta`, method: "get" }, options);
|
|
68
|
+
};
|
|
69
|
+
var getGetMetadataQueryKey = (datasetid) => [
|
|
70
|
+
`/dataset/${datasetid}/meta`
|
|
71
|
+
];
|
|
72
|
+
var useGetMetadata = (datasetid, options) => {
|
|
73
|
+
const { query: queryOptions, request: requestOptions } = options || {};
|
|
74
|
+
const queryKey = _nullishCoalesce((queryOptions == null ? void 0 : queryOptions.queryKey), () => ( getGetMetadataQueryKey(datasetid)));
|
|
75
|
+
const queryFn = () => getMetadata(datasetid, requestOptions);
|
|
76
|
+
const query = _reactquery.useQuery.call(void 0, queryKey, queryFn, _chunk3DXYUDZHcjs.__spreadValues.call(void 0, { enabled: !!datasetid }, queryOptions));
|
|
77
|
+
return _chunk3DXYUDZHcjs.__spreadValues.call(void 0, {
|
|
78
|
+
queryKey
|
|
79
|
+
}, query);
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
exports.addMetadata = addMetadata; exports.addMetadataVersion = addMetadataVersion; exports.getGetMetadataQueryKey = getGetMetadataQueryKey; exports.getGetMetadataVersionQueryKey = getGetMetadataVersionQueryKey; exports.getMetadata = getMetadata; exports.getMetadataVersion = getMetadataVersion; exports.useAddMetadata = useAddMetadata; exports.useAddMetadataVersion = useAddMetadataVersion; exports.useGetMetadata = useGetMetadata; exports.useGetMetadataVersion = useGetMetadataVersion;
|
|
93
|
+
//# sourceMappingURL=metadata.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/metadata/metadata.ts"],"names":[],"mappings":";;;;;;AAUA;AAAA;AAAA;AAAA;AAwCO,IAAM,qBAAqB,CAChC,WACA,gBACA,gCACA,YACG;AACH,QAAM,WAAW,IAAI;AACrB,MAAI,+BAA+B,oBAAoB,QAAW;AAChE,aAAS,OACP,mBACA,+BAA+B;AAAA;AAGnC,MAAI,+BAA+B,gBAAgB,QAAW;AAC5D,aAAS,OAAO,eAAe,+BAA+B;AAAA;AAGhE,SAAO,eACL;AAAA,IACE,KAAK,YAAY,kBAAkB;AAAA,IACnC,QAAQ;AAAA,IACR,MAAM;AAAA,KAER;AAAA;AAIG,IAAM,wBAAwB,CAGnC,YAYI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAOF,CAAC,UAAU;AACb,UAAM,EAAE,WAAW,gBAAgB,SAAS,SAAS;AAErD,WAAO,mBAAmB,WAAW,gBAAgB,MAAM;AAAA;AAG7D,SAAO,YASL,YAAY;AAAA;AAOT,IAAM,qBAAqB,CAChC,WACA,gBACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,YAAY,kBAAkB,kBAAkB,QAAQ,SAC/D;AAAA;AAIG,IAAM,gCAAgC,CAC3C,WACA,mBACG,CAAC,YAAY,kBAAkB;AAE7B,IAAM,wBAAwB,CAInC,WACA,gBACA,YAQ2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WACJ,8CAAc,aACd,8BAA8B,WAAW;AAE3C,QAAM,UAEF,MAAM,mBAAmB,WAAW,gBAAgB;AAExD,QAAM,QAAQ,SAIZ,UAAU,SAAS;AAAA,IACnB,SAAS,CAAC,CAAE,cAAa;AAAA,KACtB;AAGL,SAAO;AAAA,IACL;AAAA,KACG;AAAA;AAWA,IAAM,cAAc,CACzB,WACA,yBACA,YACG;AACH,QAAM,WAAW,IAAI;AACrB,MAAI,wBAAwB,oBAAoB,QAAW;AACzD,aAAS,OAAO,mBAAmB,wBAAwB;AAAA;AAE7D,MAAI,wBAAwB,WAAW,QAAW;AAChD,aAAS,OAAO,UAAU,wBAAwB;AAAA;AAGpD,SAAO,eACL,EAAE,KAAK,YAAY,kBAAkB,QAAQ,QAAQ,MAAM,YAC3D;AAAA;AAIG,IAAM,iBAAiB,CAG5B,YAQI;AACJ,QAAM,EAAE,UAAU,iBAAiB,SAAS,mBAAmB,WAAW;AAE1E,QAAM,aAGF,CAAC,UAAU;AACb,UAAM,EAAE,WAAW,SAAS,SAAS;AAErC,WAAO,YAAY,WAAW,MAAM;AAAA;AAGtC,SAAO,YAKL,YAAY;AAAA;AAOT,IAAM,cAAc,CACzB,WACA,YACG;AACH,SAAO,eACL,EAAE,KAAK,YAAY,kBAAkB,QAAQ,SAC7C;AAAA;AAIG,IAAM,yBAAyB,CAAC,cAAsB;AAAA,EAC3D,YAAY;AAAA;AAGP,IAAM,iBAAiB,CAI5B,WACA,YAI2D;AAC3D,QAAM,EAAE,OAAO,cAAc,SAAS,mBAAmB,WAAW;AAEpE,QAAM,WAAW,8CAAc,aAAY,uBAAuB;AAElE,QAAM,UAA8D,MAClE,YAAY,WAAW;AAEzB,QAAM,QAAQ,SACZ,UACA,SACA,iBAAE,SAAS,CAAC,CAAC,aAAc;AAG7B,SAAO;AAAA,IACL;AAAA,KACG;AAAA","sourcesContent":["/**\n * Generated by orval v6.6.0 🍺\n * Do not edit manually.\n * Dataset Manager API\n * The Dataset Manager API service.\n\nA service that allows *registered* users to make **Datasets** and associated **Metadata** available to **Applications** and **Jobs** using **Projects** and **Files**.\n\n * OpenAPI spec version: 0.7\n */\nimport {\n useQuery,\n useMutation,\n UseQueryOptions,\n UseMutationOptions,\n QueryFunction,\n MutationFunction,\n UseQueryResult,\n QueryKey,\n} from \"react-query\";\nimport type {\n DatasetMetaGetResponse,\n DmError,\n DatasetVersionMetaPostBodyBody,\n DatasetMetaPostBodyBody,\n} from \"../data-manager-api.schemas\";\nimport { customInstance, ErrorType } from \".././custom-instance\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype AsyncReturnType<T extends (...args: any) => Promise<any>> = T extends (\n ...args: any\n) => Promise<infer R>\n ? R\n : any;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype SecondParameter<T extends (...args: any) => any> = T extends (\n config: any,\n args: infer P\n) => any\n ? P\n : never;\n\n/**\n * Update parameters or add new annotations of the specified type(s) and to the metadata for the version of the dataset.\nThe parameters are provided in a list in keyword/arguments\nThe annotations are provided in a list in JSON format. For details of the annotations that can be created, see the data-manager-metadata library,\n\n * @summary Update Metadata for the Dataset version\n */\nexport const addMetadataVersion = (\n datasetid: string,\n datasetversion: number,\n datasetVersionMetaPostBodyBody: DatasetVersionMetaPostBodyBody,\n options?: SecondParameter<typeof customInstance>\n) => {\n const formData = new FormData();\n if (datasetVersionMetaPostBodyBody.meta_properties !== undefined) {\n formData.append(\n \"meta_properties\",\n datasetVersionMetaPostBodyBody.meta_properties\n );\n }\n if (datasetVersionMetaPostBodyBody.annotations !== undefined) {\n formData.append(\"annotations\", datasetVersionMetaPostBodyBody.annotations);\n }\n\n return customInstance<DatasetMetaGetResponse>(\n {\n url: `/dataset/${datasetid}/meta/${datasetversion}`,\n method: \"post\",\n data: formData,\n },\n options\n );\n};\n\nexport const useAddMetadataVersion = <\n TError = ErrorType<DmError | void>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof addMetadataVersion>,\n TError,\n {\n datasetid: string;\n datasetversion: number;\n data: DatasetVersionMetaPostBodyBody;\n },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof addMetadataVersion>,\n {\n datasetid: string;\n datasetversion: number;\n data: DatasetVersionMetaPostBodyBody;\n }\n > = (props) => {\n const { datasetid, datasetversion, data } = props || {};\n\n return addMetadataVersion(datasetid, datasetversion, data, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof addMetadataVersion>,\n TError,\n {\n datasetid: string;\n datasetversion: number;\n data: DatasetVersionMetaPostBodyBody;\n },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * Returns the metadata for a Dataset version in JSON format.\n\n * @summary Gets the metadata for a specific Dataset version\n */\nexport const getMetadataVersion = (\n datasetid: string,\n datasetversion: number,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<DatasetMetaGetResponse>(\n { url: `/dataset/${datasetid}/meta/${datasetversion}`, method: \"get\" },\n options\n );\n};\n\nexport const getGetMetadataVersionQueryKey = (\n datasetid: string,\n datasetversion: number\n) => [`/dataset/${datasetid}/meta/${datasetversion}`];\n\nexport const useGetMetadataVersion = <\n TData = AsyncReturnType<typeof getMetadataVersion>,\n TError = ErrorType<void | DmError>\n>(\n datasetid: string,\n datasetversion: number,\n options?: {\n query?: UseQueryOptions<\n AsyncReturnType<typeof getMetadataVersion>,\n TError,\n TData\n >;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey =\n queryOptions?.queryKey ??\n getGetMetadataVersionQueryKey(datasetid, datasetversion);\n\n const queryFn: QueryFunction<\n AsyncReturnType<typeof getMetadataVersion>\n > = () => getMetadataVersion(datasetid, datasetversion, requestOptions);\n\n const query = useQuery<\n AsyncReturnType<typeof getMetadataVersion>,\n TError,\n TData\n >(queryKey, queryFn, {\n enabled: !!(datasetid && datasetversion),\n ...queryOptions,\n });\n\n return {\n queryKey,\n ...query,\n };\n};\n\n/**\n * Update default parameters or add new labels to the metadata for the dataset.\nThe parameters are provided in a list in keyword/arguments\nThe labels are provided in a list in JSON format. For details of the label format, see the data-manager-metadata library,\n\n * @summary Update Metadata for the Dataset\n */\nexport const addMetadata = (\n datasetid: string,\n datasetMetaPostBodyBody: DatasetMetaPostBodyBody,\n options?: SecondParameter<typeof customInstance>\n) => {\n const formData = new FormData();\n if (datasetMetaPostBodyBody.meta_properties !== undefined) {\n formData.append(\"meta_properties\", datasetMetaPostBodyBody.meta_properties);\n }\n if (datasetMetaPostBodyBody.labels !== undefined) {\n formData.append(\"labels\", datasetMetaPostBodyBody.labels);\n }\n\n return customInstance<DatasetMetaGetResponse>(\n { url: `/dataset/${datasetid}/meta`, method: \"post\", data: formData },\n options\n );\n};\n\nexport const useAddMetadata = <\n TError = ErrorType<DmError | void>,\n TContext = unknown\n>(options?: {\n mutation?: UseMutationOptions<\n AsyncReturnType<typeof addMetadata>,\n TError,\n { datasetid: string; data: DatasetMetaPostBodyBody },\n TContext\n >;\n request?: SecondParameter<typeof customInstance>;\n}) => {\n const { mutation: mutationOptions, request: requestOptions } = options || {};\n\n const mutationFn: MutationFunction<\n AsyncReturnType<typeof addMetadata>,\n { datasetid: string; data: DatasetMetaPostBodyBody }\n > = (props) => {\n const { datasetid, data } = props || {};\n\n return addMetadata(datasetid, data, requestOptions);\n };\n\n return useMutation<\n AsyncReturnType<typeof addMetadata>,\n TError,\n { datasetid: string; data: DatasetMetaPostBodyBody },\n TContext\n >(mutationFn, mutationOptions);\n};\n/**\n * Returns the metadata for a Dataset in JSON format.\n\n * @summary Gets the metadata for a specific Dataset\n */\nexport const getMetadata = (\n datasetid: string,\n options?: SecondParameter<typeof customInstance>\n) => {\n return customInstance<DatasetMetaGetResponse>(\n { url: `/dataset/${datasetid}/meta`, method: \"get\" },\n options\n );\n};\n\nexport const getGetMetadataQueryKey = (datasetid: string) => [\n `/dataset/${datasetid}/meta`,\n];\n\nexport const useGetMetadata = <\n TData = AsyncReturnType<typeof getMetadata>,\n TError = ErrorType<void | DmError>\n>(\n datasetid: string,\n options?: {\n query?: UseQueryOptions<AsyncReturnType<typeof getMetadata>, TError, TData>;\n request?: SecondParameter<typeof customInstance>;\n }\n): UseQueryResult<TData, TError> & { queryKey: QueryKey } => {\n const { query: queryOptions, request: requestOptions } = options || {};\n\n const queryKey = queryOptions?.queryKey ?? getGetMetadataQueryKey(datasetid);\n\n const queryFn: QueryFunction<AsyncReturnType<typeof getMetadata>> = () =>\n getMetadata(datasetid, requestOptions);\n\n const query = useQuery<AsyncReturnType<typeof getMetadata>, TError, TData>(\n queryKey,\n queryFn,\n { enabled: !!datasetid, ...queryOptions }\n );\n\n return {\n queryKey,\n ...query,\n };\n};\n"]}
|