uzdu 1.0.14 → 1.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -10
- package/lib/{chunk-KZ5VMI45.js → chunk-HPBHRAFV.js} +53 -10
- package/lib/chunk-NAE2ZQ34.js +364 -0
- package/lib/{chunk-2WNHNCUV.js → chunk-SO2QIMWR.js} +12 -5
- package/lib/uzdu-copy.js +1 -1
- package/lib/uzdu-download.js +2 -2
- package/lib/uzdu-metadata.js +1 -1
- package/lib/uzdu-unzip.js +1 -1
- package/lib/uzdu-upload.js +11 -327
- package/lib/uzdu-zip.js +1 -1
- package/lib/uzdu.d.ts +153 -3
- package/lib/uzdu.js +17 -4
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
# UZDU – Universal Zip archiver, Downloader and Uploader for Node.js/Deno/Bun
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
|
|
5
|
+
[](https://www.npmjs.com/package/uzdu)
|
|
5
6
|
[](https://www.npmjs.com/package/uzdu)
|
|
7
|
+
[](https://github.com/denis-kalinin/uzdu/actions/workflows/release.yml)
|
|
8
|
+

|
|
9
|
+
|
|
6
10
|
|
|
11
|
+
## For users
|
|
7
12
|
|
|
8
13
|
UZDU is a convinient tool to deploy files to a web server. If you're a DevOps enthusiast and prefer to stay in your familiar <span>Node.js</span> environment, you can deploy using this tool.
|
|
9
14
|
|
|
@@ -22,18 +27,24 @@ Bun:
|
|
|
22
27
|
bunx uzdu -h
|
|
23
28
|
```
|
|
24
29
|
|
|
25
|
-
|
|
30
|
+
### uploading
|
|
26
31
|
|
|
27
|
-
- [Amazon S3](https://docs.aws.amazon.com/s3/) `npx uzdu
|
|
28
|
-
- [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) `npx uzdu
|
|
29
|
-
- [Nexus](https://support.sonatype.com/hc/en-us/articles/115006744008-Repository-How-can-I-programmatically-upload-files-into-Nexus-3#DirectUploadusingHTTPPUTtotheRepositoryPath) `npx uzdu
|
|
30
|
-
- SSH/SCP `npx uzdu
|
|
32
|
+
- [Amazon S3](https://docs.aws.amazon.com/s3/) `npx uzdu upload aws -h`
|
|
33
|
+
- [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) `npx uzdu upload az -h`
|
|
34
|
+
- [Nexus](https://support.sonatype.com/hc/en-us/articles/115006744008-Repository-How-can-I-programmatically-upload-files-into-Nexus-3#DirectUploadusingHTTPPUTtotheRepositoryPath) `npx uzdu upload http -h`
|
|
35
|
+
- SSH/SCP `npx uzdu upload ssh -h`
|
|
31
36
|
|
|
32
|
-
|
|
37
|
+
### downloading
|
|
33
38
|
|
|
34
|
-
- http `npx uzdu
|
|
39
|
+
- http `npx uzdu download http -h`
|
|
35
40
|
|
|
36
|
-
|
|
41
|
+
### working with zip-archives
|
|
37
42
|
|
|
38
43
|
- zip `npx uzdu zip -h`
|
|
39
|
-
- unzip `npx uzdu unzip -h`
|
|
44
|
+
- unzip `npx uzdu unzip -h`
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
## For developers
|
|
48
|
+
|
|
49
|
+
1. [Gif flow guide](docs/git-flow.md)
|
|
50
|
+
2. [Semantic release](docs/semantic-release.md)
|
|
@@ -1,4 +1,26 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __export = (target, all) => {
|
|
3
|
+
for (var name in all)
|
|
4
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
5
|
+
};
|
|
6
|
+
|
|
1
7
|
// src/utils.ts
|
|
8
|
+
var utils_exports = {};
|
|
9
|
+
__export(utils_exports, {
|
|
10
|
+
addMetadata: () => addMetadata,
|
|
11
|
+
checkIsFile: () => checkIsFile,
|
|
12
|
+
doUnzip: () => doUnzip,
|
|
13
|
+
getEnvironment: () => getEnvironment,
|
|
14
|
+
initEnvironment: () => initEnvironment,
|
|
15
|
+
listBlobs: () => listBlobs,
|
|
16
|
+
listFiles: () => listFiles,
|
|
17
|
+
makeZip: () => makeZip,
|
|
18
|
+
outputConfiguration: () => outputConfiguration,
|
|
19
|
+
resolvePath: () => resolvePath,
|
|
20
|
+
safeIndex: () => safeIndex,
|
|
21
|
+
shouldBeDirectory: () => shouldBeDirectory,
|
|
22
|
+
shouldBeFile: () => shouldBeFile
|
|
23
|
+
});
|
|
2
24
|
import fs from "fs";
|
|
3
25
|
import os from "os";
|
|
4
26
|
import JSZip from "jszip";
|
|
@@ -71,22 +93,36 @@ async function listFiles(rootDir, _dir) {
|
|
|
71
93
|
if (!_dir) {
|
|
72
94
|
const lstat = fs.lstatSync(rootDir);
|
|
73
95
|
if (lstat.isFile()) {
|
|
74
|
-
return [path.basename(rootDir)];
|
|
96
|
+
return { [path.basename(rootDir)]: rootDir };
|
|
75
97
|
}
|
|
76
98
|
}
|
|
77
99
|
const cwd = _dir || rootDir;
|
|
78
100
|
const files = fs.readdirSync(cwd, { withFileTypes: true });
|
|
79
101
|
const fileList = await files.reduce(async (acc, file) => {
|
|
80
102
|
const filePath = path.join(cwd, file.name);
|
|
103
|
+
let newAcc = {};
|
|
81
104
|
if (file.isDirectory()) {
|
|
82
105
|
const filesInDir = await listFiles(rootDir, filePath);
|
|
83
|
-
|
|
106
|
+
newAcc = { ...await acc, ...filesInDir };
|
|
107
|
+
} else if (file.isSymbolicLink()) {
|
|
108
|
+
const symLinkAbsoluteTargetPath = path.resolve(cwd, fs.readlinkSync(filePath));
|
|
109
|
+
const lstat = fs.lstatSync(symLinkAbsoluteTargetPath);
|
|
110
|
+
if (lstat.isDirectory()) {
|
|
111
|
+
const filesInDir = await listFiles(rootDir, filePath);
|
|
112
|
+
newAcc = { ...await acc, ...filesInDir };
|
|
113
|
+
} else if (lstat.isFile()) {
|
|
114
|
+
newAcc = { ...await acc, [filePath]: filePath.split(path.sep).join(path.posix.sep) };
|
|
115
|
+
} else if (lstat.isSymbolicLink()) {
|
|
116
|
+
const filesInDir = await listFiles(rootDir, filePath);
|
|
117
|
+
newAcc = { ...await acc, ...filesInDir };
|
|
118
|
+
}
|
|
84
119
|
} else {
|
|
85
120
|
const relativeFilePath = path.relative(rootDir, filePath).split(path.sep).join(path.posix.sep);
|
|
86
|
-
(
|
|
121
|
+
const absPath = path.resolve(rootDir, filePath).split(path.sep).join(path.posix.sep);
|
|
122
|
+
newAcc = { ...await acc, [relativeFilePath]: absPath };
|
|
87
123
|
}
|
|
88
|
-
return
|
|
89
|
-
}, Promise.resolve(
|
|
124
|
+
return newAcc;
|
|
125
|
+
}, Promise.resolve({}));
|
|
90
126
|
return fileList;
|
|
91
127
|
}
|
|
92
128
|
async function addMetadata(distributive, metadataFilename = ".metadata.json") {
|
|
@@ -111,10 +147,9 @@ async function getMetadata(dir, metadataFile) {
|
|
|
111
147
|
async function makeZip(fromDir, zipFilePath) {
|
|
112
148
|
const zip = new JSZip();
|
|
113
149
|
const files = await listFiles(fromDir);
|
|
114
|
-
files.forEach((
|
|
115
|
-
const
|
|
116
|
-
|
|
117
|
-
zip.file(file, readStream, { binary: true });
|
|
150
|
+
Object.entries(files).forEach(([basePath, absPath]) => {
|
|
151
|
+
const readStream = fs.createReadStream(absPath);
|
|
152
|
+
zip.file(basePath, readStream, { binary: true });
|
|
118
153
|
});
|
|
119
154
|
return new Promise((resolve, reject) => {
|
|
120
155
|
zip.generateNodeStream({ type: "nodebuffer", streamFiles: true }).pipe(fs.createWriteStream(zipFilePath)).on("finish", function() {
|
|
@@ -151,6 +186,12 @@ function checkIsFile(file) {
|
|
|
151
186
|
const lstat = fs.lstatSync(absPath);
|
|
152
187
|
return lstat.isFile();
|
|
153
188
|
}
|
|
189
|
+
function shouldBeFile(file) {
|
|
190
|
+
if (!checkIsFile(file)) {
|
|
191
|
+
const absPath = path.resolve(file);
|
|
192
|
+
throw new Error(`${file} is a directory, SHOULD be a file. Check [${absPath}]`);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
154
195
|
function shouldBeDirectory(directory) {
|
|
155
196
|
const lstat = fs.lstatSync(directory);
|
|
156
197
|
if (!lstat.isDirectory) {
|
|
@@ -178,6 +219,7 @@ function resolvePath(filePath) {
|
|
|
178
219
|
}
|
|
179
220
|
|
|
180
221
|
export {
|
|
222
|
+
__export,
|
|
181
223
|
getEnvironment,
|
|
182
224
|
initEnvironment,
|
|
183
225
|
listFiles,
|
|
@@ -188,5 +230,6 @@ export {
|
|
|
188
230
|
shouldBeDirectory,
|
|
189
231
|
outputConfiguration,
|
|
190
232
|
safeIndex,
|
|
191
|
-
resolvePath
|
|
233
|
+
resolvePath,
|
|
234
|
+
utils_exports
|
|
192
235
|
};
|
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
import {
|
|
2
|
+
__export,
|
|
3
|
+
getEnvironment,
|
|
4
|
+
initEnvironment,
|
|
5
|
+
listFiles,
|
|
6
|
+
resolvePath,
|
|
7
|
+
safeIndex
|
|
8
|
+
} from "./chunk-HPBHRAFV.js";
|
|
9
|
+
|
|
10
|
+
// src/azure.ts
|
|
11
|
+
var azure_exports = {};
|
|
12
|
+
__export(azure_exports, {
|
|
13
|
+
default: () => upload
|
|
14
|
+
});
|
|
15
|
+
import { BlobServiceClient } from "@azure/storage-blob";
|
|
16
|
+
import path from "path";
|
|
17
|
+
import fs from "fs";
|
|
18
|
+
async function upload(dir, options, metadataFile = ".metadata.json") {
|
|
19
|
+
if (!options.connectionString) throw Error("Uploader needs connection string for Azure Blob Storage. Provide AZURE_STORAGE_CONNECTION_STRING environment variable!");
|
|
20
|
+
const opts = Object.assign({}, { container: "$web" }, options);
|
|
21
|
+
const blobServiceClient = BlobServiceClient.fromConnectionString(options.connectionString);
|
|
22
|
+
const isDebug = process.env.DEBUG && process.env.DEBUG.toLowerCase() === "true";
|
|
23
|
+
const containerClient = blobServiceClient.getContainerClient(opts.container);
|
|
24
|
+
let dist = path.resolve(process.cwd(), dir);
|
|
25
|
+
const files = await listFiles(dir);
|
|
26
|
+
let metadata;
|
|
27
|
+
try {
|
|
28
|
+
const metadataJson = fs.readFileSync(path.join(dir, metadataFile), { encoding: "utf-8" });
|
|
29
|
+
metadata = JSON.parse(metadataJson);
|
|
30
|
+
} catch (e) {
|
|
31
|
+
}
|
|
32
|
+
if (Object.keys(files).length == 1) {
|
|
33
|
+
const lstat = fs.lstatSync(dist);
|
|
34
|
+
if (lstat.isFile()) {
|
|
35
|
+
dist = path.dirname(dist);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
await Promise.all(Object.entries(files).map(async ([file, absFile]) => {
|
|
39
|
+
let blobObj;
|
|
40
|
+
if (metadata) {
|
|
41
|
+
blobObj = metadata[file];
|
|
42
|
+
}
|
|
43
|
+
const blockBlobClient = containerClient.getBlockBlobClient(file);
|
|
44
|
+
const blobHTTPHeaders = {};
|
|
45
|
+
if (blobObj?.headers) {
|
|
46
|
+
const { CacheControl, ContentType } = blobObj.headers;
|
|
47
|
+
blobHTTPHeaders.blobCacheControl = CacheControl;
|
|
48
|
+
blobHTTPHeaders.blobContentType = ContentType;
|
|
49
|
+
}
|
|
50
|
+
const localFilePath = absFile;
|
|
51
|
+
await blockBlobClient.uploadFile(localFilePath, { blobHTTPHeaders });
|
|
52
|
+
}));
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// src/s3.ts
|
|
56
|
+
var s3_exports = {};
|
|
57
|
+
__export(s3_exports, {
|
|
58
|
+
default: () => upload2
|
|
59
|
+
});
|
|
60
|
+
import { S3Client } from "@aws-sdk/client-s3";
|
|
61
|
+
import { Upload } from "@aws-sdk/lib-storage";
|
|
62
|
+
import fs2 from "fs";
|
|
63
|
+
import path2 from "path";
|
|
64
|
+
async function upload2(dir, s3config, metadataFile = ".metadata.json") {
|
|
65
|
+
if (!s3config.accessKeyId || !s3config.secretAccessKey) {
|
|
66
|
+
throw new Error("AWS credentials not found in environment variables AWS_KEY_ID and AWS_SECRET_KEY.");
|
|
67
|
+
}
|
|
68
|
+
if (!s3config.region) {
|
|
69
|
+
throw new Error('Neither "region" in the bucket address nor AWS_REGION environment variable was found.');
|
|
70
|
+
}
|
|
71
|
+
if (!s3config.bucket) {
|
|
72
|
+
throw new Error("Amazon S3 bucket name is required");
|
|
73
|
+
}
|
|
74
|
+
const { accessKeyId, secretAccessKey, region, endpoint } = s3config;
|
|
75
|
+
const client = new S3Client({
|
|
76
|
+
credentials: {
|
|
77
|
+
accessKeyId,
|
|
78
|
+
secretAccessKey
|
|
79
|
+
},
|
|
80
|
+
region,
|
|
81
|
+
endpoint
|
|
82
|
+
});
|
|
83
|
+
let dist = path2.resolve(process.cwd(), dir);
|
|
84
|
+
const files = await listFiles(dist);
|
|
85
|
+
let metadata;
|
|
86
|
+
try {
|
|
87
|
+
const metadataJson = fs2.readFileSync(path2.join(dir, metadataFile), { encoding: "utf-8" });
|
|
88
|
+
metadata = JSON.parse(metadataJson);
|
|
89
|
+
} catch (e) {
|
|
90
|
+
}
|
|
91
|
+
if (Object.keys(files).length == 1) {
|
|
92
|
+
const lstat = fs2.lstatSync(dist);
|
|
93
|
+
if (lstat.isFile()) {
|
|
94
|
+
dist = path2.dirname(dist);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
await Promise.all(Object.entries(files).map(async ([file, absFile]) => {
|
|
98
|
+
const filePath = absFile;
|
|
99
|
+
const fileContent = fs2.readFileSync(filePath);
|
|
100
|
+
const params = {
|
|
101
|
+
Bucket: s3config.bucket,
|
|
102
|
+
Key: file,
|
|
103
|
+
Body: fileContent
|
|
104
|
+
};
|
|
105
|
+
if (metadata) {
|
|
106
|
+
const blobObj = metadata[file];
|
|
107
|
+
if (blobObj && blobObj.headers) {
|
|
108
|
+
const { CacheControl, ContentType } = blobObj.headers;
|
|
109
|
+
if (CacheControl) params.CacheControl = CacheControl;
|
|
110
|
+
if (ContentType) params.ContentType = ContentType;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return new Upload({
|
|
114
|
+
client,
|
|
115
|
+
params,
|
|
116
|
+
tags: [],
|
|
117
|
+
queueSize: 4,
|
|
118
|
+
// optional concurrency configuration
|
|
119
|
+
partSize: 1024 * 1024 * 5,
|
|
120
|
+
// optional size of each part, in bytes, at least 5MB
|
|
121
|
+
leavePartsOnError: false
|
|
122
|
+
// optional manually handle dropped parts
|
|
123
|
+
}).done();
|
|
124
|
+
}));
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// src/ssh.ts
|
|
128
|
+
var ssh_exports = {};
|
|
129
|
+
__export(ssh_exports, {
|
|
130
|
+
getDirMap: () => getDirMap,
|
|
131
|
+
getMakeDirs: () => getMakeDirs,
|
|
132
|
+
getSshConfig: () => getSshConfig,
|
|
133
|
+
upload: () => upload3
|
|
134
|
+
});
|
|
135
|
+
import { Client } from "ssh2";
|
|
136
|
+
import fs3 from "fs";
|
|
137
|
+
import path3 from "path";
|
|
138
|
+
import deepmerge from "deepmerge";
|
|
139
|
+
async function upload3(source, sftpUrlPath, sshConfig) {
|
|
140
|
+
await new Promise((resolve, reject) => {
|
|
141
|
+
fs3.stat(source, async (err, stats) => {
|
|
142
|
+
if (stats.isSymbolicLink()) {
|
|
143
|
+
reject(new Error(`${source} is symlink`));
|
|
144
|
+
} else {
|
|
145
|
+
const sshConnection = await connect(sshConfig);
|
|
146
|
+
try {
|
|
147
|
+
const files = await listFiles(source);
|
|
148
|
+
const _destination = sftpUrlPath.replace(/\/+$/, "").replace(/^~/, ".");
|
|
149
|
+
const _source = source.replace(/\/+$/, "");
|
|
150
|
+
await mkdirs(sshConnection, _destination, files);
|
|
151
|
+
await uploadFiles(files, _source, _destination, sshConnection);
|
|
152
|
+
resolve();
|
|
153
|
+
} catch (e) {
|
|
154
|
+
console.error("SFTP error", e);
|
|
155
|
+
reject(e);
|
|
156
|
+
} finally {
|
|
157
|
+
sshConnection.destroy();
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
});
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
async function mkdirs(sshConnection, destination, sources) {
|
|
164
|
+
const fileMap = getDirMap(sources);
|
|
165
|
+
const makeDirs = getMakeDirs(fileMap, destination);
|
|
166
|
+
const commands = makeDirs ? makeDirs.map((dir) => `mkdir -p "${dir}"`) : [`mkdir -p "${destination}"`];
|
|
167
|
+
const commandLine = commands.length > 1 ? commands.join(";") : commands[0];
|
|
168
|
+
await new Promise((res, rej) => {
|
|
169
|
+
sshConnection.exec(commandLine, {}, (err, channel) => {
|
|
170
|
+
if (err) {
|
|
171
|
+
console.error("mkdir error", err);
|
|
172
|
+
rej(new Error(`failed: mkdir -p ... : ${err}`));
|
|
173
|
+
} else {
|
|
174
|
+
channel.on("exit", (code, signal) => {
|
|
175
|
+
if (code != 0) rej(new Error(`Exit code: ${code} for "mkdir -p ..."`));
|
|
176
|
+
else res();
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
function _uploadFile(source, destination, sftp) {
|
|
183
|
+
return new Promise((resolve, reject) => {
|
|
184
|
+
sftp.stat(destination, async (err, stats) => {
|
|
185
|
+
if (err) {
|
|
186
|
+
sftp.fastPut(source, destination, {}, (err2) => {
|
|
187
|
+
if (err2) reject(err2);
|
|
188
|
+
else resolve();
|
|
189
|
+
});
|
|
190
|
+
} else if (stats.isFile()) {
|
|
191
|
+
sftp.fastPut(source, destination, {}, (err2) => {
|
|
192
|
+
if (err2) reject(err2);
|
|
193
|
+
else resolve();
|
|
194
|
+
});
|
|
195
|
+
} else if (stats.isDirectory()) {
|
|
196
|
+
const f = path3.basename(source);
|
|
197
|
+
reject(new Error(`Overwriting directory ${destination} with the file ${f} is not allowed. Remove the directory manually.`));
|
|
198
|
+
} else {
|
|
199
|
+
reject(new Error("Remote path is symlink"));
|
|
200
|
+
}
|
|
201
|
+
});
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
function uploadFiles(sourceFiles, source, destination, sshConnection) {
|
|
205
|
+
return new Promise((resolve, reject) => {
|
|
206
|
+
sshConnection.sftp(async (err, sftp) => {
|
|
207
|
+
if (err) {
|
|
208
|
+
console.error("uploadFiles error");
|
|
209
|
+
reject(err);
|
|
210
|
+
} else {
|
|
211
|
+
if (Object.keys(sourceFiles).length == 1) {
|
|
212
|
+
const lstat2 = fs3.lstatSync(source);
|
|
213
|
+
if (lstat2.isFile()) {
|
|
214
|
+
const dest = path3.join(destination, sourceFiles[0]).replace(/\\/g, "/");
|
|
215
|
+
const src = source;
|
|
216
|
+
console.log(`Uploading file ${src} => ${dest}`);
|
|
217
|
+
await _uploadFile(src, dest, sftp).then(() => resolve()).catch((e) => {
|
|
218
|
+
console.error(src);
|
|
219
|
+
reject(e);
|
|
220
|
+
});
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
let sourceDir = source;
|
|
225
|
+
console.info("sourcing directory", sourceDir);
|
|
226
|
+
const lstat = fs3.lstatSync(source);
|
|
227
|
+
if (lstat.isSymbolicLink()) {
|
|
228
|
+
sourceDir = fs3.readlinkSync(source);
|
|
229
|
+
console.info(`${source} ==> ${sourceDir}`);
|
|
230
|
+
}
|
|
231
|
+
const promises = [];
|
|
232
|
+
Object.entries(sourceFiles).map(([baseName, absPath]) => {
|
|
233
|
+
const dest = path3.join(destination, baseName).replace(/\\/g, "/");
|
|
234
|
+
console.log(`Uploading ${absPath} => ${dest}`);
|
|
235
|
+
const promise = new Promise((res, rej) => {
|
|
236
|
+
_uploadFile(absPath, dest, sftp).then(() => res()).catch((e) => {
|
|
237
|
+
console.error(absPath);
|
|
238
|
+
console.error(e);
|
|
239
|
+
rej(e);
|
|
240
|
+
});
|
|
241
|
+
});
|
|
242
|
+
promises.push(promise);
|
|
243
|
+
});
|
|
244
|
+
await Promise.all(promises);
|
|
245
|
+
resolve();
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
async function connect(sshConfig) {
|
|
251
|
+
const conn = new Client();
|
|
252
|
+
try {
|
|
253
|
+
return await new Promise((resolve, reject) => {
|
|
254
|
+
conn.on("error", (e) => {
|
|
255
|
+
reject(new Error(`Target host error: ${e}`));
|
|
256
|
+
}).on("ready", () => {
|
|
257
|
+
resolve(conn);
|
|
258
|
+
}).connect({
|
|
259
|
+
timeout: 99,
|
|
260
|
+
port: 22,
|
|
261
|
+
algorithms: {
|
|
262
|
+
cipher: [
|
|
263
|
+
"aes128-ctr",
|
|
264
|
+
"aes192-ctr",
|
|
265
|
+
"aes256-ctr",
|
|
266
|
+
"aes256-cbc",
|
|
267
|
+
"aes128-cbc"
|
|
268
|
+
//"aes128-gcm", ////"aes128-gcm@openssh.com", //"aes256-gcm", ////"aes256-gcm@openssh.com", ////"aes192-cbc",
|
|
269
|
+
]
|
|
270
|
+
},
|
|
271
|
+
...sshConfig
|
|
272
|
+
});
|
|
273
|
+
});
|
|
274
|
+
} catch (e) {
|
|
275
|
+
console.error("Connection failed", e);
|
|
276
|
+
conn.destroy();
|
|
277
|
+
throw e;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
function getMakeDirs(fileMap, destination) {
|
|
281
|
+
const kv = Object.entries(fileMap);
|
|
282
|
+
const hasSubdirs = kv.some((keyVal) => !!keyVal[1]);
|
|
283
|
+
if (!hasSubdirs) return false;
|
|
284
|
+
const subdirs = kv.reduce((acc, curr) => {
|
|
285
|
+
if (curr[1]) {
|
|
286
|
+
const res = getMakeDirs(curr[1]);
|
|
287
|
+
const prefix = destination ? [destination, curr[0]].join("/") : curr[0];
|
|
288
|
+
if (res) {
|
|
289
|
+
const pathes = res.map((apath) => [prefix, apath].join("/"));
|
|
290
|
+
acc.push(...pathes);
|
|
291
|
+
} else acc.push(prefix);
|
|
292
|
+
}
|
|
293
|
+
return acc;
|
|
294
|
+
}, []);
|
|
295
|
+
return subdirs;
|
|
296
|
+
}
|
|
297
|
+
function getDirMap(files) {
|
|
298
|
+
let fileMap = {};
|
|
299
|
+
Object.keys(files).map((file) => {
|
|
300
|
+
const leaf = getFileMap(file);
|
|
301
|
+
fileMap = deepmerge(fileMap, leaf);
|
|
302
|
+
});
|
|
303
|
+
return fileMap;
|
|
304
|
+
}
|
|
305
|
+
function getFileMap(file) {
|
|
306
|
+
let theFile = file;
|
|
307
|
+
if (file.indexOf("/") == 0) theFile = file.substring(1);
|
|
308
|
+
const parts = theFile.split("/");
|
|
309
|
+
if (parts.length == 1) return { [parts[0]]: false };
|
|
310
|
+
else {
|
|
311
|
+
const aFile = path3.join(...parts.slice(1)).replace(/\\/g, "/");
|
|
312
|
+
const fileMapEntry = getFileMap(aFile);
|
|
313
|
+
return { [parts[0]]: fileMapEntry };
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
function getSshConfig(ssh_server, options) {
|
|
317
|
+
if (options.dotenv) {
|
|
318
|
+
const theEnv = getEnvironment(options.dotenv);
|
|
319
|
+
initEnvironment(theEnv);
|
|
320
|
+
}
|
|
321
|
+
const userAndSshUrl = ssh_server.split("@");
|
|
322
|
+
if (userAndSshUrl.length == 1) throw new Error("ssh_server address must specify username, e.g. root@example.com");
|
|
323
|
+
const username = userAndSshUrl[0];
|
|
324
|
+
const hostParts = userAndSshUrl[1].split(":");
|
|
325
|
+
const host = hostParts[0];
|
|
326
|
+
const sPort = safeIndex(hostParts, 1) || 22;
|
|
327
|
+
const port = Number(sPort);
|
|
328
|
+
const conConfig = { host, port, username };
|
|
329
|
+
const uzduKeyPath = options.targetKeyPath || process.env.UZDU_SSH_KEY_PATH;
|
|
330
|
+
const uzduKey = options.targetKey || process.env.UZDU_SSH_KEY;
|
|
331
|
+
const uzduPassword = options.targetPassword || process.env.UZDU_SSH_PASSWORD;
|
|
332
|
+
let password = void 0;
|
|
333
|
+
let privateKey = uzduKey;
|
|
334
|
+
if (!privateKey) {
|
|
335
|
+
if (uzduKeyPath) {
|
|
336
|
+
const resolvedKeyPath = resolvePath(options.targetKey);
|
|
337
|
+
try {
|
|
338
|
+
privateKey = fs3.readFileSync(resolvedKeyPath);
|
|
339
|
+
} catch (e) {
|
|
340
|
+
throw new Error(`Not found private Key file ${resolvedKeyPath}`);
|
|
341
|
+
}
|
|
342
|
+
} else {
|
|
343
|
+
if (!uzduPassword) throw new Error("Either --targetPassword, --targetKeyPath or --targetKey should be specified");
|
|
344
|
+
password = uzduPassword;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
const authConfig = password ? {
|
|
348
|
+
password
|
|
349
|
+
} : {
|
|
350
|
+
privateKey
|
|
351
|
+
};
|
|
352
|
+
const sshConfig = { ...conConfig, ...authConfig };
|
|
353
|
+
return sshConfig;
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
export {
|
|
357
|
+
upload,
|
|
358
|
+
azure_exports,
|
|
359
|
+
upload2,
|
|
360
|
+
s3_exports,
|
|
361
|
+
upload3,
|
|
362
|
+
getSshConfig,
|
|
363
|
+
ssh_exports
|
|
364
|
+
};
|
|
@@ -1,8 +1,14 @@
|
|
|
1
1
|
import {
|
|
2
|
+
__export,
|
|
2
3
|
listFiles
|
|
3
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-HPBHRAFV.js";
|
|
4
5
|
|
|
5
6
|
// src/http.ts
|
|
7
|
+
var http_exports = {};
|
|
8
|
+
__export(http_exports, {
|
|
9
|
+
download: () => download,
|
|
10
|
+
upload: () => upload
|
|
11
|
+
});
|
|
6
12
|
import fs from "fs";
|
|
7
13
|
import path from "path";
|
|
8
14
|
var isDebug = process.env.DEBUG?.toLowerCase() === "true";
|
|
@@ -12,7 +18,7 @@ async function upload(dirOrFile, url, headers) {
|
|
|
12
18
|
const files = await listFiles(distr);
|
|
13
19
|
if (isDebug) console.log(`files to upload: ${files.length}`);
|
|
14
20
|
let singleUrl;
|
|
15
|
-
if (files.length == 1) {
|
|
21
|
+
if (Object.keys(files).length == 1) {
|
|
16
22
|
const lstat = fs.lstatSync(distr);
|
|
17
23
|
if (lstat.isFile()) {
|
|
18
24
|
distr = path.dirname(distr);
|
|
@@ -20,9 +26,9 @@ async function upload(dirOrFile, url, headers) {
|
|
|
20
26
|
}
|
|
21
27
|
}
|
|
22
28
|
const fixedBaseUrl = url.href.endsWith("/") ? url : new URL(`${url.href}/`);
|
|
23
|
-
await Promise.all(files.map(async (file) => {
|
|
29
|
+
await Promise.all(Object.entries(files).map(async ([file, absFile]) => {
|
|
24
30
|
if (isDebug) console.log(`filename: ${file}`);
|
|
25
|
-
const localFilePath =
|
|
31
|
+
const localFilePath = absFile;
|
|
26
32
|
const fileUrl = singleUrl || new URL(`${fixedBaseUrl}${file}`);
|
|
27
33
|
if (isDebug) console.log(`uploading ${localFilePath} => ${fileUrl}`);
|
|
28
34
|
await uploadFile(localFilePath, fileUrl, headers);
|
|
@@ -89,5 +95,6 @@ async function download(url, headers) {
|
|
|
89
95
|
|
|
90
96
|
export {
|
|
91
97
|
upload,
|
|
92
|
-
download
|
|
98
|
+
download,
|
|
99
|
+
http_exports
|
|
93
100
|
};
|
package/lib/uzdu-copy.js
CHANGED
package/lib/uzdu-download.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import {
|
|
2
2
|
download
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-SO2QIMWR.js";
|
|
4
4
|
import {
|
|
5
5
|
getEnvironment,
|
|
6
6
|
initEnvironment,
|
|
7
7
|
outputConfiguration
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-HPBHRAFV.js";
|
|
9
9
|
|
|
10
10
|
// src/uzdu-download.ts
|
|
11
11
|
import { Command, Option } from "commander";
|
package/lib/uzdu-metadata.js
CHANGED
package/lib/uzdu-unzip.js
CHANGED
package/lib/uzdu-upload.js
CHANGED
|
@@ -1,306 +1,22 @@
|
|
|
1
|
+
import {
|
|
2
|
+
getSshConfig,
|
|
3
|
+
upload as upload2,
|
|
4
|
+
upload2 as upload3,
|
|
5
|
+
upload3 as upload4
|
|
6
|
+
} from "./chunk-NAE2ZQ34.js";
|
|
1
7
|
import {
|
|
2
8
|
upload
|
|
3
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-SO2QIMWR.js";
|
|
4
10
|
import {
|
|
5
11
|
getEnvironment,
|
|
6
12
|
initEnvironment,
|
|
7
|
-
listFiles,
|
|
8
13
|
outputConfiguration,
|
|
9
14
|
resolvePath,
|
|
10
|
-
safeIndex,
|
|
11
15
|
shouldBeDirectory
|
|
12
|
-
} from "./chunk-
|
|
16
|
+
} from "./chunk-HPBHRAFV.js";
|
|
13
17
|
|
|
14
18
|
// src/uzdu-upload.ts
|
|
15
19
|
import { Argument, Command, Option } from "commander";
|
|
16
|
-
|
|
17
|
-
// src/azure.ts
|
|
18
|
-
import { BlobServiceClient } from "@azure/storage-blob";
|
|
19
|
-
import path from "path";
|
|
20
|
-
import fs from "fs";
|
|
21
|
-
async function upload2(dir, options, metadataFile = ".metadata.json") {
|
|
22
|
-
if (!options.connectionString) throw Error("Uploader needs connection string for Azure Blob Storage. Provide AZURE_STORAGE_CONNECTION_STRING environment variable!");
|
|
23
|
-
const opts = Object.assign({}, { container: "$web" }, options);
|
|
24
|
-
const blobServiceClient = BlobServiceClient.fromConnectionString(options.connectionString);
|
|
25
|
-
const isDebug = process.env.DEBUG && process.env.DEBUG.toLowerCase() === "true";
|
|
26
|
-
const containerClient = blobServiceClient.getContainerClient(opts.container);
|
|
27
|
-
let dist = path.resolve(process.cwd(), dir);
|
|
28
|
-
const files = await listFiles(dir);
|
|
29
|
-
let metadata;
|
|
30
|
-
try {
|
|
31
|
-
const metadataJson = fs.readFileSync(path.join(dir, metadataFile), { encoding: "utf-8" });
|
|
32
|
-
metadata = JSON.parse(metadataJson);
|
|
33
|
-
} catch (e) {
|
|
34
|
-
}
|
|
35
|
-
if (files.length == 1) {
|
|
36
|
-
const lstat = fs.lstatSync(dist);
|
|
37
|
-
if (lstat.isFile()) {
|
|
38
|
-
dist = path.dirname(dist);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
await Promise.all(files.map(async (file) => {
|
|
42
|
-
let blobObj;
|
|
43
|
-
if (metadata) {
|
|
44
|
-
blobObj = metadata[file];
|
|
45
|
-
}
|
|
46
|
-
const blockBlobClient = containerClient.getBlockBlobClient(file);
|
|
47
|
-
const blobHTTPHeaders = {};
|
|
48
|
-
if (blobObj?.headers) {
|
|
49
|
-
const { CacheControl, ContentType } = blobObj.headers;
|
|
50
|
-
blobHTTPHeaders.blobCacheControl = CacheControl;
|
|
51
|
-
blobHTTPHeaders.blobContentType = ContentType;
|
|
52
|
-
}
|
|
53
|
-
const localFilePath = path.resolve(dist, file);
|
|
54
|
-
await blockBlobClient.uploadFile(localFilePath, { blobHTTPHeaders });
|
|
55
|
-
}));
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
// src/s3.ts
|
|
59
|
-
import { S3Client } from "@aws-sdk/client-s3";
|
|
60
|
-
import { Upload } from "@aws-sdk/lib-storage";
|
|
61
|
-
import fs2 from "fs";
|
|
62
|
-
import path2 from "path";
|
|
63
|
-
async function upload3(dir, s3config, metadataFile = ".metadata.json") {
|
|
64
|
-
if (!s3config.accessKeyId || !s3config.secretAccessKey) {
|
|
65
|
-
throw new Error("AWS credentials not found in environment variables AWS_KEY_ID and AWS_SECRET_KEY.");
|
|
66
|
-
}
|
|
67
|
-
if (!s3config.region) {
|
|
68
|
-
throw new Error('Neither "region" in the bucket address nor AWS_REGION environment variable was found.');
|
|
69
|
-
}
|
|
70
|
-
if (!s3config.bucket) {
|
|
71
|
-
throw new Error("Amazon S3 bucket name is required");
|
|
72
|
-
}
|
|
73
|
-
const { accessKeyId, secretAccessKey, region, endpoint } = s3config;
|
|
74
|
-
const client = new S3Client({
|
|
75
|
-
credentials: {
|
|
76
|
-
accessKeyId,
|
|
77
|
-
secretAccessKey
|
|
78
|
-
},
|
|
79
|
-
region,
|
|
80
|
-
endpoint
|
|
81
|
-
});
|
|
82
|
-
let dist = path2.resolve(process.cwd(), dir);
|
|
83
|
-
const files = await listFiles(dist);
|
|
84
|
-
let metadata;
|
|
85
|
-
try {
|
|
86
|
-
const metadataJson = fs2.readFileSync(path2.join(dir, metadataFile), { encoding: "utf-8" });
|
|
87
|
-
metadata = JSON.parse(metadataJson);
|
|
88
|
-
} catch (e) {
|
|
89
|
-
}
|
|
90
|
-
if (files.length == 1) {
|
|
91
|
-
const lstat = fs2.lstatSync(dist);
|
|
92
|
-
if (lstat.isFile()) {
|
|
93
|
-
dist = path2.dirname(dist);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
await Promise.all(files.map(async (file) => {
|
|
97
|
-
const filePath = path2.resolve(dist, file);
|
|
98
|
-
const fileContent = fs2.readFileSync(filePath);
|
|
99
|
-
const params = {
|
|
100
|
-
Bucket: s3config.bucket,
|
|
101
|
-
Key: file,
|
|
102
|
-
Body: fileContent
|
|
103
|
-
};
|
|
104
|
-
if (metadata) {
|
|
105
|
-
const blobObj = metadata[file];
|
|
106
|
-
if (blobObj && blobObj.headers) {
|
|
107
|
-
const { CacheControl, ContentType } = blobObj.headers;
|
|
108
|
-
if (CacheControl) params.CacheControl = CacheControl;
|
|
109
|
-
if (ContentType) params.ContentType = ContentType;
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
return new Upload({
|
|
113
|
-
client,
|
|
114
|
-
params,
|
|
115
|
-
tags: [],
|
|
116
|
-
queueSize: 4,
|
|
117
|
-
// optional concurrency configuration
|
|
118
|
-
partSize: 1024 * 1024 * 5,
|
|
119
|
-
// optional size of each part, in bytes, at least 5MB
|
|
120
|
-
leavePartsOnError: false
|
|
121
|
-
// optional manually handle dropped parts
|
|
122
|
-
}).done();
|
|
123
|
-
}));
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
// src/ssh.ts
|
|
127
|
-
import { Client } from "ssh2";
|
|
128
|
-
import fs3 from "fs";
|
|
129
|
-
import path3 from "path";
|
|
130
|
-
import deepmerge from "deepmerge";
|
|
131
|
-
async function upload4(source, destination, sshConfig) {
|
|
132
|
-
await new Promise((resolve, reject) => {
|
|
133
|
-
fs3.stat(source, async (err, stats) => {
|
|
134
|
-
if (stats.isSymbolicLink()) {
|
|
135
|
-
reject(new Error(`${source} is symlink`));
|
|
136
|
-
} else {
|
|
137
|
-
const sshConnection = await connect(sshConfig);
|
|
138
|
-
try {
|
|
139
|
-
const files = await listFiles(source);
|
|
140
|
-
const _destination = destination.replace(/\/+$/, "");
|
|
141
|
-
const _source = source.replace(/\/+$/, "");
|
|
142
|
-
await mkdirs(sshConnection, _destination, files);
|
|
143
|
-
await uploadFiles(files, _source, _destination, sshConnection);
|
|
144
|
-
resolve();
|
|
145
|
-
} catch (e) {
|
|
146
|
-
console.error("SFTP error", e);
|
|
147
|
-
reject(e);
|
|
148
|
-
} finally {
|
|
149
|
-
sshConnection.destroy();
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
});
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
async function mkdirs(sshConnection, destination, sources) {
|
|
156
|
-
const fileMap = getDirMap(sources);
|
|
157
|
-
const makeDirs = getMakeDirs(fileMap, destination);
|
|
158
|
-
const commands = makeDirs ? makeDirs.map((dir) => `mkdir -p "${dir}"`) : [`mkdir -p "${destination}"`];
|
|
159
|
-
const commandLine = commands.length > 1 ? commands.join(";") : commands[0];
|
|
160
|
-
await new Promise((res, rej) => {
|
|
161
|
-
sshConnection.exec(commandLine, {}, (err, channel) => {
|
|
162
|
-
if (err) {
|
|
163
|
-
console.error("mkdir error", err);
|
|
164
|
-
rej(new Error(`failed: mkdir -p ... : ${err}`));
|
|
165
|
-
} else {
|
|
166
|
-
channel.on("exit", (code, signal) => {
|
|
167
|
-
if (code != 0) rej(new Error(`Exit code: ${code} for "mkdir -p ..."`));
|
|
168
|
-
else res();
|
|
169
|
-
});
|
|
170
|
-
}
|
|
171
|
-
});
|
|
172
|
-
});
|
|
173
|
-
}
|
|
174
|
-
function _uploadFile(source, destination, sftp) {
|
|
175
|
-
return new Promise((resolve, reject) => {
|
|
176
|
-
sftp.stat(destination, async (err, stats) => {
|
|
177
|
-
if (err) {
|
|
178
|
-
sftp.fastPut(source, destination, {}, (err2) => {
|
|
179
|
-
if (err2) reject(err2);
|
|
180
|
-
else resolve();
|
|
181
|
-
});
|
|
182
|
-
} else if (stats.isFile()) {
|
|
183
|
-
sftp.fastPut(source, destination, {}, (err2) => {
|
|
184
|
-
if (err2) reject(err2);
|
|
185
|
-
else resolve();
|
|
186
|
-
});
|
|
187
|
-
} else if (stats.isDirectory()) {
|
|
188
|
-
const f = path3.basename(source);
|
|
189
|
-
reject(new Error(`Overwriting directory ${destination} with the file ${f} is not allowed. Remove the directory manually.`));
|
|
190
|
-
} else {
|
|
191
|
-
reject(new Error("Remote path is symlink"));
|
|
192
|
-
}
|
|
193
|
-
});
|
|
194
|
-
});
|
|
195
|
-
}
|
|
196
|
-
function uploadFiles(sources, source, destination, sshConnection) {
|
|
197
|
-
return new Promise((resolve, reject) => {
|
|
198
|
-
sshConnection.sftp(async (err, sftp) => {
|
|
199
|
-
if (err) {
|
|
200
|
-
console.error("uploadFiles error");
|
|
201
|
-
reject(err);
|
|
202
|
-
} else {
|
|
203
|
-
if (sources.length == 1) {
|
|
204
|
-
const lstat = fs3.lstatSync(source);
|
|
205
|
-
if (lstat.isFile()) {
|
|
206
|
-
const dest = path3.join(destination, sources[0]).replace(/\\/g, "/");
|
|
207
|
-
const src = source;
|
|
208
|
-
console.log(`Uploading file ${src} => ${dest}`);
|
|
209
|
-
await _uploadFile(src, dest, sftp).then(() => resolve()).catch((e) => {
|
|
210
|
-
console.error(src);
|
|
211
|
-
reject(e);
|
|
212
|
-
});
|
|
213
|
-
return;
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
const promises = [];
|
|
217
|
-
sources.map((f) => {
|
|
218
|
-
const dest = path3.join(destination, f).replace(/\\/g, "/");
|
|
219
|
-
const src = path3.join(source, f).replace(/\\/g, "/");
|
|
220
|
-
console.log(`Uploading ${src} => ${dest}`);
|
|
221
|
-
const promise = new Promise((res, rej) => {
|
|
222
|
-
_uploadFile(src, dest, sftp).then(() => res()).catch((e) => {
|
|
223
|
-
console.error(src);
|
|
224
|
-
rej(e);
|
|
225
|
-
});
|
|
226
|
-
});
|
|
227
|
-
promises.push(promise);
|
|
228
|
-
});
|
|
229
|
-
await Promise.all(promises);
|
|
230
|
-
resolve();
|
|
231
|
-
}
|
|
232
|
-
});
|
|
233
|
-
});
|
|
234
|
-
}
|
|
235
|
-
async function connect(sshConfig) {
|
|
236
|
-
const conn = new Client();
|
|
237
|
-
try {
|
|
238
|
-
return await new Promise((resolve, reject) => {
|
|
239
|
-
conn.on("error", (e) => {
|
|
240
|
-
reject(new Error(`Target host error: ${e}`));
|
|
241
|
-
}).on("ready", () => {
|
|
242
|
-
resolve(conn);
|
|
243
|
-
}).connect({
|
|
244
|
-
timeout: 99,
|
|
245
|
-
port: 22,
|
|
246
|
-
algorithms: {
|
|
247
|
-
cipher: [
|
|
248
|
-
"aes128-ctr",
|
|
249
|
-
"aes192-ctr",
|
|
250
|
-
"aes256-ctr",
|
|
251
|
-
"aes256-cbc",
|
|
252
|
-
"aes128-cbc"
|
|
253
|
-
//"aes128-gcm", ////"aes128-gcm@openssh.com", //"aes256-gcm", ////"aes256-gcm@openssh.com", ////"aes192-cbc",
|
|
254
|
-
]
|
|
255
|
-
},
|
|
256
|
-
...sshConfig
|
|
257
|
-
});
|
|
258
|
-
});
|
|
259
|
-
} catch (e) {
|
|
260
|
-
console.error("Connection failed", e);
|
|
261
|
-
conn.destroy();
|
|
262
|
-
throw e;
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
function getMakeDirs(fileMap, destination) {
|
|
266
|
-
const kv = Object.entries(fileMap);
|
|
267
|
-
const hasSubdirs = kv.some((keyVal) => !!keyVal[1]);
|
|
268
|
-
if (!hasSubdirs) return false;
|
|
269
|
-
const subdirs = kv.reduce((acc, curr) => {
|
|
270
|
-
if (curr[1]) {
|
|
271
|
-
const res = getMakeDirs(curr[1]);
|
|
272
|
-
const prefix = destination ? [destination, curr[0]].join("/") : curr[0];
|
|
273
|
-
if (res) {
|
|
274
|
-
const pathes = res.map((apath) => [prefix, apath].join("/"));
|
|
275
|
-
acc.push(...pathes);
|
|
276
|
-
} else acc.push(prefix);
|
|
277
|
-
}
|
|
278
|
-
return acc;
|
|
279
|
-
}, []);
|
|
280
|
-
return subdirs;
|
|
281
|
-
}
|
|
282
|
-
function getDirMap(files) {
|
|
283
|
-
let fileMap = {};
|
|
284
|
-
files.map((file) => {
|
|
285
|
-
const leaf = getFileMap(file);
|
|
286
|
-
fileMap = deepmerge(fileMap, leaf);
|
|
287
|
-
});
|
|
288
|
-
return fileMap;
|
|
289
|
-
}
|
|
290
|
-
function getFileMap(file) {
|
|
291
|
-
let theFile = file;
|
|
292
|
-
if (file.indexOf("/") == 0) theFile = file.substring(1);
|
|
293
|
-
const parts = theFile.split("/");
|
|
294
|
-
if (parts.length == 1) return { [parts[0]]: false };
|
|
295
|
-
else {
|
|
296
|
-
const aFile = path3.join(...parts.slice(1)).replace(/\\/g, "/");
|
|
297
|
-
const fileMapEntry = getFileMap(aFile);
|
|
298
|
-
return { [parts[0]]: fileMapEntry };
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
// src/uzdu-upload.ts
|
|
303
|
-
import fs4 from "fs";
|
|
304
20
|
var command = new Command();
|
|
305
21
|
command.description("Upload to Azure, AWS and HTTP server").name("uzdu upload");
|
|
306
22
|
command.command("aws").description("upload to AWS S3").argument("<from>", "the directory to upload to the <bucket>").argument("<bucket>", 'the AWS S3 bucket[:region[:endpoint]], e.g. "mybucket", "mybucket:us-east-2" or "mybucket:my-region:https://my-s3-provider/endpoint". [:region] overrides S3_REGION environment variable. Expects environment variables S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY.').addOption(
|
|
@@ -368,43 +84,11 @@ command.command("azure").alias("az").description("upload to Azure Blob Storage")
|
|
|
368
84
|
thisCommand.error(e.message || e, { exitCode: 43, code: "az.upload.error" });
|
|
369
85
|
}
|
|
370
86
|
});
|
|
371
|
-
command.command("ssh").description("upload with SSH").argument("<source>", "source directory or file to upload <
|
|
87
|
+
command.command("ssh").description("upload with SSH").argument("<source>", "source directory or file to upload into <ssh_server>").argument("<ssh_server>", "username and server hostname/ip-address and optional SSH-port - username@hostname[:port], e.g. root@10.100.0.1:22").argument("<destination>", "desitnation directory or file").addOption(
|
|
372
88
|
new Option("-d|--dotenv [file]", 'load environment variables from a property file, i.e. a file with "key=value" lines.').preset(".env")
|
|
373
|
-
).addOption(new Option("--
|
|
89
|
+
).addOption(new Option("--targetKeyPath [targetKeyPath]", "Path to SSH private key, fallback is UZDU_SSH_KEY_PATH environment variable")).addOption(new Option("--targetKey [targetKey]", "SSH pirvate key, fallback is UZDU_SSH_KEY environment variable")).addOption(new Option("--targetPassword [targetPassword]", "SSH password, fallback is UZDU_SSH_PASSWORD environment variable")).action(async (source, ssh_server, destination, options, thisCommand) => {
|
|
374
90
|
try {
|
|
375
|
-
|
|
376
|
-
const theEnv = getEnvironment(options.dotenv);
|
|
377
|
-
initEnvironment(theEnv);
|
|
378
|
-
}
|
|
379
|
-
const hostParts = options.target.split(":");
|
|
380
|
-
const host = hostParts[0];
|
|
381
|
-
const sPort = safeIndex(hostParts, 1) || 22;
|
|
382
|
-
const port = Number(sPort);
|
|
383
|
-
const conConfig = {
|
|
384
|
-
host,
|
|
385
|
-
port,
|
|
386
|
-
username: options.targetUsername
|
|
387
|
-
};
|
|
388
|
-
const privKeyPath = options.targetKey;
|
|
389
|
-
let password = void 0;
|
|
390
|
-
let privateKey = void 0;
|
|
391
|
-
if (privKeyPath) {
|
|
392
|
-
const resolvedKeyPath = resolvePath(options.targetKey);
|
|
393
|
-
try {
|
|
394
|
-
privateKey = fs4.readFileSync(resolvedKeyPath);
|
|
395
|
-
} catch (e) {
|
|
396
|
-
throw new Error(`Not found private Key file ${resolvedKeyPath}`);
|
|
397
|
-
}
|
|
398
|
-
} else {
|
|
399
|
-
if (!options.targetPassword) throw new Error("Either --targetPassword or --targetKey should be specified");
|
|
400
|
-
password = options.targetPassword;
|
|
401
|
-
}
|
|
402
|
-
const authConfig = password ? {
|
|
403
|
-
password
|
|
404
|
-
} : {
|
|
405
|
-
privateKey
|
|
406
|
-
};
|
|
407
|
-
const sshConfig = { ...conConfig, ...authConfig };
|
|
91
|
+
const sshConfig = getSshConfig(ssh_server, options);
|
|
408
92
|
await upload4(resolvePath(source), destination, sshConfig);
|
|
409
93
|
} catch (e) {
|
|
410
94
|
console.error(e);
|
package/lib/uzdu-zip.js
CHANGED
package/lib/uzdu.d.ts
CHANGED
|
@@ -1,6 +1,156 @@
|
|
|
1
1
|
#! /usr/bin/env node
|
|
2
|
-
import {
|
|
2
|
+
import { OutputConfiguration } from 'commander';
|
|
3
|
+
import { ConnectConfig } from 'ssh2';
|
|
3
4
|
|
|
4
|
-
|
|
5
|
+
interface BlobObject {
|
|
6
|
+
/**
|
|
7
|
+
* blob key
|
|
8
|
+
*/
|
|
9
|
+
key: string;
|
|
10
|
+
headers?: {
|
|
11
|
+
CacheControl?: string;
|
|
12
|
+
ContentType?: string;
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
*
|
|
17
|
+
* @param rootDir a local directory with files to be uploaded
|
|
18
|
+
* @param blobDir a storage directory (e.g. "a", "a/b") where files from rootDir
|
|
19
|
+
* will be uploaded. Default is "";
|
|
20
|
+
* @returns a list of {@link BlobObject}
|
|
21
|
+
*/
|
|
22
|
+
declare function listBlobs(rootDir: string, blobDir?: string, _dir?: string): Promise<BlobObject[]>;
|
|
23
|
+
declare function getEnvironment(file?: string): {
|
|
24
|
+
[key: string]: string;
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* @param env a parameter structure to be added to {@link process.env}
|
|
28
|
+
*/
|
|
29
|
+
declare function initEnvironment(env: {
|
|
30
|
+
[key: string]: string;
|
|
31
|
+
}): void;
|
|
32
|
+
/**
|
|
33
|
+
*
|
|
34
|
+
* @param rootDir
|
|
35
|
+
* @param _dir
|
|
36
|
+
* @returns map, where key is path relative to rootDir, value is absolute filepath
|
|
37
|
+
*/
|
|
38
|
+
declare function listFiles(rootDir: string, _dir?: string): Promise<{
|
|
39
|
+
[x: string]: string;
|
|
40
|
+
}>;
|
|
41
|
+
/**
|
|
42
|
+
* Add metadata about files in the `distributive` directory
|
|
43
|
+
* @param distributive path to the directory with files
|
|
44
|
+
* @param metadataFilename a name of the metadata file that will be added to the `distributive` directory, default is `.metadata.json`
|
|
45
|
+
* @returns the same `distributive` path as in the parameter
|
|
46
|
+
*/
|
|
47
|
+
declare function addMetadata(distributive: string, metadataFilename?: string): Promise<string>;
|
|
48
|
+
/**
|
|
49
|
+
*
|
|
50
|
+
* @param fromDir
|
|
51
|
+
* @param zipFilePath
|
|
52
|
+
* @param metadata a json with metadata
|
|
53
|
+
* @returns
|
|
54
|
+
*/
|
|
55
|
+
declare function makeZip(fromDir: string, zipFilePath: string): Promise<string>;
|
|
56
|
+
/**
|
|
57
|
+
*
|
|
58
|
+
* @param fromZip
|
|
59
|
+
* @param toDir
|
|
60
|
+
* @returns resolved toDir
|
|
61
|
+
*/
|
|
62
|
+
declare function doUnzip(fromZip: string, toDir: string): Promise<string>;
|
|
63
|
+
declare function checkIsFile(file: string): boolean;
|
|
64
|
+
declare function shouldBeFile(file: string): void;
|
|
65
|
+
declare function shouldBeDirectory(directory: string): void;
|
|
66
|
+
declare const outputConfiguration: OutputConfiguration;
|
|
67
|
+
/**
|
|
68
|
+
* @param arr
|
|
69
|
+
* @param index
|
|
70
|
+
* @returns element[`index`] from array `arr` or undefined
|
|
71
|
+
*/
|
|
72
|
+
declare function safeIndex<T>(arr: T[], index: number): T | undefined;
|
|
73
|
+
/**
|
|
74
|
+
* Resolves paths that start with a tilde to the user's home directory.
|
|
75
|
+
*
|
|
76
|
+
* @param filePath '~/GitHub/Repo/file.png'
|
|
77
|
+
* @return '/home/bob/GitHub/Repo/file.png'
|
|
78
|
+
*/
|
|
79
|
+
declare function resolvePath(filePath: string): string;
|
|
5
80
|
|
|
6
|
-
|
|
81
|
+
type utils_BlobObject = BlobObject;
|
|
82
|
+
declare const utils_addMetadata: typeof addMetadata;
|
|
83
|
+
declare const utils_checkIsFile: typeof checkIsFile;
|
|
84
|
+
declare const utils_doUnzip: typeof doUnzip;
|
|
85
|
+
declare const utils_getEnvironment: typeof getEnvironment;
|
|
86
|
+
declare const utils_initEnvironment: typeof initEnvironment;
|
|
87
|
+
declare const utils_listBlobs: typeof listBlobs;
|
|
88
|
+
declare const utils_listFiles: typeof listFiles;
|
|
89
|
+
declare const utils_makeZip: typeof makeZip;
|
|
90
|
+
declare const utils_outputConfiguration: typeof outputConfiguration;
|
|
91
|
+
declare const utils_resolvePath: typeof resolvePath;
|
|
92
|
+
declare const utils_safeIndex: typeof safeIndex;
|
|
93
|
+
declare const utils_shouldBeDirectory: typeof shouldBeDirectory;
|
|
94
|
+
declare const utils_shouldBeFile: typeof shouldBeFile;
|
|
95
|
+
declare namespace utils {
|
|
96
|
+
export { type utils_BlobObject as BlobObject, utils_addMetadata as addMetadata, utils_checkIsFile as checkIsFile, utils_doUnzip as doUnzip, utils_getEnvironment as getEnvironment, utils_initEnvironment as initEnvironment, utils_listBlobs as listBlobs, utils_listFiles as listFiles, utils_makeZip as makeZip, utils_outputConfiguration as outputConfiguration, utils_resolvePath as resolvePath, utils_safeIndex as safeIndex, utils_shouldBeDirectory as shouldBeDirectory, utils_shouldBeFile as shouldBeFile };
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
declare function upload$3(dirOrFile: string, url: URL, headers?: string[]): Promise<void>;
|
|
100
|
+
declare function download(url: URL, headers?: string[]): Promise<Response>;
|
|
101
|
+
|
|
102
|
+
declare const http_download: typeof download;
|
|
103
|
+
declare namespace http {
|
|
104
|
+
export { http_download as download, upload$3 as upload };
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
interface S3Config {
|
|
108
|
+
accessKeyId: string;
|
|
109
|
+
secretAccessKey: string;
|
|
110
|
+
region?: string;
|
|
111
|
+
endpoint?: string;
|
|
112
|
+
bucket: string;
|
|
113
|
+
}
|
|
114
|
+
declare function upload$2(dir: string, s3config: S3Config, metadataFile?: string): Promise<void>;
|
|
115
|
+
|
|
116
|
+
type s3_S3Config = S3Config;
|
|
117
|
+
declare namespace s3 {
|
|
118
|
+
export { type s3_S3Config as S3Config, upload$2 as default };
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
interface AzureStorageOptions {
|
|
122
|
+
connectionString: string;
|
|
123
|
+
container?: string;
|
|
124
|
+
}
|
|
125
|
+
declare function upload$1(dir: string, options: AzureStorageOptions, metadataFile?: string): Promise<void>;
|
|
126
|
+
|
|
127
|
+
type azure_AzureStorageOptions = AzureStorageOptions;
|
|
128
|
+
declare namespace azure {
|
|
129
|
+
export { type azure_AzureStorageOptions as AzureStorageOptions, upload$1 as default };
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
type SshConfig = ConnectConfig & {
|
|
133
|
+
password: string;
|
|
134
|
+
privateKey?: undefined;
|
|
135
|
+
} | {
|
|
136
|
+
password?: undefined;
|
|
137
|
+
privateKey: Buffer | string;
|
|
138
|
+
};
|
|
139
|
+
declare function upload(source: string, sftpUrlPath: string, sshConfig: SshConfig): Promise<void>;
|
|
140
|
+
type FileMapEntry = {
|
|
141
|
+
[key: string]: false | FileMapEntry;
|
|
142
|
+
};
|
|
143
|
+
declare function getMakeDirs(fileMap: FileMapEntry, destination?: string): false | string[];
|
|
144
|
+
declare function getDirMap(files: Record<string, string>): FileMapEntry;
|
|
145
|
+
declare function getSshConfig(ssh_server: string, options: any): SshConfig;
|
|
146
|
+
|
|
147
|
+
type ssh_SshConfig = SshConfig;
|
|
148
|
+
declare const ssh_getDirMap: typeof getDirMap;
|
|
149
|
+
declare const ssh_getMakeDirs: typeof getMakeDirs;
|
|
150
|
+
declare const ssh_getSshConfig: typeof getSshConfig;
|
|
151
|
+
declare const ssh_upload: typeof upload;
|
|
152
|
+
declare namespace ssh {
|
|
153
|
+
export { type ssh_SshConfig as SshConfig, ssh_getDirMap as getDirMap, ssh_getMakeDirs as getMakeDirs, ssh_getSshConfig as getSshConfig, ssh_upload as upload };
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
export { azure, http, s3, ssh, utils };
|
package/lib/uzdu.js
CHANGED
|
@@ -1,14 +1,23 @@
|
|
|
1
1
|
#! /usr/bin/env node
|
|
2
2
|
import {
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
azure_exports,
|
|
4
|
+
s3_exports,
|
|
5
|
+
ssh_exports
|
|
6
|
+
} from "./chunk-NAE2ZQ34.js";
|
|
7
|
+
import {
|
|
8
|
+
http_exports
|
|
9
|
+
} from "./chunk-SO2QIMWR.js";
|
|
10
|
+
import {
|
|
11
|
+
outputConfiguration,
|
|
12
|
+
utils_exports
|
|
13
|
+
} from "./chunk-HPBHRAFV.js";
|
|
5
14
|
|
|
6
15
|
// src/uzdu.ts
|
|
7
16
|
import { Command } from "commander";
|
|
8
17
|
var version;
|
|
9
18
|
var description;
|
|
10
19
|
try {
|
|
11
|
-
version = "1.0.
|
|
20
|
+
version = "1.0.15";
|
|
12
21
|
description = "UZDU - universal zipper, downloader and uploader. Move files to/from zip, clouds (AWS, Azure), to HTTP PUT (e.g. Nexus) and to SSH";
|
|
13
22
|
} catch (e) {
|
|
14
23
|
if (e instanceof ReferenceError) {
|
|
@@ -33,5 +42,9 @@ main().catch((e) => {
|
|
|
33
42
|
process.exit(20);
|
|
34
43
|
});
|
|
35
44
|
export {
|
|
36
|
-
|
|
45
|
+
azure_exports as azure,
|
|
46
|
+
http_exports as http,
|
|
47
|
+
s3_exports as s3,
|
|
48
|
+
ssh_exports as ssh,
|
|
49
|
+
utils_exports as utils
|
|
37
50
|
};
|
package/package.json
CHANGED