uzdu 1.0.14 → 1.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,9 +1,14 @@
1
1
  # UZDU – Universal Zip archiver, Downloader and Uploader for Node.js/Deno/Bun
2
2
 
3
3
 
4
- [![Release](https://github.com/denis-kalinin/uzdu/actions/workflows/release.yml/badge.svg?event=workflow_dispatch)](https://github.com/denis-kalinin/uzdu/actions/workflows/release.yml)
4
+
5
+ [![NPM Version](https://img.shields.io/npm/v/uzdu)](https://www.npmjs.com/package/uzdu)
5
6
  [![NPM Downloads](https://img.shields.io/npm/dm/uzdu)](https://www.npmjs.com/package/uzdu)
7
+ [![Release](https://github.com/denis-kalinin/uzdu/actions/workflows/release.yml/badge.svg?event=workflow_dispatch)](https://github.com/denis-kalinin/uzdu/actions/workflows/release.yml)
8
+ ![GitHub commit activity](https://img.shields.io/github/commit-activity/y/denis-kalinin/uzdu)
9
+
6
10
 
11
+ ## For users
7
12
 
8
13
  UZDU is a convinient tool to deploy files to a web server. If you're a DevOps enthusiast and prefer to stay in your familiar <span>Node.js</span> environment, you can deploy using this tool.
9
14
 
@@ -22,18 +27,24 @@ Bun:
22
27
  bunx uzdu -h
23
28
  ```
24
29
 
25
- ## uploading
30
+ ### uploading
26
31
 
27
- - [Amazon S3](https://docs.aws.amazon.com/s3/) `npx uzdu up aws -h`
28
- - [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) `npx uzdu up az -h`
29
- - [Nexus](https://support.sonatype.com/hc/en-us/articles/115006744008-Repository-How-can-I-programmatically-upload-files-into-Nexus-3#DirectUploadusingHTTPPUTtotheRepositoryPath) `npx uzdu up http -h`
30
- - SSH/SCP `npx uzdu up ssh -h`
32
+ - [Amazon S3](https://docs.aws.amazon.com/s3/) `npx uzdu upload aws -h`
33
+ - [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) `npx uzdu upload az -h`
34
+ - [Nexus](https://support.sonatype.com/hc/en-us/articles/115006744008-Repository-How-can-I-programmatically-upload-files-into-Nexus-3#DirectUploadusingHTTPPUTtotheRepositoryPath) `npx uzdu upload http -h`
35
+ - SSH/SCP `npx uzdu upload ssh -h`
31
36
 
32
- ## downloading
37
+ ### downloading
33
38
 
34
- - http `npx uzdu down http -h`
39
+ - http `npx uzdu download http -h`
35
40
 
36
- ## working with zip-archives
41
+ ### working with zip-archives
37
42
 
38
43
  - zip `npx uzdu zip -h`
39
- - unzip `npx uzdu unzip -h`
44
+ - unzip `npx uzdu unzip -h`
45
+
46
+
47
+ ## For developers
48
+
49
+ 1. [Gif flow guide](docs/git-flow.md)
50
+ 2. [Semantic release](docs/semantic-release.md)
@@ -1,8 +1,14 @@
1
1
  import {
2
+ __export,
2
3
  listFiles
3
- } from "./chunk-KZ5VMI45.js";
4
+ } from "./chunk-OIXJ4D3Z.js";
4
5
 
5
6
  // src/http.ts
7
+ var http_exports = {};
8
+ __export(http_exports, {
9
+ download: () => download,
10
+ upload: () => upload
11
+ });
6
12
  import fs from "fs";
7
13
  import path from "path";
8
14
  var isDebug = process.env.DEBUG?.toLowerCase() === "true";
@@ -12,7 +18,7 @@ async function upload(dirOrFile, url, headers) {
12
18
  const files = await listFiles(distr);
13
19
  if (isDebug) console.log(`files to upload: ${files.length}`);
14
20
  let singleUrl;
15
- if (files.length == 1) {
21
+ if (Object.keys(files).length == 1) {
16
22
  const lstat = fs.lstatSync(distr);
17
23
  if (lstat.isFile()) {
18
24
  distr = path.dirname(distr);
@@ -20,9 +26,9 @@ async function upload(dirOrFile, url, headers) {
20
26
  }
21
27
  }
22
28
  const fixedBaseUrl = url.href.endsWith("/") ? url : new URL(`${url.href}/`);
23
- await Promise.all(files.map(async (file) => {
29
+ await Promise.all(Object.entries(files).map(async ([file, absFile]) => {
24
30
  if (isDebug) console.log(`filename: ${file}`);
25
- const localFilePath = path.resolve(distr, file);
31
+ const localFilePath = absFile;
26
32
  const fileUrl = singleUrl || new URL(`${fixedBaseUrl}${file}`);
27
33
  if (isDebug) console.log(`uploading ${localFilePath} => ${fileUrl}`);
28
34
  await uploadFile(localFilePath, fileUrl, headers);
@@ -89,5 +95,6 @@ async function download(url, headers) {
89
95
 
90
96
  export {
91
97
  upload,
92
- download
98
+ download,
99
+ http_exports
93
100
  };
@@ -1,4 +1,26 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __export = (target, all) => {
3
+ for (var name in all)
4
+ __defProp(target, name, { get: all[name], enumerable: true });
5
+ };
6
+
1
7
  // src/utils.ts
8
+ var utils_exports = {};
9
+ __export(utils_exports, {
10
+ addMetadata: () => addMetadata,
11
+ checkIsFile: () => checkIsFile,
12
+ doUnzip: () => doUnzip,
13
+ getEnvironment: () => getEnvironment,
14
+ initEnvironment: () => initEnvironment,
15
+ listBlobs: () => listBlobs,
16
+ listFiles: () => listFiles,
17
+ makeZip: () => makeZip,
18
+ outputConfiguration: () => outputConfiguration,
19
+ resolvePath: () => resolvePath,
20
+ safeIndex: () => safeIndex,
21
+ shouldBeDirectory: () => shouldBeDirectory,
22
+ shouldBeFile: () => shouldBeFile
23
+ });
2
24
  import fs from "fs";
3
25
  import os from "os";
4
26
  import JSZip from "jszip";
@@ -55,14 +77,7 @@ async function createBlobObject(filePath, rootDir, blobDir = "") {
55
77
  function getEnvironment(file = ".env") {
56
78
  const envFilePath = path.resolve(process.cwd(), file);
57
79
  const txt = fs.readFileSync(envFilePath, { encoding: "utf-8" });
58
- const lines = txt.split(/\r?\n/);
59
- const isComment = /^\s*#/;
60
- return lines.reduce((acc, line) => {
61
- if (isComment.test(line)) return acc;
62
- const [key, ...rest] = line.split("=");
63
- if (rest.length > 0) acc[key.trim()] = rest.join("=").trim();
64
- return acc;
65
- }, {});
80
+ return parseEnvironment(txt);
66
81
  }
67
82
  function initEnvironment(env) {
68
83
  Object.keys(env).forEach((key) => process.env[key] = env[key]);
@@ -71,22 +86,36 @@ async function listFiles(rootDir, _dir) {
71
86
  if (!_dir) {
72
87
  const lstat = fs.lstatSync(rootDir);
73
88
  if (lstat.isFile()) {
74
- return [path.basename(rootDir)];
89
+ return { [path.basename(rootDir)]: rootDir };
75
90
  }
76
91
  }
77
92
  const cwd = _dir || rootDir;
78
93
  const files = fs.readdirSync(cwd, { withFileTypes: true });
79
94
  const fileList = await files.reduce(async (acc, file) => {
80
95
  const filePath = path.join(cwd, file.name);
96
+ let newAcc = {};
81
97
  if (file.isDirectory()) {
82
98
  const filesInDir = await listFiles(rootDir, filePath);
83
- (await acc).push(...filesInDir);
99
+ newAcc = { ...await acc, ...filesInDir };
100
+ } else if (file.isSymbolicLink()) {
101
+ const symLinkAbsoluteTargetPath = path.resolve(cwd, fs.readlinkSync(filePath));
102
+ const lstat = fs.lstatSync(symLinkAbsoluteTargetPath);
103
+ if (lstat.isDirectory()) {
104
+ const filesInDir = await listFiles(rootDir, filePath);
105
+ newAcc = { ...await acc, ...filesInDir };
106
+ } else if (lstat.isFile()) {
107
+ newAcc = { ...await acc, [filePath]: filePath.split(path.sep).join(path.posix.sep) };
108
+ } else if (lstat.isSymbolicLink()) {
109
+ const filesInDir = await listFiles(rootDir, filePath);
110
+ newAcc = { ...await acc, ...filesInDir };
111
+ }
84
112
  } else {
85
113
  const relativeFilePath = path.relative(rootDir, filePath).split(path.sep).join(path.posix.sep);
86
- (await acc).push(relativeFilePath);
114
+ const absPath = path.resolve(rootDir, filePath).split(path.sep).join(path.posix.sep);
115
+ newAcc = { ...await acc, [relativeFilePath]: absPath };
87
116
  }
88
- return await acc;
89
- }, Promise.resolve([]));
117
+ return newAcc;
118
+ }, Promise.resolve({}));
90
119
  return fileList;
91
120
  }
92
121
  async function addMetadata(distributive, metadataFilename = ".metadata.json") {
@@ -111,10 +140,9 @@ async function getMetadata(dir, metadataFile) {
111
140
  async function makeZip(fromDir, zipFilePath) {
112
141
  const zip = new JSZip();
113
142
  const files = await listFiles(fromDir);
114
- files.forEach((file) => {
115
- const filePath = path.join(fromDir, file);
116
- const readStream = fs.createReadStream(filePath);
117
- zip.file(file, readStream, { binary: true });
143
+ Object.entries(files).forEach(([basePath, absPath]) => {
144
+ const readStream = fs.createReadStream(absPath);
145
+ zip.file(basePath, readStream, { binary: true });
118
146
  });
119
147
  return new Promise((resolve, reject) => {
120
148
  zip.generateNodeStream({ type: "nodebuffer", streamFiles: true }).pipe(fs.createWriteStream(zipFilePath)).on("finish", function() {
@@ -151,6 +179,12 @@ function checkIsFile(file) {
151
179
  const lstat = fs.lstatSync(absPath);
152
180
  return lstat.isFile();
153
181
  }
182
+ function shouldBeFile(file) {
183
+ if (!checkIsFile(file)) {
184
+ const absPath = path.resolve(file);
185
+ throw new Error(`${file} is a directory, SHOULD be a file. Check [${absPath}]`);
186
+ }
187
+ }
154
188
  function shouldBeDirectory(directory) {
155
189
  const lstat = fs.lstatSync(directory);
156
190
  if (!lstat.isDirectory) {
@@ -176,8 +210,29 @@ function resolvePath(filePath) {
176
210
  }
177
211
  return path.resolve(process.cwd(), filePath);
178
212
  }
213
+ var LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
214
+ function parseEnvironment(src) {
215
+ const obj = {};
216
+ let lines = src.toString();
217
+ lines = lines.replace(/\r\n?/mg, "\n");
218
+ let match;
219
+ while ((match = LINE.exec(lines)) != null) {
220
+ const key = match[1];
221
+ let value = match[2] || "";
222
+ value = value.trim();
223
+ const maybeQuote = value[0];
224
+ value = value.replace(/^(['"`])([\s\S]*)\1$/mg, "$2");
225
+ if (maybeQuote === '"') {
226
+ value = value.replace(/\\n/g, "\n");
227
+ value = value.replace(/\\r/g, "\r");
228
+ }
229
+ obj[key] = value;
230
+ }
231
+ return obj;
232
+ }
179
233
 
180
234
  export {
235
+ __export,
181
236
  getEnvironment,
182
237
  initEnvironment,
183
238
  listFiles,
@@ -187,6 +242,6 @@ export {
187
242
  checkIsFile,
188
243
  shouldBeDirectory,
189
244
  outputConfiguration,
190
- safeIndex,
191
- resolvePath
245
+ resolvePath,
246
+ utils_exports
192
247
  };
@@ -0,0 +1,379 @@
1
+ import {
2
+ __export,
3
+ getEnvironment,
4
+ initEnvironment,
5
+ listFiles,
6
+ resolvePath
7
+ } from "./chunk-OIXJ4D3Z.js";
8
+
9
+ // src/azure.ts
10
+ var azure_exports = {};
11
+ __export(azure_exports, {
12
+ default: () => upload
13
+ });
14
+ import { BlobServiceClient } from "@azure/storage-blob";
15
+ import path from "path";
16
+ import fs from "fs";
17
+ async function upload(dir, options, metadataFile = ".metadata.json") {
18
+ if (!options.connectionString) throw Error("Uploader needs connection string for Azure Blob Storage. Provide AZURE_STORAGE_CONNECTION_STRING environment variable!");
19
+ const opts = Object.assign({}, { container: "$web" }, options);
20
+ const blobServiceClient = BlobServiceClient.fromConnectionString(options.connectionString);
21
+ const isDebug = process.env.DEBUG && process.env.DEBUG.toLowerCase() === "true";
22
+ const containerClient = blobServiceClient.getContainerClient(opts.container);
23
+ let dist = path.resolve(process.cwd(), dir);
24
+ const files = await listFiles(dir);
25
+ let metadata;
26
+ try {
27
+ const metadataJson = fs.readFileSync(path.join(dir, metadataFile), { encoding: "utf-8" });
28
+ metadata = JSON.parse(metadataJson);
29
+ } catch (e) {
30
+ }
31
+ if (Object.keys(files).length == 1) {
32
+ const lstat = fs.lstatSync(dist);
33
+ if (lstat.isFile()) {
34
+ dist = path.dirname(dist);
35
+ }
36
+ }
37
+ await Promise.all(Object.entries(files).map(async ([file, absFile]) => {
38
+ let blobObj;
39
+ if (metadata) {
40
+ blobObj = metadata[file];
41
+ }
42
+ const blockBlobClient = containerClient.getBlockBlobClient(file);
43
+ const blobHTTPHeaders = {};
44
+ if (blobObj?.headers) {
45
+ const { CacheControl, ContentType } = blobObj.headers;
46
+ blobHTTPHeaders.blobCacheControl = CacheControl;
47
+ blobHTTPHeaders.blobContentType = ContentType;
48
+ }
49
+ const localFilePath = absFile;
50
+ await blockBlobClient.uploadFile(localFilePath, { blobHTTPHeaders });
51
+ }));
52
+ }
53
+
54
+ // src/s3.ts
55
+ var s3_exports = {};
56
+ __export(s3_exports, {
57
+ default: () => upload2
58
+ });
59
+ import { S3Client } from "@aws-sdk/client-s3";
60
+ import { Upload } from "@aws-sdk/lib-storage";
61
+ import fs2 from "fs";
62
+ import path2 from "path";
63
+ async function upload2(dir, s3config, metadataFile = ".metadata.json") {
64
+ if (!s3config.accessKeyId || !s3config.secretAccessKey) {
65
+ throw new Error("AWS credentials not found in environment variables AWS_KEY_ID and AWS_SECRET_KEY.");
66
+ }
67
+ if (!s3config.region) {
68
+ throw new Error('Neither "region" in the bucket address nor AWS_REGION environment variable was found.');
69
+ }
70
+ if (!s3config.bucket) {
71
+ throw new Error("Amazon S3 bucket name is required");
72
+ }
73
+ const { accessKeyId, secretAccessKey, region, endpoint } = s3config;
74
+ const client = new S3Client({
75
+ credentials: {
76
+ accessKeyId,
77
+ secretAccessKey
78
+ },
79
+ region,
80
+ endpoint
81
+ });
82
+ let dist = path2.resolve(process.cwd(), dir);
83
+ const files = await listFiles(dist);
84
+ let metadata;
85
+ try {
86
+ const metadataJson = fs2.readFileSync(path2.join(dir, metadataFile), { encoding: "utf-8" });
87
+ metadata = JSON.parse(metadataJson);
88
+ } catch (e) {
89
+ }
90
+ if (Object.keys(files).length == 1) {
91
+ const lstat = fs2.lstatSync(dist);
92
+ if (lstat.isFile()) {
93
+ dist = path2.dirname(dist);
94
+ }
95
+ }
96
+ await Promise.all(Object.entries(files).map(async ([file, absFile]) => {
97
+ const filePath = absFile;
98
+ const fileContent = fs2.readFileSync(filePath);
99
+ const params = {
100
+ Bucket: s3config.bucket,
101
+ Key: file,
102
+ Body: fileContent
103
+ };
104
+ if (metadata) {
105
+ const blobObj = metadata[file];
106
+ if (blobObj && blobObj.headers) {
107
+ const { CacheControl, ContentType } = blobObj.headers;
108
+ if (CacheControl) params.CacheControl = CacheControl;
109
+ if (ContentType) params.ContentType = ContentType;
110
+ }
111
+ }
112
+ return new Upload({
113
+ client,
114
+ params,
115
+ tags: [],
116
+ queueSize: 4,
117
+ // optional concurrency configuration
118
+ partSize: 1024 * 1024 * 5,
119
+ // optional size of each part, in bytes, at least 5MB
120
+ leavePartsOnError: false
121
+ // optional manually handle dropped parts
122
+ }).done();
123
+ }));
124
+ }
125
+
126
+ // src/ssh.ts
127
+ var ssh_exports = {};
128
+ __export(ssh_exports, {
129
+ getConnectConfig: () => getConnectConfig,
130
+ getCredentials: () => getCredentials,
131
+ getDirMap: () => getDirMap,
132
+ getMakeDirs: () => getMakeDirs,
133
+ getRemoteDestination: () => getRemoteDestination,
134
+ upload: () => upload3
135
+ });
136
+ import { Client } from "ssh2";
137
+ import fs3 from "fs";
138
+ import path3 from "path";
139
+ import deepmerge from "deepmerge";
140
+ async function upload3(source, sftpUrl, sshCredentials) {
141
+ await new Promise((resolve, reject) => {
142
+ fs3.stat(source, async (err, stats) => {
143
+ if (stats.isSymbolicLink()) {
144
+ reject(new Error(`${source} is symlink`));
145
+ } else {
146
+ let sshConnection;
147
+ try {
148
+ const _connectConfig = getConnectConfig(sftpUrl);
149
+ const _sshCredentials = sshCredentials || getCredentials();
150
+ const connectConfig = { ..._connectConfig, ..._sshCredentials };
151
+ sshConnection = await connect(connectConfig);
152
+ const files = await listFiles(source);
153
+ const destination = getRemoteDestination(sftpUrl);
154
+ const _source = source.replace(/\/+$/, "");
155
+ await mkdirs(sshConnection, destination, files);
156
+ await uploadFiles(files, _source, destination, sshConnection);
157
+ resolve();
158
+ } catch (e) {
159
+ console.error("SFTP error", e);
160
+ reject(e);
161
+ } finally {
162
+ sshConnection?.destroy();
163
+ }
164
+ }
165
+ });
166
+ });
167
+ }
168
+ async function mkdirs(sshConnection, destination, sources) {
169
+ const fileMap = getDirMap(sources);
170
+ const makeDirs = getMakeDirs(fileMap, destination);
171
+ const commands = makeDirs ? makeDirs.map((dir) => `mkdir -p "${dir}"`) : [`mkdir -p "${destination}"`];
172
+ const commandLine = commands.length > 1 ? commands.join(";") : commands[0];
173
+ await new Promise((res, rej) => {
174
+ sshConnection.exec(commandLine, {}, (err, channel) => {
175
+ if (err) {
176
+ console.error("mkdir error", err);
177
+ rej(new Error(`failed: mkdir -p ... : ${err}`));
178
+ } else {
179
+ channel.on("exit", (code, signal) => {
180
+ if (code != 0) rej(new Error(`Exit code: ${code} for "mkdir -p ..."`));
181
+ else res();
182
+ });
183
+ }
184
+ });
185
+ });
186
+ }
187
+ function _uploadFile(source, destination, sftp) {
188
+ return new Promise((resolve, reject) => {
189
+ sftp.stat(destination, async (err, stats) => {
190
+ if (err) {
191
+ sftp.fastPut(source, destination, {}, (err2) => {
192
+ if (err2) reject(err2);
193
+ else resolve();
194
+ });
195
+ } else if (stats.isFile()) {
196
+ sftp.fastPut(source, destination, {}, (err2) => {
197
+ if (err2) reject(err2);
198
+ else resolve();
199
+ });
200
+ } else if (stats.isDirectory()) {
201
+ const f = path3.basename(source);
202
+ reject(new Error(`Overwriting directory ${destination} with the file ${f} is not allowed. Remove the directory manually.`));
203
+ } else {
204
+ reject(new Error("Remote path is symlink"));
205
+ }
206
+ });
207
+ });
208
+ }
209
+ function uploadFiles(sourceFiles, source, destination, sshConnection) {
210
+ return new Promise((resolve, reject) => {
211
+ sshConnection.sftp(async (err, sftp) => {
212
+ if (err) {
213
+ console.error("uploadFiles error");
214
+ reject(err);
215
+ } else {
216
+ if (Object.keys(sourceFiles).length == 1) {
217
+ const lstat2 = fs3.lstatSync(source);
218
+ if (lstat2.isFile()) {
219
+ const dest = path3.join(destination, sourceFiles[0]).replace(/\\/g, "/");
220
+ const src = source;
221
+ await _uploadFile(src, dest, sftp).then(() => resolve()).catch((e) => {
222
+ console.error(src);
223
+ reject(e);
224
+ });
225
+ return;
226
+ }
227
+ }
228
+ let sourceDir = source;
229
+ const lstat = fs3.lstatSync(source);
230
+ if (lstat.isSymbolicLink()) {
231
+ sourceDir = fs3.readlinkSync(source);
232
+ }
233
+ const promises = [];
234
+ Object.entries(sourceFiles).map(([baseName, absPath]) => {
235
+ const dest = path3.join(destination, baseName).replace(/\\/g, "/");
236
+ const promise = new Promise((res, rej) => {
237
+ _uploadFile(absPath, dest, sftp).then(() => res()).catch((e) => {
238
+ console.error(absPath);
239
+ console.error(e);
240
+ rej(e);
241
+ });
242
+ });
243
+ promises.push(promise);
244
+ });
245
+ await Promise.all(promises);
246
+ resolve();
247
+ }
248
+ });
249
+ });
250
+ }
251
+ async function connect(sshConfig) {
252
+ const conn = new Client();
253
+ try {
254
+ return await new Promise((resolve, reject) => {
255
+ conn.on("error", (e) => {
256
+ reject(new Error(`Target host error: ${e}`));
257
+ }).on("ready", () => {
258
+ resolve(conn);
259
+ }).connect({
260
+ timeout: 5,
261
+ port: 22,
262
+ algorithms: {
263
+ cipher: [
264
+ "aes128-ctr",
265
+ "aes192-ctr",
266
+ "aes256-ctr",
267
+ "aes256-cbc",
268
+ "aes128-cbc"
269
+ //"aes128-gcm", ////"aes128-gcm@openssh.com", //"aes256-gcm", ////"aes256-gcm@openssh.com", ////"aes192-cbc",
270
+ ]
271
+ },
272
+ ...sshConfig
273
+ });
274
+ });
275
+ } catch (e) {
276
+ console.error("Connection failed", e);
277
+ conn.destroy();
278
+ throw e;
279
+ }
280
+ }
281
+ function getMakeDirs(fileMap, destination) {
282
+ const kv = Object.entries(fileMap);
283
+ const hasSubdirs = kv.some((keyVal) => !!keyVal[1]);
284
+ if (!hasSubdirs) return false;
285
+ const subdirs = kv.reduce((acc, curr) => {
286
+ if (curr[1]) {
287
+ const res = getMakeDirs(curr[1]);
288
+ const prefix = destination ? [destination, curr[0]].join("/") : curr[0];
289
+ if (res) {
290
+ const pathes = res.map((apath) => [prefix, apath].join("/"));
291
+ acc.push(...pathes);
292
+ } else acc.push(prefix);
293
+ }
294
+ return acc;
295
+ }, []);
296
+ return subdirs;
297
+ }
298
+ function getDirMap(files) {
299
+ let fileMap = {};
300
+ Object.keys(files).map((file) => {
301
+ const leaf = getFileMap(file);
302
+ fileMap = deepmerge(fileMap, leaf);
303
+ });
304
+ return fileMap;
305
+ }
306
+ function getFileMap(file) {
307
+ let theFile = file;
308
+ if (file.indexOf("/") == 0) theFile = file.substring(1);
309
+ const parts = theFile.split("/");
310
+ if (parts.length == 1) return { [parts[0]]: false };
311
+ else {
312
+ const aFile = path3.join(...parts.slice(1)).replace(/\\/g, "/");
313
+ const fileMapEntry = getFileMap(aFile);
314
+ return { [parts[0]]: fileMapEntry };
315
+ }
316
+ }
317
+ function getCredentials(options) {
318
+ if (options?.dotenv) {
319
+ const theEnv = getEnvironment(options.dotenv);
320
+ initEnvironment(theEnv);
321
+ }
322
+ const uzduKey = process.env.UZDU_SSH_KEY;
323
+ let password = void 0;
324
+ let privateKey = uzduKey;
325
+ if (!privateKey) {
326
+ const uzduKeyPath = options?.privateKeyPath || process.env.UZDU_SSH_KEY_PATH;
327
+ if (uzduKeyPath) {
328
+ const resolvedKeyPath = resolvePath(uzduKeyPath);
329
+ try {
330
+ privateKey = fs3.readFileSync(resolvedKeyPath);
331
+ } catch (e) {
332
+ throw new Error(`Not found private Key file ${resolvedKeyPath}`);
333
+ }
334
+ } else {
335
+ const uzduPassword = process.env.UZDU_SSH_PASSWORD;
336
+ if (!uzduPassword) throw new Error("Specify either --privateKeyPath or password in SFTP URL. Otherwise consider using --dotenv and one of environment variables: UZDU_SSH_KEY_PATH, UZDU_SSH_KEY, UZDU_SSH_PASSWORD");
337
+ password = uzduPassword;
338
+ }
339
+ }
340
+ const authConfig = password ? {
341
+ password
342
+ } : {
343
+ privateKey
344
+ };
345
+ return authConfig;
346
+ }
347
+ var sftpUrlRegex = /^sftp:\/\/(?:(?<username>[\w\.\-]{1,32})(?::(?<password>.+))?@)?(?:(?<host>[\w\.\-]+)|\[(?<ipv6>[\d:]+)\])(?::(?<port>\d{1,5}))?\/(?<path>.*)$/g;
348
+ function getConnectConfig(sftpUrl) {
349
+ sftpUrlRegex.lastIndex = 0;
350
+ const execArray = sftpUrlRegex.exec(sftpUrl);
351
+ const { groups } = execArray ?? {};
352
+ const host = groups?.host || groups?.ipv6;
353
+ if (!host) throw new Error(`Wrong URL "${sftpUrl}": host or ivp6 is not specified`);
354
+ const username = groups?.username;
355
+ const password = groups?.password;
356
+ const _port = parseInt(groups.port);
357
+ const port = isNaN(_port) ? void 0 : _port;
358
+ const connectConfig = { username, password, host, port };
359
+ return connectConfig;
360
+ }
361
+ function getRemoteDestination(sftpUrl) {
362
+ sftpUrlRegex.lastIndex = 0;
363
+ const execArray = sftpUrlRegex.exec(sftpUrl);
364
+ if (!execArray) throw new Error("Wrong sftp URL");
365
+ if (!execArray.groups) throw new Error("Wrong URL: path is not specified");
366
+ const path4 = execArray.groups.path;
367
+ const destination = path4.replace(/\/+$/, "").replace(/^~/, ".");
368
+ return destination;
369
+ }
370
+
371
+ export {
372
+ upload,
373
+ azure_exports,
374
+ upload2,
375
+ s3_exports,
376
+ upload3,
377
+ getCredentials,
378
+ ssh_exports
379
+ };
package/lib/uzdu-copy.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  checkIsFile,
4
4
  outputConfiguration
5
- } from "./chunk-KZ5VMI45.js";
5
+ } from "./chunk-OIXJ4D3Z.js";
6
6
 
7
7
  // src/uzdu-copy.ts
8
8
  import { Command } from "commander";
@@ -1,11 +1,11 @@
1
1
  import {
2
2
  download
3
- } from "./chunk-2WNHNCUV.js";
3
+ } from "./chunk-7B56UNA6.js";
4
4
  import {
5
5
  getEnvironment,
6
6
  initEnvironment,
7
7
  outputConfiguration
8
- } from "./chunk-KZ5VMI45.js";
8
+ } from "./chunk-OIXJ4D3Z.js";
9
9
 
10
10
  // src/uzdu-download.ts
11
11
  import { Command, Option } from "commander";
@@ -2,7 +2,7 @@ import {
2
2
  addMetadata,
3
3
  outputConfiguration,
4
4
  shouldBeDirectory
5
- } from "./chunk-KZ5VMI45.js";
5
+ } from "./chunk-OIXJ4D3Z.js";
6
6
 
7
7
  // src/uzdu-metadata.ts
8
8
  import { Argument, Command } from "commander";
package/lib/uzdu-unzip.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  doUnzip,
4
4
  outputConfiguration
5
- } from "./chunk-KZ5VMI45.js";
5
+ } from "./chunk-OIXJ4D3Z.js";
6
6
 
7
7
  // src/uzdu-unzip.ts
8
8
  import { Command } from "commander";
@@ -1,306 +1,22 @@
1
+ import {
2
+ getCredentials,
3
+ upload as upload2,
4
+ upload2 as upload3,
5
+ upload3 as upload4
6
+ } from "./chunk-T7RDOCPF.js";
1
7
  import {
2
8
  upload
3
- } from "./chunk-2WNHNCUV.js";
9
+ } from "./chunk-7B56UNA6.js";
4
10
  import {
5
11
  getEnvironment,
6
12
  initEnvironment,
7
- listFiles,
8
13
  outputConfiguration,
9
14
  resolvePath,
10
- safeIndex,
11
15
  shouldBeDirectory
12
- } from "./chunk-KZ5VMI45.js";
16
+ } from "./chunk-OIXJ4D3Z.js";
13
17
 
14
18
  // src/uzdu-upload.ts
15
19
  import { Argument, Command, Option } from "commander";
16
-
17
- // src/azure.ts
18
- import { BlobServiceClient } from "@azure/storage-blob";
19
- import path from "path";
20
- import fs from "fs";
21
- async function upload2(dir, options, metadataFile = ".metadata.json") {
22
- if (!options.connectionString) throw Error("Uploader needs connection string for Azure Blob Storage. Provide AZURE_STORAGE_CONNECTION_STRING environment variable!");
23
- const opts = Object.assign({}, { container: "$web" }, options);
24
- const blobServiceClient = BlobServiceClient.fromConnectionString(options.connectionString);
25
- const isDebug = process.env.DEBUG && process.env.DEBUG.toLowerCase() === "true";
26
- const containerClient = blobServiceClient.getContainerClient(opts.container);
27
- let dist = path.resolve(process.cwd(), dir);
28
- const files = await listFiles(dir);
29
- let metadata;
30
- try {
31
- const metadataJson = fs.readFileSync(path.join(dir, metadataFile), { encoding: "utf-8" });
32
- metadata = JSON.parse(metadataJson);
33
- } catch (e) {
34
- }
35
- if (files.length == 1) {
36
- const lstat = fs.lstatSync(dist);
37
- if (lstat.isFile()) {
38
- dist = path.dirname(dist);
39
- }
40
- }
41
- await Promise.all(files.map(async (file) => {
42
- let blobObj;
43
- if (metadata) {
44
- blobObj = metadata[file];
45
- }
46
- const blockBlobClient = containerClient.getBlockBlobClient(file);
47
- const blobHTTPHeaders = {};
48
- if (blobObj?.headers) {
49
- const { CacheControl, ContentType } = blobObj.headers;
50
- blobHTTPHeaders.blobCacheControl = CacheControl;
51
- blobHTTPHeaders.blobContentType = ContentType;
52
- }
53
- const localFilePath = path.resolve(dist, file);
54
- await blockBlobClient.uploadFile(localFilePath, { blobHTTPHeaders });
55
- }));
56
- }
57
-
58
- // src/s3.ts
59
- import { S3Client } from "@aws-sdk/client-s3";
60
- import { Upload } from "@aws-sdk/lib-storage";
61
- import fs2 from "fs";
62
- import path2 from "path";
63
- async function upload3(dir, s3config, metadataFile = ".metadata.json") {
64
- if (!s3config.accessKeyId || !s3config.secretAccessKey) {
65
- throw new Error("AWS credentials not found in environment variables AWS_KEY_ID and AWS_SECRET_KEY.");
66
- }
67
- if (!s3config.region) {
68
- throw new Error('Neither "region" in the bucket address nor AWS_REGION environment variable was found.');
69
- }
70
- if (!s3config.bucket) {
71
- throw new Error("Amazon S3 bucket name is required");
72
- }
73
- const { accessKeyId, secretAccessKey, region, endpoint } = s3config;
74
- const client = new S3Client({
75
- credentials: {
76
- accessKeyId,
77
- secretAccessKey
78
- },
79
- region,
80
- endpoint
81
- });
82
- let dist = path2.resolve(process.cwd(), dir);
83
- const files = await listFiles(dist);
84
- let metadata;
85
- try {
86
- const metadataJson = fs2.readFileSync(path2.join(dir, metadataFile), { encoding: "utf-8" });
87
- metadata = JSON.parse(metadataJson);
88
- } catch (e) {
89
- }
90
- if (files.length == 1) {
91
- const lstat = fs2.lstatSync(dist);
92
- if (lstat.isFile()) {
93
- dist = path2.dirname(dist);
94
- }
95
- }
96
- await Promise.all(files.map(async (file) => {
97
- const filePath = path2.resolve(dist, file);
98
- const fileContent = fs2.readFileSync(filePath);
99
- const params = {
100
- Bucket: s3config.bucket,
101
- Key: file,
102
- Body: fileContent
103
- };
104
- if (metadata) {
105
- const blobObj = metadata[file];
106
- if (blobObj && blobObj.headers) {
107
- const { CacheControl, ContentType } = blobObj.headers;
108
- if (CacheControl) params.CacheControl = CacheControl;
109
- if (ContentType) params.ContentType = ContentType;
110
- }
111
- }
112
- return new Upload({
113
- client,
114
- params,
115
- tags: [],
116
- queueSize: 4,
117
- // optional concurrency configuration
118
- partSize: 1024 * 1024 * 5,
119
- // optional size of each part, in bytes, at least 5MB
120
- leavePartsOnError: false
121
- // optional manually handle dropped parts
122
- }).done();
123
- }));
124
- }
125
-
126
- // src/ssh.ts
127
- import { Client } from "ssh2";
128
- import fs3 from "fs";
129
- import path3 from "path";
130
- import deepmerge from "deepmerge";
131
- async function upload4(source, destination, sshConfig) {
132
- await new Promise((resolve, reject) => {
133
- fs3.stat(source, async (err, stats) => {
134
- if (stats.isSymbolicLink()) {
135
- reject(new Error(`${source} is symlink`));
136
- } else {
137
- const sshConnection = await connect(sshConfig);
138
- try {
139
- const files = await listFiles(source);
140
- const _destination = destination.replace(/\/+$/, "");
141
- const _source = source.replace(/\/+$/, "");
142
- await mkdirs(sshConnection, _destination, files);
143
- await uploadFiles(files, _source, _destination, sshConnection);
144
- resolve();
145
- } catch (e) {
146
- console.error("SFTP error", e);
147
- reject(e);
148
- } finally {
149
- sshConnection.destroy();
150
- }
151
- }
152
- });
153
- });
154
- }
155
- async function mkdirs(sshConnection, destination, sources) {
156
- const fileMap = getDirMap(sources);
157
- const makeDirs = getMakeDirs(fileMap, destination);
158
- const commands = makeDirs ? makeDirs.map((dir) => `mkdir -p "${dir}"`) : [`mkdir -p "${destination}"`];
159
- const commandLine = commands.length > 1 ? commands.join(";") : commands[0];
160
- await new Promise((res, rej) => {
161
- sshConnection.exec(commandLine, {}, (err, channel) => {
162
- if (err) {
163
- console.error("mkdir error", err);
164
- rej(new Error(`failed: mkdir -p ... : ${err}`));
165
- } else {
166
- channel.on("exit", (code, signal) => {
167
- if (code != 0) rej(new Error(`Exit code: ${code} for "mkdir -p ..."`));
168
- else res();
169
- });
170
- }
171
- });
172
- });
173
- }
174
- function _uploadFile(source, destination, sftp) {
175
- return new Promise((resolve, reject) => {
176
- sftp.stat(destination, async (err, stats) => {
177
- if (err) {
178
- sftp.fastPut(source, destination, {}, (err2) => {
179
- if (err2) reject(err2);
180
- else resolve();
181
- });
182
- } else if (stats.isFile()) {
183
- sftp.fastPut(source, destination, {}, (err2) => {
184
- if (err2) reject(err2);
185
- else resolve();
186
- });
187
- } else if (stats.isDirectory()) {
188
- const f = path3.basename(source);
189
- reject(new Error(`Overwriting directory ${destination} with the file ${f} is not allowed. Remove the directory manually.`));
190
- } else {
191
- reject(new Error("Remote path is symlink"));
192
- }
193
- });
194
- });
195
- }
196
- function uploadFiles(sources, source, destination, sshConnection) {
197
- return new Promise((resolve, reject) => {
198
- sshConnection.sftp(async (err, sftp) => {
199
- if (err) {
200
- console.error("uploadFiles error");
201
- reject(err);
202
- } else {
203
- if (sources.length == 1) {
204
- const lstat = fs3.lstatSync(source);
205
- if (lstat.isFile()) {
206
- const dest = path3.join(destination, sources[0]).replace(/\\/g, "/");
207
- const src = source;
208
- console.log(`Uploading file ${src} => ${dest}`);
209
- await _uploadFile(src, dest, sftp).then(() => resolve()).catch((e) => {
210
- console.error(src);
211
- reject(e);
212
- });
213
- return;
214
- }
215
- }
216
- const promises = [];
217
- sources.map((f) => {
218
- const dest = path3.join(destination, f).replace(/\\/g, "/");
219
- const src = path3.join(source, f).replace(/\\/g, "/");
220
- console.log(`Uploading ${src} => ${dest}`);
221
- const promise = new Promise((res, rej) => {
222
- _uploadFile(src, dest, sftp).then(() => res()).catch((e) => {
223
- console.error(src);
224
- rej(e);
225
- });
226
- });
227
- promises.push(promise);
228
- });
229
- await Promise.all(promises);
230
- resolve();
231
- }
232
- });
233
- });
234
- }
235
- async function connect(sshConfig) {
236
- const conn = new Client();
237
- try {
238
- return await new Promise((resolve, reject) => {
239
- conn.on("error", (e) => {
240
- reject(new Error(`Target host error: ${e}`));
241
- }).on("ready", () => {
242
- resolve(conn);
243
- }).connect({
244
- timeout: 99,
245
- port: 22,
246
- algorithms: {
247
- cipher: [
248
- "aes128-ctr",
249
- "aes192-ctr",
250
- "aes256-ctr",
251
- "aes256-cbc",
252
- "aes128-cbc"
253
- //"aes128-gcm", ////"aes128-gcm@openssh.com", //"aes256-gcm", ////"aes256-gcm@openssh.com", ////"aes192-cbc",
254
- ]
255
- },
256
- ...sshConfig
257
- });
258
- });
259
- } catch (e) {
260
- console.error("Connection failed", e);
261
- conn.destroy();
262
- throw e;
263
- }
264
- }
265
- function getMakeDirs(fileMap, destination) {
266
- const kv = Object.entries(fileMap);
267
- const hasSubdirs = kv.some((keyVal) => !!keyVal[1]);
268
- if (!hasSubdirs) return false;
269
- const subdirs = kv.reduce((acc, curr) => {
270
- if (curr[1]) {
271
- const res = getMakeDirs(curr[1]);
272
- const prefix = destination ? [destination, curr[0]].join("/") : curr[0];
273
- if (res) {
274
- const pathes = res.map((apath) => [prefix, apath].join("/"));
275
- acc.push(...pathes);
276
- } else acc.push(prefix);
277
- }
278
- return acc;
279
- }, []);
280
- return subdirs;
281
- }
282
- function getDirMap(files) {
283
- let fileMap = {};
284
- files.map((file) => {
285
- const leaf = getFileMap(file);
286
- fileMap = deepmerge(fileMap, leaf);
287
- });
288
- return fileMap;
289
- }
290
- function getFileMap(file) {
291
- let theFile = file;
292
- if (file.indexOf("/") == 0) theFile = file.substring(1);
293
- const parts = theFile.split("/");
294
- if (parts.length == 1) return { [parts[0]]: false };
295
- else {
296
- const aFile = path3.join(...parts.slice(1)).replace(/\\/g, "/");
297
- const fileMapEntry = getFileMap(aFile);
298
- return { [parts[0]]: fileMapEntry };
299
- }
300
- }
301
-
302
- // src/uzdu-upload.ts
303
- import fs4 from "fs";
304
20
  var command = new Command();
305
21
  command.description("Upload to Azure, AWS and HTTP server").name("uzdu upload");
306
22
  command.command("aws").description("upload to AWS S3").argument("<from>", "the directory to upload to the <bucket>").argument("<bucket>", 'the AWS S3 bucket[:region[:endpoint]], e.g. "mybucket", "mybucket:us-east-2" or "mybucket:my-region:https://my-s3-provider/endpoint". [:region] overrides S3_REGION environment variable. Expects environment variables S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY.').addOption(
@@ -368,44 +84,12 @@ command.command("azure").alias("az").description("upload to Azure Blob Storage")
368
84
  thisCommand.error(e.message || e, { exitCode: 43, code: "az.upload.error" });
369
85
  }
370
86
  });
371
- command.command("ssh").description("upload with SSH").argument("<source>", "source directory or file to upload <to_server>").argument("<destination>", "desitnation directory or file").option("--target <target>", "server hostname/ip-address and optional SSH-port, e.g. 10.100.0.1:22").addOption(
87
+ command.command("ssh").description("upload via SFTP. In addition to sftpURL consider using environment variables UZDU_SSH_KEY_PATH, UZDU_SSH_KEY, UZDU_SSH_PASSWORD").argument("<source>", "source directory or file to upload into <ssh_server>").argument("<sftpUrl>", "the URL format: sftp://[user[:password]@]host[:port]/path/to/file").addOption(
372
88
  new Option("-d|--dotenv [file]", 'load environment variables from a property file, i.e. a file with "key=value" lines.').preset(".env")
373
- ).addOption(new Option("--targetUsername [targetUsername]", "SSH username for <target>").default("root")).addOption(new Option("--targetKey [targetKey]", "SSH private key")).addOption(new Option("--targetPassword [targetPassword]", "SSH password")).action(async (source, destination, options, thisCommand) => {
89
+ ).addOption(new Option("--privateKeyPath [path to file]", "Path to SSH private key, fallback is UZDU_SSH_KEY_PATH environment variable. Also consider using UZDU_SSH_KEY to provide SSH private key content or UZDU_SSH_PASSWORD.")).action(async (source, sftpUrl, options, thisCommand) => {
374
90
  try {
375
- if (options.dotenv) {
376
- const theEnv = getEnvironment(options.dotenv);
377
- initEnvironment(theEnv);
378
- }
379
- const hostParts = options.target.split(":");
380
- const host = hostParts[0];
381
- const sPort = safeIndex(hostParts, 1) || 22;
382
- const port = Number(sPort);
383
- const conConfig = {
384
- host,
385
- port,
386
- username: options.targetUsername
387
- };
388
- const privKeyPath = options.targetKey;
389
- let password = void 0;
390
- let privateKey = void 0;
391
- if (privKeyPath) {
392
- const resolvedKeyPath = resolvePath(options.targetKey);
393
- try {
394
- privateKey = fs4.readFileSync(resolvedKeyPath);
395
- } catch (e) {
396
- throw new Error(`Not found private Key file ${resolvedKeyPath}`);
397
- }
398
- } else {
399
- if (!options.targetPassword) throw new Error("Either --targetPassword or --targetKey should be specified");
400
- password = options.targetPassword;
401
- }
402
- const authConfig = password ? {
403
- password
404
- } : {
405
- privateKey
406
- };
407
- const sshConfig = { ...conConfig, ...authConfig };
408
- await upload4(resolvePath(source), destination, sshConfig);
91
+ const sshCredentials = getCredentials(options);
92
+ await upload4(resolvePath(source), sftpUrl, sshCredentials);
409
93
  } catch (e) {
410
94
  console.error(e);
411
95
  thisCommand.error(e.message || e, { exitCode: 127, code: "ssh.upload.error" });
package/lib/uzdu-zip.js CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  makeZip,
4
4
  outputConfiguration
5
- } from "./chunk-KZ5VMI45.js";
5
+ } from "./chunk-OIXJ4D3Z.js";
6
6
 
7
7
  // src/uzdu-zip.ts
8
8
  import { Command } from "commander";
package/lib/uzdu.d.ts CHANGED
@@ -1,6 +1,192 @@
1
1
  #! /usr/bin/env node
2
- import { Command } from 'commander';
2
+ import { OutputConfiguration } from 'commander';
3
+ import { ConnectConfig } from 'ssh2';
3
4
 
4
- declare const program: Command;
5
+ interface BlobObject {
6
+ /**
7
+ * blob key
8
+ */
9
+ key: string;
10
+ headers?: {
11
+ CacheControl?: string;
12
+ ContentType?: string;
13
+ };
14
+ }
15
+ /**
16
+ *
17
+ * @param rootDir a local directory with files to be uploaded
18
+ * @param blobDir a storage directory (e.g. "a", "a/b") where files from rootDir
19
+ * will be uploaded. Default is "";
20
+ * @returns a list of {@link BlobObject}
21
+ */
22
+ declare function listBlobs(rootDir: string, blobDir?: string, _dir?: string): Promise<BlobObject[]>;
23
+ declare function getEnvironment(file?: string): Record<string, string>;
24
+ /**
25
+ * @param env a parameter structure to be added to {@link process.env}
26
+ */
27
+ declare function initEnvironment(env: {
28
+ [key: string]: string;
29
+ }): void;
30
+ /**
31
+ *
32
+ * @param rootDir
33
+ * @param _dir
34
+ * @returns map, where key is path relative to rootDir, value is absolute filepath
35
+ */
36
+ declare function listFiles(rootDir: string, _dir?: string): Promise<Record<string, string>>;
37
+ /**
38
+ * Add metadata about files in the `distributive` directory
39
+ * @param distributive path to the directory with files
40
+ * @param metadataFilename a name of the metadata file that will be added to the `distributive` directory, default is `.metadata.json`
41
+ * @returns the same `distributive` path as in the parameter
42
+ */
43
+ declare function addMetadata(distributive: string, metadataFilename?: string): Promise<string>;
44
+ /**
45
+ *
46
+ * @param fromDir
47
+ * @param zipFilePath
48
+ * @param metadata a json with metadata
49
+ * @returns
50
+ */
51
+ declare function makeZip(fromDir: string, zipFilePath: string): Promise<string>;
52
+ /**
53
+ *
54
+ * @param fromZip
55
+ * @param toDir
56
+ * @returns resolved toDir
57
+ */
58
+ declare function doUnzip(fromZip: string, toDir: string): Promise<string>;
59
+ declare function checkIsFile(file: string): boolean;
60
+ declare function shouldBeFile(file: string): void;
61
+ declare function shouldBeDirectory(directory: string): void;
62
+ declare const outputConfiguration: OutputConfiguration;
63
+ /**
64
+ * @param arr
65
+ * @param index
66
+ * @returns element[`index`] from array `arr` or undefined
67
+ */
68
+ declare function safeIndex<T>(arr: T[], index: number): T | undefined;
69
+ /**
70
+ * Resolves paths that start with a tilde to the user's home directory.
71
+ *
72
+ * @param filePath '~/GitHub/Repo/file.png'
73
+ * @return '/home/bob/GitHub/Repo/file.png'
74
+ */
75
+ declare function resolvePath(filePath: string): string;
5
76
 
6
- export { program };
77
+ type utils_BlobObject = BlobObject;
78
+ declare const utils_addMetadata: typeof addMetadata;
79
+ declare const utils_checkIsFile: typeof checkIsFile;
80
+ declare const utils_doUnzip: typeof doUnzip;
81
+ declare const utils_getEnvironment: typeof getEnvironment;
82
+ declare const utils_initEnvironment: typeof initEnvironment;
83
+ declare const utils_listBlobs: typeof listBlobs;
84
+ declare const utils_listFiles: typeof listFiles;
85
+ declare const utils_makeZip: typeof makeZip;
86
+ declare const utils_outputConfiguration: typeof outputConfiguration;
87
+ declare const utils_resolvePath: typeof resolvePath;
88
+ declare const utils_safeIndex: typeof safeIndex;
89
+ declare const utils_shouldBeDirectory: typeof shouldBeDirectory;
90
+ declare const utils_shouldBeFile: typeof shouldBeFile;
91
+ declare namespace utils {
92
+ export { type utils_BlobObject as BlobObject, utils_addMetadata as addMetadata, utils_checkIsFile as checkIsFile, utils_doUnzip as doUnzip, utils_getEnvironment as getEnvironment, utils_initEnvironment as initEnvironment, utils_listBlobs as listBlobs, utils_listFiles as listFiles, utils_makeZip as makeZip, utils_outputConfiguration as outputConfiguration, utils_resolvePath as resolvePath, utils_safeIndex as safeIndex, utils_shouldBeDirectory as shouldBeDirectory, utils_shouldBeFile as shouldBeFile };
93
+ }
94
+
95
+ declare function upload$3(dirOrFile: string, url: URL, headers?: string[]): Promise<void>;
96
+ declare function download(url: URL, headers?: string[]): Promise<Response>;
97
+
98
+ declare const http_download: typeof download;
99
+ declare namespace http {
100
+ export { http_download as download, upload$3 as upload };
101
+ }
102
+
103
+ interface S3Config {
104
+ accessKeyId: string;
105
+ secretAccessKey: string;
106
+ region?: string;
107
+ endpoint?: string;
108
+ bucket: string;
109
+ }
110
+ declare function upload$2(dir: string, s3config: S3Config, metadataFile?: string): Promise<void>;
111
+
112
+ type s3_S3Config = S3Config;
113
+ declare namespace s3 {
114
+ export { type s3_S3Config as S3Config, upload$2 as default };
115
+ }
116
+
117
+ interface AzureStorageOptions {
118
+ connectionString: string;
119
+ container?: string;
120
+ }
121
+ declare function upload$1(dir: string, options: AzureStorageOptions, metadataFile?: string): Promise<void>;
122
+
123
+ type azure_AzureStorageOptions = AzureStorageOptions;
124
+ declare namespace azure {
125
+ export { type azure_AzureStorageOptions as AzureStorageOptions, upload$1 as default };
126
+ }
127
+
128
+ type SshCredentials = {
129
+ password: string;
130
+ privateKey?: undefined;
131
+ } | {
132
+ password?: undefined;
133
+ privateKey: Buffer | string;
134
+ };
135
+ declare function upload(source: string, sftpUrl: string, sshCredentials?: SshCredentials): Promise<void>;
136
+ type FileMapEntry = {
137
+ [key: string]: false | FileMapEntry;
138
+ };
139
+ declare function getMakeDirs(fileMap: FileMapEntry, destination?: string): false | string[];
140
+ declare function getDirMap(files: Record<string, string>): FileMapEntry;
141
+ /**
142
+ *
143
+ * @param options
144
+ * @returns
145
+ */
146
+ declare function getCredentials(options?: {
147
+ privateKeyPath?: string;
148
+ dotenv?: string;
149
+ }): {
150
+ password: string;
151
+ privateKey?: undefined;
152
+ } | {
153
+ privateKey: Buffer | string;
154
+ password?: undefined;
155
+ };
156
+ /**
157
+ * Get SSH ConnectionConfig from sftp URL.
158
+ * The general format for the URL is:
159
+ * `sftp://[user[:password]@]host[:port]/path/to/file`
160
+ *
161
+ * Examples:
162
+ * ```
163
+ * sftp://ubuntu:pa55w0rd@example.com/opt/file
164
+ * sftp://root@[2001:db8::5]:222/opt/file
165
+ * sftp://203.0.113.5/opt/file
166
+ * ```
167
+ * @param sftpUrl
168
+ *
169
+ * @throws Wrong URL: host or ivp6 is not specified
170
+ */
171
+ declare function getConnectConfig(sftpUrl: string): ConnectConfig;
172
+ /**
173
+ *
174
+ * @param sftpUrl
175
+ * @returns path on remote ssh server
176
+ * @throws Wrong sfpt URL
177
+ * @throws Wrong URL: path is not specified
178
+ */
179
+ declare function getRemoteDestination(sftpUrl: string): string;
180
+
181
+ type ssh_SshCredentials = SshCredentials;
182
+ declare const ssh_getConnectConfig: typeof getConnectConfig;
183
+ declare const ssh_getCredentials: typeof getCredentials;
184
+ declare const ssh_getDirMap: typeof getDirMap;
185
+ declare const ssh_getMakeDirs: typeof getMakeDirs;
186
+ declare const ssh_getRemoteDestination: typeof getRemoteDestination;
187
+ declare const ssh_upload: typeof upload;
188
+ declare namespace ssh {
189
+ export { type ssh_SshCredentials as SshCredentials, ssh_getConnectConfig as getConnectConfig, ssh_getCredentials as getCredentials, ssh_getDirMap as getDirMap, ssh_getMakeDirs as getMakeDirs, ssh_getRemoteDestination as getRemoteDestination, ssh_upload as upload };
190
+ }
191
+
192
+ export { azure, http, s3, ssh, utils };
package/lib/uzdu.js CHANGED
@@ -1,14 +1,23 @@
1
1
  #! /usr/bin/env node
2
2
  import {
3
- outputConfiguration
4
- } from "./chunk-KZ5VMI45.js";
3
+ azure_exports,
4
+ s3_exports,
5
+ ssh_exports
6
+ } from "./chunk-T7RDOCPF.js";
7
+ import {
8
+ http_exports
9
+ } from "./chunk-7B56UNA6.js";
10
+ import {
11
+ outputConfiguration,
12
+ utils_exports
13
+ } from "./chunk-OIXJ4D3Z.js";
5
14
 
6
15
  // src/uzdu.ts
7
16
  import { Command } from "commander";
8
17
  var version;
9
18
  var description;
10
19
  try {
11
- version = "1.0.14";
20
+ version = "1.0.16";
12
21
  description = "UZDU - universal zipper, downloader and uploader. Move files to/from zip, clouds (AWS, Azure), to HTTP PUT (e.g. Nexus) and to SSH";
13
22
  } catch (e) {
14
23
  if (e instanceof ReferenceError) {
@@ -33,5 +42,9 @@ main().catch((e) => {
33
42
  process.exit(20);
34
43
  });
35
44
  export {
36
- program
45
+ azure_exports as azure,
46
+ http_exports as http,
47
+ s3_exports as s3,
48
+ ssh_exports as ssh,
49
+ utils_exports as utils
37
50
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "uzdu",
3
- "version": "1.0.14",
3
+ "version": "1.0.16",
4
4
  "description": "UZDU - universal zipper, downloader and uploader. Move files to/from zip, clouds (AWS, Azure), to HTTP PUT (e.g. Nexus) and to SSH",
5
5
  "bin": {
6
6
  "uzdu": "lib/uzdu.js"