@ps-aux/nodebup 0.12.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,22 +23,22 @@ let DirBackupController = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.
23
23
 
24
24
  _defineProperty(this, "log", (0, _logging.classObjLog)(this));
25
25
 
26
- _defineProperty(this, "backup", inp => {
26
+ _defineProperty(this, "backup", async inp => {
27
27
  const storage = this.storageProvider.provide();
28
28
 
29
29
  const path = _Path.AbsPath.from(inp.path);
30
30
 
31
31
  this.log.info(`Backing up from ${path} to '${storage}'`);
32
- storage.store(path);
32
+ await storage.store(path);
33
33
  });
34
34
 
35
- _defineProperty(this, "restore", inp => {
35
+ _defineProperty(this, "restore", async inp => {
36
36
  const storage = this.storageProvider.provide();
37
37
 
38
38
  const path = _Path.AbsPath.from(inp.path);
39
39
 
40
40
  this.log.info(`Restoring from '${storage}' to ${path}`);
41
- storage.restore(path);
41
+ await storage.restore(path);
42
42
  });
43
43
  }
44
44
 
@@ -19,34 +19,28 @@ var _logging = require("../../log/logging");
19
19
 
20
20
  var _Fs = require("../../fs/Fs");
21
21
 
22
- var _dec, _dec2, _dec3, _class, _class2, _temp;
22
+ var _Postgres = require("../../tools/pg/Postgres");
23
+
24
+ var _ContextSymbols = require("../../ctx/ContextSymbols");
25
+
26
+ var _config = require("../../config");
27
+
28
+ var _dec, _dec2, _dec3, _dec4, _class, _class2, _temp;
23
29
 
24
30
  function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
25
31
 
26
- const parseConnectionUrl = url => {
27
- var _url$match;
28
-
29
- // TODO what if there are query params in the end?
30
- const regex = /postgres:\/\/(?<username>.*):(?<password>.*)@(?<host>.*):(?<port>\d*)$/;
31
- const match = (_url$match = url.match(regex)) === null || _url$match === void 0 ? void 0 : _url$match.groups;
32
- if (!match || !match.username || !match.password || !match.host || !match.port) throw new Error(`The Postgres connection URL does not match required regex: ${regex.toString()}`);
33
- return {
34
- username: match.username,
35
- password: match.password,
36
- host: match.host,
37
- port: parseInt(match.port, 10)
38
- };
39
- };
40
-
41
- let PgBackupController = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.metadata("design:type", Function), _dec3 = Reflect.metadata("design:paramtypes", [typeof _Fs.Fs === "undefined" ? Object : _Fs.Fs, typeof _BackupStorageProvider.BackupStorageProvider === "undefined" ? Object : _BackupStorageProvider.BackupStorageProvider, typeof _Shell.Shell === "undefined" ? Object : _Shell.Shell, typeof _Zipper.Zipper === "undefined" ? Object : _Zipper.Zipper]), _dec(_class = _dec2(_class = _dec3(_class = (_temp = _class2 = class PgBackupController {
32
+ let PgBackupController = (_dec = (0, _inversify.injectable)(), _dec2 = function (target, key) {
33
+ return (0, _inversify.inject)(_ContextSymbols.AppConfig_)(target, undefined, 3);
34
+ }, _dec3 = Reflect.metadata("design:type", Function), _dec4 = Reflect.metadata("design:paramtypes", [typeof _Fs.Fs === "undefined" ? Object : _Fs.Fs, typeof _BackupStorageProvider.BackupStorageProvider === "undefined" ? Object : _BackupStorageProvider.BackupStorageProvider, typeof _Shell.Shell === "undefined" ? Object : _Shell.Shell, typeof _config.Config === "undefined" ? Object : _config.Config, typeof _Zipper.Zipper === "undefined" ? Object : _Zipper.Zipper]), _dec(_class = _dec2(_class = _dec3(_class = _dec4(_class = (_temp = _class2 = class PgBackupController {
42
35
  constructor( // private gpg: Gpg,
43
36
  // private ssh: SshKeyManager,
44
37
  fs, // private fsSyncer: FsSyncer,
45
38
  storageBackendProvider, // @inject(AppConfig_) private cfg: Config,
46
- sh, zip) {
39
+ sh, cfg, zip) {
47
40
  this.fs = fs;
48
41
  this.storageBackendProvider = storageBackendProvider;
49
42
  this.sh = sh;
43
+ this.cfg = cfg;
50
44
  this.zip = zip;
51
45
 
52
46
  _defineProperty(this, "log", (0, _logging.classObjLog)(this));
@@ -57,43 +51,24 @@ let PgBackupController = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.m
57
51
  return fields.join('-');
58
52
  });
59
53
 
60
- _defineProperty(this, "getVersion", version => version || PgBackupController.defaultPgVersion);
61
-
62
54
  _defineProperty(this, "backup", async ({
63
55
  pgUrl,
64
56
  pgBinDir,
65
57
  backupName
66
58
  }) => {
67
59
  const storage = this.storageBackendProvider.provide();
68
- const connParams = parseConnectionUrl(pgUrl); // TODO validate url
69
-
70
- this.log.info(`Backing up Postgres database @${connParams.host}`);
71
-
72
- const dir = _Path.AbsPath.from(`/var/backup/bup/${backupName}/postgres`); // const dir = AbsPath.from('/var/foo')
60
+ const pg = new _Postgres.Postgres(this.sh, this.fs, {
61
+ binDir: pgBinDir
62
+ }); // TODO make the path configurable/overridable
73
63
 
64
+ const dir = _Path.AbsPath.from(`/tmp/backup/bup/${backupName}/postgres`);
74
65
 
75
66
  await this.fs.inNewDir(dir, async () => {
76
- // const outputDir = AbsPath.from(dir)
77
67
  this.log.info('Processing Postgres backup'); // Don't forget that this itself might be run in docker
78
68
  // therefore volume mounts are not usable (will apply to the location at host)
79
69
 
80
70
  const file = PgBackupController.dumpFileName;
81
- this.log.info(`Dumping data into ${file}`);
82
- let cmd = `pg_dumpall -d ${pgUrl} `;
83
-
84
- if (pgBinDir) {
85
- cmd = this.addPgBinDir(cmd, pgBinDir);
86
- }
87
-
88
- await this.sh.asyncExec(`${cmd} > ${file}`, {
89
- env: {
90
- PGPASSWORD: connParams.password
91
- },
92
- cwd: dir.str()
93
- }, {
94
- stdout: o => this.log.trace(o),
95
- stderr: o => this.log.error(o)
96
- });
71
+ await pg.dumpAllDbs(pgUrl, dir.resolve(file));
97
72
  this.log.info(`Compressing ${file}`);
98
73
  await this.sh.asyncExec(`gzip -v ${file}`, {
99
74
  cwd: dir.str()
@@ -102,80 +77,40 @@ let PgBackupController = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.m
102
77
  stderr: o => this.log.info('gzip: ' + o)
103
78
  });
104
79
  this.log.info('Uploading');
105
- storage.store(dir);
80
+ await storage.store(dir);
106
81
  });
107
82
  });
108
83
 
109
- _defineProperty(this, "dumpFromDockerCmd", (pass, pgUrl, version) => {
110
- const bashCmds = [`echo "*:*:*:*:${pass}" > ~/.pgpass`, `chmod 400 ~/.pgpass`, `pg_dumpall -d ${pgUrl}`]; // Restore docker command
111
- // this.sh.exec(
112
- // `gzip --decompress --stdout ${zipFile.str()} | docker run --network host -i ` +
113
- // `-e PGPASSWORD=${con.password} ` +
114
- // `postgres:${version} ` +
115
- // `psql ${connectionArgs} -v ON_ERROR_STOP=0`
116
- // )
117
- // TODO consider pulling the docker images first so the Docke daremon info messages about container pulling are not logged as errors
118
-
119
- return `docker run --rm --network host postgres:${version} ` + `bash -c '${bashCmds.join(' && ')}'`;
120
- });
121
-
122
84
  _defineProperty(this, "restore", async ({
123
85
  pgUrl,
124
86
  pgBinDir
125
87
  }) => {
126
- // const version = this.getVersion(pgVersion)
127
- const storage = this.storageBackendProvider.provide(); // To validate
128
-
129
- const con = parseConnectionUrl(pgUrl);
88
+ const storage = this.storageBackendProvider.provide();
89
+ const pg = new _Postgres.Postgres(this.sh, this.fs, {
90
+ binDir: pgBinDir
91
+ });
130
92
  this.log.info(`Restoring Postgres database`);
131
- await this.fs.inTmpDir('pg-restore', async dirStr => {
132
- const dir = _Path.AbsPath.from(dirStr);
133
-
134
- storage.restore(dir); // TODO check if the dir contains the with the expected name
93
+ await this.fs.inTmpDir('pg-restore', async dir => {
94
+ await storage.restore(dir); // TODO check if the dir contains the with the expected name
135
95
  // TODO add e2e test for docker
136
96
 
137
97
  this.log.info('Backup dir restored');
138
98
  const file = PgBackupController.dumpFileName;
139
- const zipFile = file + '.gz';
140
- const zipPath = this.fs.listFiles(dir).find(n => n.basename() === zipFile);
141
- if (!zipPath) throw new Error(`Expected to find file ${zipFile} in the restore data`);
99
+ const zipFileName = file + '.gz';
100
+ const zipPath = this.fs.listFiles(dir).find(n => n.basename() === zipFileName);
101
+ if (!zipPath) throw new Error(`Expected to find file ${zipFileName} in the restore data`);
102
+ const zipFile = zipPath.str();
142
103
  this.log.info(`Decompressing ${zipFile}`);
143
- await this.sh.asyncExec(`gzip --decompress -v ${zipFile}`, {
144
- cwd: dir.str()
145
- }, {
104
+ await this.sh.asyncExec(`gzip --decompress -v ${zipFile}`, {}, {
146
105
  stdout: o => this.log.trace('gzip: ' + o),
147
106
  stderr: o => this.log.info('gzip: ' + o)
148
- }); // Database is set to 'postgres' so that also users which don't have db created can use db-less URL
149
- // Restoring user needs to have the admin access anyway
150
-
151
- const connectionArgs = `-h ${con.host} -p ${con.port} -U ${con.username} -d postgres`; // Don't forget that this itself might be run in docker
152
- // therefore volume mounts are not usable (will apply to the location at host)
107
+ }); // Remove the '.gz' suffix
153
108
 
154
- let cmd = `psql ${connectionArgs} -v ON_ERROR_STOP=0 < ${file}`;
155
-
156
- if (pgBinDir) {
157
- cmd = this.addPgBinDir(cmd, pgBinDir);
158
- }
159
-
160
- await this.sh.asyncExec(cmd, {
161
- cwd: dir.str(),
162
- env: {
163
- PGPASSWORD: con.password
164
- }
165
- }, {
166
- stdout: o => this.log.trace(o),
167
- stderr: o => this.log.error(o)
168
- });
169
- this.log.info(`Data successfully inserted into database @${con.host}`);
109
+ const unzippedFile = zipFile.substring(0, zipFile.length - 3);
110
+ await pg.restoreAllDbs(_Path.AbsPath.from(unzippedFile), pgUrl);
170
111
  });
171
112
  });
172
-
173
- _defineProperty(this, "addPgBinDir", (cmd, pgBinDir) => {
174
- this.log.info(`Using PG bin dir ${pgBinDir}`);
175
- this.fs.ensureIsDir(_Path.AbsPath.from(pgBinDir));
176
- return pgBinDir + '/' + cmd;
177
- });
178
113
  }
179
114
 
180
- }, _defineProperty(_class2, "defaultPgVersion", '14.2'), _defineProperty(_class2, "dumpFileName", 'pg-dump.sql'), _temp)) || _class) || _class) || _class);
115
+ }, _defineProperty(_class2, "dumpFileName", 'pg-dump.sql'), _temp)) || _class) || _class) || _class) || _class);
181
116
  exports.PgBackupController = PgBackupController;
package/lib/cli/app.js CHANGED
@@ -21,6 +21,12 @@ var _DirBackupController = require("../bup/dir/DirBackupController");
21
21
 
22
22
  var _PgBackupController = require("../bup/pg/PgBackupController");
23
23
 
24
+ var _Postgres = require("../tools/pg/Postgres");
25
+
26
+ var _Shell = require("../tools/shell/Shell");
27
+
28
+ var _Fs = require("../fs/Fs");
29
+
24
30
  const storageNameOpt = {
25
31
  name: 'storage-name',
26
32
  convertCase: true,
@@ -45,6 +51,7 @@ const singleStorageOptions = [storageNameOpt, {
45
51
  }];
46
52
  const backupTagOption = {
47
53
  name: 'backup-tag',
54
+ fromConfig: 'backup.tag',
48
55
  convertCase: true
49
56
  };
50
57
  const backupOptions = [backupTagOption];
@@ -139,38 +146,68 @@ const restic = (0, _nclif.cmdGroup)({
139
146
  })
140
147
  }
141
148
  });
149
+ const pgBinOption = {
150
+ name: 'pg-bin-dir',
151
+ description: 'A directory with Postgres binaries (if the ones on the path should not be used)',
152
+ convertCase: true,
153
+ fromConfig: 'pg.bin-dir'
154
+ };
142
155
  const pg = (0, _nclif.cmdGroup)({
143
- description: 'Postgres backup commands',
144
- options: [...singleStorageOptions, {
145
- name: 'pg-url',
146
- description: 'Postgres URL',
147
- convertCase: true,
148
- fromConfig: 'pg.url',
149
- required: true
150
- }, {
151
- name: 'pg-bin-dir',
152
- description: 'A directory with Postgres binaries (if the ones on the path should not be used)',
153
- convertCase: true,
154
- fromConfig: 'pg.bin-dir'
155
- } // {
156
- // name: 'pg-version',
157
- // convertCase: true,
158
- // fromConfig: 'pg.version',
159
- // description: `Postgres version - default is ${PgBackupController.defaultPgVersion}`
160
- // }
161
- ],
156
+ options: [pgBinOption],
162
157
  commands: {
163
- backup: (0, _nclif.cmd)({
164
- options: [...backupOptions, {
165
- name: 'backup-name',
158
+ backup: (0, _nclif.cmdGroup)({
159
+ description: 'Postgres backup commands',
160
+ options: [...singleStorageOptions, {
161
+ name: 'pg-url',
162
+ description: 'Postgres URL',
166
163
  convertCase: true,
164
+ fromConfig: 'pg.url',
167
165
  required: true
168
- }],
169
- run: (inp, c) => c.get(_PgBackupController.PgBackupController).backup(inp)
166
+ } // {
167
+ // name: 'pg-version',
168
+ // convertCase: true,
169
+ // fromConfig: 'pg.version',
170
+ // description: `Postgres version - default is ${PgBackupController.defaultPgVersion}`
171
+ // }
172
+ ],
173
+ commands: {
174
+ create: (0, _nclif.cmd)({
175
+ options: [...backupOptions, {
176
+ name: 'backup-name',
177
+ convertCase: true,
178
+ fromConfig: 'backup.name',
179
+ required: true
180
+ }],
181
+ run: (inp, c) => c.get(_PgBackupController.PgBackupController).backup(inp)
182
+ }),
183
+ restore: (0, _nclif.cmd)({
184
+ options: restoreOptions,
185
+ run: (inp, c) => {
186
+ try {
187
+ return c.get(_PgBackupController.PgBackupController).restore(inp);
188
+ } catch (e) {
189
+ console.error(e);
190
+ }
191
+ }
192
+ })
193
+ }
170
194
  }),
171
- restore: (0, _nclif.cmd)({
172
- options: restoreOptions,
173
- run: (inp, c) => c.get(_PgBackupController.PgBackupController).restore(inp)
195
+ copy: (0, _nclif.cmd)({
196
+ positionals: [{
197
+ name: 'src-pg-url',
198
+ description: 'Source Postgres instance',
199
+ convertCase: true
200
+ }, {
201
+ name: 'dst-pg-url',
202
+ description: 'Target Postgres instance',
203
+ convertCase: true
204
+ }],
205
+ options: [],
206
+ run: async (inp, c) => {
207
+ await new _Postgres.Postgres(c.get(_Shell.Shell), c.get(_Fs.Fs), {
208
+ binDir: inp.pgBinDir
209
+ }).copyAllDbs(inp.srcPgUrl, inp.dstPgUrl);
210
+ }
174
211
  })
175
212
  }
176
213
  });
package/lib/fs/Fs.js CHANGED
@@ -21,6 +21,8 @@ var _logging = require("../log/logging");
21
21
 
22
22
  var _Path = require("./path/Path");
23
23
 
24
+ var _prettyBytes = _interopRequireDefault(require("pretty-bytes"));
25
+
24
26
  var _dec, _dec2, _dec3, _class;
25
27
 
26
28
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -47,6 +49,11 @@ let Fs = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.metadata("design:
47
49
 
48
50
  _defineProperty(this, "isFile", path => !this.isDir(path));
49
51
 
52
+ _defineProperty(this, "getSize", async path => {
53
+ const res = await _promises.default.stat(path.str());
54
+ return (0, _prettyBytes.default)(res.size);
55
+ });
56
+
50
57
  _defineProperty(this, "ensureIsFile", p => {
51
58
  if (!this.isFile(p)) throw new Error(`${p} is not a file`);
52
59
  });
@@ -69,18 +76,26 @@ let Fs = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.metadata("design:
69
76
  return _fs.default.readdirSync(path.str()).map(f => path.resolve(f));
70
77
  });
71
78
 
72
- _defineProperty(this, "inTmpDir", async (name, withDir) => {
73
- const dir = this.mkTmpDir(name);
79
+ _defineProperty(this, "inTmpDir", async (prefix, withDir, opts = {
80
+ delete: true
81
+ }) => {
82
+ let created = false;
83
+ const dir = this.mkTmpDir(prefix);
84
+ created = true;
74
85
 
75
86
  try {
76
- await withDir(dir);
87
+ await withDir(_Path.AbsPath.from(dir));
77
88
  } finally {
78
- // TODO handle if not exists?
79
- this.rmDir(_Path.AbsPath.from(dir));
89
+ if (created && (opts === null || opts === void 0 ? void 0 : opts.delete) === true) {
90
+ // TODO handle if not exists?
91
+ this.rmDir(_Path.AbsPath.from(dir));
92
+ }
80
93
  }
81
94
  });
82
95
 
83
- _defineProperty(this, "inNewDir", async (path, withDir) => {
96
+ _defineProperty(this, "inNewDir", async (path, withDir, opts = {
97
+ delete: true
98
+ }) => {
84
99
  let created = false;
85
100
 
86
101
  try {
@@ -88,7 +103,9 @@ let Fs = (_dec = (0, _inversify.injectable)(), _dec2 = Reflect.metadata("design:
88
103
  created = true;
89
104
  await withDir();
90
105
  } finally {
91
- if (created) this.rmDir(path);
106
+ if (created && (opts === null || opts === void 0 ? void 0 : opts.delete) === true) {
107
+ this.rmDir(path);
108
+ }
92
109
  }
93
110
  });
94
111
 
@@ -12,19 +12,19 @@ class BackupStorage {
12
12
  this.backend = backend;
13
13
  this.props = props;
14
14
 
15
- _defineProperty(this, "store", from => {
15
+ _defineProperty(this, "store", async from => {
16
16
  const {
17
17
  tag
18
18
  } = this.props;
19
19
  const tags = tag ? [tag] : undefined;
20
- this.backend.backup(from, tags);
20
+ await this.backend.backup(from, tags);
21
21
  });
22
22
 
23
- _defineProperty(this, "restore", to => {
23
+ _defineProperty(this, "restore", async to => {
24
24
  const {
25
25
  snapshotId
26
26
  } = this.props;
27
- if (snapshotId) this.backend.restoreSnapshot(to, snapshotId);else this.backend.restoreLatest(to);
27
+ if (snapshotId) await this.backend.restoreSnapshot(to, snapshotId);else await this.backend.restoreLatest(to);
28
28
  });
29
29
  }
30
30
 
@@ -17,7 +17,7 @@ class RcloneBackupBackend {
17
17
 
18
18
  _defineProperty(this, "log", (0, _logging.classObjLog)(this));
19
19
 
20
- _defineProperty(this, "backup", (from, tags) => {
20
+ _defineProperty(this, "backup", async (from, tags) => {
21
21
  if (tags) throw new _nclif.InvalidInputError(`Rclone does not support tags`);
22
22
  this.log.info('Performing backup', {
23
23
  from
@@ -25,14 +25,14 @@ class RcloneBackupBackend {
25
25
  this.rclone.backup(from);
26
26
  });
27
27
 
28
- _defineProperty(this, "restoreLatest", to => {
28
+ _defineProperty(this, "restoreLatest", async to => {
29
29
  this.log.info('Restoring', {
30
30
  to
31
31
  });
32
32
  this.rclone.restore(to);
33
33
  });
34
34
 
35
- _defineProperty(this, "restoreSnapshot", (to, snapshotId) => {
35
+ _defineProperty(this, "restoreSnapshot", async (to, snapshotId) => {
36
36
  throw new _nclif.InvalidInputError(`Rclone does not support snaphosts`);
37
37
  });
38
38
  }
@@ -17,18 +17,18 @@ class ResticBackupBackend {
17
17
 
18
18
  _defineProperty(this, "log", (0, _logging.classObjLog)(this));
19
19
 
20
- _defineProperty(this, "backup", (from, tags = []) => {
20
+ _defineProperty(this, "backup", async (from, tags = []) => {
21
21
  this.log.info(`Performing backup from=${from}, tags=${tags}`);
22
- this.restic.backup(from, _Path.RelativePath.from('.'), tags);
22
+ await this.restic.backup(from, _Path.RelativePath.from('.'), tags);
23
23
  this.restic.forget();
24
24
  });
25
25
 
26
- _defineProperty(this, "restoreLatest", to => {
26
+ _defineProperty(this, "restoreLatest", async to => {
27
27
  this.log.info(`Restoring latest snapshot to=${to}`);
28
28
  this.restic.restore(to);
29
29
  });
30
30
 
31
- _defineProperty(this, "restoreSnapshot", (to, snapshotId) => {
31
+ _defineProperty(this, "restoreSnapshot", async (to, snapshotId) => {
32
32
  this.log.info(`Restoring snapshot '${snapshotId}'`, {
33
33
  to
34
34
  });
@@ -46,26 +46,30 @@ class ResticClient {
46
46
  });
47
47
  });
48
48
 
49
- _defineProperty(this, "backup", (cwd, from, tags = []) => {
50
- this.log.debug('Running backup for repo=%s', this.url);
49
+ _defineProperty(this, "backup", async (cwd, from, tags = []) => {
50
+ this.log.debug('Running backup for repo=%s', this.url); // Use -H to set hostname - TODO consider setting explicitly ?
51
+
51
52
  let cmd = `restic backup ${from.str()}`;
52
53
  tags.forEach(t => {
53
54
  cmd += ` --tag=${t}`;
54
55
  });
55
- this.shell.exec(cmd, {
56
+ await this.shell.asyncExec(cmd, {
56
57
  cwd: cwd.str(),
57
58
  env: this.env()
59
+ }, {
60
+ stdout: o => this.log.trace(o),
61
+ stderr: o => this.log.error(o)
58
62
  });
59
63
  });
60
64
 
61
- _defineProperty(this, "restore", (to, snapshotId = 'latest') => {
65
+ _defineProperty(this, "restore", async (to, snapshotId = 'latest') => {
62
66
  this.log.debug('Running restore for repo=%s', this.url);
63
67
  this.shell.exec(`restic restore ${snapshotId} --target ${to.str()}`, {
64
68
  env: this.env()
65
69
  });
66
70
  });
67
71
 
68
- _defineProperty(this, "forget", () => {
72
+ _defineProperty(this, "forget", async () => {
69
73
  this.log.debug('Pruning repo=%s', this.url);
70
74
  const cfg = {
71
75
  hourly: 6,
@@ -32,17 +32,16 @@ describe('ResticClient', () => {
32
32
  });
33
33
  it('create repo', async () => {
34
34
  await sut.prepareRepo();
35
- console.log('repo created');
36
35
  });
37
- it('push data & forget', () => {
36
+ it('push data & forget', async () => {
38
37
  const from = _Path.AbsPath.from(backupDir);
39
38
 
40
39
  const to = _Path.RelativePath.from('.');
41
40
 
42
- sut.backup(from, to);
43
- sut.backup(from, to, ['foo', 'bar']);
44
- sut.forget();
45
- sut.backup(from, to);
41
+ await sut.backup(from, to);
42
+ await sut.backup(from, to, ['foo', 'bar']);
43
+ await sut.forget();
44
+ await sut.backup(from, to);
46
45
  const res = sut.snapshots();
47
46
  expect(res).toBeArrayOfSize(3);
48
47
  const tagged = res[1];
@@ -77,10 +76,10 @@ describe('ResticClient', () => {
77
76
  afterAll(() => {
78
77
  (0, _testHelper.cleanDir)(restoreDir);
79
78
  });
80
- it('push and restore data', () => {
79
+ it('push and restore data', async () => {
81
80
  // sut.prepareRepo()
82
- sut.backup(_Path.AbsPath.from(backupDir), _Path.RelativePath.from('.'));
83
- sut.restore(_Path.AbsPath.from(restoreDir));
81
+ await sut.backup(_Path.AbsPath.from(backupDir), _Path.RelativePath.from('.'));
82
+ await sut.restore(_Path.AbsPath.from(restoreDir));
84
83
  const res = (0, _dirCompare.compareSync)(backupDir, restoreDir);
85
84
  expect(res.same).toBeTrue();
86
85
  });
@@ -0,0 +1,115 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.parseConnectionUrl = exports.Postgres = void 0;
7
+
8
+ var _logging = require("../../log/logging");
9
+
10
+ function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
11
+
12
+ class Postgres {
13
+ constructor(sh, fs, opts) {
14
+ this.sh = sh;
15
+ this.fs = fs;
16
+ this.opts = opts;
17
+
18
+ _defineProperty(this, "log", (0, _logging.classObjLog)(this));
19
+
20
+ _defineProperty(this, "dumpAllDbs", async (srcPgUrl, dstFilePath) => {
21
+ const dstFile = dstFilePath.str();
22
+ this.log.info(`Dumping data into ${dstFile}`);
23
+ await this.runShellCmd(`${this.dumpAllDbCmd(srcPgUrl)} > ${dstFile}`);
24
+ const size = await this.fs.getSize(dstFilePath);
25
+ this.log.trace(`All data dumped into ${dstFile}, size=${size}`);
26
+ });
27
+
28
+ _defineProperty(this, "dumpAllDbCmd", pgUrl => {
29
+ const con = parseConnectionUrl(pgUrl);
30
+ const cmd = `PGPASSWORD=${con.password} ${this.bin('pg_dumpall')} -d ${pgUrl} `;
31
+ return cmd;
32
+ });
33
+
34
+ _defineProperty(this, "restoreAllDbs", async (srcFilePath, dstPgUrl) => {
35
+ const con = parseConnectionUrl(dstPgUrl);
36
+ this.log.info(`Restoring data from ${srcFilePath.str()}`);
37
+ const srcFile = srcFilePath.str();
38
+ await this.runShellCmd(`${this.restoreAllDbCmd(con)} < ${srcFile}`);
39
+ this.log.info(`Data successfully inserted into database @${con.host}`);
40
+ });
41
+
42
+ _defineProperty(this, "runShellCmd", async cmd => {
43
+ await this.sh.asyncExec(cmd, {}, {
44
+ stdout: o => this.log.trace(o),
45
+ stderr: o => this.log.error(o)
46
+ });
47
+ });
48
+
49
+ _defineProperty(this, "restoreAllDbCmd", con => {
50
+ // Database is set to 'postgres' so that also users which don't have db created can use db-less URL
51
+ // Restoring user needs to have the admin access anyway - TODO what if default db is not called postgres
52
+ const database = 'postgres';
53
+ const connectionArgs = `-h ${con.host} -p ${con.port} -U ${con.username} -d ${database}`;
54
+ const cmd = ` PGPASSWORD=${con.password} ${this.bin('psql')} ${connectionArgs} -v ON_ERROR_STOP=1`;
55
+ return cmd;
56
+ });
57
+
58
+ _defineProperty(this, "copyAllDbs", async (srcPgUrl, dstPgUrl) => {
59
+ await this.runShellCmd(`${this.dumpAllDbCmd(srcPgUrl)} | ${this.restoreAllDbCmd(parseConnectionUrl(dstPgUrl))}`); // Using intermediate file
60
+ // await this.fs.inTmpDir('foo', async dir => {
61
+ // const file = dir.resolve('dump.sql')
62
+ // await this.dumpAllDbs(srcPgUrl, file)
63
+ // await this.restoreAllDbs(file, dstPgUrl)
64
+ // })
65
+ });
66
+
67
+ _defineProperty(this, "bin", localBin => {
68
+ var _this$opts;
69
+
70
+ // TODO somehow incorporate this in transparent manner
71
+ // const version = '16.0'
72
+ // const dockerWrapper = `docker run --rm --network host postgres:${version} `
73
+ // TODO figure out how to make transparent execution in Docker image
74
+ const binDir = (_this$opts = this.opts) === null || _this$opts === void 0 ? void 0 : _this$opts.binDir;
75
+ if (!binDir) return localBin;
76
+ return binDir + '/' + localBin;
77
+ });
78
+
79
+ _defineProperty(this, "dumpFromDockerCmd", (pass, pgUrl, version) => {
80
+ const bashCmds = [`echo "*:*:*:*:${pass}" > ~/.pgpass`, `chmod 400 ~/.pgpass`, `pg_dumpall -d ${pgUrl}`]; // Restore docker command
81
+ // this.sh.exec(
82
+ // `gzip --decompress --stdout ${zipFile.str()} | docker run --network host -i ` +
83
+ // `-e PGPASSWORD=${con.password} ` +
84
+ // `postgres:${version} ` +
85
+ // `psql ${connectionArgs} -v ON_ERROR_STOP=0`
86
+ // )
87
+ // TODO consider pulling the docker images first so the Docke daremon info messages about container pulling are not logged as errors
88
+
89
+ return `docker run --rm --network host postgres:${version} ` + `bash -c '${bashCmds.join(' && ')}'`;
90
+ });
91
+ }
92
+
93
+ }
94
+
95
+ exports.Postgres = Postgres;
96
+
97
+ _defineProperty(Postgres, "defaultPgVersion", '14.2');
98
+
99
+ const parseConnectionUrl = url => {
100
+ var _url$match;
101
+
102
+ // TODO what if there are query params in the end?
103
+ const regex = /postgres:\/\/(?<username>.*):(?<password>.*)@(?<host>.*):(?<port>\d*)$/;
104
+ const match = (_url$match = url.match(regex)) === null || _url$match === void 0 ? void 0 : _url$match.groups;
105
+ if (!match || !match.username || !match.password || !match.host || !match.port) throw new Error(`The Postgres connection URL does not match required regex: ${regex.toString()}`);
106
+ return {
107
+ username: match.username,
108
+ password: match.password,
109
+ host: match.host,
110
+ port: parseInt(match.port, 10),
111
+ url
112
+ };
113
+ };
114
+
115
+ exports.parseConnectionUrl = parseConnectionUrl;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ps-aux/nodebup",
3
- "version": "0.12.0",
3
+ "version": "0.13.0",
4
4
  "description": "",
5
5
  "module": "lib/index.js",
6
6
  "main": "lib/index.js",
@@ -41,6 +41,7 @@
41
41
  "@babel/preset-env": "^7.16.7",
42
42
  "@babel/preset-typescript": "^7.16.7",
43
43
  "@ps-aux/cibs": "^0.6.4",
44
+ "@testcontainers/postgresql": "^10.7.2",
44
45
  "@types/jest": "^27.4.0",
45
46
  "@types/jest-when": "^2.7.4",
46
47
  "@types/node": "^17.0.6",
@@ -68,9 +69,11 @@
68
69
  "npm-check-updates": "^12.0.5",
69
70
  "pg": "^8.7.3",
70
71
  "prettier": "^2.5.1",
72
+ "testcontainers": "^10.7.2",
71
73
  "ts-jest": "^27.1.2",
72
74
  "ts-node": "^10.9.2",
73
- "typescript": "^4.5.4"
75
+ "typescript": "^4.5.4",
76
+ "vitest": "^1.3.1"
74
77
  },
75
78
  "lint-staged": {
76
79
  "./**/*.{js,ts,tsx}": [
@@ -90,6 +93,7 @@
90
93
  "jszip": "^3.10.1",
91
94
  "pino": "^7.11.0",
92
95
  "pino-pretty": "^9.1.1",
96
+ "pretty-bytes": "^5.6.0",
93
97
  "ramda": "^0.27.1",
94
98
  "reflect-metadata": "^0.1.13",
95
99
  "unzipper": "^0.10.11",