@alwaysmeticulous/downloading-helpers 2.40.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +3 -0
- package/dist/config/snippets.d.ts +4 -0
- package/dist/config/snippets.js +24 -0
- package/dist/file-downloads/download-file.d.ts +1 -0
- package/dist/file-downloads/download-file.js +21 -0
- package/dist/file-downloads/local-data.utils.d.ts +20 -0
- package/dist/file-downloads/local-data.utils.js +92 -0
- package/dist/file-downloads/sessions.d.ts +10 -0
- package/dist/file-downloads/sessions.js +41 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.js +17 -0
- package/dist/scripts/replay-assets.d.ts +1 -0
- package/dist/scripts/replay-assets.js +59 -0
- package/dist/scripts/replays.d.ts +8 -0
- package/dist/scripts/replays.js +63 -0
- package/package.json +54 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
ISC License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2022, Meticulous Contributors
|
|
4
|
+
|
|
5
|
+
Permission to use, copy, modify, and/or distribute this software for any
|
|
6
|
+
purpose with or without fee is hereby granted, provided that the above
|
|
7
|
+
copyright notice and this permission notice appear in all copies.
|
|
8
|
+
|
|
9
|
+
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
10
|
+
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|
11
|
+
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
12
|
+
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|
13
|
+
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|
14
|
+
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|
15
|
+
PERFORMANCE OF THIS SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ConfigurationError = exports.getSnippetsBaseUrl = void 0;
|
|
4
|
+
const common_1 = require("@alwaysmeticulous/common");
|
|
5
|
+
const getSnippetsBaseUrl = () => {
|
|
6
|
+
const baseUrl = process.env["METICULOUS_SNIPPETS_BASE_URL"] || common_1.BASE_SNIPPETS_URL;
|
|
7
|
+
try {
|
|
8
|
+
return new URL(baseUrl).href;
|
|
9
|
+
}
|
|
10
|
+
catch (e) {
|
|
11
|
+
if (e instanceof TypeError) {
|
|
12
|
+
throw new ConfigurationError(`Invalid base snippets URL: ${baseUrl}`);
|
|
13
|
+
}
|
|
14
|
+
throw e;
|
|
15
|
+
}
|
|
16
|
+
};
|
|
17
|
+
exports.getSnippetsBaseUrl = getSnippetsBaseUrl;
|
|
18
|
+
class ConfigurationError extends Error {
|
|
19
|
+
constructor(message) {
|
|
20
|
+
super(message);
|
|
21
|
+
this.name = "ConfigurationError";
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
exports.ConfigurationError = ConfigurationError;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const downloadFile: (fileUrl: string, path: string) => Promise<void>;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.downloadFile = void 0;
|
|
7
|
+
const fs_1 = require("fs");
|
|
8
|
+
const stream_1 = require("stream");
|
|
9
|
+
const util_1 = require("util");
|
|
10
|
+
const axios_1 = __importDefault(require("axios"));
|
|
11
|
+
const promisifiedFinished = (0, util_1.promisify)(stream_1.finished);
|
|
12
|
+
const downloadFile = async (fileUrl, path) => {
|
|
13
|
+
const writer = (0, fs_1.createWriteStream)(path);
|
|
14
|
+
return axios_1.default
|
|
15
|
+
.request({ method: "GET", url: fileUrl, responseType: "stream" })
|
|
16
|
+
.then(async (response) => {
|
|
17
|
+
response.data.pipe(writer);
|
|
18
|
+
return promisifiedFinished(writer);
|
|
19
|
+
});
|
|
20
|
+
};
|
|
21
|
+
exports.downloadFile = downloadFile;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export declare const sanitizeFilename: (filename: string) => string;
|
|
2
|
+
type ReleaseLock = () => Promise<void>;
|
|
3
|
+
export interface LoadOrDownloadJsonFileOptions<T> {
|
|
4
|
+
filePath: string;
|
|
5
|
+
downloadJson: () => Promise<T | null>;
|
|
6
|
+
/**
|
|
7
|
+
* For debug messages e.g. 'session' or 'session data'
|
|
8
|
+
*/
|
|
9
|
+
dataDescription: string;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Returns the JSON.parse'd contents of the file at the given path. If the file
|
|
13
|
+
* doesn't exist yet then it downloads the object, writes it to the file, and returns it.
|
|
14
|
+
*
|
|
15
|
+
* Handles concurrent processes trying to download to the same file at the same time.
|
|
16
|
+
*/
|
|
17
|
+
export declare const getOrDownloadJsonFile: <T>({ filePath, downloadJson, dataDescription, }: LoadOrDownloadJsonFileOptions<T>) => Promise<T | null>;
|
|
18
|
+
export declare const fileExists: (filePath: string) => Promise<boolean>;
|
|
19
|
+
export declare const waitToAcquireLockOnDirectory: (directoryPath: string) => Promise<ReleaseLock>;
|
|
20
|
+
export {};
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.waitToAcquireLockOnDirectory = exports.fileExists = exports.getOrDownloadJsonFile = exports.sanitizeFilename = void 0;
|
|
7
|
+
const promises_1 = require("fs/promises");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const common_1 = require("@alwaysmeticulous/common");
|
|
10
|
+
const loglevel_1 = __importDefault(require("loglevel"));
|
|
11
|
+
const luxon_1 = require("luxon");
|
|
12
|
+
const proper_lockfile_1 = require("proper-lockfile");
|
|
13
|
+
const sanitizeFilename = (filename) => {
|
|
14
|
+
return filename.replace(/[^a-zA-Z0-9]/g, "_");
|
|
15
|
+
};
|
|
16
|
+
exports.sanitizeFilename = sanitizeFilename;
|
|
17
|
+
/**
|
|
18
|
+
* Returns the JSON.parse'd contents of the file at the given path. If the file
|
|
19
|
+
* doesn't exist yet then it downloads the object, writes it to the file, and returns it.
|
|
20
|
+
*
|
|
21
|
+
* Handles concurrent processes trying to download to the same file at the same time.
|
|
22
|
+
*/
|
|
23
|
+
const getOrDownloadJsonFile = async ({ filePath, downloadJson, dataDescription, }) => {
|
|
24
|
+
const logger = loglevel_1.default.getLogger(common_1.METICULOUS_LOGGER_NAME);
|
|
25
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(filePath), { recursive: true });
|
|
26
|
+
// We create a lock file so that if multiple processes try downloading at the same
|
|
27
|
+
// time they don't interfere with each other. The second process to run will
|
|
28
|
+
// wait for the first process to complete, and then return straight away because
|
|
29
|
+
// it'll notice the file already exists.
|
|
30
|
+
const releaseLock = await waitToAcquireLockOnFile(filePath);
|
|
31
|
+
try {
|
|
32
|
+
const existingData = await (0, promises_1.readFile)(filePath)
|
|
33
|
+
.then((data) => JSON.parse(data.toString("utf-8")))
|
|
34
|
+
.catch(() => null);
|
|
35
|
+
if (existingData) {
|
|
36
|
+
logger.debug(`Reading ${dataDescription} from local copy in ${filePath}`);
|
|
37
|
+
return existingData;
|
|
38
|
+
}
|
|
39
|
+
const downloadedData = await downloadJson();
|
|
40
|
+
if (downloadedData) {
|
|
41
|
+
await (0, promises_1.writeFile)(filePath, JSON.stringify(downloadedData, null, 2));
|
|
42
|
+
logger.debug(`Wrote ${dataDescription} to ${filePath}`);
|
|
43
|
+
}
|
|
44
|
+
return downloadedData;
|
|
45
|
+
}
|
|
46
|
+
finally {
|
|
47
|
+
await releaseLock();
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
exports.getOrDownloadJsonFile = getOrDownloadJsonFile;
|
|
51
|
+
const waitToAcquireLockOnFile = async (filePath) => {
|
|
52
|
+
// In many cases the file doesn't exist yet, and can't exist yet (need to download the data, and creating an
|
|
53
|
+
// empty file beforehand is risky if the process crashes, and a second process tries reading the empty file).
|
|
54
|
+
// However proper-lockfile requires us to pass a file or directory as the first arg. This path is just used
|
|
55
|
+
// to detect if the same process tries taking out multiple locks on the same file. It just needs to be calculated
|
|
56
|
+
// as something that's unique to the file, and gives the same path for a given file everytime. So we create our
|
|
57
|
+
// own lock-target directory for this purpose (directory not file since mkdir is guaranteed to be synchronous).
|
|
58
|
+
// The path needs to actually exist, since proper-lockfile resolves symlinks on it.
|
|
59
|
+
//
|
|
60
|
+
// Note: we don't delete the lock directory afterwards because doing so without creating race-conditions is tricky
|
|
61
|
+
const lockDirectory = `${filePath}.lock-target`;
|
|
62
|
+
await (0, promises_1.mkdir)(lockDirectory, { recursive: true });
|
|
63
|
+
const releaseLock = await (0, proper_lockfile_1.lock)(lockDirectory, {
|
|
64
|
+
retries: LOCK_RETRY_OPTIONS,
|
|
65
|
+
lockfilePath: `${filePath}.lock`,
|
|
66
|
+
});
|
|
67
|
+
return async () => {
|
|
68
|
+
await releaseLock();
|
|
69
|
+
};
|
|
70
|
+
};
|
|
71
|
+
const fileExists = (filePath) => (0, promises_1.access)(filePath)
|
|
72
|
+
.then(() => true)
|
|
73
|
+
.catch(() => false);
|
|
74
|
+
exports.fileExists = fileExists;
|
|
75
|
+
const waitToAcquireLockOnDirectory = (directoryPath) => {
|
|
76
|
+
return (0, proper_lockfile_1.lock)(directoryPath, {
|
|
77
|
+
retries: LOCK_RETRY_OPTIONS,
|
|
78
|
+
lockfilePath: (0, path_1.join)(directoryPath, "dir.lock"),
|
|
79
|
+
});
|
|
80
|
+
};
|
|
81
|
+
exports.waitToAcquireLockOnDirectory = waitToAcquireLockOnDirectory;
|
|
82
|
+
const LOCK_RETRY_OPTIONS = {
|
|
83
|
+
// We want to keep on retrying till we get the maxRetryTime, so we set retries, which is a maximum, to a high value
|
|
84
|
+
retries: 1000,
|
|
85
|
+
factor: 1.05,
|
|
86
|
+
minTimeout: 500,
|
|
87
|
+
maxTimeout: 2000,
|
|
88
|
+
// Wait a maximum of 120s for the other process to finish downloading and/or extracting
|
|
89
|
+
maxRetryTime: luxon_1.Duration.fromObject({ minutes: 2 }).as("milliseconds"),
|
|
90
|
+
// Randomize so processes are less likely to clash on their retries
|
|
91
|
+
randomize: true,
|
|
92
|
+
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { SessionData } from "@alwaysmeticulous/api";
|
|
2
|
+
import { AxiosInstance } from "axios";
|
|
3
|
+
export declare const getOrFetchRecordedSession: (client: AxiosInstance, sessionId: string) => Promise<{
|
|
4
|
+
fileName: string;
|
|
5
|
+
data: any;
|
|
6
|
+
}>;
|
|
7
|
+
export declare const getOrFetchRecordedSessionData: (client: AxiosInstance, sessionId: string) => Promise<{
|
|
8
|
+
fileName: string;
|
|
9
|
+
data: SessionData;
|
|
10
|
+
}>;
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getOrFetchRecordedSessionData = exports.getOrFetchRecordedSession = void 0;
|
|
7
|
+
const path_1 = require("path");
|
|
8
|
+
const client_1 = require("@alwaysmeticulous/client");
|
|
9
|
+
const common_1 = require("@alwaysmeticulous/common");
|
|
10
|
+
const loglevel_1 = __importDefault(require("loglevel"));
|
|
11
|
+
const local_data_utils_1 = require("./local-data.utils");
|
|
12
|
+
const getOrFetchRecordedSession = async (client, sessionId) => {
|
|
13
|
+
const logger = loglevel_1.default.getLogger(common_1.METICULOUS_LOGGER_NAME);
|
|
14
|
+
const sessionFile = (0, path_1.join)((0, common_1.getMeticulousLocalDataDir)(), "sessions", `${(0, local_data_utils_1.sanitizeFilename)(sessionId)}.json`);
|
|
15
|
+
const session = await (0, local_data_utils_1.getOrDownloadJsonFile)({
|
|
16
|
+
filePath: sessionFile,
|
|
17
|
+
dataDescription: "session",
|
|
18
|
+
downloadJson: () => (0, client_1.getRecordedSession)(client, sessionId),
|
|
19
|
+
});
|
|
20
|
+
if (!session) {
|
|
21
|
+
logger.error("Error: Could not retrieve session. Is the API token correct?");
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
return { fileName: sessionFile, data: session };
|
|
25
|
+
};
|
|
26
|
+
exports.getOrFetchRecordedSession = getOrFetchRecordedSession;
|
|
27
|
+
const getOrFetchRecordedSessionData = async (client, sessionId) => {
|
|
28
|
+
const logger = loglevel_1.default.getLogger(common_1.METICULOUS_LOGGER_NAME);
|
|
29
|
+
const sessionFile = (0, path_1.join)((0, common_1.getMeticulousLocalDataDir)(), "sessions", `${(0, local_data_utils_1.sanitizeFilename)(sessionId)}_data.json`);
|
|
30
|
+
const sessionData = await (0, local_data_utils_1.getOrDownloadJsonFile)({
|
|
31
|
+
filePath: sessionFile,
|
|
32
|
+
dataDescription: "session data",
|
|
33
|
+
downloadJson: () => (0, client_1.getRecordedSessionData)(client, sessionId),
|
|
34
|
+
});
|
|
35
|
+
if (!sessionData) {
|
|
36
|
+
logger.error("Error: Could not retrieve session data. This may be an invalid session");
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
return { fileName: sessionFile, data: sessionData };
|
|
40
|
+
};
|
|
41
|
+
exports.getOrFetchRecordedSessionData = getOrFetchRecordedSessionData;
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { sanitizeFilename } from "./file-downloads/local-data.utils";
|
|
2
|
+
export { getOrFetchReplay, getOrFetchReplayArchive } from "./scripts/replays";
|
|
3
|
+
export { getOrFetchRecordedSession, getOrFetchRecordedSessionData, } from "./file-downloads/sessions";
|
|
4
|
+
export { fetchAsset } from "./scripts/replay-assets";
|
|
5
|
+
export { downloadFile } from "./file-downloads/download-file";
|
|
6
|
+
export { getReplayDir } from "./scripts/replays";
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getReplayDir = exports.downloadFile = exports.fetchAsset = exports.getOrFetchRecordedSessionData = exports.getOrFetchRecordedSession = exports.getOrFetchReplayArchive = exports.getOrFetchReplay = exports.sanitizeFilename = void 0;
|
|
4
|
+
var local_data_utils_1 = require("./file-downloads/local-data.utils");
|
|
5
|
+
Object.defineProperty(exports, "sanitizeFilename", { enumerable: true, get: function () { return local_data_utils_1.sanitizeFilename; } });
|
|
6
|
+
var replays_1 = require("./scripts/replays");
|
|
7
|
+
Object.defineProperty(exports, "getOrFetchReplay", { enumerable: true, get: function () { return replays_1.getOrFetchReplay; } });
|
|
8
|
+
Object.defineProperty(exports, "getOrFetchReplayArchive", { enumerable: true, get: function () { return replays_1.getOrFetchReplayArchive; } });
|
|
9
|
+
var sessions_1 = require("./file-downloads/sessions");
|
|
10
|
+
Object.defineProperty(exports, "getOrFetchRecordedSession", { enumerable: true, get: function () { return sessions_1.getOrFetchRecordedSession; } });
|
|
11
|
+
Object.defineProperty(exports, "getOrFetchRecordedSessionData", { enumerable: true, get: function () { return sessions_1.getOrFetchRecordedSessionData; } });
|
|
12
|
+
var replay_assets_1 = require("./scripts/replay-assets");
|
|
13
|
+
Object.defineProperty(exports, "fetchAsset", { enumerable: true, get: function () { return replay_assets_1.fetchAsset; } });
|
|
14
|
+
var download_file_1 = require("./file-downloads/download-file");
|
|
15
|
+
Object.defineProperty(exports, "downloadFile", { enumerable: true, get: function () { return download_file_1.downloadFile; } });
|
|
16
|
+
var replays_2 = require("./scripts/replays");
|
|
17
|
+
Object.defineProperty(exports, "getReplayDir", { enumerable: true, get: function () { return replays_2.getReplayDir; } });
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const fetchAsset: (path: string) => Promise<string>;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.fetchAsset = void 0;
|
|
7
|
+
const promises_1 = require("fs/promises");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const common_1 = require("@alwaysmeticulous/common");
|
|
10
|
+
const axios_1 = __importDefault(require("axios"));
|
|
11
|
+
const loglevel_1 = __importDefault(require("loglevel"));
|
|
12
|
+
const snippets_1 = require("../config/snippets");
|
|
13
|
+
const ASSETS_FOLDER_NAME = "assets";
|
|
14
|
+
const ASSET_METADATA_FILE_NAME = "assets.json";
|
|
15
|
+
const fetchAsset = async (path) => {
|
|
16
|
+
const logger = loglevel_1.default.getLogger(common_1.METICULOUS_LOGGER_NAME);
|
|
17
|
+
const fetchUrl = new URL(path, (0, snippets_1.getSnippetsBaseUrl)()).href;
|
|
18
|
+
const assetFileName = `${(0, path_1.basename)(new URL(fetchUrl).pathname, ".js")}.cjs`;
|
|
19
|
+
const assetMetadata = await loadAssetMetadata();
|
|
20
|
+
const etag = (await axios_1.default.head(fetchUrl)).headers["etag"] || "";
|
|
21
|
+
const entry = assetMetadata.assets.find((item) => item.fileName === assetFileName);
|
|
22
|
+
const filePath = (0, path_1.join)(await getOrCreateAssetsDir(), assetFileName);
|
|
23
|
+
if (entry && etag !== "" && etag === entry.etag) {
|
|
24
|
+
logger.debug(`${fetchUrl} already present`);
|
|
25
|
+
return filePath;
|
|
26
|
+
}
|
|
27
|
+
const contents = (await axios_1.default.get(fetchUrl)).data;
|
|
28
|
+
await (0, promises_1.writeFile)(filePath, contents);
|
|
29
|
+
if (entry) {
|
|
30
|
+
logger.debug(`${fetchUrl} updated`);
|
|
31
|
+
entry.etag = etag;
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
logger.debug(`${fetchUrl} downloaded`);
|
|
35
|
+
assetMetadata.assets.push({ fileName: assetFileName, etag, fetchUrl });
|
|
36
|
+
}
|
|
37
|
+
await saveAssetMetadata(assetMetadata);
|
|
38
|
+
return filePath;
|
|
39
|
+
};
|
|
40
|
+
exports.fetchAsset = fetchAsset;
|
|
41
|
+
const getOrCreateAssetsDir = async () => {
|
|
42
|
+
const assetsDir = (0, path_1.join)((0, common_1.getMeticulousLocalDataDir)(), ASSETS_FOLDER_NAME);
|
|
43
|
+
await (0, promises_1.mkdir)(assetsDir, { recursive: true });
|
|
44
|
+
return assetsDir;
|
|
45
|
+
};
|
|
46
|
+
const loadAssetMetadata = async () => {
|
|
47
|
+
const assetsFile = (0, path_1.join)(await getOrCreateAssetsDir(), ASSET_METADATA_FILE_NAME);
|
|
48
|
+
const existingMetadata = await (0, promises_1.readFile)(assetsFile)
|
|
49
|
+
.then((data) => JSON.parse(data.toString("utf-8")))
|
|
50
|
+
.catch(() => null);
|
|
51
|
+
if (existingMetadata) {
|
|
52
|
+
return existingMetadata;
|
|
53
|
+
}
|
|
54
|
+
return { assets: [] };
|
|
55
|
+
};
|
|
56
|
+
const saveAssetMetadata = async (assetMetadata) => {
|
|
57
|
+
const assetsFile = (0, path_1.join)(await getOrCreateAssetsDir(), ASSET_METADATA_FILE_NAME);
|
|
58
|
+
await (0, promises_1.writeFile)(assetsFile, JSON.stringify(assetMetadata, null, 2));
|
|
59
|
+
};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { AxiosInstance } from "axios";
|
|
2
|
+
export declare const getOrFetchReplay: (client: AxiosInstance, replayId: string) => Promise<{
|
|
3
|
+
fileName: string;
|
|
4
|
+
}>;
|
|
5
|
+
export declare const getOrFetchReplayArchive: (client: AxiosInstance, replayId: string) => Promise<{
|
|
6
|
+
fileName: string;
|
|
7
|
+
}>;
|
|
8
|
+
export declare const getReplayDir: (replayId: string) => string;
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getReplayDir = exports.getOrFetchReplayArchive = exports.getOrFetchReplay = void 0;
|
|
7
|
+
const promises_1 = require("fs/promises");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const client_1 = require("@alwaysmeticulous/client");
|
|
10
|
+
const common_1 = require("@alwaysmeticulous/common");
|
|
11
|
+
const adm_zip_1 = __importDefault(require("adm-zip"));
|
|
12
|
+
const loglevel_1 = __importDefault(require("loglevel"));
|
|
13
|
+
const download_file_1 = require("../file-downloads/download-file");
|
|
14
|
+
const local_data_utils_1 = require("../file-downloads/local-data.utils");
|
|
15
|
+
const getOrFetchReplay = async (client, replayId) => {
|
|
16
|
+
const logger = loglevel_1.default.getLogger(common_1.METICULOUS_LOGGER_NAME);
|
|
17
|
+
const replayFile = (0, path_1.join)((0, exports.getReplayDir)(replayId), `${replayId}.json`);
|
|
18
|
+
const replay = await (0, local_data_utils_1.getOrDownloadJsonFile)({
|
|
19
|
+
filePath: replayFile,
|
|
20
|
+
dataDescription: "replay",
|
|
21
|
+
downloadJson: () => (0, client_1.getReplay)(client, replayId),
|
|
22
|
+
});
|
|
23
|
+
if (!replay) {
|
|
24
|
+
logger.error(`Error: Could not retrieve replay with id "${replayId}". Is the API token correct?`);
|
|
25
|
+
process.exit(1);
|
|
26
|
+
}
|
|
27
|
+
return { fileName: replayFile };
|
|
28
|
+
};
|
|
29
|
+
exports.getOrFetchReplay = getOrFetchReplay;
|
|
30
|
+
const getOrFetchReplayArchive = async (client, replayId) => {
|
|
31
|
+
const logger = loglevel_1.default.getLogger(common_1.METICULOUS_LOGGER_NAME);
|
|
32
|
+
const replayDir = (0, exports.getReplayDir)(replayId);
|
|
33
|
+
await (0, promises_1.mkdir)(replayDir, { recursive: true });
|
|
34
|
+
const releaseLock = await (0, local_data_utils_1.waitToAcquireLockOnDirectory)(replayDir);
|
|
35
|
+
try {
|
|
36
|
+
const replayArchiveFile = (0, path_1.join)(replayDir, `${replayId}.zip`);
|
|
37
|
+
const paramsFile = (0, path_1.join)(replayDir, "replayEventsParams.json");
|
|
38
|
+
// Check if "replayEventsParams.json" exists. If yes, we assume the replay
|
|
39
|
+
// zip archive has been downloaded and extracted.
|
|
40
|
+
if (await (0, local_data_utils_1.fileExists)(paramsFile)) {
|
|
41
|
+
logger.debug(`Replay archive already downloaded at ${replayDir}`);
|
|
42
|
+
return { fileName: replayDir };
|
|
43
|
+
}
|
|
44
|
+
const downloadUrlData = await (0, client_1.getReplayDownloadUrl)(client, replayId);
|
|
45
|
+
if (!downloadUrlData) {
|
|
46
|
+
logger.error("Error: Could not retrieve replay archive URL. This may be an invalid replay");
|
|
47
|
+
await releaseLock();
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
await (0, download_file_1.downloadFile)(downloadUrlData.dowloadUrl, replayArchiveFile);
|
|
51
|
+
const zipFile = new adm_zip_1.default(replayArchiveFile);
|
|
52
|
+
zipFile.extractAllTo(replayDir, /*overwrite=*/ true);
|
|
53
|
+
await (0, promises_1.rm)(replayArchiveFile);
|
|
54
|
+
logger.debug(`Extracted replay archive in ${replayDir}`);
|
|
55
|
+
return { fileName: replayDir };
|
|
56
|
+
}
|
|
57
|
+
finally {
|
|
58
|
+
await releaseLock();
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
exports.getOrFetchReplayArchive = getOrFetchReplayArchive;
|
|
62
|
+
const getReplayDir = (replayId) => (0, path_1.join)((0, common_1.getMeticulousLocalDataDir)(), "replays", replayId);
|
|
63
|
+
exports.getReplayDir = getReplayDir;
|
package/package.json
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@alwaysmeticulous/downloading-helpers",
|
|
3
|
+
"version": "2.40.4",
|
|
4
|
+
"description": "Helper utilities for downloading files & scripts required to execute replays",
|
|
5
|
+
"license": "ISC",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist"
|
|
10
|
+
],
|
|
11
|
+
"scripts": {
|
|
12
|
+
"clean": "rimraf dist tsconfig.tsbuildinfo",
|
|
13
|
+
"build": "tsc --build tsconfig.json",
|
|
14
|
+
"dev": "tsc --build tsconfig.json --watch",
|
|
15
|
+
"format": "prettier --write src",
|
|
16
|
+
"lint": "eslint src --ext=ts,tsx,js --cache",
|
|
17
|
+
"lint:commit": "eslint --cache $(git diff --relative --name-only --diff-filter=ACMRTUXB master | grep -E \"(.js$|.ts$|.tsx$)\")",
|
|
18
|
+
"lint:fix": "eslint src --ext=ts,tsx,js --cache --fix",
|
|
19
|
+
"depcheck": "depcheck --ignore-patterns=dist"
|
|
20
|
+
},
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"@alwaysmeticulous/api": "^2.40.4",
|
|
23
|
+
"@alwaysmeticulous/client": "^2.40.4",
|
|
24
|
+
"@alwaysmeticulous/common": "^2.40.4",
|
|
25
|
+
"adm-zip": "^0.5.9",
|
|
26
|
+
"axios": "^1.2.6",
|
|
27
|
+
"loglevel": "^1.8.0",
|
|
28
|
+
"luxon": "^3.2.1",
|
|
29
|
+
"proper-lockfile": "^4.1.2"
|
|
30
|
+
},
|
|
31
|
+
"devDependencies": {
|
|
32
|
+
"@types/adm-zip": "^0.5.0",
|
|
33
|
+
"@types/luxon": "^3.2.0",
|
|
34
|
+
"@types/proper-lockfile": "^4.1.2"
|
|
35
|
+
},
|
|
36
|
+
"author": {
|
|
37
|
+
"name": "The Meticulous Team",
|
|
38
|
+
"email": "eng@meticulous.ai",
|
|
39
|
+
"url": "https://meticulous.ai"
|
|
40
|
+
},
|
|
41
|
+
"engines": {
|
|
42
|
+
"node": ">= 12"
|
|
43
|
+
},
|
|
44
|
+
"homepage": "https://github.com/alwaysmeticulous/meticulous-sdk",
|
|
45
|
+
"repository": {
|
|
46
|
+
"type": "git",
|
|
47
|
+
"url": "https://github.com/alwaysmeticulous/meticulous-sdk.git",
|
|
48
|
+
"directory": "packages/cli"
|
|
49
|
+
},
|
|
50
|
+
"bugs": {
|
|
51
|
+
"url": "https://github.com/alwaysmeticulous/meticulous-sdk/issues"
|
|
52
|
+
},
|
|
53
|
+
"gitHead": "11acce96c03e23cbb556779a121710a8136d24db"
|
|
54
|
+
}
|