podcast-dl 7.3.2 → 8.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -10,6 +10,12 @@ A CLI for downloading podcasts with a focus on archiving.
10
10
 
11
11
  `npx podcast-dl --url <PODCAST_RSS_URL>`
12
12
 
13
+ ### Binaries
14
+
15
+ [Visit the releases page](https://github.com/lightpohl/podcast-dl/releases) and download the latest binary for your system.
16
+
17
+ `podcast-dl --url <PODCAST_RSS_URL>`
18
+
13
19
  ### [More Examples](./docs/examples.md)
14
20
 
15
21
  ## Options
@@ -38,8 +44,8 @@ Type values surrounded in square brackets (`[]`) can be used as used as boolean
38
44
  | --reverse | | false | Reverse download direction and start at last RSS item. |
39
45
  | --info | | false | Print retrieved podcast info instead of downloading. |
40
46
  | --list | [String] | false | Print episode list instead of downloading. Defaults to "table" when used as a boolean option. "json" is also supported. |
41
- | --exec | String | false | Execute a command after each episode is downloaded. |
42
- | --filter-url-tacking | | false | Attempts to extract the direct download link of an episode if detected (**experimental**). |
47
+ | --exec | String | false | Execute a command after each episode is downloaded. See "Templating" for more details. |
48
+ | --parser-config | String | false | Path to JSON file that will be parsed and used to override the default config passed to [rss-parser](https://github.com/rbren/rss-parser#xml-options). |
43
49
  | --version | | false | Output the version number. |
44
50
  | --help | | false | Output usage information. |
45
51
 
@@ -50,7 +56,7 @@ Type values surrounded in square brackets (`[]`) can be used as used as boolean
50
56
 
51
57
  ## Templating
52
58
 
53
- Options that support templating allow users to specify a template for the generated filename(s). The provided template will replace all matched keywords with the related data described below. Each keyword must be wrapped in two braces like so:
59
+ Options that support templating allow users to specify a template for the generated filename(s) or option. The provided template will replace all matched keywords with the related data described below. Each keyword must be wrapped in two braces like so:
54
60
 
55
61
  `--out-dir "./{{podcast_title}}"`
56
62
 
@@ -70,12 +76,12 @@ Options that support templating allow users to specify a template for the genera
70
76
  - `podcast_title`: Title of the podcast feed.
71
77
  - `podcast_link`: `link` value provided for the podcast feed. Typically the homepage URL.
72
78
 
73
- ## Executing Process After Downloading Episode
74
-
75
- Option to execute command after downloading episode with `{}` being a placeholder for the downloaded episode and `{filenameBase}` for the filename without extension.
79
+ ### `--exec`
76
80
 
77
- - Example to convert all episodes to mp3 with 192k: `ffmpeg -i {} -b:a 192k -f mp3 {filenameBase}.mp3`
78
- - Example to move all episodes to folder: `mv {} /mnt/media_server/`
81
+ - `episode_path`: The path to the downloaded episode.
82
+ - `episode_path_base`: The path to the folder of the downloaded episode.
83
+ - `episode_filname`: The filename of the episode.
84
+ - `episode_filename_base`: The filename of the episode without its extension.
79
85
 
80
86
  ## Log Levels
81
87
 
package/bin/async.js CHANGED
@@ -21,7 +21,6 @@ import {
21
21
  runExec,
22
22
  writeItemMeta,
23
23
  writeToArchive,
24
- getUrlEmbed,
25
24
  getIsInArchive,
26
25
  } from "./util.js";
27
26
 
@@ -37,7 +36,6 @@ const download = async ({
37
36
  archive,
38
37
  override,
39
38
  onAfterDownload,
40
- filterUrlTracking,
41
39
  }) => {
42
40
  const logMessage = getLogMessageWithMarker(marker);
43
41
  if (!override && fs.existsSync(outputPath)) {
@@ -50,18 +48,7 @@ const download = async ({
50
48
  return;
51
49
  }
52
50
 
53
- let embeddedUrl = null;
54
- if (filterUrlTracking) {
55
- logMessage("Attempting to find embedded URL...");
56
- embeddedUrl = await getUrlEmbed(url);
57
-
58
- if (!embeddedUrl) {
59
- logMessage("Unable to find embedded URL. Defaulting to full address");
60
- }
61
- }
62
-
63
- const finalUrl = embeddedUrl || url;
64
- const headResponse = await got(finalUrl, {
51
+ const headResponse = await got(url, {
65
52
  timeout: 5000,
66
53
  method: "HEAD",
67
54
  responseType: "json",
@@ -108,7 +95,7 @@ const download = async ({
108
95
  });
109
96
 
110
97
  await pipeline(
111
- got.stream(finalUrl).on("downloadProgress", onDownloadProgress),
98
+ got.stream(url).on("downloadProgress", onDownloadProgress),
112
99
  fs.createWriteStream(tempOutputPath)
113
100
  );
114
101
  } catch (error) {
@@ -155,7 +142,6 @@ let downloadItemsAsync = async ({
155
142
  episodeTemplate,
156
143
  exec,
157
144
  feed,
158
- filterUrlTracking,
159
145
  includeEpisodeMeta,
160
146
  mono,
161
147
  override,
@@ -193,7 +179,6 @@ let downloadItemsAsync = async ({
193
179
  archive,
194
180
  override,
195
181
  marker,
196
- filterUrlTracking,
197
182
  key: getArchiveKey({
198
183
  prefix: archiveUrl,
199
184
  name: getArchiveFilename({
@@ -219,7 +204,12 @@ let downloadItemsAsync = async ({
219
204
 
220
205
  if (exec) {
221
206
  logMessage("Running exec...");
222
- await runExec({ exec, outputPodcastPath, episodeFilename });
207
+ await runExec({
208
+ exec,
209
+ basePath,
210
+ outputPodcastPath,
211
+ episodeFilename,
212
+ });
223
213
  }
224
214
 
225
215
  numEpisodesDownloaded += 1;
package/bin/bin.js CHANGED
@@ -3,9 +3,9 @@
3
3
  import fs from "fs";
4
4
  import _path from "path";
5
5
  import commander from "commander";
6
- import { createRequire } from "module";
7
6
  import pluralize from "pluralize";
8
7
 
8
+ import { setupCommander } from "./commander.js";
9
9
  import { download } from "./async.js";
10
10
  import {
11
11
  getArchiveKey,
@@ -16,9 +16,7 @@ import {
16
16
  logFeedInfo,
17
17
  logItemsList,
18
18
  writeFeedMeta,
19
- ITEM_LIST_FORMATS,
20
19
  } from "./util.js";
21
- import { createParseNumber, hasFfmpeg } from "./validate.js";
22
20
  import {
23
21
  ERROR_STATUSES,
24
22
  LOG_LEVELS,
@@ -29,100 +27,7 @@ import {
29
27
  import { getFolderName, getSafeName } from "./naming.js";
30
28
  import { downloadItemsAsync } from "./async.js";
31
29
 
32
- const require = createRequire(import.meta.url);
33
- const { version } = require("../package.json");
34
-
35
- commander
36
- .version(version)
37
- .option("--url <string>", "url to podcast rss feed")
38
- .option("--out-dir <path>", "specify output directory", "./{{podcast_title}}")
39
- .option(
40
- "--archive [path]",
41
- "download or write only items not listed in archive file"
42
- )
43
- .option(
44
- "--episode-template <string>",
45
- "template for generating episode related filenames",
46
- "{{release_date}}-{{title}}"
47
- )
48
- .option("--include-meta", "write out podcast metadata to json")
49
- .option(
50
- "--include-episode-meta",
51
- "write out individual episode metadata to json"
52
- )
53
- .option("--include-episode-images", "download found episode images")
54
- .option(
55
- "--offset <number>",
56
- "offset episode to start downloading from (most recent = 0)",
57
- createParseNumber({ min: 0, name: "--offset" }),
58
- 0
59
- )
60
- .option(
61
- "--limit <number>",
62
- "max amount of episodes to download",
63
- createParseNumber({ min: 1, name: "--limit", require: false })
64
- )
65
- .option(
66
- "--episode-regex <string>",
67
- "match episode title against regex before downloading"
68
- )
69
- .option(
70
- "--after <string>",
71
- "download episodes only after this date (inclusive)"
72
- )
73
- .option(
74
- "--before <string>",
75
- "download episodes only before this date (inclusive)"
76
- )
77
- .option(
78
- "--add-mp3-metadata",
79
- "attempts to add a base level of metadata to .mp3 files using ffmpeg",
80
- hasFfmpeg
81
- )
82
- .option(
83
- "--adjust-bitrate <string>",
84
- "attempts to adjust bitrate of .mp3 files using ffmpeg",
85
- hasFfmpeg
86
- )
87
- .option(
88
- "--mono",
89
- "attempts to force .mp3 files into mono using ffmpeg",
90
- hasFfmpeg
91
- )
92
- .option("--override", "override local files on collision")
93
- .option("--reverse", "download episodes in reverse order")
94
- .option("--info", "print retrieved podcast info instead of downloading")
95
- .option(
96
- "--list [table|json]",
97
- "print episode info instead of downloading",
98
- (value) => {
99
- if (
100
- value !== ITEM_LIST_FORMATS.table &&
101
- value !== ITEM_LIST_FORMATS.json
102
- ) {
103
- logErrorAndExit(
104
- `${value} is an invalid format for --list\nUse "table" or "json"`
105
- );
106
- }
107
-
108
- return value;
109
- }
110
- )
111
- .option(
112
- "--exec <string>",
113
- "Execute a command after each episode is downloaded"
114
- )
115
- .option(
116
- "--threads <number>",
117
- "the number of downloads that can happen concurrently",
118
- createParseNumber({ min: 1, max: 32, name: "threads" }),
119
- 1
120
- )
121
- .option(
122
- "--filter-url-tracking",
123
- "attempts to extract the direct download link of an episode if detected (experimental)"
124
- )
125
- .parse(process.argv);
30
+ setupCommander(commander, process.argv);
126
31
 
127
32
  const {
128
33
  url,
@@ -143,7 +48,7 @@ const {
143
48
  exec,
144
49
  mono,
145
50
  threads,
146
- filterUrlTracking,
51
+ parserConfig,
147
52
  addMp3Metadata: addMp3MetadataFlag,
148
53
  adjustBitrate: bitrate,
149
54
  } = commander;
@@ -157,13 +62,15 @@ const main = async () => {
157
62
 
158
63
  const { hostname, pathname } = new URL(url);
159
64
  const archiveUrl = `${hostname}${pathname}`;
160
- const feed = await getFeed(url);
65
+ const feed = await getFeed(url, parserConfig);
161
66
  const basePath = _path.resolve(
162
67
  process.cwd(),
163
68
  getFolderName({ feed, template: outDir })
164
69
  );
165
70
 
166
- logFeedInfo(feed);
71
+ if (info) {
72
+ logFeedInfo(feed);
73
+ }
167
74
 
168
75
  if (list) {
169
76
  if (feed.items && feed.items.length) {
@@ -187,6 +94,8 @@ const main = async () => {
187
94
  process.exit(0);
188
95
  }
189
96
 
97
+ logFeedInfo(feed);
98
+
190
99
  if (!fs.existsSync(basePath)) {
191
100
  logMessage(`${basePath} does not exist. Creating...`, LOG_LEVELS.important);
192
101
  fs.mkdirSync(basePath, { recursive: true });
@@ -290,7 +199,6 @@ const main = async () => {
290
199
  override,
291
200
  targetItems,
292
201
  threads,
293
- filterUrlTracking,
294
202
  });
295
203
 
296
204
  if (hasErrors && numEpisodesDownloaded !== targetItems.length) {
@@ -0,0 +1,100 @@
1
+ import { ITEM_LIST_FORMATS } from "./util.js";
2
+ import { createParseNumber, hasFfmpeg } from "./validate.js";
3
+ import { logErrorAndExit } from "./logger.js";
4
+
5
+ export const setupCommander = (commander, argv) => {
6
+ commander
7
+ .version("8.0.0")
8
+ .option("--url <string>", "url to podcast rss feed")
9
+ .option(
10
+ "--out-dir <path>",
11
+ "specify output directory",
12
+ "./{{podcast_title}}"
13
+ )
14
+ .option(
15
+ "--archive [path]",
16
+ "download or write only items not listed in archive file"
17
+ )
18
+ .option(
19
+ "--episode-template <string>",
20
+ "template for generating episode related filenames",
21
+ "{{release_date}}-{{title}}"
22
+ )
23
+ .option("--include-meta", "write out podcast metadata to json")
24
+ .option(
25
+ "--include-episode-meta",
26
+ "write out individual episode metadata to json"
27
+ )
28
+ .option("--include-episode-images", "download found episode images")
29
+ .option(
30
+ "--offset <number>",
31
+ "offset episode to start downloading from (most recent = 0)",
32
+ createParseNumber({ min: 0, name: "--offset" }),
33
+ 0
34
+ )
35
+ .option(
36
+ "--limit <number>",
37
+ "max amount of episodes to download",
38
+ createParseNumber({ min: 1, name: "--limit", require: false })
39
+ )
40
+ .option(
41
+ "--episode-regex <string>",
42
+ "match episode title against regex before downloading"
43
+ )
44
+ .option(
45
+ "--after <string>",
46
+ "download episodes only after this date (inclusive)"
47
+ )
48
+ .option(
49
+ "--before <string>",
50
+ "download episodes only before this date (inclusive)"
51
+ )
52
+ .option(
53
+ "--add-mp3-metadata",
54
+ "attempts to add a base level of metadata to .mp3 files using ffmpeg",
55
+ hasFfmpeg
56
+ )
57
+ .option(
58
+ "--adjust-bitrate <string>",
59
+ "attempts to adjust bitrate of .mp3 files using ffmpeg",
60
+ hasFfmpeg
61
+ )
62
+ .option(
63
+ "--mono",
64
+ "attempts to force .mp3 files into mono using ffmpeg",
65
+ hasFfmpeg
66
+ )
67
+ .option("--override", "override local files on collision")
68
+ .option("--reverse", "download episodes in reverse order")
69
+ .option("--info", "print retrieved podcast info instead of downloading")
70
+ .option(
71
+ "--list [table|json]",
72
+ "print episode info instead of downloading",
73
+ (value) => {
74
+ if (!ITEM_LIST_FORMATS.includes(value)) {
75
+ logErrorAndExit(
76
+ `${value} is an invalid format for --list\nUse one of the following: ${ITEM_LIST_FORMATS.join(
77
+ ", "
78
+ )}`
79
+ );
80
+ }
81
+
82
+ return value;
83
+ }
84
+ )
85
+ .option(
86
+ "--exec <string>",
87
+ "Execute a command after each episode is downloaded"
88
+ )
89
+ .option(
90
+ "--threads <number>",
91
+ "the number of downloads that can happen concurrently",
92
+ createParseNumber({ min: 1, max: 32, name: "threads" }),
93
+ 1
94
+ )
95
+ .option(
96
+ "--parser-config <string>",
97
+ "path to JSON config to override RSS parser"
98
+ )
99
+ .parse(argv);
100
+ };
package/bin/naming.js CHANGED
@@ -41,8 +41,8 @@ const getFilename = ({ item, ext, url, feed, template }) => {
41
41
 
42
42
  const getFolderName = ({ feed, template }) => {
43
43
  const templateReplacementsTuples = [
44
- ["podcast_title", feed.title ? getSafeName(feed.title) : ""],
45
- ["podcast_link", feed.link ? getSafeName(feed.link) : ""],
44
+ ["podcast_title", feed.title || ""],
45
+ ["podcast_link", feed.link || ""],
46
46
  ];
47
47
 
48
48
  let name = template;
@@ -55,7 +55,7 @@ const getFolderName = ({ feed, template }) => {
55
55
  : name.replace(replaceRegex, "");
56
56
  });
57
57
 
58
- return name;
58
+ return getSafeName(name);
59
59
  };
60
60
 
61
61
  const getArchiveFilename = ({ pubDate, name, ext }) => {
package/bin/util.js CHANGED
@@ -2,7 +2,6 @@ import rssParser from "rss-parser";
2
2
  import path from "path";
3
3
  import fs from "fs";
4
4
  import dayjs from "dayjs";
5
- import got from "got";
6
5
  import util from "util";
7
6
  import { exec } from "child_process";
8
7
 
@@ -11,10 +10,6 @@ import { getArchiveFilename, getFilename } from "./naming.js";
11
10
 
12
11
  const execWithPromise = util.promisify(exec);
13
12
 
14
- const parser = new rssParser({
15
- defaultRSS: 2.0,
16
- });
17
-
18
13
  const getTempPath = (path) => {
19
14
  return `${path}.tmp`;
20
15
  };
@@ -23,14 +18,30 @@ const getArchiveKey = ({ prefix, name }) => {
23
18
  return `${prefix}-${name}`;
24
19
  };
25
20
 
26
- const getArchive = (archive) => {
27
- const archivePath = path.resolve(process.cwd(), archive);
21
+ const getPublicObject = (object) => {
22
+ const output = {};
23
+ Object.keys(object).forEach((key) => {
24
+ if (!key.startsWith("_") && object[key]) {
25
+ output[key] = object[key];
26
+ }
27
+ });
28
+
29
+ return output;
30
+ };
31
+
32
+ const getJsonFile = (filePath) => {
33
+ const fullPath = path.resolve(process.cwd(), filePath);
28
34
 
29
- if (!fs.existsSync(archivePath)) {
30
- return [];
35
+ if (!fs.existsSync(fullPath)) {
36
+ return null;
31
37
  }
32
38
 
33
- return JSON.parse(fs.readFileSync(archivePath));
39
+ return JSON.parse(fs.readFileSync(fullPath));
40
+ };
41
+
42
+ const getArchive = (archive) => {
43
+ const archiveContent = getJsonFile(archive);
44
+ return archiveContent === null ? [] : archiveContent;
34
45
  };
35
46
 
36
47
  const writeToArchive = ({ key, archive }) => {
@@ -49,54 +60,6 @@ const getIsInArchive = ({ key, archive }) => {
49
60
  return archiveResult.includes(key);
50
61
  };
51
62
 
52
- const getPossibleUrlEmbeds = (url, maxAmount = 5) => {
53
- const fullUrl = new URL(url);
54
- const possibleStartIndexes = [];
55
-
56
- for (let i = 0; i < fullUrl.pathname.length; i++) {
57
- if (fullUrl.pathname[i] === "/") {
58
- possibleStartIndexes.push(i);
59
- }
60
- }
61
-
62
- const possibleEmbedChoices = possibleStartIndexes.map((startIndex) => {
63
- let possibleEmbed = fullUrl.pathname.slice(startIndex + 1);
64
-
65
- if (!possibleEmbed.startsWith("http")) {
66
- possibleEmbed = `https://${possibleEmbed}`;
67
- }
68
-
69
- return decodeURIComponent(possibleEmbed);
70
- });
71
-
72
- return possibleEmbedChoices
73
- .slice(Math.max(possibleEmbedChoices.length - maxAmount, 0))
74
- .reverse();
75
- };
76
-
77
- const getUrlEmbed = async (url) => {
78
- const possibleUrlEmbeds = getPossibleUrlEmbeds(url);
79
- for (const possibleUrl of possibleUrlEmbeds) {
80
- try {
81
- const embeddedUrl = new URL(possibleUrl);
82
- await got(embeddedUrl.href, {
83
- timeout: 3000,
84
- method: "HEAD",
85
- responseType: "json",
86
- headers: {
87
- accept: "*/*",
88
- },
89
- });
90
-
91
- return embeddedUrl;
92
- } catch (error) {
93
- // do nothing
94
- }
95
- }
96
-
97
- return null;
98
- };
99
-
100
63
  const getLoopControls = ({ offset, length, reverse }) => {
101
64
  if (reverse) {
102
65
  const startIndex = length - 1 - offset;
@@ -246,10 +209,7 @@ const logFeedInfo = (feed) => {
246
209
  logMessage();
247
210
  };
248
211
 
249
- const ITEM_LIST_FORMATS = {
250
- table: "table",
251
- json: "json",
252
- };
212
+ const ITEM_LIST_FORMATS = ["table", "json"];
253
213
 
254
214
  const logItemsList = ({
255
215
  type,
@@ -271,23 +231,28 @@ const logItemsList = ({
271
231
  episodeRegex,
272
232
  });
273
233
 
274
- const tableData = items.map((item) => {
275
- return {
234
+ if (!items.length) {
235
+ logErrorAndExit("No episodes found with provided criteria to list");
236
+ }
237
+
238
+ const isJson = type === "json";
239
+
240
+ const output = items.map((item) => {
241
+ const data = {
276
242
  episodeNum: feed.items.length - item._originalIndex,
277
243
  title: item.title,
278
244
  pubDate: item.pubDate,
279
245
  };
246
+
247
+ return data;
280
248
  });
281
249
 
282
- if (!tableData.length) {
283
- logErrorAndExit("No episodes found with provided criteria to list");
250
+ if (isJson) {
251
+ console.log(JSON.stringify(output));
252
+ return;
284
253
  }
285
254
 
286
- if (type === ITEM_LIST_FORMATS.json) {
287
- console.log(JSON.stringify(tableData));
288
- } else {
289
- console.table(tableData);
290
- }
255
+ console.table(output);
291
256
  };
292
257
 
293
258
  const writeFeedMeta = ({ outputPath, feed, key, archive, override }) => {
@@ -295,15 +260,7 @@ const writeFeedMeta = ({ outputPath, feed, key, archive, override }) => {
295
260
  logMessage("Feed metadata exists in archive. Skipping...");
296
261
  return;
297
262
  }
298
-
299
- const output = {};
300
- ["title", "description", "link", "feedUrl", "managingEditor"].forEach(
301
- (key) => {
302
- if (feed[key]) {
303
- output[key] = feed[key];
304
- }
305
- }
306
- );
263
+ const output = getPublicObject(feed);
307
264
 
308
265
  try {
309
266
  if (override || !fs.existsSync(outputPath)) {
@@ -339,12 +296,7 @@ const writeItemMeta = ({
339
296
  return;
340
297
  }
341
298
 
342
- const output = {};
343
- ["title", "contentSnippet", "pubDate", "creator"].forEach((key) => {
344
- if (item[key]) {
345
- output[key] = item[key];
346
- }
347
- });
299
+ const output = getPublicObject(item);
348
300
 
349
301
  try {
350
302
  if (override || !fs.existsSync(outputPath)) {
@@ -432,7 +384,19 @@ const getImageUrl = ({ image, itunes }) => {
432
384
  return null;
433
385
  };
434
386
 
435
- const getFeed = async (url) => {
387
+ const getFeed = async (url, parserConfig) => {
388
+ const defaultConfig = {
389
+ defaultRSS: 2.0,
390
+ };
391
+
392
+ const config = parserConfig ? getJsonFile(parserConfig) : defaultConfig;
393
+
394
+ if (parserConfig && !config) {
395
+ logErrorAndExit(`Unable to load parser config: ${parserConfig}`);
396
+ }
397
+
398
+ const parser = new rssParser(config);
399
+
436
400
  const { href } = new URL(url);
437
401
 
438
402
  let feed;
@@ -525,14 +489,22 @@ const runFfmpeg = async ({
525
489
  fs.renameSync(tmpMp3Path, outputPath);
526
490
  };
527
491
 
528
- const runExec = async ({ exec, outputPodcastPath, episodeFilename }) => {
529
- const filenameBase = episodeFilename.substring(
492
+ const runExec = async ({
493
+ exec,
494
+ basePath,
495
+ outputPodcastPath,
496
+ episodeFilename,
497
+ }) => {
498
+ const episodeFilenameBase = episodeFilename.substring(
530
499
  0,
531
500
  episodeFilename.lastIndexOf(".")
532
501
  );
502
+
533
503
  const execCmd = exec
534
- .replace(/{}/g, `"${outputPodcastPath}"`)
535
- .replace(/{filenameBase}/g, `"${filenameBase}"`);
504
+ .replace(/{{episode_path}}/g, `"${outputPodcastPath}"`)
505
+ .replace(/{{episode_path_base}}/g, `"${basePath}"`)
506
+ .replace(/{{episode_filename}}/g, `"${episodeFilename}"`)
507
+ .replace(/{{episode_filename_base}}/g, `"${episodeFilenameBase}"`);
536
508
 
537
509
  await execWithPromise(execCmd, { stdio: "ignore" });
538
510
  };
@@ -548,7 +520,6 @@ export {
548
520
  getItemsToDownload,
549
521
  getTempPath,
550
522
  getUrlExt,
551
- getUrlEmbed,
552
523
  logFeedInfo,
553
524
  ITEM_LIST_FORMATS,
554
525
  logItemsList,
package/package.json CHANGED
@@ -1,11 +1,13 @@
1
1
  {
2
2
  "name": "podcast-dl",
3
- "version": "7.3.2",
3
+ "version": "8.0.0",
4
4
  "description": "A CLI for downloading podcasts.",
5
5
  "type": "module",
6
6
  "bin": "./bin/bin.js",
7
7
  "scripts": {
8
+ "build": "rimraf ./binaries && npx webpack && npm run pkg",
8
9
  "lint": "eslint ./bin",
10
+ "pkg": "npx pkg ./dist/podcast-dl.js --targets node14-linux-x64,node14-win-x64,node14-macos-x64 --out-path ./binaries",
9
11
  "release": "standard-version"
10
12
  },
11
13
  "lint-staged": {
@@ -44,7 +46,8 @@
44
46
  "lint-staged": "^10.1.7",
45
47
  "prettier": "2.3.2",
46
48
  "rimraf": "^3.0.2",
47
- "standard-version": "^9.0.0"
49
+ "standard-version": "^9.0.0",
50
+ "webpack-cli": "^5.0.1"
48
51
  },
49
52
  "dependencies": {
50
53
  "command-exists": "^1.2.9",
@@ -54,7 +57,7 @@
54
57
  "got": "^11.0.2",
55
58
  "p-limit": "^4.0.0",
56
59
  "pluralize": "^8.0.0",
57
- "rss-parser": "^3.7.6",
60
+ "rss-parser": "^3.12.0",
58
61
  "throttle-debounce": "^3.0.1"
59
62
  }
60
63
  }