socialbuffer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +146 -0
- package/bin/socialbuffer.js +895 -0
- package/package.json +34 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 socialbuffer contributors
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
# socialbuffer
|
|
2
|
+
|
|
3
|
+
Minimal CLI for turning markdown files into queued Buffer posts for X and LinkedIn.
|
|
4
|
+
|
|
5
|
+
`socialbuffer` is the public name and primary command. `tweetx` remains available as a backward-compatible alias for now.
|
|
6
|
+
|
|
7
|
+
## Run from source
|
|
8
|
+
|
|
9
|
+
Clone the repo, create a local `.env`, and run the CLI directly from source:
|
|
10
|
+
|
|
11
|
+
```sh
|
|
12
|
+
nvm use
|
|
13
|
+
cp .env.example .env
|
|
14
|
+
node ./bin/socialbuffer.js --help
|
|
15
|
+
node ./bin/socialbuffer.js post --file ./example-post.md --dry-run
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
The root `.env` is loaded automatically, so source usage works without a global install.
|
|
19
|
+
|
|
20
|
+
Node 20 is the supported runtime for development and CI.
|
|
21
|
+
|
|
22
|
+
## Global install
|
|
23
|
+
|
|
24
|
+
Install the CLI globally so `socialbuffer` is available from any directory:
|
|
25
|
+
|
|
26
|
+
```sh
|
|
27
|
+
npm install -g .
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
When run outside this repo, `socialbuffer` looks for config in this order:
|
|
31
|
+
|
|
32
|
+
```text
|
|
33
|
+
$SOCIALBUFFER_ENV_FILE
|
|
34
|
+
$TWEETX_ENV_FILE
|
|
35
|
+
$XDG_CONFIG_HOME/socialbuffer/.env
|
|
36
|
+
~/.config/socialbuffer/.env
|
|
37
|
+
~/.socialbuffer/.env
|
|
38
|
+
~/.config/tweetx/.env
|
|
39
|
+
~/.tweetx/.env
|
|
40
|
+
./.env
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Use `~/.config/socialbuffer/.env` for the public config path. The older `tweetx` config paths still work as legacy fallbacks.
|
|
44
|
+
|
|
45
|
+
## Discover your channel
|
|
46
|
+
|
|
47
|
+
List Buffer channels:
|
|
48
|
+
|
|
49
|
+
```sh
|
|
50
|
+
node ./bin/socialbuffer.js channels
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
List only X/Twitter channels:
|
|
54
|
+
|
|
55
|
+
```sh
|
|
56
|
+
node ./bin/socialbuffer.js channels --service twitter
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
List only LinkedIn channels:
|
|
60
|
+
|
|
61
|
+
```sh
|
|
62
|
+
node ./bin/socialbuffer.js channels --service linkedin
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
With a global install, the same commands become:
|
|
66
|
+
|
|
67
|
+
```sh
|
|
68
|
+
socialbuffer channels --service twitter
|
|
69
|
+
socialbuffer channels --service linkedin
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## First command
|
|
73
|
+
|
|
74
|
+
```sh
|
|
75
|
+
node ./bin/socialbuffer.js post --file ./post.md --dry-run
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## Setup
|
|
79
|
+
|
|
80
|
+
Set these environment variables in one of the supported env files:
|
|
81
|
+
|
|
82
|
+
```sh
|
|
83
|
+
BUFFER_API_KEY=your_buffer_api_key
|
|
84
|
+
BUFFER_X_CHANNEL_ID=your_buffer_x_channel_id
|
|
85
|
+
BUFFER_LINKEDIN_CHANNEL_ID=your_buffer_linkedin_channel_id
|
|
86
|
+
X_BEARER_TOKEN=your_x_bearer_token
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
`X_BEARER_TOKEN` is only needed for the read-only `analytics` command.
|
|
90
|
+
|
|
91
|
+
## Usage
|
|
92
|
+
|
|
93
|
+
Queue an X post in Buffer:
|
|
94
|
+
|
|
95
|
+
```sh
|
|
96
|
+
socialbuffer post --file ./post.md
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
Queue a LinkedIn post in Buffer:
|
|
100
|
+
|
|
101
|
+
```sh
|
|
102
|
+
socialbuffer post --platform linkedin --file ./post.md
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
Queue a post with one image:
|
|
106
|
+
|
|
107
|
+
```sh
|
|
108
|
+
socialbuffer post --file ./post.md --image ./shot.png
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
Queue a post with one remote image URL:
|
|
112
|
+
|
|
113
|
+
```sh
|
|
114
|
+
socialbuffer post --file ./post.md --image-url https://example.com/shot.png
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
Share immediately:
|
|
118
|
+
|
|
119
|
+
```sh
|
|
120
|
+
socialbuffer post --file ./post.md --mode shareNow
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
Preview the payload without sending anything:
|
|
124
|
+
|
|
125
|
+
```sh
|
|
126
|
+
socialbuffer post --file ./post.md --dry-run
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
Read X analytics:
|
|
130
|
+
|
|
131
|
+
```sh
|
|
132
|
+
socialbuffer analytics --username xdevelopers
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
## Notes
|
|
136
|
+
|
|
137
|
+
- The first version reads the markdown file as source text and flattens markdown formatting before publish.
|
|
138
|
+
- `channels` uses Buffer's GraphQL API so it works with the API key from Buffer's API settings page.
|
|
139
|
+
- The CLI auto-loads values from global config files and then lets the current directory's `.env` override them.
|
|
140
|
+
- `post` defaults to platform `x`. Use `--platform linkedin` to target the configured LinkedIn channel.
|
|
141
|
+
- `analytics` uses the X API directly and expects `X_BEARER_TOKEN` in `.env` or a supported global env file.
|
|
142
|
+
- `BUFFER_CHANNEL_ID` remains supported as a legacy fallback for X only.
|
|
143
|
+
- YAML frontmatter is stripped if present at the top of the file.
|
|
144
|
+
- The current image path supports one local image or one remote image URL.
|
|
145
|
+
|
|
146
|
+
Built alongside [DocsALot](https://docsalot.dev): convert your commits into documentation, social media posts on autopilot.
|
|
@@ -0,0 +1,895 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { readFile } from "node:fs/promises";
|
|
4
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
5
|
+
import { request } from "node:https";
|
|
6
|
+
import { homedir } from "node:os";
|
|
7
|
+
import { basename, extname, join, resolve } from "node:path";
|
|
8
|
+
import process from "node:process";
|
|
9
|
+
|
|
10
|
+
const PRIMARY_CLI_NAME = "socialbuffer";
|
|
11
|
+
const LEGACY_CLI_NAME = "tweetx";
|
|
12
|
+
const BUFFER_ENDPOINT = "https://api.buffer.com";
|
|
13
|
+
const X_API_ENDPOINT = "https://api.x.com/2";
|
|
14
|
+
const VALID_MODES = new Set(["addToQueue", "shareNow"]);
|
|
15
|
+
const VALID_POST_PLATFORMS = new Set(["x", "linkedin"]);
|
|
16
|
+
const VALID_SORTS = new Set(["engagement", "recent", "likes", "reposts", "replies", "quotes"]);
|
|
17
|
+
|
|
18
|
+
function printHelp() {
|
|
19
|
+
console.log(`${PRIMARY_CLI_NAME} (alias: ${LEGACY_CLI_NAME})
|
|
20
|
+
|
|
21
|
+
Usage:
|
|
22
|
+
${PRIMARY_CLI_NAME} channels [--service twitter|x|linkedin] [--api-key API_KEY]
|
|
23
|
+
${PRIMARY_CLI_NAME} post --file path/to/post.md [--platform x|linkedin] [--image path/to/file.png | --image-url https://...] [--alt "alt text"] [--mode addToQueue|shareNow] [--channel CHANNEL_ID] [--api-key API_KEY] [--dry-run]
|
|
24
|
+
${PRIMARY_CLI_NAME} analytics --username USERNAME [--limit N] [--sort engagement|recent|likes|reposts|replies|quotes] [--include-replies true|false] [--include-retweets true|false] [--x-token TOKEN]
|
|
25
|
+
|
|
26
|
+
Environment:
|
|
27
|
+
BUFFER_API_KEY Buffer API key
|
|
28
|
+
BUFFER_X_CHANNEL_ID Buffer X channel ID
|
|
29
|
+
BUFFER_LINKEDIN_CHANNEL_ID Buffer LinkedIn channel ID
|
|
30
|
+
BUFFER_CHANNEL_ID Legacy fallback for Buffer X channel ID
|
|
31
|
+
X_BEARER_TOKEN X app Bearer token for read-only analytics
|
|
32
|
+
TWITTER_BEARER_TOKEN Alias for X_BEARER_TOKEN
|
|
33
|
+
SOCIALBUFFER_ENV_FILE Optional path to an env file
|
|
34
|
+
TWEETX_ENV_FILE Legacy alias for SOCIALBUFFER_ENV_FILE
|
|
35
|
+
|
|
36
|
+
Examples:
|
|
37
|
+
${PRIMARY_CLI_NAME} channels
|
|
38
|
+
${PRIMARY_CLI_NAME} channels --service twitter
|
|
39
|
+
${PRIMARY_CLI_NAME} channels --service linkedin
|
|
40
|
+
${PRIMARY_CLI_NAME} post --file ./post.md
|
|
41
|
+
${PRIMARY_CLI_NAME} post --platform linkedin --file ./post.md
|
|
42
|
+
${PRIMARY_CLI_NAME} post --file ./post.md --image ./shot.png
|
|
43
|
+
${PRIMARY_CLI_NAME} post --file ./post.md --image-url https://example.com/shot.png
|
|
44
|
+
${PRIMARY_CLI_NAME} post --file ./post.md --mode shareNow
|
|
45
|
+
${PRIMARY_CLI_NAME} post --file ./post.md --dry-run
|
|
46
|
+
${PRIMARY_CLI_NAME} analytics --username xdevelopers
|
|
47
|
+
${PRIMARY_CLI_NAME} analytics --username xdevelopers --limit 20 --sort likes
|
|
48
|
+
`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function fail(message) {
|
|
52
|
+
console.error(`Error: ${message}`);
|
|
53
|
+
process.exit(1);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function parseEnvFile(envPath) {
|
|
57
|
+
if (!existsSync(envPath)) {
|
|
58
|
+
return {};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const raw = readFileSync(envPath, "utf8");
|
|
62
|
+
const values = {};
|
|
63
|
+
|
|
64
|
+
for (const line of raw.split(/\r?\n/)) {
|
|
65
|
+
const trimmed = line.trim();
|
|
66
|
+
if (!trimmed || trimmed.startsWith("#")) {
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const separatorIndex = trimmed.indexOf("=");
|
|
71
|
+
if (separatorIndex === -1) {
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const key = trimmed.slice(0, separatorIndex).trim();
|
|
76
|
+
const value = trimmed.slice(separatorIndex + 1).trim();
|
|
77
|
+
if (!key) {
|
|
78
|
+
continue;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
values[key] = value;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return values;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function resolveEnvPaths() {
|
|
88
|
+
const paths = [];
|
|
89
|
+
const customEnvPath = process.env.SOCIALBUFFER_ENV_FILE || process.env.TWEETX_ENV_FILE;
|
|
90
|
+
|
|
91
|
+
if (customEnvPath) {
|
|
92
|
+
paths.push(resolve(customEnvPath));
|
|
93
|
+
} else {
|
|
94
|
+
const xdgConfigHome = process.env.XDG_CONFIG_HOME || join(homedir(), ".config");
|
|
95
|
+
paths.push(join(xdgConfigHome, PRIMARY_CLI_NAME, ".env"));
|
|
96
|
+
paths.push(join(homedir(), `.${PRIMARY_CLI_NAME}`, ".env"));
|
|
97
|
+
paths.push(join(xdgConfigHome, "tweetx", ".env"));
|
|
98
|
+
paths.push(join(homedir(), ".tweetx", ".env"));
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
paths.push(resolve(".env"));
|
|
102
|
+
return [...new Set(paths)];
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
function loadEnvFiles() {
|
|
106
|
+
const originalKeys = new Set(Object.keys(process.env));
|
|
107
|
+
|
|
108
|
+
for (const envPath of resolveEnvPaths()) {
|
|
109
|
+
const values = parseEnvFile(envPath);
|
|
110
|
+
for (const [key, value] of Object.entries(values)) {
|
|
111
|
+
if (originalKeys.has(key)) {
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
process.env[key] = value;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function stripFrontmatter(markdown) {
|
|
121
|
+
if (!markdown.startsWith("---\n")) {
|
|
122
|
+
return markdown;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const closingIndex = markdown.indexOf("\n---\n", 4);
|
|
126
|
+
if (closingIndex === -1) {
|
|
127
|
+
return markdown;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
return markdown.slice(closingIndex + 5);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
function normalizeMarkdown(markdown) {
|
|
134
|
+
const withoutBom = markdown.replace(/^\uFEFF/, "");
|
|
135
|
+
const withoutFrontmatter = stripFrontmatter(withoutBom);
|
|
136
|
+
return withoutFrontmatter.replace(/\r\n/g, "\n").trim();
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function markdownToPlainText(markdown) {
|
|
140
|
+
return normalizeMarkdown(markdown)
|
|
141
|
+
.replace(/^#{1,6}\s+/gm, "")
|
|
142
|
+
.replace(/^\s*[-*+]\s+/gm, "- ")
|
|
143
|
+
.replace(/`([^`]+)`/g, "$1")
|
|
144
|
+
.replace(/\*\*([^*]+)\*\*/g, "$1")
|
|
145
|
+
.replace(/\*([^*]+)\*/g, "$1")
|
|
146
|
+
.trim();
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function parseArgs(argv) {
|
|
150
|
+
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) {
|
|
151
|
+
return { command: "help" };
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const [command, ...rest] = argv;
|
|
155
|
+
const options = {
|
|
156
|
+
mode: "addToQueue",
|
|
157
|
+
dryRun: false,
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
for (let i = 0; i < rest.length; i += 1) {
|
|
161
|
+
const token = rest[i];
|
|
162
|
+
|
|
163
|
+
if (token === "--dry-run") {
|
|
164
|
+
options.dryRun = true;
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
if (!token.startsWith("--")) {
|
|
169
|
+
fail(`unexpected argument: ${token}`);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const key = token.slice(2);
|
|
173
|
+
const value = rest[i + 1];
|
|
174
|
+
if (value === undefined || value.startsWith("--")) {
|
|
175
|
+
fail(`missing value for --${key}`);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
options[key] = value;
|
|
179
|
+
i += 1;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return { command, options };
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
async function loadPostText(filePath) {
|
|
186
|
+
const raw = await readFile(filePath, "utf8");
|
|
187
|
+
const text = markdownToPlainText(raw);
|
|
188
|
+
if (!text) {
|
|
189
|
+
fail("post file is empty after trimming");
|
|
190
|
+
}
|
|
191
|
+
return text;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
function getMimeType(filePath) {
|
|
195
|
+
const extension = extname(filePath).toLowerCase();
|
|
196
|
+
switch (extension) {
|
|
197
|
+
case ".png":
|
|
198
|
+
return "image/png";
|
|
199
|
+
case ".jpg":
|
|
200
|
+
case ".jpeg":
|
|
201
|
+
return "image/jpeg";
|
|
202
|
+
case ".webp":
|
|
203
|
+
return "image/webp";
|
|
204
|
+
case ".gif":
|
|
205
|
+
return "image/gif";
|
|
206
|
+
default:
|
|
207
|
+
fail(`unsupported image type: ${extension || "unknown"}`);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
function defaultAltText(filePath) {
|
|
212
|
+
const filename = basename(filePath, extname(filePath));
|
|
213
|
+
return filename.replace(/[_-]+/g, " ").trim() || "Attached image";
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
async function loadImageAsset(filePath, altText) {
|
|
217
|
+
const bytes = await readFile(filePath);
|
|
218
|
+
const mimeType = getMimeType(filePath);
|
|
219
|
+
const dataUrl = `data:${mimeType};base64,${bytes.toString("base64")}`;
|
|
220
|
+
return {
|
|
221
|
+
images: [
|
|
222
|
+
{
|
|
223
|
+
url: dataUrl,
|
|
224
|
+
metadata: {
|
|
225
|
+
altText: altText || defaultAltText(filePath),
|
|
226
|
+
},
|
|
227
|
+
},
|
|
228
|
+
],
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
function loadRemoteImageAsset(imageUrl, altText) {
|
|
233
|
+
return {
|
|
234
|
+
images: [
|
|
235
|
+
{
|
|
236
|
+
url: imageUrl,
|
|
237
|
+
metadata: {
|
|
238
|
+
altText: altText || "Attached image",
|
|
239
|
+
},
|
|
240
|
+
},
|
|
241
|
+
],
|
|
242
|
+
};
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
async function postJson(url, apiKey, body) {
|
|
246
|
+
const target = new URL(url);
|
|
247
|
+
|
|
248
|
+
return new Promise((resolvePromise, rejectPromise) => {
|
|
249
|
+
const req = request(
|
|
250
|
+
{
|
|
251
|
+
method: "POST",
|
|
252
|
+
protocol: target.protocol,
|
|
253
|
+
hostname: target.hostname,
|
|
254
|
+
port: target.port || undefined,
|
|
255
|
+
path: `${target.pathname}${target.search}`,
|
|
256
|
+
headers: {
|
|
257
|
+
"content-type": "application/json",
|
|
258
|
+
authorization: `Bearer ${apiKey}`,
|
|
259
|
+
},
|
|
260
|
+
},
|
|
261
|
+
(response) => {
|
|
262
|
+
let responseBody = "";
|
|
263
|
+
|
|
264
|
+
response.setEncoding("utf8");
|
|
265
|
+
response.on("data", (chunk) => {
|
|
266
|
+
responseBody += chunk;
|
|
267
|
+
});
|
|
268
|
+
response.on("end", () => {
|
|
269
|
+
let payload = null;
|
|
270
|
+
|
|
271
|
+
if (responseBody) {
|
|
272
|
+
try {
|
|
273
|
+
payload = JSON.parse(responseBody);
|
|
274
|
+
} catch (error) {
|
|
275
|
+
rejectPromise(
|
|
276
|
+
new Error(
|
|
277
|
+
`Buffer returned invalid JSON with status ${response.statusCode || "unknown"}`,
|
|
278
|
+
),
|
|
279
|
+
);
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
resolvePromise({
|
|
285
|
+
ok: (response.statusCode || 500) >= 200 && (response.statusCode || 500) < 300,
|
|
286
|
+
status: response.statusCode || 500,
|
|
287
|
+
payload,
|
|
288
|
+
});
|
|
289
|
+
});
|
|
290
|
+
},
|
|
291
|
+
);
|
|
292
|
+
|
|
293
|
+
req.on("error", (error) => {
|
|
294
|
+
rejectPromise(error);
|
|
295
|
+
});
|
|
296
|
+
|
|
297
|
+
req.write(JSON.stringify(body));
|
|
298
|
+
req.end();
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
async function getJson(url, token) {
|
|
303
|
+
const target = new URL(url);
|
|
304
|
+
|
|
305
|
+
return new Promise((resolvePromise, rejectPromise) => {
|
|
306
|
+
const req = request(
|
|
307
|
+
{
|
|
308
|
+
method: "GET",
|
|
309
|
+
protocol: target.protocol,
|
|
310
|
+
hostname: target.hostname,
|
|
311
|
+
port: target.port || undefined,
|
|
312
|
+
path: `${target.pathname}${target.search}`,
|
|
313
|
+
headers: {
|
|
314
|
+
authorization: `Bearer ${token}`,
|
|
315
|
+
},
|
|
316
|
+
},
|
|
317
|
+
(response) => {
|
|
318
|
+
let responseBody = "";
|
|
319
|
+
|
|
320
|
+
response.setEncoding("utf8");
|
|
321
|
+
response.on("data", (chunk) => {
|
|
322
|
+
responseBody += chunk;
|
|
323
|
+
});
|
|
324
|
+
response.on("end", () => {
|
|
325
|
+
let payload = null;
|
|
326
|
+
|
|
327
|
+
if (responseBody) {
|
|
328
|
+
try {
|
|
329
|
+
payload = JSON.parse(responseBody);
|
|
330
|
+
} catch (error) {
|
|
331
|
+
rejectPromise(
|
|
332
|
+
new Error(
|
|
333
|
+
`X returned invalid JSON with status ${response.statusCode || "unknown"}`,
|
|
334
|
+
),
|
|
335
|
+
);
|
|
336
|
+
return;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
resolvePromise({
|
|
341
|
+
ok: (response.statusCode || 500) >= 200 && (response.statusCode || 500) < 300,
|
|
342
|
+
status: response.statusCode || 500,
|
|
343
|
+
payload,
|
|
344
|
+
});
|
|
345
|
+
});
|
|
346
|
+
},
|
|
347
|
+
);
|
|
348
|
+
|
|
349
|
+
req.on("error", (error) => {
|
|
350
|
+
rejectPromise(error);
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
req.end();
|
|
354
|
+
});
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
async function createBufferPost({ apiKey, channelId, text, mode, assets }) {
|
|
358
|
+
const query = `
|
|
359
|
+
mutation CreatePost($input: CreatePostInput!) {
|
|
360
|
+
createPost(input: $input) {
|
|
361
|
+
__typename
|
|
362
|
+
... on PostActionSuccess {
|
|
363
|
+
post {
|
|
364
|
+
id
|
|
365
|
+
text
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
... on MutationError {
|
|
369
|
+
message
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
`;
|
|
374
|
+
|
|
375
|
+
const { ok, status, payload } = await postJson(BUFFER_ENDPOINT, apiKey, {
|
|
376
|
+
query,
|
|
377
|
+
variables: {
|
|
378
|
+
input: {
|
|
379
|
+
channelId,
|
|
380
|
+
mode,
|
|
381
|
+
schedulingType: "automatic",
|
|
382
|
+
text,
|
|
383
|
+
assets,
|
|
384
|
+
},
|
|
385
|
+
},
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
if (!ok) {
|
|
389
|
+
const detail = payload ? JSON.stringify(payload) : "Request failed";
|
|
390
|
+
fail(`Buffer request failed with ${status}: ${detail}`);
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
if (payload?.errors?.length) {
|
|
394
|
+
fail(payload.errors.map((item) => item.message).join("; "));
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
const result = payload?.data?.createPost;
|
|
398
|
+
if (!result) {
|
|
399
|
+
fail("Buffer response did not include createPost");
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
if (result.__typename === "MutationError") {
|
|
403
|
+
fail(result.message || "Buffer mutation failed");
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
if (result.__typename !== "PostActionSuccess" || !result.post) {
|
|
407
|
+
fail(`unexpected Buffer response type: ${result.__typename || "unknown"}`);
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
return result.post;
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
async function queryBufferGraphql({ apiKey, query, variables }) {
|
|
414
|
+
const { ok, status, payload } = await postJson(BUFFER_ENDPOINT, apiKey, {
|
|
415
|
+
query,
|
|
416
|
+
variables,
|
|
417
|
+
});
|
|
418
|
+
|
|
419
|
+
if (!ok) {
|
|
420
|
+
const detail = payload ? JSON.stringify(payload) : "Request failed";
|
|
421
|
+
fail(`Buffer request failed with ${status}: ${detail}`);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
if (payload?.errors?.length) {
|
|
425
|
+
fail(payload.errors.map((item) => item.message).join("; "));
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
return payload?.data;
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
function parseBooleanOption(value, fallback = false) {
|
|
432
|
+
if (value === undefined) {
|
|
433
|
+
return fallback;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
if (value === "true") {
|
|
437
|
+
return true;
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
if (value === "false") {
|
|
441
|
+
return false;
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
fail(`expected boolean string 'true' or 'false', got: ${value}`);
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
function parseLimit(value, fallback = 10) {
|
|
448
|
+
if (value === undefined) {
|
|
449
|
+
return fallback;
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
const parsed = Number.parseInt(value, 10);
|
|
453
|
+
if (!Number.isInteger(parsed) || parsed < 5 || parsed > 100) {
|
|
454
|
+
fail("--limit must be an integer between 5 and 100");
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
return parsed;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
function normalizeXMetrics(metrics) {
|
|
461
|
+
if (!metrics || typeof metrics !== "object") {
|
|
462
|
+
return {
|
|
463
|
+
likes: 0,
|
|
464
|
+
reposts: 0,
|
|
465
|
+
replies: 0,
|
|
466
|
+
quotes: 0,
|
|
467
|
+
bookmarks: 0,
|
|
468
|
+
impressions: 0,
|
|
469
|
+
};
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
return {
|
|
473
|
+
likes: metrics.like_count || 0,
|
|
474
|
+
reposts: metrics.retweet_count || metrics.repost_count || 0,
|
|
475
|
+
replies: metrics.reply_count || 0,
|
|
476
|
+
quotes: metrics.quote_count || 0,
|
|
477
|
+
bookmarks: metrics.bookmark_count || 0,
|
|
478
|
+
impressions: metrics.impression_count || 0,
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
function computeEngagementScore(metrics) {
|
|
483
|
+
return (
|
|
484
|
+
metrics.likes +
|
|
485
|
+
metrics.reposts * 2 +
|
|
486
|
+
metrics.replies * 2 +
|
|
487
|
+
metrics.quotes * 3 +
|
|
488
|
+
metrics.bookmarks
|
|
489
|
+
);
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
function sortAnalytics(items, sort) {
|
|
493
|
+
const copy = [...items];
|
|
494
|
+
|
|
495
|
+
copy.sort((left, right) => {
|
|
496
|
+
if (sort === "recent") {
|
|
497
|
+
return new Date(right.createdAt).getTime() - new Date(left.createdAt).getTime();
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
if (sort === "likes") {
|
|
501
|
+
return right.metrics.likes - left.metrics.likes;
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
if (sort === "reposts") {
|
|
505
|
+
return right.metrics.reposts - left.metrics.reposts;
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
if (sort === "replies") {
|
|
509
|
+
return right.metrics.replies - left.metrics.replies;
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
if (sort === "quotes") {
|
|
513
|
+
return right.metrics.quotes - left.metrics.quotes;
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
return right.score - left.score;
|
|
517
|
+
});
|
|
518
|
+
|
|
519
|
+
return copy;
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
async function xGet({ token, path, query }) {
|
|
523
|
+
const url = new URL(`${X_API_ENDPOINT}${path}`);
|
|
524
|
+
|
|
525
|
+
if (query) {
|
|
526
|
+
for (const [key, value] of Object.entries(query)) {
|
|
527
|
+
if (value === undefined || value === null || value === "") {
|
|
528
|
+
continue;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
url.searchParams.set(key, String(value));
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
const { ok, status, payload } = await getJson(url.toString(), token);
|
|
536
|
+
if (!ok) {
|
|
537
|
+
const detail = payload ? JSON.stringify(payload) : "Request failed";
|
|
538
|
+
fail(`X request failed with ${status}: ${detail}`);
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
if (payload?.errors?.length) {
|
|
542
|
+
fail(payload.errors.map((item) => item.detail || item.message).join("; "));
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
return payload;
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
async function lookupXUser({ token, username }) {
|
|
549
|
+
const payload = await xGet({
|
|
550
|
+
token,
|
|
551
|
+
path: `/users/by/username/${encodeURIComponent(username)}`,
|
|
552
|
+
query: {
|
|
553
|
+
"user.fields": "public_metrics",
|
|
554
|
+
},
|
|
555
|
+
});
|
|
556
|
+
|
|
557
|
+
if (!payload?.data?.id) {
|
|
558
|
+
fail(`Could not find X user: ${username}`);
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
return payload.data;
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
async function listXPosts({ token, userId, limit, includeReplies, includeRetweets }) {
|
|
565
|
+
const exclude = [];
|
|
566
|
+
if (!includeReplies) {
|
|
567
|
+
exclude.push("replies");
|
|
568
|
+
}
|
|
569
|
+
if (!includeRetweets) {
|
|
570
|
+
exclude.push("retweets");
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
const payload = await xGet({
|
|
574
|
+
token,
|
|
575
|
+
path: `/users/${encodeURIComponent(userId)}/tweets`,
|
|
576
|
+
query: {
|
|
577
|
+
max_results: limit,
|
|
578
|
+
exclude: exclude.join(","),
|
|
579
|
+
"tweet.fields": "created_at,public_metrics,lang",
|
|
580
|
+
},
|
|
581
|
+
});
|
|
582
|
+
|
|
583
|
+
return Array.isArray(payload?.data) ? payload.data : [];
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
function normalizeServiceFilter(service) {
|
|
587
|
+
if (!service) {
|
|
588
|
+
return null;
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
const normalized = service.toLowerCase();
|
|
592
|
+
|
|
593
|
+
if (normalized === "x") {
|
|
594
|
+
return "twitter";
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
return normalized;
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
function normalizePostPlatform(platform) {
|
|
601
|
+
if (!platform) {
|
|
602
|
+
return "x";
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
const normalized = platform.toLowerCase();
|
|
606
|
+
|
|
607
|
+
if (normalized === "twitter") {
|
|
608
|
+
return "x";
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
if (!VALID_POST_PLATFORMS.has(normalized)) {
|
|
612
|
+
fail(`--platform must be one of: ${Array.from(VALID_POST_PLATFORMS).join(", ")}`);
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
return normalized;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
function resolveChannelId({ platform, channel }) {
|
|
619
|
+
if (channel) {
|
|
620
|
+
return channel;
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
if (platform === "linkedin") {
|
|
624
|
+
return process.env.BUFFER_LINKEDIN_CHANNEL_ID;
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
return process.env.BUFFER_X_CHANNEL_ID || process.env.BUFFER_CHANNEL_ID;
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
async function listBufferProfiles({ apiKey, service }) {
|
|
631
|
+
const normalizedService = normalizeServiceFilter(service);
|
|
632
|
+
|
|
633
|
+
const accountData = await queryBufferGraphql({
|
|
634
|
+
apiKey,
|
|
635
|
+
query: `
|
|
636
|
+
query AccountOrganizations {
|
|
637
|
+
account {
|
|
638
|
+
id
|
|
639
|
+
organizations {
|
|
640
|
+
id
|
|
641
|
+
name
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
`,
|
|
646
|
+
});
|
|
647
|
+
|
|
648
|
+
const organizations = accountData?.account?.organizations;
|
|
649
|
+
if (!Array.isArray(organizations) || organizations.length === 0) {
|
|
650
|
+
fail("No Buffer organizations found for this account");
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
const profiles = [];
|
|
654
|
+
|
|
655
|
+
for (const organization of organizations) {
|
|
656
|
+
const channelsData = await queryBufferGraphql({
|
|
657
|
+
apiKey,
|
|
658
|
+
query: `
|
|
659
|
+
query Channels($input: ChannelsInput!) {
|
|
660
|
+
channels(input: $input) {
|
|
661
|
+
id
|
|
662
|
+
name
|
|
663
|
+
service
|
|
664
|
+
displayName
|
|
665
|
+
descriptor
|
|
666
|
+
timezone
|
|
667
|
+
organizationId
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
`,
|
|
671
|
+
variables: {
|
|
672
|
+
input: {
|
|
673
|
+
organizationId: organization.id,
|
|
674
|
+
},
|
|
675
|
+
},
|
|
676
|
+
});
|
|
677
|
+
|
|
678
|
+
const channels = channelsData?.channels;
|
|
679
|
+
if (!Array.isArray(channels)) {
|
|
680
|
+
continue;
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
for (const channel of channels) {
|
|
684
|
+
if (normalizedService && channel?.service !== normalizedService) {
|
|
685
|
+
continue;
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
profiles.push({
|
|
689
|
+
id: channel.id,
|
|
690
|
+
organizationId: channel.organizationId,
|
|
691
|
+
organizationName: organization.name,
|
|
692
|
+
service: channel.service,
|
|
693
|
+
name: channel.name,
|
|
694
|
+
displayName: channel.displayName || null,
|
|
695
|
+
descriptor: channel.descriptor || null,
|
|
696
|
+
timezone: channel.timezone || null,
|
|
697
|
+
});
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
return profiles;
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
async function handleChannels(options) {
|
|
705
|
+
const apiKey = options["api-key"] || process.env.BUFFER_API_KEY;
|
|
706
|
+
if (!apiKey) {
|
|
707
|
+
fail("BUFFER_API_KEY is required");
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
const profiles = await listBufferProfiles({
|
|
711
|
+
apiKey,
|
|
712
|
+
service: options.service,
|
|
713
|
+
});
|
|
714
|
+
|
|
715
|
+
console.log(JSON.stringify(profiles, null, 2));
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
async function handlePost(options) {
|
|
719
|
+
const filePath = options.file;
|
|
720
|
+
if (!filePath) {
|
|
721
|
+
fail("--file is required");
|
|
722
|
+
}
|
|
723
|
+
|
|
724
|
+
const apiKey = options["api-key"] || process.env.BUFFER_API_KEY;
|
|
725
|
+
const platform = normalizePostPlatform(options.platform);
|
|
726
|
+
const channelId = resolveChannelId({ platform, channel: options.channel });
|
|
727
|
+
const mode = options.mode || "addToQueue";
|
|
728
|
+
const imagePath = options.image;
|
|
729
|
+
const imageUrl = options["image-url"];
|
|
730
|
+
|
|
731
|
+
if (!VALID_MODES.has(mode)) {
|
|
732
|
+
fail(`--mode must be one of: ${Array.from(VALID_MODES).join(", ")}`);
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
if (imagePath && imageUrl) {
|
|
736
|
+
fail("use either --image or --image-url, not both");
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
if (!apiKey && !options.dryRun) {
|
|
740
|
+
fail("BUFFER_API_KEY is required unless --dry-run is used");
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
if (!channelId) {
|
|
744
|
+
if (platform === "linkedin") {
|
|
745
|
+
fail("BUFFER_LINKEDIN_CHANNEL_ID or --channel is required for --platform linkedin");
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
fail("BUFFER_X_CHANNEL_ID, BUFFER_CHANNEL_ID, or --channel is required for --platform x");
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
const text = await loadPostText(filePath);
|
|
752
|
+
const assets = imagePath
|
|
753
|
+
? await loadImageAsset(imagePath, options.alt)
|
|
754
|
+
: imageUrl
|
|
755
|
+
? loadRemoteImageAsset(imageUrl, options.alt)
|
|
756
|
+
: undefined;
|
|
757
|
+
|
|
758
|
+
if (options.dryRun) {
|
|
759
|
+
console.log(
|
|
760
|
+
JSON.stringify(
|
|
761
|
+
{
|
|
762
|
+
channelId,
|
|
763
|
+
filePath,
|
|
764
|
+
imagePath: imagePath || null,
|
|
765
|
+
imageUrl: imageUrl || null,
|
|
766
|
+
mode,
|
|
767
|
+
platform,
|
|
768
|
+
text,
|
|
769
|
+
hasAssets: Boolean(assets),
|
|
770
|
+
},
|
|
771
|
+
null,
|
|
772
|
+
2,
|
|
773
|
+
),
|
|
774
|
+
);
|
|
775
|
+
return;
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
const post = await createBufferPost({
|
|
779
|
+
apiKey,
|
|
780
|
+
channelId,
|
|
781
|
+
mode,
|
|
782
|
+
text,
|
|
783
|
+
assets,
|
|
784
|
+
});
|
|
785
|
+
|
|
786
|
+
console.log(
|
|
787
|
+
JSON.stringify(
|
|
788
|
+
{
|
|
789
|
+
ok: true,
|
|
790
|
+
channelId,
|
|
791
|
+
id: post.id,
|
|
792
|
+
mode,
|
|
793
|
+
platform,
|
|
794
|
+
text: post.text,
|
|
795
|
+
},
|
|
796
|
+
null,
|
|
797
|
+
2,
|
|
798
|
+
),
|
|
799
|
+
);
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
async function handleAnalytics(options) {
|
|
803
|
+
const username = options.username;
|
|
804
|
+
if (!username) {
|
|
805
|
+
fail("--username is required");
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
const token =
|
|
809
|
+
options["x-token"] || process.env.X_BEARER_TOKEN || process.env.TWITTER_BEARER_TOKEN;
|
|
810
|
+
if (!token) {
|
|
811
|
+
fail("X_BEARER_TOKEN or --x-token is required");
|
|
812
|
+
}
|
|
813
|
+
|
|
814
|
+
const limit = parseLimit(options.limit, 10);
|
|
815
|
+
const sort = options.sort || "engagement";
|
|
816
|
+
if (!VALID_SORTS.has(sort)) {
|
|
817
|
+
fail(`--sort must be one of: ${Array.from(VALID_SORTS).join(", ")}`);
|
|
818
|
+
}
|
|
819
|
+
|
|
820
|
+
const includeReplies = parseBooleanOption(options["include-replies"], false);
|
|
821
|
+
const includeRetweets = parseBooleanOption(options["include-retweets"], false);
|
|
822
|
+
|
|
823
|
+
const user = await lookupXUser({ token, username });
|
|
824
|
+
const posts = await listXPosts({
|
|
825
|
+
token,
|
|
826
|
+
userId: user.id,
|
|
827
|
+
limit,
|
|
828
|
+
includeReplies,
|
|
829
|
+
includeRetweets,
|
|
830
|
+
});
|
|
831
|
+
|
|
832
|
+
const ranked = sortAnalytics(
|
|
833
|
+
posts.map((post) => {
|
|
834
|
+
const metrics = normalizeXMetrics(post.public_metrics);
|
|
835
|
+
return {
|
|
836
|
+
id: post.id,
|
|
837
|
+
createdAt: post.created_at,
|
|
838
|
+
text: post.text,
|
|
839
|
+
url: `https://x.com/${username}/status/${post.id}`,
|
|
840
|
+
metrics,
|
|
841
|
+
score: computeEngagementScore(metrics),
|
|
842
|
+
};
|
|
843
|
+
}),
|
|
844
|
+
sort,
|
|
845
|
+
);
|
|
846
|
+
|
|
847
|
+
console.log(
|
|
848
|
+
JSON.stringify(
|
|
849
|
+
{
|
|
850
|
+
ok: true,
|
|
851
|
+
username,
|
|
852
|
+
userId: user.id,
|
|
853
|
+
sort,
|
|
854
|
+
count: ranked.length,
|
|
855
|
+
tweets: ranked,
|
|
856
|
+
},
|
|
857
|
+
null,
|
|
858
|
+
2,
|
|
859
|
+
),
|
|
860
|
+
);
|
|
861
|
+
}
|
|
862
|
+
|
|
863
|
+
async function main() {
|
|
864
|
+
loadEnvFiles();
|
|
865
|
+
|
|
866
|
+
const { command, options } = parseArgs(process.argv.slice(2));
|
|
867
|
+
|
|
868
|
+
if (command === "help") {
|
|
869
|
+
printHelp();
|
|
870
|
+
return;
|
|
871
|
+
}
|
|
872
|
+
|
|
873
|
+
if (command === "channels") {
|
|
874
|
+
await handleChannels(options);
|
|
875
|
+
return;
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
if (command === "post") {
|
|
879
|
+
await handlePost(options);
|
|
880
|
+
return;
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
if (command === "analytics") {
|
|
884
|
+
await handleAnalytics(options);
|
|
885
|
+
return;
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
if (command !== "post") {
|
|
889
|
+
fail(`unsupported command: ${command}`);
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
main().catch((error) => {
|
|
894
|
+
fail(error instanceof Error ? error.message : String(error));
|
|
895
|
+
});
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "socialbuffer",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Queue markdown-based social posts to Buffer for X and LinkedIn from the terminal.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"socialbuffer": "./bin/socialbuffer.js",
|
|
8
|
+
"tweetx": "./bin/socialbuffer.js"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"bin",
|
|
12
|
+
"README.md"
|
|
13
|
+
],
|
|
14
|
+
"scripts": {
|
|
15
|
+
"start": "node ./bin/socialbuffer.js",
|
|
16
|
+
"check": "node ./bin/socialbuffer.js --help",
|
|
17
|
+
"check:dry-run": "node ./bin/socialbuffer.js post --file ./example-post.md --dry-run",
|
|
18
|
+
"test": "npm run check && npm run check:dry-run"
|
|
19
|
+
},
|
|
20
|
+
"keywords": [
|
|
21
|
+
"buffer",
|
|
22
|
+
"social",
|
|
23
|
+
"social-media",
|
|
24
|
+
"x",
|
|
25
|
+
"twitter",
|
|
26
|
+
"linkedin",
|
|
27
|
+
"cli",
|
|
28
|
+
"markdown"
|
|
29
|
+
],
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"engines": {
|
|
32
|
+
"node": ">=20"
|
|
33
|
+
}
|
|
34
|
+
}
|