spora 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -0
- package/bin/spora.js +2 -0
- package/dist/account-creator-PZW5JLHS.js +498 -0
- package/dist/account-creator-PZW5JLHS.js.map +1 -0
- package/dist/chunk-3JEDGXEM.js +32 -0
- package/dist/chunk-3JEDGXEM.js.map +1 -0
- package/dist/chunk-53YLFYJF.js +59 -0
- package/dist/chunk-53YLFYJF.js.map +1 -0
- package/dist/chunk-7CR4ID6P.js +614 -0
- package/dist/chunk-7CR4ID6P.js.map +1 -0
- package/dist/chunk-AHXZIGQE.js +156 -0
- package/dist/chunk-AHXZIGQE.js.map +1 -0
- package/dist/chunk-DJJWHOL3.js +162 -0
- package/dist/chunk-DJJWHOL3.js.map +1 -0
- package/dist/chunk-EBO4F5NU.js +105 -0
- package/dist/chunk-EBO4F5NU.js.map +1 -0
- package/dist/chunk-ERTBXYOP.js +81 -0
- package/dist/chunk-ERTBXYOP.js.map +1 -0
- package/dist/chunk-KELPENM3.js +47 -0
- package/dist/chunk-KELPENM3.js.map +1 -0
- package/dist/chunk-NFDZ47AG.js +57 -0
- package/dist/chunk-NFDZ47AG.js.map +1 -0
- package/dist/chunk-O23NWMYU.js +124 -0
- package/dist/chunk-O23NWMYU.js.map +1 -0
- package/dist/chunk-YEKHNTQO.js +80 -0
- package/dist/chunk-YEKHNTQO.js.map +1 -0
- package/dist/chunk-ZJZKH7N7.js +56 -0
- package/dist/chunk-ZJZKH7N7.js.map +1 -0
- package/dist/cli.js +675 -0
- package/dist/cli.js.map +1 -0
- package/dist/client-3AQCA4YE.js +401 -0
- package/dist/client-3AQCA4YE.js.map +1 -0
- package/dist/client-RBGZWS3Q.js +373 -0
- package/dist/client-RBGZWS3Q.js.map +1 -0
- package/dist/colony-J5KQIV6M.js +229 -0
- package/dist/colony-J5KQIV6M.js.map +1 -0
- package/dist/config-NZAFARS6.js +14 -0
- package/dist/config-NZAFARS6.js.map +1 -0
- package/dist/crypto-FHSQ72NU.js +14 -0
- package/dist/crypto-FHSQ72NU.js.map +1 -0
- package/dist/heartbeat-J4JLYH2B.js +358 -0
- package/dist/heartbeat-J4JLYH2B.js.map +1 -0
- package/dist/init-BG4Z4XQU.js +205 -0
- package/dist/init-BG4Z4XQU.js.map +1 -0
- package/dist/llm-RDNC5Y3G.js +16 -0
- package/dist/llm-RDNC5Y3G.js.map +1 -0
- package/dist/mcp-server.js +773 -0
- package/dist/mcp-server.js.map +1 -0
- package/dist/memory-7FBE26K3.js +26 -0
- package/dist/memory-7FBE26K3.js.map +1 -0
- package/dist/memory-O3AJIKBX.js +24 -0
- package/dist/memory-O3AJIKBX.js.map +1 -0
- package/dist/paths-5GFUUHCZ.js +13 -0
- package/dist/paths-5GFUUHCZ.js.map +1 -0
- package/dist/prompt-builder-WNMZ2QCN.js +17 -0
- package/dist/prompt-builder-WNMZ2QCN.js.map +1 -0
- package/dist/queue-ELK5ZX7J.js +14 -0
- package/dist/queue-ELK5ZX7J.js.map +1 -0
- package/dist/x-client-J4GE5A7P.js +12 -0
- package/dist/x-client-J4GE5A7P.js.map +1 -0
- package/package.json +57 -0
- package/templates/SKILL.md +335 -0
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import {
|
|
2
|
+
logger
|
|
3
|
+
} from "./chunk-KELPENM3.js";
|
|
4
|
+
import {
|
|
5
|
+
loadConfig
|
|
6
|
+
} from "./chunk-YEKHNTQO.js";
|
|
7
|
+
import {
|
|
8
|
+
paths
|
|
9
|
+
} from "./chunk-53YLFYJF.js";
|
|
10
|
+
|
|
11
|
+
// src/runtime/llm.ts
|
|
12
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
13
|
+
import { readFileSync, existsSync } from "fs";
|
|
14
|
+
var client = null;
|
|
15
|
+
function getLLMApiKey() {
|
|
16
|
+
if (process.env.ANTHROPIC_API_KEY) {
|
|
17
|
+
return process.env.ANTHROPIC_API_KEY;
|
|
18
|
+
}
|
|
19
|
+
if (existsSync(paths.llmKey)) {
|
|
20
|
+
return readFileSync(paths.llmKey, "utf-8").trim();
|
|
21
|
+
}
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
function hasLLMKey() {
|
|
25
|
+
return getLLMApiKey() !== null;
|
|
26
|
+
}
|
|
27
|
+
function getClient() {
|
|
28
|
+
if (client) return client;
|
|
29
|
+
const apiKey = getLLMApiKey();
|
|
30
|
+
if (!apiKey) {
|
|
31
|
+
throw new Error("No LLM API key configured. Run `spora set-llm-key` first.");
|
|
32
|
+
}
|
|
33
|
+
client = new Anthropic({ apiKey });
|
|
34
|
+
return client;
|
|
35
|
+
}
|
|
36
|
+
async function generateResponse(systemPrompt, userMessage) {
|
|
37
|
+
const config = loadConfig();
|
|
38
|
+
const model = config.llm?.model ?? "claude-sonnet-4-20250514";
|
|
39
|
+
logger.info(`Calling LLM (${model})...`);
|
|
40
|
+
const anthropic = getClient();
|
|
41
|
+
const response = await anthropic.messages.create({
|
|
42
|
+
model,
|
|
43
|
+
max_tokens: 1024,
|
|
44
|
+
system: systemPrompt,
|
|
45
|
+
messages: [{ role: "user", content: userMessage }]
|
|
46
|
+
});
|
|
47
|
+
const textBlock = response.content.find((b) => b.type === "text");
|
|
48
|
+
const content = textBlock ? textBlock.text : "";
|
|
49
|
+
logger.info(`LLM response: ${response.usage.input_tokens} in, ${response.usage.output_tokens} out`);
|
|
50
|
+
return {
|
|
51
|
+
content,
|
|
52
|
+
inputTokens: response.usage.input_tokens,
|
|
53
|
+
outputTokens: response.usage.output_tokens
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
async function chat(systemPrompt, messages) {
|
|
57
|
+
const config = loadConfig();
|
|
58
|
+
const model = config.llm?.model ?? "claude-sonnet-4-20250514";
|
|
59
|
+
const anthropic = getClient();
|
|
60
|
+
const response = await anthropic.messages.create({
|
|
61
|
+
model,
|
|
62
|
+
max_tokens: 1024,
|
|
63
|
+
system: systemPrompt,
|
|
64
|
+
messages
|
|
65
|
+
});
|
|
66
|
+
const textBlock = response.content.find((b) => b.type === "text");
|
|
67
|
+
const content = textBlock ? textBlock.text : "";
|
|
68
|
+
return {
|
|
69
|
+
content,
|
|
70
|
+
inputTokens: response.usage.input_tokens,
|
|
71
|
+
outputTokens: response.usage.output_tokens
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export {
|
|
76
|
+
getLLMApiKey,
|
|
77
|
+
hasLLMKey,
|
|
78
|
+
generateResponse,
|
|
79
|
+
chat
|
|
80
|
+
};
|
|
81
|
+
//# sourceMappingURL=chunk-ERTBXYOP.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/runtime/llm.ts"],"sourcesContent":["import Anthropic from \"@anthropic-ai/sdk\";\nimport { readFileSync, existsSync } from \"node:fs\";\nimport { paths } from \"../utils/paths.js\";\nimport { loadConfig } from \"../utils/config.js\";\nimport { logger } from \"../utils/logger.js\";\n\nlet client: Anthropic | null = null;\n\nexport function getLLMApiKey(): string | null {\n // Check env first, then file\n if (process.env.ANTHROPIC_API_KEY) {\n return process.env.ANTHROPIC_API_KEY;\n }\n if (existsSync(paths.llmKey)) {\n return readFileSync(paths.llmKey, \"utf-8\").trim();\n }\n return null;\n}\n\nexport function hasLLMKey(): boolean {\n return getLLMApiKey() !== null;\n}\n\nfunction getClient(): Anthropic {\n if (client) return client;\n const apiKey = getLLMApiKey();\n if (!apiKey) {\n throw new Error(\"No LLM API key configured. Run `spora set-llm-key` first.\");\n }\n client = new Anthropic({ apiKey });\n return client;\n}\n\nexport interface LLMResponse {\n content: string;\n inputTokens: number;\n outputTokens: number;\n}\n\nexport async function generateResponse(\n systemPrompt: string,\n userMessage: string,\n): Promise<LLMResponse> {\n const config = loadConfig();\n const model = config.llm?.model ?? \"claude-sonnet-4-20250514\";\n\n logger.info(`Calling LLM (${model})...`);\n\n const anthropic = getClient();\n const response = await anthropic.messages.create({\n model,\n max_tokens: 1024,\n system: systemPrompt,\n messages: [{ role: \"user\", content: userMessage }],\n });\n\n const textBlock = response.content.find((b) => b.type === \"text\");\n const content = textBlock ? textBlock.text : \"\";\n\n logger.info(`LLM response: ${response.usage.input_tokens} in, ${response.usage.output_tokens} out`);\n\n return {\n content,\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n };\n}\n\nexport async function chat(\n systemPrompt: string,\n messages: Array<{ role: \"user\" | \"assistant\"; content: string }>,\n): Promise<LLMResponse> {\n const config = loadConfig();\n const model = config.llm?.model ?? \"claude-sonnet-4-20250514\";\n\n const anthropic = getClient();\n const response = await anthropic.messages.create({\n model,\n max_tokens: 1024,\n system: systemPrompt,\n messages,\n });\n\n const textBlock = response.content.find((b) => b.type === \"text\");\n const content = textBlock ? textBlock.text : \"\";\n\n return {\n content,\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n };\n}\n"],"mappings":";;;;;;;;;;;AAAA,OAAO,eAAe;AACtB,SAAS,cAAc,kBAAkB;AAKzC,IAAI,SAA2B;AAExB,SAAS,eAA8B;AAE5C,MAAI,QAAQ,IAAI,mBAAmB;AACjC,WAAO,QAAQ,IAAI;AAAA,EACrB;AACA,MAAI,WAAW,MAAM,MAAM,GAAG;AAC5B,WAAO,aAAa,MAAM,QAAQ,OAAO,EAAE,KAAK;AAAA,EAClD;AACA,SAAO;AACT;AAEO,SAAS,YAAqB;AACnC,SAAO,aAAa,MAAM;AAC5B;AAEA,SAAS,YAAuB;AAC9B,MAAI,OAAQ,QAAO;AACnB,QAAM,SAAS,aAAa;AAC5B,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,2DAA2D;AAAA,EAC7E;AACA,WAAS,IAAI,UAAU,EAAE,OAAO,CAAC;AACjC,SAAO;AACT;AAQA,eAAsB,iBACpB,cACA,aACsB;AACtB,QAAM,SAAS,WAAW;AAC1B,QAAM,QAAQ,OAAO,KAAK,SAAS;AAEnC,SAAO,KAAK,gBAAgB,KAAK,MAAM;AAEvC,QAAM,YAAY,UAAU;AAC5B,QAAM,WAAW,MAAM,UAAU,SAAS,OAAO;AAAA,IAC/C;AAAA,IACA,YAAY;AAAA,IACZ,QAAQ;AAAA,IACR,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,YAAY,CAAC;AAAA,EACnD,CAAC;AAED,QAAM,YAAY,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAChE,QAAM,UAAU,YAAY,UAAU,OAAO;AAE7C,SAAO,KAAK,iBAAiB,SAAS,MAAM,YAAY,QAAQ,SAAS,MAAM,aAAa,MAAM;AAElG,SAAO;AAAA,IACL;AAAA,IACA,aAAa,SAAS,MAAM;AAAA,IAC5B,cAAc,SAAS,MAAM;AAAA,EAC/B;AACF;AAEA,eAAsB,KACpB,cACA,UACsB;AACtB,QAAM,SAAS,WAAW;AAC1B,QAAM,QAAQ,OAAO,KAAK,SAAS;AAEnC,QAAM,YAAY,UAAU;AAC5B,QAAM,WAAW,MAAM,UAAU,SAAS,OAAO;AAAA,IAC/C;AAAA,IACA,YAAY;AAAA,IACZ,QAAQ;AAAA,IACR;AAAA,EACF,CAAC;AAED,QAAM,YAAY,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAChE,QAAM,UAAU,YAAY,UAAU,OAAO;AAE7C,SAAO;AAAA,IACL;AAAA,IACA,aAAa,SAAS,MAAM;AAAA,IAC5B,cAAc,SAAS,MAAM;AAAA,EAC/B;AACF;","names":[]}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ensureDirectories,
|
|
3
|
+
paths
|
|
4
|
+
} from "./chunk-53YLFYJF.js";
|
|
5
|
+
|
|
6
|
+
// src/utils/logger.ts
|
|
7
|
+
import { appendFileSync } from "fs";
|
|
8
|
+
var LOG_LEVELS = {
|
|
9
|
+
debug: 0,
|
|
10
|
+
info: 1,
|
|
11
|
+
warn: 2,
|
|
12
|
+
error: 3
|
|
13
|
+
};
|
|
14
|
+
var minLevel = "info";
|
|
15
|
+
function setLogLevel(level) {
|
|
16
|
+
minLevel = level;
|
|
17
|
+
}
|
|
18
|
+
function formatMessage(level, message, data) {
|
|
19
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
20
|
+
const base = `[${timestamp}] [${level.toUpperCase()}] ${message}`;
|
|
21
|
+
if (data !== void 0) {
|
|
22
|
+
return `${base} ${JSON.stringify(data)}`;
|
|
23
|
+
}
|
|
24
|
+
return base;
|
|
25
|
+
}
|
|
26
|
+
function log(level, message, data) {
|
|
27
|
+
if (LOG_LEVELS[level] < LOG_LEVELS[minLevel]) return;
|
|
28
|
+
const formatted = formatMessage(level, message, data);
|
|
29
|
+
process.stderr.write(formatted + "\n");
|
|
30
|
+
try {
|
|
31
|
+
ensureDirectories();
|
|
32
|
+
appendFileSync(paths.logFile, formatted + "\n");
|
|
33
|
+
} catch {
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
var logger = {
|
|
37
|
+
debug: (message, data) => log("debug", message, data),
|
|
38
|
+
info: (message, data) => log("info", message, data),
|
|
39
|
+
warn: (message, data) => log("warn", message, data),
|
|
40
|
+
error: (message, data) => log("error", message, data)
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
export {
|
|
44
|
+
setLogLevel,
|
|
45
|
+
logger
|
|
46
|
+
};
|
|
47
|
+
//# sourceMappingURL=chunk-KELPENM3.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/utils/logger.ts"],"sourcesContent":["import { appendFileSync } from \"node:fs\";\nimport { paths, ensureDirectories } from \"./paths.js\";\n\nexport type LogLevel = \"debug\" | \"info\" | \"warn\" | \"error\";\n\nconst LOG_LEVELS: Record<LogLevel, number> = {\n debug: 0,\n info: 1,\n warn: 2,\n error: 3,\n};\n\nlet minLevel: LogLevel = \"info\";\n\nexport function setLogLevel(level: LogLevel): void {\n minLevel = level;\n}\n\nfunction formatMessage(level: LogLevel, message: string, data?: unknown): string {\n const timestamp = new Date().toISOString();\n const base = `[${timestamp}] [${level.toUpperCase()}] ${message}`;\n if (data !== undefined) {\n return `${base} ${JSON.stringify(data)}`;\n }\n return base;\n}\n\nfunction log(level: LogLevel, message: string, data?: unknown): void {\n if (LOG_LEVELS[level] < LOG_LEVELS[minLevel]) return;\n\n const formatted = formatMessage(level, message, data);\n\n // Always write to stderr (safe for MCP stdio servers)\n process.stderr.write(formatted + \"\\n\");\n\n // Also append to log file\n try {\n ensureDirectories();\n appendFileSync(paths.logFile, formatted + \"\\n\");\n } catch {\n // Silently ignore file write errors\n }\n}\n\nexport const logger = {\n debug: (message: string, data?: unknown) => log(\"debug\", message, data),\n info: (message: string, data?: unknown) => log(\"info\", message, data),\n warn: (message: string, data?: unknown) => log(\"warn\", message, data),\n error: (message: string, data?: unknown) => log(\"error\", message, data),\n};\n"],"mappings":";;;;;;AAAA,SAAS,sBAAsB;AAK/B,IAAM,aAAuC;AAAA,EAC3C,OAAO;AAAA,EACP,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO;AACT;AAEA,IAAI,WAAqB;AAElB,SAAS,YAAY,OAAuB;AACjD,aAAW;AACb;AAEA,SAAS,cAAc,OAAiB,SAAiB,MAAwB;AAC/E,QAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,QAAM,OAAO,IAAI,SAAS,MAAM,MAAM,YAAY,CAAC,KAAK,OAAO;AAC/D,MAAI,SAAS,QAAW;AACtB,WAAO,GAAG,IAAI,IAAI,KAAK,UAAU,IAAI,CAAC;AAAA,EACxC;AACA,SAAO;AACT;AAEA,SAAS,IAAI,OAAiB,SAAiB,MAAsB;AACnE,MAAI,WAAW,KAAK,IAAI,WAAW,QAAQ,EAAG;AAE9C,QAAM,YAAY,cAAc,OAAO,SAAS,IAAI;AAGpD,UAAQ,OAAO,MAAM,YAAY,IAAI;AAGrC,MAAI;AACF,sBAAkB;AAClB,mBAAe,MAAM,SAAS,YAAY,IAAI;AAAA,EAChD,QAAQ;AAAA,EAER;AACF;AAEO,IAAM,SAAS;AAAA,EACpB,OAAO,CAAC,SAAiB,SAAmB,IAAI,SAAS,SAAS,IAAI;AAAA,EACtE,MAAM,CAAC,SAAiB,SAAmB,IAAI,QAAQ,SAAS,IAAI;AAAA,EACpE,MAAM,CAAC,SAAiB,SAAmB,IAAI,QAAQ,SAAS,IAAI;AAAA,EACpE,OAAO,CAAC,SAAiB,SAAmB,IAAI,SAAS,SAAS,IAAI;AACxE;","names":[]}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import {
|
|
2
|
+
logger
|
|
3
|
+
} from "./chunk-KELPENM3.js";
|
|
4
|
+
import {
|
|
5
|
+
loadConfig,
|
|
6
|
+
saveConfig
|
|
7
|
+
} from "./chunk-YEKHNTQO.js";
|
|
8
|
+
|
|
9
|
+
// src/x-client/rate-limiter.ts
|
|
10
|
+
var RateLimiter = class {
|
|
11
|
+
checkResetDate() {
|
|
12
|
+
const config = loadConfig();
|
|
13
|
+
const now = /* @__PURE__ */ new Date();
|
|
14
|
+
const resetDate = new Date(config.credits.resetDate);
|
|
15
|
+
if (now >= resetDate) {
|
|
16
|
+
config.credits.postsUsedThisMonth = 0;
|
|
17
|
+
const nextReset = new Date(now.getFullYear(), now.getMonth() + 1, 1);
|
|
18
|
+
config.credits.resetDate = nextReset.toISOString();
|
|
19
|
+
saveConfig(config);
|
|
20
|
+
logger.info("Monthly credits reset");
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
canPost() {
|
|
24
|
+
this.checkResetDate();
|
|
25
|
+
const config = loadConfig();
|
|
26
|
+
return config.credits.postsUsedThisMonth < config.credits.monthlyPostLimit;
|
|
27
|
+
}
|
|
28
|
+
remaining() {
|
|
29
|
+
this.checkResetDate();
|
|
30
|
+
const config = loadConfig();
|
|
31
|
+
return config.credits.monthlyPostLimit - config.credits.postsUsedThisMonth;
|
|
32
|
+
}
|
|
33
|
+
consume(count = 1) {
|
|
34
|
+
this.checkResetDate();
|
|
35
|
+
const config = loadConfig();
|
|
36
|
+
config.credits.postsUsedThisMonth += count;
|
|
37
|
+
saveConfig(config);
|
|
38
|
+
const remaining = config.credits.monthlyPostLimit - config.credits.postsUsedThisMonth;
|
|
39
|
+
if (remaining <= Math.floor(config.credits.monthlyPostLimit * 0.2)) {
|
|
40
|
+
logger.warn(`Low credits: ${remaining} posts remaining this month`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
requireBasicTier(action) {
|
|
44
|
+
const config = loadConfig();
|
|
45
|
+
if (config.xMethod === "api" && config.xApiTier !== "basic") {
|
|
46
|
+
throw new Error(
|
|
47
|
+
`${action} requires X API Basic tier ($200/mo). Current tier: ${config.xApiTier ?? "free"}. Switch to browser mode or upgrade your API tier.`
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
var rateLimiter = new RateLimiter();
|
|
53
|
+
|
|
54
|
+
export {
|
|
55
|
+
rateLimiter
|
|
56
|
+
};
|
|
57
|
+
//# sourceMappingURL=chunk-NFDZ47AG.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/x-client/rate-limiter.ts"],"sourcesContent":["import { loadConfig, saveConfig } from \"../utils/config.js\";\nimport { logger } from \"../utils/logger.js\";\n\nexport class RateLimiter {\n private checkResetDate(): void {\n const config = loadConfig();\n const now = new Date();\n const resetDate = new Date(config.credits.resetDate);\n\n if (now >= resetDate) {\n config.credits.postsUsedThisMonth = 0;\n const nextReset = new Date(now.getFullYear(), now.getMonth() + 1, 1);\n config.credits.resetDate = nextReset.toISOString();\n saveConfig(config);\n logger.info(\"Monthly credits reset\");\n }\n }\n\n canPost(): boolean {\n this.checkResetDate();\n const config = loadConfig();\n return config.credits.postsUsedThisMonth < config.credits.monthlyPostLimit;\n }\n\n remaining(): number {\n this.checkResetDate();\n const config = loadConfig();\n return config.credits.monthlyPostLimit - config.credits.postsUsedThisMonth;\n }\n\n consume(count: number = 1): void {\n this.checkResetDate();\n const config = loadConfig();\n config.credits.postsUsedThisMonth += count;\n saveConfig(config);\n\n const remaining = config.credits.monthlyPostLimit - config.credits.postsUsedThisMonth;\n if (remaining <= Math.floor(config.credits.monthlyPostLimit * 0.2)) {\n logger.warn(`Low credits: ${remaining} posts remaining this month`);\n }\n }\n\n requireBasicTier(action: string): void {\n const config = loadConfig();\n if (config.xMethod === \"api\" && config.xApiTier !== \"basic\") {\n throw new Error(\n `${action} requires X API Basic tier ($200/mo). ` +\n `Current tier: ${config.xApiTier ?? \"free\"}. ` +\n `Switch to browser mode or upgrade your API tier.`\n );\n }\n }\n}\n\nexport const rateLimiter = new RateLimiter();\n"],"mappings":";;;;;;;;;AAGO,IAAM,cAAN,MAAkB;AAAA,EACf,iBAAuB;AAC7B,UAAM,SAAS,WAAW;AAC1B,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY,IAAI,KAAK,OAAO,QAAQ,SAAS;AAEnD,QAAI,OAAO,WAAW;AACpB,aAAO,QAAQ,qBAAqB;AACpC,YAAM,YAAY,IAAI,KAAK,IAAI,YAAY,GAAG,IAAI,SAAS,IAAI,GAAG,CAAC;AACnE,aAAO,QAAQ,YAAY,UAAU,YAAY;AACjD,iBAAW,MAAM;AACjB,aAAO,KAAK,uBAAuB;AAAA,IACrC;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,SAAK,eAAe;AACpB,UAAM,SAAS,WAAW;AAC1B,WAAO,OAAO,QAAQ,qBAAqB,OAAO,QAAQ;AAAA,EAC5D;AAAA,EAEA,YAAoB;AAClB,SAAK,eAAe;AACpB,UAAM,SAAS,WAAW;AAC1B,WAAO,OAAO,QAAQ,mBAAmB,OAAO,QAAQ;AAAA,EAC1D;AAAA,EAEA,QAAQ,QAAgB,GAAS;AAC/B,SAAK,eAAe;AACpB,UAAM,SAAS,WAAW;AAC1B,WAAO,QAAQ,sBAAsB;AACrC,eAAW,MAAM;AAEjB,UAAM,YAAY,OAAO,QAAQ,mBAAmB,OAAO,QAAQ;AACnE,QAAI,aAAa,KAAK,MAAM,OAAO,QAAQ,mBAAmB,GAAG,GAAG;AAClE,aAAO,KAAK,gBAAgB,SAAS,6BAA6B;AAAA,IACpE;AAAA,EACF;AAAA,EAEA,iBAAiB,QAAsB;AACrC,UAAM,SAAS,WAAW;AAC1B,QAAI,OAAO,YAAY,SAAS,OAAO,aAAa,SAAS;AAC3D,YAAM,IAAI;AAAA,QACR,GAAG,MAAM,uDACU,OAAO,YAAY,MAAM;AAAA,MAE9C;AAAA,IACF;AAAA,EACF;AACF;AAEO,IAAM,cAAc,IAAI,YAAY;","names":[]}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import {
|
|
2
|
+
logger
|
|
3
|
+
} from "./chunk-KELPENM3.js";
|
|
4
|
+
import {
|
|
5
|
+
loadConfig
|
|
6
|
+
} from "./chunk-YEKHNTQO.js";
|
|
7
|
+
import {
|
|
8
|
+
ensureDirectories,
|
|
9
|
+
paths
|
|
10
|
+
} from "./chunk-53YLFYJF.js";
|
|
11
|
+
|
|
12
|
+
// src/scheduler/queue.ts
|
|
13
|
+
import { readFileSync, writeFileSync, existsSync } from "fs";
|
|
14
|
+
function loadQueue() {
|
|
15
|
+
if (!existsSync(paths.pendingPosts)) {
|
|
16
|
+
return { entries: [] };
|
|
17
|
+
}
|
|
18
|
+
return JSON.parse(readFileSync(paths.pendingPosts, "utf-8"));
|
|
19
|
+
}
|
|
20
|
+
function saveQueue(data) {
|
|
21
|
+
ensureDirectories();
|
|
22
|
+
writeFileSync(paths.pendingPosts, JSON.stringify(data, null, 2));
|
|
23
|
+
}
|
|
24
|
+
function nextScheduledTime() {
|
|
25
|
+
const config = loadConfig();
|
|
26
|
+
const now = /* @__PURE__ */ new Date();
|
|
27
|
+
const queue = loadQueue();
|
|
28
|
+
const pendingEntries = queue.entries.filter((e) => e.status === "pending");
|
|
29
|
+
let lastScheduled = now;
|
|
30
|
+
if (pendingEntries.length > 0) {
|
|
31
|
+
const latest = new Date(
|
|
32
|
+
pendingEntries.reduce(
|
|
33
|
+
(max, e) => new Date(e.scheduledFor) > new Date(max.scheduledFor) ? e : max
|
|
34
|
+
).scheduledFor
|
|
35
|
+
);
|
|
36
|
+
if (latest > lastScheduled) lastScheduled = latest;
|
|
37
|
+
}
|
|
38
|
+
const intervalMinutes = Math.floor(
|
|
39
|
+
(config.schedule.activeHoursEnd - config.schedule.activeHoursStart) * 60 / config.schedule.postsPerDay
|
|
40
|
+
);
|
|
41
|
+
const next = new Date(lastScheduled.getTime() + intervalMinutes * 60 * 1e3);
|
|
42
|
+
if (next.getHours() >= config.schedule.activeHoursEnd) {
|
|
43
|
+
next.setDate(next.getDate() + 1);
|
|
44
|
+
next.setHours(config.schedule.activeHoursStart, Math.floor(Math.random() * 60), 0, 0);
|
|
45
|
+
}
|
|
46
|
+
if (next.getHours() < config.schedule.activeHoursStart) {
|
|
47
|
+
next.setHours(config.schedule.activeHoursStart, Math.floor(Math.random() * 60), 0, 0);
|
|
48
|
+
}
|
|
49
|
+
return next.toISOString();
|
|
50
|
+
}
|
|
51
|
+
function addToQueue(content, scheduledFor) {
|
|
52
|
+
const queue = loadQueue();
|
|
53
|
+
const entry = {
|
|
54
|
+
id: `post-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`,
|
|
55
|
+
content,
|
|
56
|
+
scheduledFor: scheduledFor ?? nextScheduledTime(),
|
|
57
|
+
status: "pending",
|
|
58
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
59
|
+
};
|
|
60
|
+
queue.entries.push(entry);
|
|
61
|
+
saveQueue(queue);
|
|
62
|
+
logger.info(`Post queued: ${entry.id} scheduled for ${entry.scheduledFor}`);
|
|
63
|
+
return entry;
|
|
64
|
+
}
|
|
65
|
+
async function flushQueue() {
|
|
66
|
+
const queue = loadQueue();
|
|
67
|
+
const now = /* @__PURE__ */ new Date();
|
|
68
|
+
let posted = 0;
|
|
69
|
+
let failed = 0;
|
|
70
|
+
const { getXClient } = await import("./x-client-J4GE5A7P.js");
|
|
71
|
+
const client = await getXClient();
|
|
72
|
+
for (const entry of queue.entries) {
|
|
73
|
+
if (entry.status !== "pending") continue;
|
|
74
|
+
if (new Date(entry.scheduledFor) > now) continue;
|
|
75
|
+
try {
|
|
76
|
+
const result = await client.postTweet(entry.content);
|
|
77
|
+
if (result.success) {
|
|
78
|
+
entry.status = "posted";
|
|
79
|
+
entry.postedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
80
|
+
posted++;
|
|
81
|
+
logger.info(`Posted: ${entry.id}`);
|
|
82
|
+
} else {
|
|
83
|
+
entry.status = "failed";
|
|
84
|
+
entry.error = result.error;
|
|
85
|
+
failed++;
|
|
86
|
+
logger.warn(`Failed to post: ${entry.id} - ${result.error}`);
|
|
87
|
+
}
|
|
88
|
+
} catch (error) {
|
|
89
|
+
entry.status = "failed";
|
|
90
|
+
entry.error = error.message;
|
|
91
|
+
failed++;
|
|
92
|
+
}
|
|
93
|
+
await new Promise((resolve) => setTimeout(resolve, 2e3));
|
|
94
|
+
}
|
|
95
|
+
saveQueue(queue);
|
|
96
|
+
const remaining = queue.entries.filter((e) => e.status === "pending").length;
|
|
97
|
+
return { posted, failed, remaining };
|
|
98
|
+
}
|
|
99
|
+
function showQueue() {
|
|
100
|
+
const queue = loadQueue();
|
|
101
|
+
const pending = queue.entries.filter((e) => e.status === "pending");
|
|
102
|
+
if (pending.length === 0) {
|
|
103
|
+
console.log("Queue is empty.");
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
console.log(`
|
|
107
|
+
${pending.length} posts queued:
|
|
108
|
+
`);
|
|
109
|
+
for (const entry of pending.sort(
|
|
110
|
+
(a, b) => new Date(a.scheduledFor).getTime() - new Date(b.scheduledFor).getTime()
|
|
111
|
+
)) {
|
|
112
|
+
const time = new Date(entry.scheduledFor).toLocaleString();
|
|
113
|
+
const preview = entry.content.length > 60 ? entry.content.slice(0, 60) + "..." : entry.content;
|
|
114
|
+
console.log(` [${time}] ${preview}`);
|
|
115
|
+
}
|
|
116
|
+
console.log();
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
export {
|
|
120
|
+
addToQueue,
|
|
121
|
+
flushQueue,
|
|
122
|
+
showQueue
|
|
123
|
+
};
|
|
124
|
+
//# sourceMappingURL=chunk-O23NWMYU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/scheduler/queue.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { paths, ensureDirectories } from \"../utils/paths.js\";\nimport { loadConfig, saveConfig } from \"../utils/config.js\";\nimport { logger } from \"../utils/logger.js\";\n\nexport interface QueueEntry {\n id: string;\n content: string;\n scheduledFor: string;\n status: \"pending\" | \"posted\" | \"failed\" | \"expired\";\n createdAt: string;\n postedAt?: string;\n error?: string;\n}\n\ninterface QueueData {\n entries: QueueEntry[];\n}\n\nfunction loadQueue(): QueueData {\n if (!existsSync(paths.pendingPosts)) {\n return { entries: [] };\n }\n return JSON.parse(readFileSync(paths.pendingPosts, \"utf-8\")) as QueueData;\n}\n\nfunction saveQueue(data: QueueData): void {\n ensureDirectories();\n writeFileSync(paths.pendingPosts, JSON.stringify(data, null, 2));\n}\n\nfunction nextScheduledTime(): string {\n const config = loadConfig();\n const now = new Date();\n const queue = loadQueue();\n\n // Find the latest scheduled time in the queue\n const pendingEntries = queue.entries.filter((e) => e.status === \"pending\");\n let lastScheduled = now;\n\n if (pendingEntries.length > 0) {\n const latest = new Date(\n pendingEntries.reduce((max, e) =>\n new Date(e.scheduledFor) > new Date(max.scheduledFor) ? e : max\n ).scheduledFor\n );\n if (latest > lastScheduled) lastScheduled = latest;\n }\n\n // Add a random interval within the active hours\n const intervalMinutes = Math.floor(\n ((config.schedule.activeHoursEnd - config.schedule.activeHoursStart) * 60) /\n config.schedule.postsPerDay\n );\n\n const next = new Date(lastScheduled.getTime() + intervalMinutes * 60 * 1000);\n\n // Clamp to active hours\n if (next.getHours() >= config.schedule.activeHoursEnd) {\n next.setDate(next.getDate() + 1);\n next.setHours(config.schedule.activeHoursStart, Math.floor(Math.random() * 60), 0, 0);\n }\n if (next.getHours() < config.schedule.activeHoursStart) {\n next.setHours(config.schedule.activeHoursStart, Math.floor(Math.random() * 60), 0, 0);\n }\n\n return next.toISOString();\n}\n\nexport function addToQueue(content: string, scheduledFor?: string): QueueEntry {\n const queue = loadQueue();\n\n const entry: QueueEntry = {\n id: `post-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`,\n content,\n scheduledFor: scheduledFor ?? nextScheduledTime(),\n status: \"pending\",\n createdAt: new Date().toISOString(),\n };\n\n queue.entries.push(entry);\n saveQueue(queue);\n\n logger.info(`Post queued: ${entry.id} scheduled for ${entry.scheduledFor}`);\n return entry;\n}\n\nexport async function flushQueue(): Promise<{\n posted: number;\n failed: number;\n remaining: number;\n}> {\n const queue = loadQueue();\n const now = new Date();\n let posted = 0;\n let failed = 0;\n\n const { getXClient } = await import(\"../x-client/index.js\");\n const client = await getXClient();\n\n for (const entry of queue.entries) {\n if (entry.status !== \"pending\") continue;\n if (new Date(entry.scheduledFor) > now) continue;\n\n try {\n const result = await client.postTweet(entry.content);\n if (result.success) {\n entry.status = \"posted\";\n entry.postedAt = new Date().toISOString();\n posted++;\n logger.info(`Posted: ${entry.id}`);\n } else {\n entry.status = \"failed\";\n entry.error = result.error;\n failed++;\n logger.warn(`Failed to post: ${entry.id} - ${result.error}`);\n }\n } catch (error) {\n entry.status = \"failed\";\n entry.error = (error as Error).message;\n failed++;\n }\n\n // Small delay between posts to avoid rate limits\n await new Promise((resolve) => setTimeout(resolve, 2000));\n }\n\n saveQueue(queue);\n\n const remaining = queue.entries.filter((e) => e.status === \"pending\").length;\n return { posted, failed, remaining };\n}\n\nexport function showQueue(): void {\n const queue = loadQueue();\n const pending = queue.entries.filter((e) => e.status === \"pending\");\n\n if (pending.length === 0) {\n console.log(\"Queue is empty.\");\n return;\n }\n\n console.log(`\\n${pending.length} posts queued:\\n`);\n for (const entry of pending.sort(\n (a, b) => new Date(a.scheduledFor).getTime() - new Date(b.scheduledFor).getTime()\n )) {\n const time = new Date(entry.scheduledFor).toLocaleString();\n const preview = entry.content.length > 60 ? entry.content.slice(0, 60) + \"...\" : entry.content;\n console.log(` [${time}] ${preview}`);\n }\n console.log();\n}\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,cAAc,eAAe,kBAAkB;AAmBxD,SAAS,YAAuB;AAC9B,MAAI,CAAC,WAAW,MAAM,YAAY,GAAG;AACnC,WAAO,EAAE,SAAS,CAAC,EAAE;AAAA,EACvB;AACA,SAAO,KAAK,MAAM,aAAa,MAAM,cAAc,OAAO,CAAC;AAC7D;AAEA,SAAS,UAAU,MAAuB;AACxC,oBAAkB;AAClB,gBAAc,MAAM,cAAc,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AACjE;AAEA,SAAS,oBAA4B;AACnC,QAAM,SAAS,WAAW;AAC1B,QAAM,MAAM,oBAAI,KAAK;AACrB,QAAM,QAAQ,UAAU;AAGxB,QAAM,iBAAiB,MAAM,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS;AACzE,MAAI,gBAAgB;AAEpB,MAAI,eAAe,SAAS,GAAG;AAC7B,UAAM,SAAS,IAAI;AAAA,MACjB,eAAe;AAAA,QAAO,CAAC,KAAK,MAC1B,IAAI,KAAK,EAAE,YAAY,IAAI,IAAI,KAAK,IAAI,YAAY,IAAI,IAAI;AAAA,MAC9D,EAAE;AAAA,IACJ;AACA,QAAI,SAAS,cAAe,iBAAgB;AAAA,EAC9C;AAGA,QAAM,kBAAkB,KAAK;AAAA,KACzB,OAAO,SAAS,iBAAiB,OAAO,SAAS,oBAAoB,KACrE,OAAO,SAAS;AAAA,EACpB;AAEA,QAAM,OAAO,IAAI,KAAK,cAAc,QAAQ,IAAI,kBAAkB,KAAK,GAAI;AAG3E,MAAI,KAAK,SAAS,KAAK,OAAO,SAAS,gBAAgB;AACrD,SAAK,QAAQ,KAAK,QAAQ,IAAI,CAAC;AAC/B,SAAK,SAAS,OAAO,SAAS,kBAAkB,KAAK,MAAM,KAAK,OAAO,IAAI,EAAE,GAAG,GAAG,CAAC;AAAA,EACtF;AACA,MAAI,KAAK,SAAS,IAAI,OAAO,SAAS,kBAAkB;AACtD,SAAK,SAAS,OAAO,SAAS,kBAAkB,KAAK,MAAM,KAAK,OAAO,IAAI,EAAE,GAAG,GAAG,CAAC;AAAA,EACtF;AAEA,SAAO,KAAK,YAAY;AAC1B;AAEO,SAAS,WAAW,SAAiB,cAAmC;AAC7E,QAAM,QAAQ,UAAU;AAExB,QAAM,QAAoB;AAAA,IACxB,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,GAAG,CAAC,CAAC;AAAA,IAChE;AAAA,IACA,cAAc,gBAAgB,kBAAkB;AAAA,IAChD,QAAQ;AAAA,IACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,QAAM,QAAQ,KAAK,KAAK;AACxB,YAAU,KAAK;AAEf,SAAO,KAAK,gBAAgB,MAAM,EAAE,kBAAkB,MAAM,YAAY,EAAE;AAC1E,SAAO;AACT;AAEA,eAAsB,aAInB;AACD,QAAM,QAAQ,UAAU;AACxB,QAAM,MAAM,oBAAI,KAAK;AACrB,MAAI,SAAS;AACb,MAAI,SAAS;AAEb,QAAM,EAAE,WAAW,IAAI,MAAM,OAAO,wBAAsB;AAC1D,QAAM,SAAS,MAAM,WAAW;AAEhC,aAAW,SAAS,MAAM,SAAS;AACjC,QAAI,MAAM,WAAW,UAAW;AAChC,QAAI,IAAI,KAAK,MAAM,YAAY,IAAI,IAAK;AAExC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO,UAAU,MAAM,OAAO;AACnD,UAAI,OAAO,SAAS;AAClB,cAAM,SAAS;AACf,cAAM,YAAW,oBAAI,KAAK,GAAE,YAAY;AACxC;AACA,eAAO,KAAK,WAAW,MAAM,EAAE,EAAE;AAAA,MACnC,OAAO;AACL,cAAM,SAAS;AACf,cAAM,QAAQ,OAAO;AACrB;AACA,eAAO,KAAK,mBAAmB,MAAM,EAAE,MAAM,OAAO,KAAK,EAAE;AAAA,MAC7D;AAAA,IACF,SAAS,OAAO;AACd,YAAM,SAAS;AACf,YAAM,QAAS,MAAgB;AAC/B;AAAA,IACF;AAGA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAI,CAAC;AAAA,EAC1D;AAEA,YAAU,KAAK;AAEf,QAAM,YAAY,MAAM,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS,EAAE;AACtE,SAAO,EAAE,QAAQ,QAAQ,UAAU;AACrC;AAEO,SAAS,YAAkB;AAChC,QAAM,QAAQ,UAAU;AACxB,QAAM,UAAU,MAAM,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS;AAElE,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,IAAI,iBAAiB;AAC7B;AAAA,EACF;AAEA,UAAQ,IAAI;AAAA,EAAK,QAAQ,MAAM;AAAA,CAAkB;AACjD,aAAW,SAAS,QAAQ;AAAA,IAC1B,CAAC,GAAG,MAAM,IAAI,KAAK,EAAE,YAAY,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,YAAY,EAAE,QAAQ;AAAA,EAClF,GAAG;AACD,UAAM,OAAO,IAAI,KAAK,MAAM,YAAY,EAAE,eAAe;AACzD,UAAM,UAAU,MAAM,QAAQ,SAAS,KAAK,MAAM,QAAQ,MAAM,GAAG,EAAE,IAAI,QAAQ,MAAM;AACvF,YAAQ,IAAI,MAAM,IAAI,KAAK,OAAO,EAAE;AAAA,EACtC;AACA,UAAQ,IAAI;AACd;","names":[]}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ensureDirectories,
|
|
3
|
+
paths
|
|
4
|
+
} from "./chunk-53YLFYJF.js";
|
|
5
|
+
|
|
6
|
+
// src/utils/config.ts
|
|
7
|
+
import { readFileSync, writeFileSync, existsSync } from "fs";
|
|
8
|
+
import { z } from "zod";
|
|
9
|
+
var ConfigSchema = z.object({
|
|
10
|
+
version: z.literal(1),
|
|
11
|
+
xMethod: z.enum(["api", "browser"]),
|
|
12
|
+
xApiTier: z.enum(["free", "basic"]).optional(),
|
|
13
|
+
credits: z.object({
|
|
14
|
+
monthlyPostLimit: z.number(),
|
|
15
|
+
postsUsedThisMonth: z.number(),
|
|
16
|
+
resetDate: z.string()
|
|
17
|
+
}),
|
|
18
|
+
schedule: z.object({
|
|
19
|
+
postsPerDay: z.number(),
|
|
20
|
+
activeHoursStart: z.number().min(0).max(23),
|
|
21
|
+
activeHoursEnd: z.number().min(0).max(23),
|
|
22
|
+
timezone: z.string()
|
|
23
|
+
}),
|
|
24
|
+
llm: z.object({
|
|
25
|
+
provider: z.enum(["anthropic", "openai"]).default("anthropic"),
|
|
26
|
+
model: z.string().default("claude-sonnet-4-20250514")
|
|
27
|
+
}).optional(),
|
|
28
|
+
runtime: z.object({
|
|
29
|
+
heartbeatIntervalMs: z.number().default(3e5),
|
|
30
|
+
actionsPerHeartbeat: z.number().default(3),
|
|
31
|
+
enabled: z.boolean().default(false)
|
|
32
|
+
}).optional(),
|
|
33
|
+
connection: z.object({
|
|
34
|
+
token: z.string().optional(),
|
|
35
|
+
apiEndpoint: z.string().default("https://spora.dev/api/v1"),
|
|
36
|
+
lastSync: z.string().optional(),
|
|
37
|
+
configVersion: z.number().default(0)
|
|
38
|
+
}).optional()
|
|
39
|
+
});
|
|
40
|
+
function loadConfig() {
|
|
41
|
+
if (!existsSync(paths.config)) {
|
|
42
|
+
throw new Error("Spora not initialized. Run `spora init` first.");
|
|
43
|
+
}
|
|
44
|
+
const raw = readFileSync(paths.config, "utf-8");
|
|
45
|
+
return ConfigSchema.parse(JSON.parse(raw));
|
|
46
|
+
}
|
|
47
|
+
function saveConfig(config) {
|
|
48
|
+
ensureDirectories();
|
|
49
|
+
ConfigSchema.parse(config);
|
|
50
|
+
writeFileSync(paths.config, JSON.stringify(config, null, 2));
|
|
51
|
+
}
|
|
52
|
+
function createDefaultConfig(overrides) {
|
|
53
|
+
const monthlyLimit = overrides.xApiTier === "basic" ? 1e4 : 500;
|
|
54
|
+
const now = /* @__PURE__ */ new Date();
|
|
55
|
+
const resetDate = new Date(now.getFullYear(), now.getMonth() + 1, 1).toISOString();
|
|
56
|
+
return {
|
|
57
|
+
version: 1,
|
|
58
|
+
xMethod: overrides.xMethod,
|
|
59
|
+
xApiTier: overrides.xApiTier,
|
|
60
|
+
credits: {
|
|
61
|
+
monthlyPostLimit: monthlyLimit,
|
|
62
|
+
postsUsedThisMonth: 0,
|
|
63
|
+
resetDate
|
|
64
|
+
},
|
|
65
|
+
schedule: {
|
|
66
|
+
postsPerDay: Math.floor(monthlyLimit / 30),
|
|
67
|
+
activeHoursStart: 8,
|
|
68
|
+
activeHoursEnd: 22,
|
|
69
|
+
timezone: overrides.timezone ?? Intl.DateTimeFormat().resolvedOptions().timeZone
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export {
|
|
75
|
+
ConfigSchema,
|
|
76
|
+
loadConfig,
|
|
77
|
+
saveConfig,
|
|
78
|
+
createDefaultConfig
|
|
79
|
+
};
|
|
80
|
+
//# sourceMappingURL=chunk-YEKHNTQO.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/utils/config.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { z } from \"zod\";\nimport { paths, ensureDirectories } from \"./paths.js\";\n\nexport const ConfigSchema = z.object({\n version: z.literal(1),\n xMethod: z.enum([\"api\", \"browser\"]),\n xApiTier: z.enum([\"free\", \"basic\"]).optional(),\n\n credits: z.object({\n monthlyPostLimit: z.number(),\n postsUsedThisMonth: z.number(),\n resetDate: z.string(),\n }),\n\n schedule: z.object({\n postsPerDay: z.number(),\n activeHoursStart: z.number().min(0).max(23),\n activeHoursEnd: z.number().min(0).max(23),\n timezone: z.string(),\n }),\n\n llm: z.object({\n provider: z.enum([\"anthropic\", \"openai\"]).default(\"anthropic\"),\n model: z.string().default(\"claude-sonnet-4-20250514\"),\n }).optional(),\n\n runtime: z.object({\n heartbeatIntervalMs: z.number().default(300_000),\n actionsPerHeartbeat: z.number().default(3),\n enabled: z.boolean().default(false),\n }).optional(),\n\n connection: z.object({\n token: z.string().optional(),\n apiEndpoint: z.string().default(\"https://spora.dev/api/v1\"),\n lastSync: z.string().optional(),\n configVersion: z.number().default(0),\n }).optional(),\n});\n\nexport type Config = z.infer<typeof ConfigSchema>;\n\nexport function loadConfig(): Config {\n if (!existsSync(paths.config)) {\n throw new Error(\"Spora not initialized. Run `spora init` first.\");\n }\n const raw = readFileSync(paths.config, \"utf-8\");\n return ConfigSchema.parse(JSON.parse(raw));\n}\n\nexport function saveConfig(config: Config): void {\n ensureDirectories();\n ConfigSchema.parse(config);\n writeFileSync(paths.config, JSON.stringify(config, null, 2));\n}\n\nexport function createDefaultConfig(overrides: {\n xMethod: \"api\" | \"browser\";\n xApiTier?: \"free\" | \"basic\";\n timezone?: string;\n}): Config {\n const monthlyLimit = overrides.xApiTier === \"basic\" ? 10000 : 500;\n const now = new Date();\n const resetDate = new Date(now.getFullYear(), now.getMonth() + 1, 1).toISOString();\n\n return {\n version: 1,\n xMethod: overrides.xMethod,\n xApiTier: overrides.xApiTier,\n credits: {\n monthlyPostLimit: monthlyLimit,\n postsUsedThisMonth: 0,\n resetDate,\n },\n schedule: {\n postsPerDay: Math.floor(monthlyLimit / 30),\n activeHoursStart: 8,\n activeHoursEnd: 22,\n timezone: overrides.timezone ?? Intl.DateTimeFormat().resolvedOptions().timeZone,\n },\n };\n}\n"],"mappings":";;;;;;AAAA,SAAS,cAAc,eAAe,kBAAkB;AACxD,SAAS,SAAS;AAGX,IAAM,eAAe,EAAE,OAAO;AAAA,EACnC,SAAS,EAAE,QAAQ,CAAC;AAAA,EACpB,SAAS,EAAE,KAAK,CAAC,OAAO,SAAS,CAAC;AAAA,EAClC,UAAU,EAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,EAAE,SAAS;AAAA,EAE7C,SAAS,EAAE,OAAO;AAAA,IAChB,kBAAkB,EAAE,OAAO;AAAA,IAC3B,oBAAoB,EAAE,OAAO;AAAA,IAC7B,WAAW,EAAE,OAAO;AAAA,EACtB,CAAC;AAAA,EAED,UAAU,EAAE,OAAO;AAAA,IACjB,aAAa,EAAE,OAAO;AAAA,IACtB,kBAAkB,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE;AAAA,IAC1C,gBAAgB,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE;AAAA,IACxC,UAAU,EAAE,OAAO;AAAA,EACrB,CAAC;AAAA,EAED,KAAK,EAAE,OAAO;AAAA,IACZ,UAAU,EAAE,KAAK,CAAC,aAAa,QAAQ,CAAC,EAAE,QAAQ,WAAW;AAAA,IAC7D,OAAO,EAAE,OAAO,EAAE,QAAQ,0BAA0B;AAAA,EACtD,CAAC,EAAE,SAAS;AAAA,EAEZ,SAAS,EAAE,OAAO;AAAA,IAChB,qBAAqB,EAAE,OAAO,EAAE,QAAQ,GAAO;AAAA,IAC/C,qBAAqB,EAAE,OAAO,EAAE,QAAQ,CAAC;AAAA,IACzC,SAAS,EAAE,QAAQ,EAAE,QAAQ,KAAK;AAAA,EACpC,CAAC,EAAE,SAAS;AAAA,EAEZ,YAAY,EAAE,OAAO;AAAA,IACnB,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,IAC3B,aAAa,EAAE,OAAO,EAAE,QAAQ,0BAA0B;AAAA,IAC1D,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA,IAC9B,eAAe,EAAE,OAAO,EAAE,QAAQ,CAAC;AAAA,EACrC,CAAC,EAAE,SAAS;AACd,CAAC;AAIM,SAAS,aAAqB;AACnC,MAAI,CAAC,WAAW,MAAM,MAAM,GAAG;AAC7B,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AACA,QAAM,MAAM,aAAa,MAAM,QAAQ,OAAO;AAC9C,SAAO,aAAa,MAAM,KAAK,MAAM,GAAG,CAAC;AAC3C;AAEO,SAAS,WAAW,QAAsB;AAC/C,oBAAkB;AAClB,eAAa,MAAM,MAAM;AACzB,gBAAc,MAAM,QAAQ,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAC7D;AAEO,SAAS,oBAAoB,WAIzB;AACT,QAAM,eAAe,UAAU,aAAa,UAAU,MAAQ;AAC9D,QAAM,MAAM,oBAAI,KAAK;AACrB,QAAM,YAAY,IAAI,KAAK,IAAI,YAAY,GAAG,IAAI,SAAS,IAAI,GAAG,CAAC,EAAE,YAAY;AAEjF,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS,UAAU;AAAA,IACnB,UAAU,UAAU;AAAA,IACpB,SAAS;AAAA,MACP,kBAAkB;AAAA,MAClB,oBAAoB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,UAAU;AAAA,MACR,aAAa,KAAK,MAAM,eAAe,EAAE;AAAA,MACzC,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,UAAU,UAAU,YAAY,KAAK,eAAe,EAAE,gBAAgB,EAAE;AAAA,IAC1E;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ensureDirectories,
|
|
3
|
+
paths
|
|
4
|
+
} from "./chunk-53YLFYJF.js";
|
|
5
|
+
|
|
6
|
+
// src/utils/crypto.ts
|
|
7
|
+
import { createCipheriv, createDecipheriv, randomBytes, createHash } from "crypto";
|
|
8
|
+
import { readFileSync, writeFileSync, existsSync } from "fs";
|
|
9
|
+
import { hostname } from "os";
|
|
10
|
+
var ALGORITHM = "aes-256-gcm";
|
|
11
|
+
function deriveKey() {
|
|
12
|
+
const machineId = `spora-${hostname()}-${process.env.USER ?? "default"}`;
|
|
13
|
+
return createHash("sha256").update(machineId).digest();
|
|
14
|
+
}
|
|
15
|
+
function encrypt(data) {
|
|
16
|
+
const key = deriveKey();
|
|
17
|
+
const iv = randomBytes(16);
|
|
18
|
+
const cipher = createCipheriv(ALGORITHM, key, iv);
|
|
19
|
+
let encrypted = cipher.update(data, "utf-8", "hex");
|
|
20
|
+
encrypted += cipher.final("hex");
|
|
21
|
+
const authTag = cipher.getAuthTag().toString("hex");
|
|
22
|
+
return JSON.stringify({
|
|
23
|
+
iv: iv.toString("hex"),
|
|
24
|
+
encrypted,
|
|
25
|
+
authTag
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
function decrypt(payload) {
|
|
29
|
+
const key = deriveKey();
|
|
30
|
+
const { iv, encrypted, authTag } = JSON.parse(payload);
|
|
31
|
+
const decipher = createDecipheriv(ALGORITHM, key, Buffer.from(iv, "hex"));
|
|
32
|
+
decipher.setAuthTag(Buffer.from(authTag, "hex"));
|
|
33
|
+
let decrypted = decipher.update(encrypted, "hex", "utf-8");
|
|
34
|
+
decrypted += decipher.final("utf-8");
|
|
35
|
+
return decrypted;
|
|
36
|
+
}
|
|
37
|
+
function saveCredentials(credentials) {
|
|
38
|
+
ensureDirectories();
|
|
39
|
+
const encrypted = encrypt(JSON.stringify(credentials));
|
|
40
|
+
writeFileSync(paths.credentials, encrypted);
|
|
41
|
+
}
|
|
42
|
+
function loadCredentials() {
|
|
43
|
+
if (!existsSync(paths.credentials)) {
|
|
44
|
+
throw new Error("No credentials found. Run `spora init` first.");
|
|
45
|
+
}
|
|
46
|
+
const payload = readFileSync(paths.credentials, "utf-8");
|
|
47
|
+
return JSON.parse(decrypt(payload));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export {
|
|
51
|
+
encrypt,
|
|
52
|
+
decrypt,
|
|
53
|
+
saveCredentials,
|
|
54
|
+
loadCredentials
|
|
55
|
+
};
|
|
56
|
+
//# sourceMappingURL=chunk-ZJZKH7N7.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/utils/crypto.ts"],"sourcesContent":["import { createCipheriv, createDecipheriv, randomBytes, createHash } from \"node:crypto\";\nimport { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { hostname } from \"node:os\";\nimport { paths, ensureDirectories } from \"./paths.js\";\n\nconst ALGORITHM = \"aes-256-gcm\";\n\nfunction deriveKey(): Buffer {\n const machineId = `spora-${hostname()}-${process.env.USER ?? \"default\"}`;\n return createHash(\"sha256\").update(machineId).digest();\n}\n\nexport function encrypt(data: string): string {\n const key = deriveKey();\n const iv = randomBytes(16);\n const cipher = createCipheriv(ALGORITHM, key, iv);\n\n let encrypted = cipher.update(data, \"utf-8\", \"hex\");\n encrypted += cipher.final(\"hex\");\n const authTag = cipher.getAuthTag().toString(\"hex\");\n\n return JSON.stringify({\n iv: iv.toString(\"hex\"),\n encrypted,\n authTag,\n });\n}\n\nexport function decrypt(payload: string): string {\n const key = deriveKey();\n const { iv, encrypted, authTag } = JSON.parse(payload);\n\n const decipher = createDecipheriv(ALGORITHM, key, Buffer.from(iv, \"hex\"));\n decipher.setAuthTag(Buffer.from(authTag, \"hex\"));\n\n let decrypted = decipher.update(encrypted, \"hex\", \"utf-8\");\n decrypted += decipher.final(\"utf-8\");\n\n return decrypted;\n}\n\nexport interface XCredentials {\n method: \"api\" | \"browser\";\n // API mode\n apiKey?: string;\n apiSecret?: string;\n accessToken?: string;\n accessTokenSecret?: string;\n bearerToken?: string;\n // Browser mode\n username?: string;\n password?: string;\n email?: string;\n}\n\nexport function saveCredentials(credentials: XCredentials): void {\n ensureDirectories();\n const encrypted = encrypt(JSON.stringify(credentials));\n writeFileSync(paths.credentials, encrypted);\n}\n\nexport function loadCredentials(): XCredentials {\n if (!existsSync(paths.credentials)) {\n throw new Error(\"No credentials found. Run `spora init` first.\");\n }\n const payload = readFileSync(paths.credentials, \"utf-8\");\n return JSON.parse(decrypt(payload)) as XCredentials;\n}\n"],"mappings":";;;;;;AAAA,SAAS,gBAAgB,kBAAkB,aAAa,kBAAkB;AAC1E,SAAS,cAAc,eAAe,kBAAkB;AACxD,SAAS,gBAAgB;AAGzB,IAAM,YAAY;AAElB,SAAS,YAAoB;AAC3B,QAAM,YAAY,SAAS,SAAS,CAAC,IAAI,QAAQ,IAAI,QAAQ,SAAS;AACtE,SAAO,WAAW,QAAQ,EAAE,OAAO,SAAS,EAAE,OAAO;AACvD;AAEO,SAAS,QAAQ,MAAsB;AAC5C,QAAM,MAAM,UAAU;AACtB,QAAM,KAAK,YAAY,EAAE;AACzB,QAAM,SAAS,eAAe,WAAW,KAAK,EAAE;AAEhD,MAAI,YAAY,OAAO,OAAO,MAAM,SAAS,KAAK;AAClD,eAAa,OAAO,MAAM,KAAK;AAC/B,QAAM,UAAU,OAAO,WAAW,EAAE,SAAS,KAAK;AAElD,SAAO,KAAK,UAAU;AAAA,IACpB,IAAI,GAAG,SAAS,KAAK;AAAA,IACrB;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEO,SAAS,QAAQ,SAAyB;AAC/C,QAAM,MAAM,UAAU;AACtB,QAAM,EAAE,IAAI,WAAW,QAAQ,IAAI,KAAK,MAAM,OAAO;AAErD,QAAM,WAAW,iBAAiB,WAAW,KAAK,OAAO,KAAK,IAAI,KAAK,CAAC;AACxE,WAAS,WAAW,OAAO,KAAK,SAAS,KAAK,CAAC;AAE/C,MAAI,YAAY,SAAS,OAAO,WAAW,OAAO,OAAO;AACzD,eAAa,SAAS,MAAM,OAAO;AAEnC,SAAO;AACT;AAgBO,SAAS,gBAAgB,aAAiC;AAC/D,oBAAkB;AAClB,QAAM,YAAY,QAAQ,KAAK,UAAU,WAAW,CAAC;AACrD,gBAAc,MAAM,aAAa,SAAS;AAC5C;AAEO,SAAS,kBAAgC;AAC9C,MAAI,CAAC,WAAW,MAAM,WAAW,GAAG;AAClC,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACjE;AACA,QAAM,UAAU,aAAa,MAAM,aAAa,OAAO;AACvD,SAAO,KAAK,MAAM,QAAQ,OAAO,CAAC;AACpC;","names":[]}
|