optimal-cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +175 -0
  2. package/dist/bin/optimal.d.ts +2 -0
  3. package/dist/bin/optimal.js +995 -0
  4. package/dist/lib/budget/projections.d.ts +115 -0
  5. package/dist/lib/budget/projections.js +384 -0
  6. package/dist/lib/budget/scenarios.d.ts +93 -0
  7. package/dist/lib/budget/scenarios.js +214 -0
  8. package/dist/lib/cms/publish-blog.d.ts +62 -0
  9. package/dist/lib/cms/publish-blog.js +74 -0
  10. package/dist/lib/cms/strapi-client.d.ts +123 -0
  11. package/dist/lib/cms/strapi-client.js +213 -0
  12. package/dist/lib/config.d.ts +55 -0
  13. package/dist/lib/config.js +206 -0
  14. package/dist/lib/infra/deploy.d.ts +29 -0
  15. package/dist/lib/infra/deploy.js +58 -0
  16. package/dist/lib/infra/migrate.d.ts +34 -0
  17. package/dist/lib/infra/migrate.js +103 -0
  18. package/dist/lib/kanban.d.ts +46 -0
  19. package/dist/lib/kanban.js +118 -0
  20. package/dist/lib/newsletter/distribute.d.ts +52 -0
  21. package/dist/lib/newsletter/distribute.js +193 -0
  22. package/dist/lib/newsletter/generate-insurance.d.ts +42 -0
  23. package/dist/lib/newsletter/generate-insurance.js +36 -0
  24. package/dist/lib/newsletter/generate.d.ts +104 -0
  25. package/dist/lib/newsletter/generate.js +571 -0
  26. package/dist/lib/returnpro/anomalies.d.ts +64 -0
  27. package/dist/lib/returnpro/anomalies.js +166 -0
  28. package/dist/lib/returnpro/audit.d.ts +32 -0
  29. package/dist/lib/returnpro/audit.js +147 -0
  30. package/dist/lib/returnpro/diagnose.d.ts +52 -0
  31. package/dist/lib/returnpro/diagnose.js +281 -0
  32. package/dist/lib/returnpro/kpis.d.ts +32 -0
  33. package/dist/lib/returnpro/kpis.js +192 -0
  34. package/dist/lib/returnpro/templates.d.ts +48 -0
  35. package/dist/lib/returnpro/templates.js +229 -0
  36. package/dist/lib/returnpro/upload-income.d.ts +25 -0
  37. package/dist/lib/returnpro/upload-income.js +235 -0
  38. package/dist/lib/returnpro/upload-netsuite.d.ts +37 -0
  39. package/dist/lib/returnpro/upload-netsuite.js +566 -0
  40. package/dist/lib/returnpro/upload-r1.d.ts +48 -0
  41. package/dist/lib/returnpro/upload-r1.js +398 -0
  42. package/dist/lib/social/post-generator.d.ts +83 -0
  43. package/dist/lib/social/post-generator.js +333 -0
  44. package/dist/lib/social/publish.d.ts +66 -0
  45. package/dist/lib/social/publish.js +226 -0
  46. package/dist/lib/social/scraper.d.ts +67 -0
  47. package/dist/lib/social/scraper.js +361 -0
  48. package/dist/lib/supabase.d.ts +4 -0
  49. package/dist/lib/supabase.js +20 -0
  50. package/dist/lib/transactions/delete-batch.d.ts +60 -0
  51. package/dist/lib/transactions/delete-batch.js +203 -0
  52. package/dist/lib/transactions/ingest.d.ts +43 -0
  53. package/dist/lib/transactions/ingest.js +555 -0
  54. package/dist/lib/transactions/stamp.d.ts +51 -0
  55. package/dist/lib/transactions/stamp.js +524 -0
  56. package/package.json +50 -0
@@ -0,0 +1,398 @@
1
+ import * as fs from 'fs';
2
+ import * as path from 'path';
3
+ import ExcelJS from 'exceljs';
4
+ import { getSupabase } from '../supabase.js';
5
+ // ---------------------------------------------------------------------------
6
+ // Constants
7
+ // ---------------------------------------------------------------------------
8
+ /**
9
+ * Account code / ID for Checked-In Qty, which is the primary volume type
10
+ * produced by a standard R1 upload. This matches the dashboard's volume-configs.ts.
11
+ *
12
+ * account_code: "Checked-In Qty"
13
+ * account_id: 130
14
+ */
15
+ const CHECKED_IN_ACCOUNT_CODE = 'Checked-In Qty';
16
+ const CHECKED_IN_ACCOUNT_ID = 130;
17
+ const CHUNK_SIZE = 500;
18
+ const PAGE_SIZE = 1000;
19
+ // ---------------------------------------------------------------------------
20
+ // Helpers
21
+ // ---------------------------------------------------------------------------
22
+ /**
23
+ * Extract location from a ProgramName / program code string.
24
+ * - If starts with "DS-", location is "DS"
25
+ * - Otherwise, first 5 characters (uppercased)
26
+ * Mirrors dashboard-returnpro/lib/r1-monthly/processing.ts extractLocation()
27
+ */
28
+ function extractLocation(programName) {
29
+ const trimmed = programName.trim();
30
+ if (!trimmed)
31
+ return 'UNKNOWN';
32
+ if (trimmed.startsWith('DS-'))
33
+ return 'DS';
34
+ return trimmed.substring(0, 5).toUpperCase();
35
+ }
36
+ /**
37
+ * Split an array into fixed-size chunks for batched inserts.
38
+ */
39
+ function chunk(arr, size) {
40
+ const out = [];
41
+ for (let i = 0; i < arr.length; i += size) {
42
+ out.push(arr.slice(i, i + size));
43
+ }
44
+ return out;
45
+ }
46
+ // ---------------------------------------------------------------------------
47
+ // Supabase dim table lookups
48
+ // ---------------------------------------------------------------------------
49
+ /**
50
+ * Fetch all dim_program_id rows and build a map: program_code -> program_id_key.
51
+ * Fetches ALL rows in pages to avoid the 1000-row Supabase cap.
52
+ * First occurrence wins for any duplicate program_code.
53
+ */
54
+ async function buildProgramIdKeyMap(supabaseUrl, supabaseKey) {
55
+ const map = new Map();
56
+ let offset = 0;
57
+ while (true) {
58
+ const url = `${supabaseUrl}/rest/v1/dim_program_id` +
59
+ `?select=program_id_key,program_code` +
60
+ `&order=program_id_key` +
61
+ `&offset=${offset}&limit=${PAGE_SIZE}`;
62
+ const res = await fetch(url, {
63
+ headers: { apikey: supabaseKey, Authorization: `Bearer ${supabaseKey}` },
64
+ });
65
+ if (!res.ok) {
66
+ const text = await res.text();
67
+ throw new Error(`Failed to fetch dim_program_id: ${text}`);
68
+ }
69
+ const rows = (await res.json());
70
+ for (const row of rows) {
71
+ if (!map.has(row.program_code)) {
72
+ map.set(row.program_code, row.program_id_key);
73
+ }
74
+ }
75
+ if (rows.length < PAGE_SIZE)
76
+ break;
77
+ offset += PAGE_SIZE;
78
+ }
79
+ return map;
80
+ }
81
+ /**
82
+ * Fetch all dim_master_program rows and build a map: master_name -> {master_program_id, client_id}.
83
+ * Fetches ALL rows in pages to avoid the 1000-row Supabase cap.
84
+ */
85
+ async function buildMasterProgramMap(supabaseUrl, supabaseKey) {
86
+ const map = new Map();
87
+ let offset = 0;
88
+ while (true) {
89
+ const url = `${supabaseUrl}/rest/v1/dim_master_program` +
90
+ `?select=master_program_id,master_name,client_id` +
91
+ `&order=master_program_id` +
92
+ `&offset=${offset}&limit=${PAGE_SIZE}`;
93
+ const res = await fetch(url, {
94
+ headers: { apikey: supabaseKey, Authorization: `Bearer ${supabaseKey}` },
95
+ });
96
+ if (!res.ok) {
97
+ const text = await res.text();
98
+ throw new Error(`Failed to fetch dim_master_program: ${text}`);
99
+ }
100
+ const rows = (await res.json());
101
+ for (const row of rows) {
102
+ if (!map.has(row.master_name)) {
103
+ map.set(row.master_name, row);
104
+ }
105
+ }
106
+ if (rows.length < PAGE_SIZE)
107
+ break;
108
+ offset += PAGE_SIZE;
109
+ }
110
+ return map;
111
+ }
112
+ // ---------------------------------------------------------------------------
113
+ // XLSX parsing
114
+ // ---------------------------------------------------------------------------
115
+ /**
116
+ * Parse the first sheet of an R1 XLSX file into R1Row records.
117
+ *
118
+ * Required columns (case-sensitive, matching the Rust WASM parser):
119
+ * ProgramName
120
+ * Master Program Name
121
+ * TRGID
122
+ *
123
+ * Optional columns:
124
+ * LocationID
125
+ * MR_LMR_UPC_AverageCategoryRetail (or "RetailPrice" / "Retail Price")
126
+ *
127
+ * Returns { rows, totalRead, skipped, warnings }
128
+ */
129
+ async function parseR1Xlsx(filePath) {
130
+ const warnings = [];
131
+ const workbook = new ExcelJS.Workbook();
132
+ await workbook.xlsx.readFile(filePath);
133
+ const worksheet = workbook.worksheets[0];
134
+ if (!worksheet) {
135
+ throw new Error(`R1 XLSX has no worksheets: ${filePath}`);
136
+ }
137
+ // Read header row (row 1)
138
+ const headerRow = worksheet.getRow(1);
139
+ const headers = new Map();
140
+ headerRow.eachCell({ includeEmpty: false }, (cell, colNum) => {
141
+ const val = String(cell.value ?? '').trim();
142
+ if (val)
143
+ headers.set(val, colNum);
144
+ });
145
+ // Resolve required column indices
146
+ const programCol = headers.get('ProgramName');
147
+ const masterCol = headers.get('Master Program Name');
148
+ const trgidCol = headers.get('TRGID');
149
+ if (programCol === undefined || masterCol === undefined || trgidCol === undefined) {
150
+ throw new Error(`R1 XLSX missing required columns. ` +
151
+ `Expected: ProgramName, Master Program Name, TRGID. ` +
152
+ `Found: ${[...headers.keys()].join(', ')}`);
153
+ }
154
+ // Resolve optional column indices
155
+ const locationCol = headers.get('LocationID');
156
+ const retailCol = headers.get('MR_LMR_UPC_AverageCategoryRetail') ??
157
+ headers.get('RetailPrice') ??
158
+ headers.get('Retail Price');
159
+ const rows = [];
160
+ let totalRead = 0;
161
+ let skipped = 0;
162
+ worksheet.eachRow({ includeEmpty: false }, (row, rowNum) => {
163
+ if (rowNum === 1)
164
+ return; // skip header
165
+ totalRead++;
166
+ const programCode = String(row.getCell(programCol).value ?? '').trim();
167
+ const masterProgram = String(row.getCell(masterCol).value ?? '').trim();
168
+ const trgid = String(row.getCell(trgidCol).value ?? '').trim();
169
+ // Skip rows missing the three required values
170
+ if (!programCode || !masterProgram || !trgid) {
171
+ skipped++;
172
+ return;
173
+ }
174
+ const locationId = locationCol
175
+ ? String(row.getCell(locationCol).value ?? '').trim()
176
+ : '';
177
+ let avgRetail = null;
178
+ if (retailCol !== undefined) {
179
+ const rawRetail = row.getCell(retailCol).value;
180
+ if (rawRetail !== null && rawRetail !== undefined && rawRetail !== '') {
181
+ const parsed = typeof rawRetail === 'number' ? rawRetail : parseFloat(String(rawRetail));
182
+ if (!isNaN(parsed) && parsed > 0)
183
+ avgRetail = parsed;
184
+ }
185
+ }
186
+ rows.push({ programCode, masterProgram, trgid, locationId, avgRetail });
187
+ });
188
+ if (rows.length === 0 && totalRead > 0) {
189
+ warnings.push(`All ${totalRead} rows were skipped (missing ProgramName, Master Program Name, or TRGID). ` +
190
+ `Check that the first sheet contains data with the expected column headers.`);
191
+ }
192
+ return { rows, totalRead, skipped, warnings };
193
+ }
194
+ // ---------------------------------------------------------------------------
195
+ // Aggregation
196
+ // ---------------------------------------------------------------------------
197
+ /**
198
+ * Aggregate raw R1 rows into per-(masterProgram, programCode, location) groups.
199
+ * For each group we track distinct TRGIDs and distinct LocationIDs.
200
+ * This mirrors the Rust WASM aggregation in wasm/r1-parser/src/lib.rs.
201
+ *
202
+ * The count stored in `amount` uses distinct TRGID count (Checked-In Qty
203
+ * for most programs). The LocationID set is retained for callers that need it.
204
+ */
205
+ function aggregateRows(rows) {
206
+ const groups = new Map();
207
+ for (const row of rows) {
208
+ const location = extractLocation(row.programCode);
209
+ const key = `${row.masterProgram}|||${row.programCode}|||${location}`;
210
+ let group = groups.get(key);
211
+ if (!group) {
212
+ group = {
213
+ masterProgram: row.masterProgram,
214
+ masterProgramId: null,
215
+ programCode: row.programCode,
216
+ programIdKey: null,
217
+ clientId: null,
218
+ location,
219
+ trgidSet: new Set(),
220
+ locationIdSet: new Set(),
221
+ };
222
+ groups.set(key, group);
223
+ }
224
+ group.trgidSet.add(row.trgid);
225
+ if (row.locationId)
226
+ group.locationIdSet.add(row.locationId);
227
+ }
228
+ return groups;
229
+ }
230
+ // ---------------------------------------------------------------------------
231
+ // Insertion helpers
232
+ // ---------------------------------------------------------------------------
233
+ /**
234
+ * Insert a batch of rows directly into stg_financials_raw via PostgREST.
235
+ * Returns the number of rows inserted (or throws on failure).
236
+ */
237
+ async function insertBatch(supabaseUrl, supabaseKey, rows) {
238
+ const res = await fetch(`${supabaseUrl}/rest/v1/stg_financials_raw`, {
239
+ method: 'POST',
240
+ headers: {
241
+ apikey: supabaseKey,
242
+ Authorization: `Bearer ${supabaseKey}`,
243
+ 'Content-Type': 'application/json',
244
+ Prefer: 'return=minimal',
245
+ },
246
+ body: JSON.stringify(rows),
247
+ });
248
+ if (!res.ok) {
249
+ const text = await res.text();
250
+ let message = text || res.statusText;
251
+ try {
252
+ const payload = JSON.parse(text);
253
+ message = payload.message ?? message;
254
+ if (payload.hint)
255
+ message += ` (Hint: ${payload.hint})`;
256
+ if (payload.details)
257
+ message += ` (Details: ${payload.details})`;
258
+ }
259
+ catch {
260
+ // use raw text
261
+ }
262
+ throw new Error(`Insert batch failed: ${message}`);
263
+ }
264
+ return rows.length;
265
+ }
266
+ // ---------------------------------------------------------------------------
267
+ // Main export
268
+ // ---------------------------------------------------------------------------
269
+ /**
270
+ * Parse an R1 XLSX file, aggregate financial data by program, and insert
271
+ * into the ReturnPro `stg_financials_raw` staging table.
272
+ *
273
+ * Flow:
274
+ * 1. Read and parse the XLSX (first sheet, required columns: ProgramName,
275
+ * Master Program Name, TRGID).
276
+ * 2. Aggregate rows into (masterProgram, programCode, location) groups,
277
+ * counting distinct TRGIDs per group.
278
+ * 3. Look up dim_master_program and dim_program_id to resolve FK columns.
279
+ * 4. Insert into stg_financials_raw in batches of 500.
280
+ *
281
+ * @param filePath Absolute path to the R1 XLSX file on disk.
282
+ * @param userId The user_id to stamp on each inserted row.
283
+ * @param monthYear Target month in "YYYY-MM" format (e.g. "2025-10").
284
+ * Stored as the `date` column as "YYYY-MM-01".
285
+ */
286
+ export async function processR1Upload(filePath, userId, monthYear) {
287
+ if (!fs.existsSync(filePath)) {
288
+ throw new Error(`File not found: ${filePath}`);
289
+ }
290
+ // Validate monthYear format
291
+ if (!/^\d{4}-\d{2}$/.test(monthYear)) {
292
+ throw new Error(`monthYear must be in YYYY-MM format (e.g. "2025-10"), got: "${monthYear}"`);
293
+ }
294
+ const sourceFileName = path.basename(filePath);
295
+ const dateStr = `${monthYear}-01`;
296
+ const loadedAt = new Date().toISOString();
297
+ const warnings = [];
298
+ // -------------------------------------------------------------------------
299
+ // 1. Parse XLSX
300
+ // -------------------------------------------------------------------------
301
+ const { rows: rawRows, totalRead, skipped, warnings: parseWarnings } = await parseR1Xlsx(filePath);
302
+ warnings.push(...parseWarnings);
303
+ if (rawRows.length === 0) {
304
+ return {
305
+ sourceFileName,
306
+ date: dateStr,
307
+ totalRowsRead: totalRead,
308
+ rowsSkipped: skipped,
309
+ programGroupsFound: 0,
310
+ rowsInserted: 0,
311
+ warnings,
312
+ };
313
+ }
314
+ // -------------------------------------------------------------------------
315
+ // 2. Aggregate rows into groups
316
+ // -------------------------------------------------------------------------
317
+ const groups = aggregateRows(rawRows);
318
+ // -------------------------------------------------------------------------
319
+ // 3. Fetch dim tables for FK resolution
320
+ // -------------------------------------------------------------------------
321
+ const sb = getSupabase('returnpro');
322
+ // Pull the connection URL + key from the client's config via env (same env
323
+ // vars that supabase.ts reads from process.env)
324
+ const supabaseUrl = process.env['RETURNPRO_SUPABASE_URL'];
325
+ const supabaseKey = process.env['RETURNPRO_SUPABASE_SERVICE_KEY'];
326
+ if (!supabaseUrl || !supabaseKey) {
327
+ throw new Error('Missing env vars: RETURNPRO_SUPABASE_URL, RETURNPRO_SUPABASE_SERVICE_KEY');
328
+ }
329
+ const [programIdKeyMap, masterProgramMap] = await Promise.all([
330
+ buildProgramIdKeyMap(supabaseUrl, supabaseKey),
331
+ buildMasterProgramMap(supabaseUrl, supabaseKey),
332
+ ]);
333
+ // Track master programs not found in dim_master_program
334
+ const unknownMasterPrograms = new Set();
335
+ // -------------------------------------------------------------------------
336
+ // 4. Build insert rows
337
+ // -------------------------------------------------------------------------
338
+ const insertRows = [];
339
+ for (const [, group] of groups) {
340
+ const masterDim = masterProgramMap.get(group.masterProgram);
341
+ if (!masterDim) {
342
+ unknownMasterPrograms.add(group.masterProgram);
343
+ }
344
+ const trgidCount = group.trgidSet.size;
345
+ if (trgidCount === 0)
346
+ continue;
347
+ insertRows.push({
348
+ source_file_name: sourceFileName,
349
+ loaded_at: loadedAt,
350
+ user_id: userId,
351
+ location: group.location,
352
+ master_program: group.masterProgram,
353
+ program_code: group.programCode,
354
+ program_id_key: programIdKeyMap.get(group.programCode) ?? null,
355
+ date: dateStr,
356
+ account_code: CHECKED_IN_ACCOUNT_CODE,
357
+ account_id: CHECKED_IN_ACCOUNT_ID,
358
+ // amount is TEXT in stg_financials_raw — store as string
359
+ amount: String(trgidCount),
360
+ mode: 'actual',
361
+ master_program_id: masterDim?.master_program_id ?? null,
362
+ client_id: masterDim?.client_id ?? null,
363
+ });
364
+ }
365
+ if (unknownMasterPrograms.size > 0) {
366
+ warnings.push(`${unknownMasterPrograms.size} master program(s) not found in dim_master_program ` +
367
+ `(master_program_id and client_id will be NULL): ` +
368
+ [...unknownMasterPrograms].sort().join(', '));
369
+ }
370
+ // -------------------------------------------------------------------------
371
+ // 5. Insert in batches of CHUNK_SIZE
372
+ // -------------------------------------------------------------------------
373
+ let totalInserted = 0;
374
+ const batches = chunk(insertRows, CHUNK_SIZE);
375
+ for (const [i, batch] of batches.entries()) {
376
+ try {
377
+ const inserted = await insertBatch(supabaseUrl, supabaseKey, batch);
378
+ totalInserted += inserted;
379
+ }
380
+ catch (err) {
381
+ const message = err instanceof Error ? err.message : String(err);
382
+ throw new Error(`Batch ${i + 1}/${batches.length} insert failed after ${totalInserted} rows inserted: ${message}`);
383
+ }
384
+ }
385
+ // Suppress unused variable warning — sb is a valid Supabase client kept
386
+ // as a reference for future use (e.g. RPC calls). The dim lookups above
387
+ // use raw fetch for pagination control (no .range() on PostgREST URL).
388
+ void sb;
389
+ return {
390
+ sourceFileName,
391
+ date: dateStr,
392
+ totalRowsRead: totalRead,
393
+ rowsSkipped: skipped,
394
+ programGroupsFound: groups.size,
395
+ rowsInserted: totalInserted,
396
+ warnings,
397
+ };
398
+ }
@@ -0,0 +1,83 @@
1
+ /**
2
+ * Social Post Generation Pipeline
3
+ *
4
+ * Ported from Python: ~/projects/newsletter-automation social post pipeline
5
+ *
6
+ * Pipeline: Groq AI generates post ideas -> Unsplash image search -> Strapi push
7
+ *
8
+ * Functions:
9
+ * callGroq() — call Groq API (OpenAI-compatible) for AI content
10
+ * searchUnsplashImage() — search Unsplash NAPI for a stock photo URL
11
+ * generateSocialPosts() — orchestrator: generate posts and push to Strapi
12
+ */
13
+ import 'dotenv/config';
14
+ export interface GeneratePostsOptions {
15
+ brand: string;
16
+ /** Number of posts to generate (default: 9) */
17
+ count?: number;
18
+ /** Week start date in YYYY-MM-DD format */
19
+ weekOf?: string;
20
+ /** Platforms to target (default: ['instagram', 'facebook']) */
21
+ platforms?: string[];
22
+ /** Generate but do not push to Strapi */
23
+ dryRun?: boolean;
24
+ }
25
+ export interface SocialPostData {
26
+ headline: string;
27
+ body: string;
28
+ cta_text: string;
29
+ cta_url: string;
30
+ image_url: string | null;
31
+ overlay_style: 'dark-bottom' | 'brand-bottom' | 'brand-full' | 'dark-full';
32
+ template: string;
33
+ platform: string;
34
+ brand: string;
35
+ scheduled_date: string;
36
+ delivery_status: 'pending';
37
+ }
38
+ export interface GeneratePostsResult {
39
+ brand: string;
40
+ postsCreated: number;
41
+ posts: Array<{
42
+ documentId: string;
43
+ headline: string;
44
+ platform: string;
45
+ scheduled_date: string;
46
+ }>;
47
+ errors: string[];
48
+ }
49
+ /**
50
+ * Call the Groq API (OpenAI-compatible) and return the assistant's response text.
51
+ *
52
+ * @example
53
+ * const response = await callGroq('You are a copywriter.', 'Write a tagline.')
54
+ */
55
+ export declare function callGroq(systemPrompt: string, userPrompt: string): Promise<string>;
56
+ /**
57
+ * Search Unsplash NAPI for a themed stock photo URL.
58
+ * Returns the `.results[0].urls.regular` URL, or null if not found.
59
+ *
60
+ * Note: Uses the public NAPI endpoint — no auth required but may be rate-limited.
61
+ *
62
+ * @example
63
+ * const url = await searchUnsplashImage('life insurance family protection')
64
+ */
65
+ export declare function searchUnsplashImage(query: string): Promise<string | null>;
66
+ /**
67
+ * Main orchestrator: generate AI-powered social media posts and push to Strapi.
68
+ *
69
+ * Steps:
70
+ * 1. Call Groq to generate post ideas as JSON
71
+ * 2. For each post, search Unsplash for a themed image
72
+ * 3. Build SocialPostData and push to Strapi via POST /api/social-posts
73
+ * 4. Return summary of created posts and any errors
74
+ *
75
+ * @example
76
+ * const result = await generateSocialPosts({
77
+ * brand: 'LIFEINSUR',
78
+ * count: 9,
79
+ * weekOf: '2026-03-02',
80
+ * })
81
+ * console.log(`Created ${result.postsCreated} posts`)
82
+ */
83
+ export declare function generateSocialPosts(opts: GeneratePostsOptions): Promise<GeneratePostsResult>;