microfox 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. package/CHANGELOG.md +72 -0
  2. package/README.md +114 -0
  3. package/dist/agent-template.txt +486 -0
  4. package/dist/background-agent/.gitignore.txt +22 -0
  5. package/dist/background-agent/eslint.config.js.txt +48 -0
  6. package/dist/background-agent/microfox.json.txt +4 -0
  7. package/dist/background-agent/openapi.md.txt +1 -0
  8. package/dist/background-agent/package.json.txt +47 -0
  9. package/dist/background-agent/serverless.yml.txt +123 -0
  10. package/dist/background-agent/src/functions/cron-paginate.ts.txt +135 -0
  11. package/dist/background-agent/src/functions/cron-populate.ts.txt +33 -0
  12. package/dist/background-agent/src/functions/route-trigger-index.ts.txt +157 -0
  13. package/dist/background-agent/src/functions/route-trigger-populate.ts.txt +0 -0
  14. package/dist/background-agent/src/functions/sqs-index.ts.txt +147 -0
  15. package/dist/background-agent/src/helpers/ragRedis.ts.txt +78 -0
  16. package/dist/background-agent/src/index.ts.txt +69 -0
  17. package/dist/background-agent/tsconfig.json.txt +33 -0
  18. package/dist/chunk-4HNHBA2H.mjs +104 -0
  19. package/dist/chunk-4HNHBA2H.mjs.map +1 -0
  20. package/dist/chunk-ARAHSYJI.mjs +263 -0
  21. package/dist/chunk-ARAHSYJI.mjs.map +1 -0
  22. package/dist/chunk-JGAX4PD6.mjs +290 -0
  23. package/dist/chunk-JGAX4PD6.mjs.map +1 -0
  24. package/dist/chunk-KPJJOO76.mjs +12 -0
  25. package/dist/chunk-KPJJOO76.mjs.map +1 -0
  26. package/dist/chunk-TZQZMKHP.mjs +11 -0
  27. package/dist/chunk-TZQZMKHP.mjs.map +1 -0
  28. package/dist/chunk-UFRGJMF4.mjs +154 -0
  29. package/dist/chunk-UFRGJMF4.mjs.map +1 -0
  30. package/dist/chunk-UHWJTQKW.mjs +139 -0
  31. package/dist/chunk-UHWJTQKW.mjs.map +1 -0
  32. package/dist/chunk-UYROVW53.mjs +89 -0
  33. package/dist/chunk-UYROVW53.mjs.map +1 -0
  34. package/dist/chunk-XGFSFWK3.mjs +113 -0
  35. package/dist/chunk-XGFSFWK3.mjs.map +1 -0
  36. package/dist/cli.d.mts +2 -0
  37. package/dist/cli.d.ts +2 -0
  38. package/dist/cli.js +1154 -0
  39. package/dist/cli.js.map +1 -0
  40. package/dist/cli.mjs +52 -0
  41. package/dist/cli.mjs.map +1 -0
  42. package/dist/commands/add.d.mts +5 -0
  43. package/dist/commands/add.d.ts +5 -0
  44. package/dist/commands/add.js +126 -0
  45. package/dist/commands/add.js.map +1 -0
  46. package/dist/commands/add.mjs +10 -0
  47. package/dist/commands/add.mjs.map +1 -0
  48. package/dist/commands/code.d.mts +5 -0
  49. package/dist/commands/code.d.ts +5 -0
  50. package/dist/commands/code.js +187 -0
  51. package/dist/commands/code.js.map +1 -0
  52. package/dist/commands/code.mjs +9 -0
  53. package/dist/commands/code.mjs.map +1 -0
  54. package/dist/commands/install.d.mts +5 -0
  55. package/dist/commands/install.d.ts +5 -0
  56. package/dist/commands/install.js +296 -0
  57. package/dist/commands/install.js.map +1 -0
  58. package/dist/commands/install.mjs +9 -0
  59. package/dist/commands/install.mjs.map +1 -0
  60. package/dist/commands/kickstart.d.mts +5 -0
  61. package/dist/commands/kickstart.d.ts +5 -0
  62. package/dist/commands/kickstart.js +322 -0
  63. package/dist/commands/kickstart.js.map +1 -0
  64. package/dist/commands/kickstart.mjs +10 -0
  65. package/dist/commands/kickstart.mjs.map +1 -0
  66. package/dist/commands/push.d.mts +5 -0
  67. package/dist/commands/push.d.ts +5 -0
  68. package/dist/commands/push.js +137 -0
  69. package/dist/commands/push.js.map +1 -0
  70. package/dist/commands/push.mjs +9 -0
  71. package/dist/commands/push.mjs.map +1 -0
  72. package/dist/commands/status.d.mts +7 -0
  73. package/dist/commands/status.d.ts +7 -0
  74. package/dist/commands/status.js +148 -0
  75. package/dist/commands/status.js.map +1 -0
  76. package/dist/commands/status.mjs +13 -0
  77. package/dist/commands/status.mjs.map +1 -0
  78. package/dist/commands/update.d.mts +5 -0
  79. package/dist/commands/update.d.ts +5 -0
  80. package/dist/commands/update.js +172 -0
  81. package/dist/commands/update.js.map +1 -0
  82. package/dist/commands/update.mjs +9 -0
  83. package/dist/commands/update.mjs.map +1 -0
  84. package/dist/index.d.mts +20 -0
  85. package/dist/index.d.ts +20 -0
  86. package/dist/index.js +406 -0
  87. package/dist/index.js.map +1 -0
  88. package/dist/index.mjs +14 -0
  89. package/dist/index.mjs.map +1 -0
  90. package/dist/package-template.txt +494 -0
  91. package/package.json +68 -0
@@ -0,0 +1,48 @@
1
+ import js from '@eslint/js';
2
+ import tseslint from '@typescript-eslint/eslint-plugin';
3
+ import tsparser from '@typescript-eslint/parser';
4
+
5
+ export default [
6
+ js.configs.recommended,
7
+ {
8
+ files: ['**/*.ts', '**/*.tsx'],
9
+ languageOptions: {
10
+ parser: tsparser,
11
+ parserOptions: {
12
+ ecmaVersion: 2020,
13
+ sourceType: 'module',
14
+ ecmaFeatures: {
15
+ jsx: true,
16
+ },
17
+ },
18
+ globals: {
19
+ console: 'readonly',
20
+ process: 'readonly',
21
+ },
22
+ },
23
+ plugins: {
24
+ '@typescript-eslint': tseslint,
25
+ },
26
+ rules: {
27
+ ...tseslint.configs.recommended.rules,
28
+ 'no-unused-vars': 'off',
29
+ '@typescript-eslint/no-unused-vars': ['warn'],
30
+ 'no-console': 'warn',
31
+ 'react/prop-types': 'off',
32
+ 'no-case-declarations': 'off',
33
+ '@typescript-eslint/no-explicit-any': 'off',
34
+ '@typescript-eslint/ban-ts-comment': 'off',
35
+ '@typescript-eslint/no-empty-function': 'off',
36
+ },
37
+ },
38
+ {
39
+ ignores: [
40
+ 'dist/**',
41
+ 'node_modules/**',
42
+ '.turbo/**',
43
+ 'coverage/**',
44
+ '**/*.js',
45
+ '**/*.test.ts',
46
+ ],
47
+ },
48
+ ];
@@ -0,0 +1,4 @@
1
+ {
2
+ "stage": "prod",
3
+ "ignored": [".build/**", "package-lock.json"]
4
+ }
@@ -0,0 +1 @@
1
+ This is a simple API for a Reddit RAG (Retrieval-Augmented Generation) agent. It allows you to index subreddits and perform searches on the indexed content to get AI-generated reports and insights.
@@ -0,0 +1,47 @@
1
+ {
2
+ "name": "<%= agentName %>",
3
+ "version": "1.0.0",
4
+ "description": "A Microbizz agent.",
5
+ "main": "dist/index.js",
6
+ "type": "module",
7
+ "scripts": {
8
+ "build": "tsc",
9
+ "deploy": "npm run build && serverless deploy --stage dev",
10
+ "deploy:preview": "npm run build && serverless deploy --stage preview",
11
+ "deploy:prod": "npm run build && serverless deploy --stage prod",
12
+ "remove": "serverless remove",
13
+ "remove:preview": "serverless remove --stage preview",
14
+ "remove:prod": "serverless remove --stage prod",
15
+ "start": "serverless offline start",
16
+ "dev": "npm run build && serverless offline start",
17
+ "lint": "eslint .",
18
+ "lint:fix": "eslint . --fix"
19
+ },
20
+ "dependencies": {
21
+ "@aws-sdk/client-sqs": "^3.504.0",
22
+ "@microfox/ai": "^1.2.0",
23
+ "@microfox/ai-provider-anthropic": "^1.3.0",
24
+ "@microfox/db-upstash": "^1.1.4",
25
+ "@microfox/rag-upstash": "^1.1.1",
26
+ "@microfox/tool-core": "^1.0.6",
27
+ "@upstash/redis": "^1.28.4",
28
+ "ai": "latest",
29
+ "aws-lambda": "*",
30
+ "dotenv": "^16.4.5",
31
+ "uuid": "^9.0.1"
32
+ },
33
+ "devDependencies": {
34
+ "@eslint/js": "latest",
35
+ "@types/aws-lambda": "^8.10.138",
36
+ "@types/node": "^20.11.19",
37
+ "@types/uuid": "^9.0.8",
38
+ "@typescript-eslint/eslint-plugin": "latest",
39
+ "@typescript-eslint/parser": "latest",
40
+ "eslint": "^9.1.0",
41
+ "serverless": "^3.38.0",
42
+ "serverless-offline": "^13.3.3",
43
+ "serverless-plugin-typescript": "^2.1.5",
44
+ "serverless-step-functions": "^3.23.2",
45
+ "typescript": "^5.3.3"
46
+ }
47
+ }
@@ -0,0 +1,123 @@
1
+ service: <%= agentName %>
2
+
3
+ package:
4
+ excludeDevDependencies: true
5
+ exclude:
6
+ - venv/**
7
+ - .idea/**
8
+ - .vscode/**
9
+ - src/**
10
+ - node_modules/serverless-offline/**
11
+
12
+ custom:
13
+ stage: ${env:ENVIRONMENT, 'dev'}
14
+ serverless-offline:
15
+ httpPort: 4000
16
+ lambdaPort: 4002
17
+ useChildProcesses: true
18
+ useWorkerThreads: true
19
+ noCookieValidation: true
20
+ allowCache: true
21
+ hideStackTraces: false
22
+ disableCookieValidation: true
23
+ noTimeout: true
24
+ environment: ${file(env.json)}
25
+
26
+ provider:
27
+ name: aws
28
+ runtime: nodejs20.x
29
+ region: us-east-1
30
+ stage: ${env:ENVIRONMENT, 'dev'}
31
+ environment: ${file(env.json)}
32
+ iam:
33
+ role:
34
+ statements:
35
+ - Effect: "Allow"
36
+ Action:
37
+ - "sqs:SendMessage"
38
+ Resource:
39
+ - Fn::GetAtt: [IndexQueue, Arn]
40
+ - Fn::GetAtt: [PaginateQueue, Arn]
41
+
42
+ plugins:
43
+ - serverless-plugin-typescript
44
+ - serverless-offline
45
+ - serverless-step-functions
46
+
47
+ functions:
48
+ # Cron job to populate the index queue
49
+ cronPopulate:
50
+ handler: dist/functions/cron-populate.handler
51
+ environment:
52
+ INDEX_QUEUE_URL:
53
+ Ref: IndexQueue
54
+ events:
55
+ - schedule: rate(1 day)
56
+
57
+ # Cron job to paginate
58
+ cronPaginate:
59
+ handler: dist/functions/cron-paginate.handler
60
+ environment:
61
+ PAGINATE_QUEUE_URL:
62
+ Ref: PaginateQueue
63
+ events:
64
+ - sqs:
65
+ arn:
66
+ Fn::GetAtt:
67
+ - PaginateQueue
68
+ - Arn
69
+
70
+ # HTTP trigger to add a URL to the index queue
71
+ triggerIndex:
72
+ handler: dist/functions/route-trigger-index.handler
73
+ environment:
74
+ INDEX_QUEUE_URL:
75
+ Ref: IndexQueue
76
+ events:
77
+ - http:
78
+ path: /index
79
+ method: POST
80
+ cors: true
81
+
82
+ # SQS consumer to index a URL
83
+ sqsIndex:
84
+ handler: dist/functions/sqs-index.handler
85
+ events:
86
+ - sqs:
87
+ arn:
88
+ Fn::GetAtt:
89
+ - IndexQueue
90
+ - Arn
91
+
92
+ # Documentation
93
+ getDocs:
94
+ handler: dist/index.getDocs
95
+ events:
96
+ - http:
97
+ path: /docs.json
98
+ method: get
99
+ cors: true
100
+
101
+ resources:
102
+ Resources:
103
+ IndexQueue:
104
+ Type: AWS::SQS::Queue
105
+ Properties:
106
+ QueueName: ${self:service}-index-queue-${opt:stage,env:ENVIRONMENT, 'dev'}
107
+ VisibilityTimeout: 300
108
+ PaginateQueue:
109
+ Type: AWS::SQS::Queue
110
+ Properties:
111
+ QueueName: ${self:service}-paginate-queue-${opt:stage,env:ENVIRONMENT, 'dev'}
112
+ VisibilityTimeout: 300
113
+ Outputs:
114
+ ApiEndpoints:
115
+ Description: "API Endpoints"
116
+ Value:
117
+ Fn::Join:
118
+ - ""
119
+ - - "API: https://"
120
+ - Ref: "ApiGatewayRestApi"
121
+ - ".execute-api."
122
+ - Ref: "AWS::Region"
123
+ - ".amazonaws.com/${env:ENVIRONMENT, 'dev'}"
@@ -0,0 +1,135 @@
1
+ import { SQSClient, SendMessageCommand } from "@aws-sdk/client-sqs";
2
+ import {
3
+ subredditStore,
4
+ ragSubredditPaginator,
5
+ subredditListPaginator,
6
+ } from "../helpers/ragRedis.js";
7
+ import { handler as indexSubredditHandler } from "./sqs-index.js";
8
+ import { createMockSQSEvent, ProcessTask } from "@microfox/tool-core";
9
+
10
+ const sqsClient = new SQSClient({});
11
+ const isOffline = process.env.IS_OFFLINE === "true";
12
+ const STALE_AFTER_SECONDS = 60 * 60; // 1 hour
13
+ const ITEMS_PER_PAGE = 100;
14
+
15
+ /**
16
+ * An example of how a cronjob indexes subreddits progressively (in batches)
17
+ * @returns
18
+ */
19
+ export const handler = async (): Promise<void> => {
20
+ console.log("handler: cron-index-subreddit");
21
+
22
+ const taskHandler = new ProcessTask({
23
+ url: process.env.TASK_UPSTASH_REDIS_REST_URL,
24
+ token: process.env.TASK_UPSTASH_REDIS_REST_TOKEN,
25
+ });
26
+
27
+ try {
28
+ let paginatorState = await subredditListPaginator.getCurrentStatus();
29
+ if (!paginatorState) {
30
+ paginatorState = await subredditListPaginator.startNewIndexing({
31
+ page: 1,
32
+ itemsPerPage: ITEMS_PER_PAGE,
33
+ totalCount: 0,
34
+ });
35
+ }
36
+ if (!paginatorState.progress) {
37
+ console.error("handler: paginator state is not valid", {
38
+ paginatorState,
39
+ });
40
+ return;
41
+ }
42
+ const page = paginatorState.progress.page;
43
+ console.log("handler: processing page", { page });
44
+
45
+ const allSubreddits = await subredditStore.list();
46
+ const totalCount = allSubreddits.length;
47
+ const totalPages = Math.ceil(totalCount / ITEMS_PER_PAGE);
48
+ console.log("handler: subreddits stats", { totalCount, totalPages });
49
+
50
+ if (totalCount === 0) {
51
+ console.log("handler: no subreddits to process");
52
+ return;
53
+ }
54
+
55
+ const startIndex = (page - 1) * ITEMS_PER_PAGE;
56
+ if (startIndex >= totalCount && totalCount > 0) {
57
+ console.log("handler: page out of bounds, resetting to page 1");
58
+ await subredditListPaginator.updateIndexingStatus({
59
+ page: 1,
60
+ totalCount,
61
+ });
62
+ // We could either process page 1 now or wait for the next cron run.
63
+ // Let's wait for the next run to keep logic simple.
64
+ return;
65
+ }
66
+
67
+ const subredditsToProcess = allSubreddits.slice(
68
+ startIndex,
69
+ startIndex + ITEMS_PER_PAGE
70
+ );
71
+ console.log("handler: processing subreddits", {
72
+ count: subredditsToProcess.length,
73
+ });
74
+
75
+ for (const subreddit of subredditsToProcess) {
76
+ const subredditName = subreddit.name;
77
+ console.log("handler: checking subreddit", { subredditName });
78
+
79
+ const isStale =
80
+ await ragSubredditPaginator(subredditName).isStale(STALE_AFTER_SECONDS);
81
+
82
+ if (isStale) {
83
+ console.log("handler: subreddit is stale, triggering indexing", {
84
+ subredditName,
85
+ });
86
+
87
+ const task = await taskHandler.createTask({
88
+ subreddit: subredditName,
89
+ });
90
+
91
+ const sqsMessageBody = {
92
+ subreddit: subredditName,
93
+ task_id: task.id,
94
+ };
95
+
96
+ if (isOffline) {
97
+ console.log(
98
+ "handler: running in offline mode, invoking handler directly"
99
+ );
100
+ const sqsEvent = createMockSQSEvent(sqsMessageBody);
101
+ await indexSubredditHandler(sqsEvent);
102
+ console.log("handler: invoked handler directly");
103
+ } else {
104
+ if (!process.env.INDEX_SUBREDDIT_QUEUE_URL) {
105
+ console.error("INDEX_SUBREDDIT_QUEUE_URL is not set");
106
+ continue; // Move to the next subreddit
107
+ }
108
+ console.log("handler: sending message to SQS", {
109
+ queueUrl: process.env.INDEX_SUBREDDIT_QUEUE_URL,
110
+ });
111
+ const command = new SendMessageCommand({
112
+ QueueUrl: process.env.INDEX_SUBREDDIT_QUEUE_URL,
113
+ MessageBody: JSON.stringify(sqsMessageBody),
114
+ });
115
+
116
+ await sqsClient.send(command);
117
+ console.log("handler: sent message to SQS");
118
+ }
119
+ } else {
120
+ console.log("handler: subreddit is not stale, skipping", {
121
+ subredditName,
122
+ });
123
+ }
124
+ }
125
+
126
+ const nextPage = page >= totalPages && totalPages > 0 ? 1 : page + 1;
127
+ console.log("handler: updating paginator for next run", { nextPage });
128
+ await subredditListPaginator.updateIndexingStatus({
129
+ page: nextPage,
130
+ totalCount,
131
+ });
132
+ } catch (error) {
133
+ console.error("handler: failed to process cron-index-subreddit", error);
134
+ }
135
+ };
@@ -0,0 +1,33 @@
1
+ import { ToolParse } from "@microfox/tool-core";
2
+ import { subredditStore, SubredditInfo } from "../helpers/ragRedis.js";
3
+
4
+ const toolHandler = new ToolParse({});
5
+
6
+ export const handler = async (): Promise<void> => {
7
+ console.log("cron-save-handler: cron-save-subreddit");
8
+
9
+ try {
10
+ if (!process.env.REDDIT_ACCESS_TOKEN) {
11
+ console.log("cron-save-handler: fetching env vars for reddit");
12
+ await toolHandler.fetchEnvVars({
13
+ stage: "staging",
14
+ packageName: "@microfox/reddit",
15
+ templateType: "testing",
16
+ });
17
+ console.log("cron-save-handler: fetched env vars for reddit");
18
+ }
19
+ if (!process.env.ANTHROPIC_API_KEY) {
20
+ console.log("cron-save-handler: fetching env vars for anthropic");
21
+ await toolHandler.fetchEnvVars({
22
+ stage: "staging",
23
+ packageName: "@microfox/ai-provider-anthropic",
24
+ templateType: "testing",
25
+ });
26
+ console.log("cron-save-handler: fetched env vars for anthropic");
27
+ }
28
+ // TODO: Populate the content
29
+ const savedSubreddits = new Set<string>();
30
+ } catch (error) {
31
+ console.error("Failed to run cron to save subreddits", error);
32
+ }
33
+ };
@@ -0,0 +1,157 @@
1
+ import {
2
+ APIGatewayProxyEvent,
3
+ APIGatewayProxyResult,
4
+ SQSEvent,
5
+ } from "aws-lambda";
6
+ import { SQSClient, SendMessageCommand } from "@aws-sdk/client-sqs";
7
+ import { handler as indexSubredditHandler } from "./sqs-index.js";
8
+ import { createMockSQSEvent, ProcessTask } from "@microfox/tool-core";
9
+ import { ragSubredditPaginator } from "../helpers/ragRedis.js";
10
+
11
+ const sqsClient = new SQSClient({});
12
+ const isOffline = process.env.IS_OFFLINE === "true";
13
+
14
+ export const handler = async (
15
+ event: APIGatewayProxyEvent
16
+ ): Promise<APIGatewayProxyResult> => {
17
+ console.log("handler: sqs-trigger-subreddit", {
18
+ isOffline,
19
+ body: event.body,
20
+ });
21
+
22
+ const taskHandler = new ProcessTask({
23
+ url: process.env.TASK_UPSTASH_REDIS_REST_URL,
24
+ token: process.env.TASK_UPSTASH_REDIS_REST_TOKEN,
25
+ });
26
+
27
+ if (!event.body) {
28
+ console.log("handler: missing request body");
29
+ return {
30
+ statusCode: 400,
31
+ body: JSON.stringify({ message: "Missing request body" }),
32
+ };
33
+ }
34
+
35
+ try {
36
+ console.log("handler: parsing request body");
37
+ const { subreddit } = JSON.parse(event.body);
38
+ console.log("handler: parsed request body", { subreddit });
39
+ if (!subreddit) {
40
+ console.log("handler: missing subreddit in request body");
41
+ return {
42
+ statusCode: 400,
43
+ body: JSON.stringify({ message: "Missing subreddit in request body" }),
44
+ };
45
+ }
46
+
47
+ if (!process.env.INDEX_SUBREDDIT_QUEUE_URL && !isOffline) {
48
+ console.error("INDEX_SUBREDDIT_QUEUE_URL is not set");
49
+ return {
50
+ statusCode: 500,
51
+ body: JSON.stringify({
52
+ message: "Internal server error: Queue not configured",
53
+ }),
54
+ };
55
+ }
56
+
57
+ // TODO: get the subreddit info
58
+ // TODO: store/update it in db
59
+
60
+ const task = await taskHandler.createTask({
61
+ subreddit,
62
+ });
63
+
64
+ console.log("handler: preparing SQS message");
65
+ const authHeader = event.headers["x-auth-secrets"];
66
+ const sqsMessageBody = { subreddit, xAuthSecrets: authHeader, task_id: task.id};
67
+ console.log("handler: prepared SQS message", { sqsMessageBody });
68
+
69
+ console.log("handler: checking if subreddit is stale", { subreddit });
70
+ const isStale = await ragSubredditPaginator(subreddit).isStale(60 * 60);
71
+ console.log("handler: checked if subreddit is stale", { isStale });
72
+
73
+ if (!isStale) {
74
+ console.log("handler: subreddit already indexed in the last hour");
75
+ return {
76
+ statusCode: 202,
77
+ body: JSON.stringify({
78
+ message: "Subreddit already indexed in the last hour",
79
+ }),
80
+ };
81
+ }
82
+
83
+ if (isOffline) {
84
+ console.log(
85
+ "handler: running in offline mode, invoking handler directly"
86
+ );
87
+ const sqsEvent = createMockSQSEvent(sqsMessageBody);
88
+ indexSubredditHandler(sqsEvent);
89
+ console.log("handler: invoked handler directly");
90
+ } else {
91
+ console.log("handler: sending message to SQS", {
92
+ queueUrl: process.env.INDEX_SUBREDDIT_QUEUE_URL,
93
+ });
94
+ const command = new SendMessageCommand({
95
+ QueueUrl: process.env.INDEX_SUBREDDIT_QUEUE_URL,
96
+ MessageBody: JSON.stringify(sqsMessageBody),
97
+ });
98
+
99
+ await sqsClient.send(command);
100
+ console.log("handler: sent message to SQS");
101
+ }
102
+
103
+ console.log("handler: finished successfully");
104
+ return {
105
+ statusCode: 202,
106
+ body: JSON.stringify({
107
+ message: "Subreddit indexing triggered successfully",
108
+ }),
109
+ };
110
+ } catch (error) {
111
+ console.error(error);
112
+ if (error instanceof SyntaxError) {
113
+ console.log("handler: invalid JSON in request body");
114
+ return {
115
+ statusCode: 400,
116
+ body: JSON.stringify({ message: "Invalid JSON in request body" }),
117
+ };
118
+ }
119
+ console.log("handler: internal server error");
120
+ return {
121
+ statusCode: 500,
122
+ body: JSON.stringify({ message: "Internal server error" }),
123
+ };
124
+ }
125
+ };
126
+
127
+ export const handleApiRequestDocs = {
128
+ summary: "Trigger subreddit indexing",
129
+ description:
130
+ "Triggers the indexing of a specific subreddit by sending a message to the SQS queue.",
131
+ tags: ["Subreddit"],
132
+ requestBody: {
133
+ required: true,
134
+ content: {
135
+ "application/json": {
136
+ schema: {
137
+ type: "object",
138
+ properties: {
139
+ subreddit: {
140
+ type: "string",
141
+ description: "The name of the subreddit to index.",
142
+ },
143
+ },
144
+ required: ["subreddit"],
145
+ },
146
+ },
147
+ },
148
+ },
149
+ responses: {
150
+ "202": {
151
+ description:
152
+ "Accepted: The indexing request has been successfully queued.",
153
+ },
154
+ "400": { description: "Bad Request: Missing or invalid request body." },
155
+ "500": { description: "Internal server error." },
156
+ },
157
+ };