koishi-plugin-sy-bot 0.0.7 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.js CHANGED
@@ -162,28 +162,28 @@ ${hljsCss}
162
162
  </body>
163
163
  </html>
164
164
  `;
165
+ const page = await ctx.puppeteer.page();
165
166
  try {
166
- console.log("开始创建页面...");
167
- const page = await ctx.puppeteer.page();
168
- console.log("页面创建成功!");
169
- console.log("开始设置视口...");
167
+ const logger = ctx.logger("sy-bot");
168
+ logger.info("开始创建页面...");
169
+ logger.info("页面创建成功!");
170
+ logger.info("开始设置视口...");
170
171
  await page.setViewport({
171
172
  width: 1e3,
172
173
  height: 800,
173
174
  deviceScaleFactor: 2
174
175
  });
175
- console.log("视口设置成功");
176
- console.log("开始加载内容...");
176
+ logger.info("视口设置成功");
177
+ logger.info("开始加载内容...");
177
178
  await page.setContent(html, {
178
- waitUntil: "domcontentloaded",
179
- timeout: 3e4
179
+ waitUntil: "networkidle0"
180
180
  });
181
- console.log("内容加载成功");
182
- console.log("开始设置body...");
181
+ logger.info("内容加载成功");
182
+ logger.info("开始设置body...");
183
183
  const body = await page.$("body");
184
- console.log("设置box...");
184
+ logger.info("设置box...");
185
185
  const box = await body.boundingBox();
186
- console.log("获取page截图缓冲区...");
186
+ logger.info("获取page截图缓冲区...");
187
187
  const buffer = await page.screenshot({
188
188
  type: "png",
189
189
  clip: {
@@ -193,13 +193,15 @@ ${hljsCss}
193
193
  height: Math.ceil(box.height)
194
194
  }
195
195
  });
196
- console.log("正在关闭页面...");
197
- await page.close();
198
- console.log("成功关闭页面...");
196
+ logger.info("正在关闭页面...");
197
+ logger.info("成功关闭页面...");
199
198
  return buffer;
200
199
  } catch (err) {
201
- console.log(err);
200
+ const logger = ctx.logger("sy-bot");
201
+ logger.error(err);
202
202
  throw "pptr 建立报错:" + (err instanceof Error ? err.message : String(err));
203
+ } finally {
204
+ await page.close();
203
205
  }
204
206
  }
205
207
  __name(markdownToImage, "markdownToImage");
@@ -233,8 +235,9 @@ async function sendGroupMessageReaction(session, face_id, ifSet = true) {
233
235
  );
234
236
  }
235
237
  __name(sendGroupMessageReaction, "sendGroupMessageReaction");
236
- async function getAiChatResult({ config, user_query }) {
238
+ async function getAiChatResult({ config, user_query, ctx }) {
237
239
  try {
240
+ const logger = ctx.logger("sy-bot");
238
241
  const { model: MODEL, api: API_KEY, prompt: system_prompt } = config;
239
242
  const { data } = await import_axios.default.post(
240
243
  `https://api.qingyuntop.top/v1beta/models/${MODEL}?key=${API_KEY}`,
@@ -266,12 +269,15 @@ async function getAiChatResult({ config, user_query }) {
266
269
  }
267
270
  }
268
271
  );
269
- console.log("Gemini调用结果:", data);
272
+ console.log("Gemini调用结果:\n", data);
270
273
  const { content } = data.candidates[0];
271
- const result = content.parts[1].text;
274
+ logger.info("提取AI请求结果content:\n", content);
275
+ logger.info("过滤后的parts回复列表:\n", data.candidates[0].content.parts.filter((part) => part.text));
276
+ const result = content.parts.filter((part) => part.text).map((part) => part.text).join("");
277
+ logger.info("AI回复的文字:", result);
272
278
  return result;
273
279
  } catch (error) {
274
- console.error("<getAiChatResult()<function> 调用报错!\n", error);
280
+ console.error("getAiChatResult() 调用报错!\n", error);
275
281
  }
276
282
  }
277
283
  __name(getAiChatResult, "getAiChatResult");
@@ -289,6 +295,7 @@ async function getAiImgUnderstandMultiple({
289
295
  url_list,
290
296
  query
291
297
  }) {
298
+ const logger = ctx.logger("sy-bot");
292
299
  try {
293
300
  const { model: MODEL, api: API_KEY } = config;
294
301
  let base64_list = [];
@@ -319,10 +326,13 @@ async function getAiImgUnderstandMultiple({
319
326
  `https://api.qingyuntop.top/v1beta/models/${MODEL}?key=${API_KEY}`,
320
327
  BODY
321
328
  );
322
- const { text } = data.candidates[0].content.parts[1];
329
+ logger.info("axios 接口返回的数据 data: \n", data);
330
+ logger.info("过滤后的parts回复列表:\n", data.candidates[0].content.parts.filter((part) => part.text));
331
+ const text = data.candidates[0].content.parts.filter((part) => part.text).map((p) => p.text).join("");
332
+ logger.info("解析出的text:", text);
323
333
  return text;
324
334
  } catch (error) {
325
- console.error("<getAiChatResult()<function> 调用报错!\n", error);
335
+ logger.error("getAiChatResult() 调用报错!\n", error);
326
336
  }
327
337
  }
328
338
  __name(getAiImgUnderstandMultiple, "getAiImgUnderstandMultiple");
@@ -344,16 +354,19 @@ __name(getElementsImages, "getElementsImages");
344
354
  async function sendOnlyText({ session, ctx, config, query }) {
345
355
  const quote = session?.event?.message?.quote?.content;
346
356
  const question = quote ? query + quote : query;
357
+ const logger = ctx.logger("sy-bot");
347
358
  const raw_answer = await getAiChatResult({
348
359
  config,
349
- user_query: question
360
+ user_query: question,
361
+ ctx
350
362
  });
351
- console.log("AI的回复:", raw_answer);
352
363
  if (!raw_answer) {
353
364
  return "AI没有回复";
354
365
  }
355
366
  try {
367
+ logger.info("开始渲染markdown...");
356
368
  const h_ans_img = await textToImage(ctx, raw_answer);
369
+ logger.info("markdown渲染完毕!");
357
370
  session.send(
358
371
  /* @__PURE__ */ (0, import_jsx_runtime.jsx)("quote", { id: session.event.message.id }) + /* @__PURE__ */ (0, import_jsx_runtime.jsx)("at", { id: session.event.user.id }) + h_ans_img + "\n"
359
372
  );
@@ -377,14 +390,18 @@ async function sendNotOnlyText({
377
390
  url_list,
378
391
  query
379
392
  }) {
393
+ const logger = ctx.logger("sy-bot");
394
+ logger.info("开始执行 getAiImgUnderstandMultiple()");
380
395
  const raw_answer = await getAiImgUnderstandMultiple({
381
396
  ctx,
382
397
  config,
383
398
  url_list,
384
399
  query
385
400
  });
386
- console.log("AI的回答:", raw_answer);
401
+ logger.info("getAiImgUnderstandMultiple执行完毕,得到AI的回答:", raw_answer);
402
+ logger.info("开始渲染markdown,执行textToImage");
387
403
  const h_ans_img = await textToImage(ctx, raw_answer);
404
+ logger.info("渲染完毕,返回图片结果!");
388
405
  session.send(
389
406
  /* @__PURE__ */ (0, import_jsx_runtime.jsx)("quote", { id: session.event.message.id }) + /* @__PURE__ */ (0, import_jsx_runtime.jsx)("at", { id: session.event.user.id }) + h_ans_img + "\n"
390
407
  );
@@ -395,6 +412,7 @@ __name(sendNotOnlyText, "sendNotOnlyText");
395
412
  function registerAsk(ctx, config) {
396
413
  ctx.command("ask", "哈基米?哈基米~").action(async ({ session }) => {
397
414
  try {
415
+ const logger = ctx.logger("sy-bot");
398
416
  const messageElements = session.event.message.elements;
399
417
  const quoteElements = session.event.message.quote?.elements;
400
418
  const quoteRealText = getElementsText(quoteElements);
@@ -403,28 +421,33 @@ function registerAsk(ctx, config) {
403
421
  const messageImages = getElementsImages(messageElements);
404
422
  const finalText = quoteRealText + messageRealText;
405
423
  const finalImages = [...quoteImages, ...messageImages];
406
- console.log("用户输入的图像:", finalImages);
407
- console.log("用户输入的文字:", finalText);
424
+ logger.info("用户输入的图像:", finalImages);
425
+ logger.info("用户输入的文字:", finalText);
408
426
  await startEmoji(session);
409
- try {
410
- if (finalImages.length > 0) {
411
- await sendNotOnlyText({
412
- ctx,
413
- session,
414
- config,
415
- url_list: finalImages,
416
- query: finalText
417
- });
418
- } else {
419
- await sendOnlyText({ session, ctx, config, query: finalText });
420
- }
421
- } finally {
422
- await finishEmoji(session);
427
+ logger.info("发送表情响应");
428
+ if (finalImages.length > 0) {
429
+ logger.info("用户发送了图片,调用图片理解api...");
430
+ logger.info("开始执行sendNotOnlyText...");
431
+ await sendNotOnlyText({
432
+ ctx,
433
+ session,
434
+ config,
435
+ url_list: finalImages,
436
+ query: finalText
437
+ });
438
+ logger.info("sendNotOnlyText执行结束");
439
+ } else {
440
+ logger.info("用户发送了纯文本,调用文字对话api...");
441
+ logger.info("开始执行sendOnlyText...");
442
+ await sendOnlyText({ session, ctx, config, query: finalText });
443
+ logger.info("sendOnlyText执行结束");
423
444
  }
424
445
  } catch (err) {
425
- console.error("ask 命令执行错误:", err);
446
+ const logger = ctx.logger("sy-bot");
447
+ logger.error("ask 命令执行错误:", err);
448
+ return "oops!出了点叉子!";
449
+ } finally {
426
450
  await finishEmoji(session);
427
- return "调用ask发生错误:" + (err instanceof Error ? err.message : String(err));
428
451
  }
429
452
  });
430
453
  }
@@ -7,17 +7,12 @@ import { Context, h, Session } from "koishi";
7
7
  export declare function getKaoyanCountdownText(targetTime: any): string;
8
8
  export declare function getPrompt(): void;
9
9
  export declare function sendGroupMessageReaction(session: any, face_id: any, ifSet?: boolean): Promise<void>;
10
- export declare function getAiChatResult({ config, user_query }: {
10
+ export declare function getAiChatResult({ config, user_query, ctx }: {
11
11
  config: any;
12
12
  user_query: any;
13
- }): Promise<any>;
14
- export declare function imageUrlToBase64(ctx: Context, url: string): Promise<string>;
15
- export declare function getAiImgUnderstand({ ctx, config, url, query }: {
16
13
  ctx: any;
17
- config: any;
18
- url: any;
19
- query: any;
20
14
  }): Promise<any>;
15
+ export declare function imageUrlToBase64(ctx: Context, url: string): Promise<string>;
21
16
  export declare function getAiImgUnderstandMultiple({ ctx, config, url_list, query, }: {
22
17
  ctx: any;
23
18
  config: any;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "koishi-plugin-sy-bot",
3
3
  "description": "考研群答疑Bot,接入Gemini。",
4
- "version": "0.0.7",
4
+ "version": "0.0.9",
5
5
  "main": "lib/index.js",
6
6
  "typings": "lib/index.d.ts",
7
7
  "files": [
@@ -12,7 +12,7 @@
12
12
  "scripts": {
13
13
  "dev": "cd ../.. && npm run dev",
14
14
  "build": "cd ../.. && npm run build",
15
- "bump": "cd ../.. && npm run bump",
15
+ "v": "cd ../.. && npm run build && npm run bump",
16
16
  "pub": "cd ../.. && npm run pub"
17
17
  },
18
18
  "keywords": [