@sesamespace/sesame 0.2.6 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +33 -7
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -13,7 +13,7 @@
13
13
  * heartbeats, reconnection, and message routing automatically.
14
14
  */
15
15
  // Plugin version (injected from package.json at build time, fallback to static)
16
- const PLUGIN_VERSION = "0.2.6";
16
+ const PLUGIN_VERSION = "0.2.7";
17
17
  /** Standard headers for Sesame API calls */
18
18
  function sesameHeaders(apiKey, json = true) {
19
19
  const h = {
@@ -46,6 +46,18 @@ function guessContentType(fileName) {
46
46
  };
47
47
  return map[ext] ?? "application/octet-stream";
48
48
  }
49
+ /** Detect gateway-generated error messages that shouldn't appear as normal chat */
50
+ const ERROR_PATTERNS = [
51
+ /^The AI service is temporarily overloaded/,
52
+ /^The AI service is temporarily unavailable/,
53
+ /^The AI service returned an error/,
54
+ /^⚠️.*error/i,
55
+ /^Error:/i,
56
+ ];
57
+ function isGatewayErrorMessage(text) {
58
+ const trimmed = text.trim();
59
+ return ERROR_PATTERNS.some((p) => p.test(trimmed));
60
+ }
49
61
  const sesameChannelPlugin = {
50
62
  id: "sesame",
51
63
  meta: {
@@ -182,10 +194,12 @@ const sesameChannelPlugin = {
182
194
  chatId: channelId,
183
195
  };
184
196
  }
197
+ // Detect error messages from the gateway and tag them appropriately
198
+ const intent = isGatewayErrorMessage(text) ? "error" : "chat";
185
199
  const response = await fetch(`${account.apiUrl}/api/v1/channels/${channelId}/messages`, {
186
200
  method: "POST",
187
201
  headers: sesameHeaders(account.apiKey),
188
- body: JSON.stringify({ content: text, kind: "text", intent: "chat" }),
202
+ body: JSON.stringify({ content: text, kind: "text", intent }),
189
203
  });
190
204
  if (!response.ok) {
191
205
  const error = (await response.json().catch(() => ({})));
@@ -753,7 +767,11 @@ async function handleMessage(message, account, state, ctx) {
753
767
  ctx: inboundCtx,
754
768
  });
755
769
  // ── Streaming reply: send initial message, then edit in place ──
756
- const sesameStreamMode = cfg.channels?.sesame?.streamMode ?? "buffer";
770
+ // Only stream for human senders in DM channels agents get buffer mode
771
+ const configStreamMode = cfg.channels?.sesame?.streamMode ?? "buffer";
772
+ const senderKind = meta.senderKind ?? message.sender?.kind;
773
+ const isHumanDm = channelKind === "dm" && senderKind === "human";
774
+ const sesameStreamMode = isHumanDm ? configStreamMode : "buffer";
757
775
  const replyBuffer = [];
758
776
  let streamMessageId = null;
759
777
  let streamSending = false; // lock to prevent concurrent first-message sends
@@ -779,10 +797,11 @@ async function handleMessage(message, account, state, ctx) {
779
797
  log.error?.(`[sesame] Circuit breaker: suppressing outbound message`);
780
798
  return null;
781
799
  }
800
+ const intent = isGatewayErrorMessage(text) ? "error" : "chat";
782
801
  const res = await fetch(`${account.apiUrl}/api/v1/channels/${channelId}/messages`, {
783
802
  method: "POST",
784
803
  headers: sesameHeaders(account.apiKey),
785
- body: JSON.stringify({ content: text, kind: "text", intent: "chat" }),
804
+ body: JSON.stringify({ content: text, kind: "text", intent }),
786
805
  });
787
806
  if (!res.ok) {
788
807
  const err = await res.text().catch(() => "");
@@ -839,9 +858,16 @@ async function handleMessage(message, account, state, ctx) {
839
858
  log.info?.(`[sesame] Final stream edit (${fullReply.length} chars), marked as delivered`);
840
859
  }
841
860
  else {
842
- // No streaming happened — don't send here, let outbound.sendText handle it
843
- // (avoids double-send when OpenClaw calls both deliver + outbound.sendText)
844
- log.info?.(`[sesame] Buffer mode: deferring to outbound.sendText (${fullReply.length} chars)`);
861
+ // No streaming happened — send the buffered reply directly
862
+ // (outbound.sendText is not reliably called by OC core in all configurations)
863
+ const sentId = await sendNewMessage(fullReply);
864
+ if (sentId) {
865
+ streamDelivered.set(channelId, { messageId: sentId, text: fullReply });
866
+ log.info?.(`[sesame] Buffer mode: sent directly (${fullReply.length} chars), msgId=${sentId}`);
867
+ }
868
+ else {
869
+ log.error?.(`[sesame] Buffer mode: direct send failed (${fullReply.length} chars)`);
870
+ }
845
871
  }
846
872
  };
847
873
  const { dispatcher, replyOptions, markDispatchIdle } = core.channel.reply.createReplyDispatcherWithTyping({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sesamespace/sesame",
3
- "version": "0.2.6",
3
+ "version": "0.2.7",
4
4
  "description": "Sesame channel plugin for OpenClaw — connect your AI agent to the Sesame messaging platform",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",