json-object-editor 0.10.657 → 0.10.660

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1474,9 +1474,15 @@ this.executeJOEAiPrompt = async function(data, req, res) {
1474
1474
  system: system,
1475
1475
  messages: [],
1476
1476
  source: body.source || "widget",
1477
+ // Optional scope for object-scoped widget chats
1478
+ scope_itemtype: body.scope_itemtype || null,
1479
+ scope_id: body.scope_id || null,
1477
1480
  created: new Date().toISOString(),
1478
1481
  joeUpdated: new Date().toISOString()
1479
1482
  };
1483
+ if (body.name && !convo.name) {
1484
+ convo.name = String(body.name);
1485
+ }
1480
1486
 
1481
1487
  const saved = await new Promise(function (resolve, reject) {
1482
1488
  // Widget conversations are lightweight and do not need full history diffs.
@@ -1598,7 +1604,76 @@ this.executeJOEAiPrompt = async function(data, req, res) {
1598
1604
  return { success: false, error: "Conversation not found" };
1599
1605
  }
1600
1606
 
1607
+ // Best-effort: if this is an object-scoped conversation and we have
1608
+ // not yet attached any files, walk the scoped object for uploader
1609
+ // style files that have OpenAI ids and cache them on the convo.
1610
+ try{
1611
+ if ((!convo.attached_openai_file_ids || !convo.attached_openai_file_ids.length) &&
1612
+ convo.scope_itemtype && convo.scope_id) {
1613
+ var scopedObj = null;
1614
+ try{
1615
+ scopedObj = $J.get(convo.scope_id, convo.scope_itemtype) || $J.get(convo.scope_id);
1616
+ }catch(_e){}
1617
+ if (scopedObj && typeof scopedObj === 'object') {
1618
+ var ids = [];
1619
+ var meta = [];
1620
+ Object.keys(scopedObj).forEach(function(field){
1621
+ var val = scopedObj[field];
1622
+ if (!Array.isArray(val)) { return; }
1623
+ val.forEach(function(f){
1624
+ if (f && f.openai_file_id) {
1625
+ ids.push(f.openai_file_id);
1626
+ meta.push({
1627
+ itemtype: scopedObj.itemtype || convo.scope_itemtype,
1628
+ field: field,
1629
+ name: f.filename || '',
1630
+ role: f.file_role || null,
1631
+ openai_file_id: f.openai_file_id
1632
+ });
1633
+ }
1634
+ });
1635
+ });
1636
+ if (ids.length) {
1637
+ convo.attached_openai_file_ids = ids;
1638
+ convo.attached_files_meta = meta;
1639
+ }
1640
+ }
1641
+ }
1642
+ }catch(_e){ /* non-fatal */ }
1643
+
1601
1644
  convo.messages = normalizeMessages(convo.messages);
1645
+
1646
+ // On the very first turn of an object-scoped widget conversation,
1647
+ // pre-load a slimmed understandObject snapshot so the assistant
1648
+ // immediately knows which record "this client/task/..." refers to
1649
+ // without having to remember to call MCP. We keep this snapshot
1650
+ // concise via slimUnderstandObjectResult and only inject it once.
1651
+ try{
1652
+ var isObjectChat = (convo.source === 'object_chat') && convo.scope_id;
1653
+ var hasMessages = Array.isArray(convo.messages) && convo.messages.length > 0;
1654
+ if (isObjectChat && !hasMessages){
1655
+ const uo = await callMCPTool('understandObject', {
1656
+ _id: convo.scope_id,
1657
+ itemtype: convo.scope_itemtype || undefined,
1658
+ depth: 1,
1659
+ slim: true
1660
+ }, { req });
1661
+ const slimmed = slimUnderstandObjectResult(uo);
1662
+ if (slimmed) {
1663
+ convo.messages = convo.messages || [];
1664
+ convo.messages.push({
1665
+ role: 'system',
1666
+ content: JSON.stringify({
1667
+ tool: 'understandObject',
1668
+ scope_object: slimmed
1669
+ })
1670
+ });
1671
+ }
1672
+ }
1673
+ }catch(_e){
1674
+ console.warn('[chatgpt] widgetMessage understandObject preload failed', _e && _e.message || _e);
1675
+ }
1676
+
1602
1677
  const nowIso = new Date().toISOString();
1603
1678
 
1604
1679
  // Append user message
@@ -1645,22 +1720,79 @@ this.executeJOEAiPrompt = async function(data, req, res) {
1645
1720
  const model = (assistantObj && assistantObj.ai_model) || convo.model || body.model || "gpt-5.1";
1646
1721
 
1647
1722
  // Prefer explicit system text on the conversation, then assistant instructions.
1648
- const systemText = (convo.system && String(convo.system)) ||
1723
+ const baseSystemText = (convo.system && String(convo.system)) ||
1649
1724
  (assistantObj && assistantObj.instructions) ||
1650
1725
  "";
1726
+
1727
+ // When this conversation was launched from an object ("Start Chat"
1728
+ // on a record), include a small scope hint so the assistant knows
1729
+ // which object id/itemtype to use with MCP tools like
1730
+ // understandObject/search. We keep this concise to avoid
1731
+ // unnecessary tokens but still make the scope unambiguous.
1732
+ let systemText = baseSystemText;
1733
+ try{
1734
+ if (convo.source === 'object_chat' && convo.scope_id) {
1735
+ const scopeLine = '\n\n---\nJOE scope_object:\n'
1736
+ + '- itemtype: ' + String(convo.scope_itemtype || 'unknown') + '\n'
1737
+ + '- _id: ' + String(convo.scope_id) + '\n'
1738
+ + 'When you need this object\'s details, call the MCP tool "understandObject" '
1739
+ + 'with these identifiers, or search for related records using the MCP search tools.\n';
1740
+ systemText = (baseSystemText || '') + scopeLine;
1741
+ }
1742
+ }catch(_e){ /* non-fatal */ }
1743
+
1744
+ // Build the messages array for the model. We deliberately separate
1745
+ // the stored `convo.messages` from the model-facing payload so we
1746
+ // can annotate the latest user turn with uploaded_files metadata
1747
+ // without altering the persisted history.
1651
1748
  const messagesForModel = convo.messages.map(function (m) {
1652
1749
  return { role: m.role, content: m.content };
1653
1750
  });
1751
+ // If we have attached file metadata, wrap the latest user turn in a
1752
+ // small JSON envelope so the model can see which files exist and how
1753
+ // they are labeled (role, name, origin field) while still receiving
1754
+ // the raw user input as `input`.
1755
+ try{
1756
+ if (convo.attached_files_meta && convo.attached_files_meta.length && messagesForModel.length) {
1757
+ var lastMsg = messagesForModel[messagesForModel.length - 1];
1758
+ if (lastMsg && lastMsg.role === role && typeof lastMsg.content === 'string') {
1759
+ lastMsg.content = JSON.stringify({
1760
+ uploaded_files: convo.attached_files_meta,
1761
+ input: lastMsg.content
1762
+ }, null, 2);
1763
+ }
1764
+ }
1765
+ }catch(_e){ /* non-fatal */ }
1766
+
1767
+ // Collect OpenAI file ids from scoped object attachments and any
1768
+ // assistant-level files so they are available to the model via the
1769
+ // shared attachFilesToResponsesPayload helper inside runWithTools.
1770
+ var openaiFileIds = [];
1771
+ if (Array.isArray(convo.attached_openai_file_ids) && convo.attached_openai_file_ids.length){
1772
+ openaiFileIds = openaiFileIds.concat(convo.attached_openai_file_ids);
1773
+ }
1774
+ try{
1775
+ if (assistantObj && Array.isArray(assistantObj.assistant_files)) {
1776
+ assistantObj.assistant_files.forEach(function(f){
1777
+ if (f && f.openai_file_id) {
1778
+ openaiFileIds.push(f.openai_file_id);
1779
+ }
1780
+ });
1781
+ }
1782
+ }catch(_e){}
1654
1783
 
1655
1784
  // Use runWithTools so that, when an assistant has tools configured,
1656
- // we let the model call those tools via MCP before generating a
1657
- // final response.
1785
+ // we let the model call those tools via MCP / function tools before
1786
+ // generating a final response. Attach any discovered OpenAI files
1787
+ // so the model can read from them as needed.
1658
1788
  const runResult = await runWithTools({
1659
1789
  openai: openai,
1660
1790
  model: model,
1661
1791
  systemText: systemText,
1662
1792
  messages: messagesForModel,
1663
1793
  assistant: assistantObj,
1794
+ attachments_mode: (body.attachments_mode || 'direct'),
1795
+ openai_file_ids: openaiFileIds.length ? openaiFileIds : null,
1664
1796
  req: req
1665
1797
  });
1666
1798
 
@@ -3,7 +3,7 @@
3
3
 
4
4
  var schema = {
5
5
  title: "Ai Assistant | ${name}",
6
- display: "Ai Assist",
6
+ display: "Ai Assistant",
7
7
  info: "An AI Assistant configuration linked to OpenAI, managed within Joe.",
8
8
  // Curated summary for agents and tools
9
9
  summary:{
@@ -329,6 +329,20 @@ var schema = {
329
329
  display: "File IDs (auto populated on sync)",
330
330
  comment: "Auto-managed list of OpenAI file_ids attached to this Assistant."
331
331
  },
332
+ {
333
+ name:"assistant_files",
334
+ type:"uploader",
335
+ allowmultiple:true,
336
+ height:"220px",
337
+ display:"Assistant Files (uploaded)",
338
+ comment:"Drag files here to upload. Files will be mirrored to OpenAI (when configured) and attached whenever this assistant is used.",
339
+ onConfirm:_joe.SERVER.Plugins.awsConnect,
340
+ file_roles:[
341
+ { value:"reference", label:"Reference" },
342
+ { value:"instructions",label:"Instructions" },
343
+ { value:"example", label:"Example", default:true }
344
+ ]
345
+ },
332
346
  { section_end: "files" },
333
347
  {section_start:'categorization',collapsed:true},
334
348
  'datasets',
@@ -31,6 +31,12 @@ var schema = {
31
31
  { name:'messages', type:'objectList' },
32
32
  { name:'last_message_at', type:'string', format:'date-time' },
33
33
  { name:'source', type:'string' },
34
+ // Optional scope for object-scoped chats (Start Chat on an object)
35
+ { name:'scope_itemtype', type:'string' },
36
+ { name:'scope_id', type:'string' },
37
+ // Optional attached OpenAI Files (ids + meta) for this conversation
38
+ { name:'attached_openai_file_ids', type:'string', isArray:true },
39
+ { name:'attached_files_meta', type:'objectList' },
34
40
  { name:'user', type:'string', isReference:true, targetSchema:'user' },
35
41
  { name:'user_name', type:'string' },
36
42
  { name:'user_color', type:'string' },
@@ -176,6 +182,31 @@ var schema = {
176
182
 
177
183
  { section_start: "meta", collapsed: true },
178
184
  "source",
185
+ {
186
+ name:"scope_itemtype",
187
+ type:"text",
188
+ display:"Scope Itemtype",
189
+ comment:"When set, this conversation is scoped to a specific object type (e.g., client, task)."
190
+ },
191
+ {
192
+ name:"scope_id",
193
+ type:"text",
194
+ display:"Scope ID",
195
+ comment:"When set, this conversation is scoped to a specific object _id."
196
+ },
197
+ {
198
+ name:"attached_openai_file_ids",
199
+ type:"text",
200
+ display:"Attached OpenAI File IDs",
201
+ locked:true
202
+ },
203
+ {
204
+ name:"attached_files_meta",
205
+ type:"code",
206
+ display:"Attached Files Meta (JSON)",
207
+ height:"160px",
208
+ comment:"Read-only meta for files attached to this conversation (itemtype, field, name, role, openai_file_id)."
209
+ },
179
210
  "tags",
180
211
  { section_end: "meta" },
181
212