saico 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/openai.js ADDED
@@ -0,0 +1,72 @@
1
+ const OpenAI = require('openai');
2
+
3
+ const openai = new OpenAI({
4
+ apiKey: process.env.OPENAI_API_KEY||'test'
5
+ });
6
+
7
+ module.exports = {
8
+ send,
9
+ };
10
+
11
+ async function send(messages, functions, model) {
12
+ let data;
13
+ let retries = 5;
14
+
15
+ // Convert functions to tools format if provided
16
+ let tools;
17
+ if (functions && functions.length > 0) {
18
+ tools = functions.map(func => ({
19
+ type: 'function',
20
+ function: func
21
+ }));
22
+ }
23
+
24
+ while (--retries)
25
+ {
26
+ try {
27
+ const requestParams = {
28
+ model: model || 'gpt-5-mini',
29
+ messages
30
+ };
31
+
32
+ if (tools) {
33
+ requestParams.tools = tools;
34
+ }
35
+
36
+ data = await openai.chat.completions.create(requestParams);
37
+ break;
38
+ } catch (error) {
39
+ // Check if the status code is 429
40
+ if (error.status == 429)
41
+ {
42
+ console.error("Error 429: Too Many Requests");
43
+ console.error("Message:", error.message);
44
+ const errorMessage = error.message || '';
45
+
46
+ const waitTimeMatch = errorMessage.match(/Please try again in (\d+(\.\d+)?)(ms|s)/);
47
+
48
+ if (waitTimeMatch)
49
+ {
50
+ let waitTime = parseFloat(waitTimeMatch[1]);
51
+ if (waitTimeMatch[3] === 's')
52
+ waitTime *= 1000; // Convert seconds to milliseconds
53
+ console.error(`Rate limit reached. Retrying in ${waitTime}ms...`);
54
+ await new Promise(resolve => setTimeout(resolve, waitTime+100));
55
+ } else {
56
+ console.error('Rate limit error encountered, but could not extract wait time. Aborting.');
57
+ console.error('messages:\n', JSON.stringify(messages));
58
+ throw error; // Exit if wait time cannot be extracted
59
+ }
60
+ } else {
61
+ console.error(`Unexpected Error ${error.status}: ${error.message}`);
62
+ throw error;
63
+ }
64
+ }
65
+ }
66
+
67
+ if (!data || !data.choices || !data.choices.length)
68
+ return console.error('failed to receive response\n', data);
69
+ return data.choices[0].message;
70
+ }
71
+
72
+
package/package.json ADDED
@@ -0,0 +1,49 @@
1
+ {
2
+ "name": "saico",
3
+ "version": "2.0.0",
4
+ "main": "index.js",
5
+ "type": "commonjs",
6
+ "description": "Hierarchical AI Conversation Orchestrator - Task hierarchy with conversation contexts",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "git+https://github.com/wanderli-ai/saico.git"
10
+ },
11
+ "bugs": {
12
+ "url": "https://github.com/wanderli-ai/saico/issues"
13
+ },
14
+ "homepage": "https://github.com/wanderli-ai/saico#readme",
15
+ "files": [
16
+ "index.js",
17
+ "itask.js",
18
+ "context.js",
19
+ "sid.js",
20
+ "openai.js",
21
+ "util.js",
22
+ "redis.js",
23
+ "README.md",
24
+ "LICENSE"
25
+ ],
26
+ "engines": {
27
+ "node": ">=16.0.0"
28
+ },
29
+ "dependencies": {
30
+ "openai": "^4.85.4",
31
+ "tiktoken": "^1.0.17",
32
+ "redis": "^4.7.0"
33
+ },
34
+ "devDependencies": {
35
+ "chai": "^4.5.0",
36
+ "chai-http": "^4.4.0",
37
+ "mock-fs": "^5.4.1",
38
+ "mocha": "^10.2.0",
39
+ "sinon": "^17.0.0",
40
+ "sinon-chai": "^3.7.0"
41
+ },
42
+ "scripts": {
43
+ "test": "NODE_ENV=test mocha --exit 'test/**/*.test.js'",
44
+ "start": "node server.js"
45
+ },
46
+ "keywords": ["ai", "conversation", "orchestrator", "task", "hierarchy", "openai", "chatgpt", "llm"],
47
+ "author": "wanderli-ai",
48
+ "license": "ISC"
49
+ }
package/redis.js ADDED
@@ -0,0 +1,123 @@
1
+ const redis = require('redis');
2
+ let rclient;
3
+ let debug = false;
4
+
5
+ module.exports = {
6
+ init,
7
+ createObservableForRedis,
8
+ };
9
+
10
+ function logDebug(...args) {
11
+ if (debug)
12
+ console.log(...args);
13
+ }
14
+
15
+ async function init() {
16
+ rclient = redis.createClient({ url: 'redis://localhost:6379' });
17
+ module.exports.rclient = rclient;
18
+
19
+ rclient.on('connect', () => {
20
+ console.log('Connected to Redis');
21
+ });
22
+
23
+ rclient.on('error', (err) => {
24
+ console.error('Redis connection error:', err);
25
+ });
26
+
27
+ await rclient.connect();
28
+ }
29
+
30
+ function debounce(func, delay) {
31
+ let timer;
32
+ return (...args) => {
33
+ clearTimeout(timer);
34
+ timer = setTimeout(() => func(...args), delay);
35
+ };
36
+ }
37
+
38
+ function createObservableForRedis(key, obj) {
39
+ let lastSavedObject = null; // Cache for the last-saved sanitized object
40
+ let lastSavedTimestamp = null; // Timestamp of the last save to Redis
41
+
42
+ const saveToRedis = debounce(() => {
43
+ const sanitizedObj = sanitizeObject(obj);
44
+
45
+ // Compare sanitized object with the last-saved object
46
+ if (serialize(sanitizedObj) === serialize(lastSavedObject)) {
47
+ logDebug("No changes detected, skipping save.");
48
+ return;
49
+ }
50
+
51
+ lastSavedObject = sanitizedObj;
52
+ lastSavedTimestamp = Date.now(); // Update the last saved timestamp
53
+ sanitizedObj.lastSave = lastSavedTimestamp;
54
+ rclient.set(key, serialize(sanitizedObj));
55
+ logDebug("Saved to Redis:", key, `at ${lastSavedTimestamp}`);
56
+ }, 1000);
57
+
58
+ const handler = {
59
+ get(target, prop, receiver) {
60
+ if (prop === "lastMod") {
61
+ // Expose the last saved timestamp as a method
62
+ return () => lastSavedTimestamp;
63
+ }
64
+ if (prop in target) {
65
+ const value = Reflect.get(target, prop, receiver);
66
+ if (typeof value === 'function') {
67
+ // Bind the method to the original target to preserve `this`
68
+ return value.bind(target);
69
+ }
70
+ return value;
71
+ }
72
+ return Reflect.get(target, prop, receiver);
73
+ },
74
+ set(target, prop, value) {
75
+ if (String(prop).startsWith('_')) {
76
+ target[prop] = value; // Allow setting but do not trigger save
77
+ logDebug(`Ignored saving property '${prop}'`);
78
+ return true;
79
+ }
80
+
81
+ // Wrap new objects with the Proxy
82
+ if (typeof value === 'object' && value !== null) {
83
+ value = new Proxy(value, handler);
84
+ }
85
+
86
+ target[prop] = value;
87
+ saveToRedis(); // Trigger save for the root object
88
+ return true;
89
+ },
90
+ deleteProperty(target, prop) {
91
+ if (String(prop).startsWith('_')) {
92
+ delete target[prop]; // Allow deletion without triggering save
93
+ logDebug(`Ignored deletion of property '${prop}'`);
94
+ return true;
95
+ }
96
+
97
+ delete target[prop];
98
+ saveToRedis(); // Trigger save for the root object
99
+ return true;
100
+ },
101
+ };
102
+
103
+ function serialize(obj) {
104
+ if (typeof obj == 'object' && typeof obj?.serialize == 'function')
105
+ return obj.serialize();
106
+ return JSON.stringify(obj);
107
+ }
108
+
109
+ function sanitizeObject(obj) {
110
+ if (typeof obj !== 'object' || obj === null || typeof obj.serialize == 'function')
111
+ return obj;
112
+ const sanitized = Array.isArray(obj) ? [] : {};
113
+ for (const key in obj) {
114
+ if (!key.startsWith('_')) {
115
+ sanitized[key] = sanitizeObject(obj[key]);
116
+ }
117
+ }
118
+ return sanitized;
119
+ }
120
+
121
+ return new Proxy(obj, handler);
122
+ }
123
+
package/sid.js ADDED
@@ -0,0 +1,207 @@
1
+ 'use strict';
2
+
3
+ const Itask = require('./itask.js');
4
+ const { Context, createContext } = require('./context.js');
5
+
6
+ /**
7
+ * Sid - Session/User Context root task.
8
+ *
9
+ * Extends Itask to serve as the root of task hierarchies.
10
+ * Always has a conversation context attached.
11
+ * Provides serialization support for persistence.
12
+ */
13
+ class Sid extends Itask {
14
+ constructor(opt = {}, states = []) {
15
+ // Normalize options
16
+ if (typeof opt === 'string')
17
+ opt = { name: opt };
18
+
19
+ // Set defaults for a session root task
20
+ const name = opt.name || 'session';
21
+ const prompt = opt.prompt || '';
22
+
23
+ // Call parent constructor with async:true to control context creation
24
+ super({
25
+ ...opt,
26
+ name,
27
+ prompt,
28
+ async: true // We'll manage running ourselves
29
+ }, states);
30
+
31
+ // User data storage
32
+ this.userData = opt.userData || {};
33
+
34
+ // Session-specific configuration
35
+ this.sessionConfig = {
36
+ token_limit: opt.token_limit,
37
+ max_depth: opt.max_depth,
38
+ max_tool_repetition: opt.max_tool_repetition,
39
+ ...opt.sessionConfig
40
+ };
41
+
42
+ // Always create a context for Sid (root session task)
43
+ const contextConfig = {
44
+ tag: opt.tag || this.id,
45
+ token_limit: this.sessionConfig.token_limit,
46
+ max_depth: this.sessionConfig.max_depth,
47
+ max_tool_repetition: this.sessionConfig.max_tool_repetition,
48
+ tool_handler: opt.tool_handler,
49
+ functions: opt.functions,
50
+ sequential_mode: opt.sequential_mode,
51
+ msgs: opt.msgs
52
+ };
53
+
54
+ this.context = new Context(prompt, this, contextConfig);
55
+
56
+ // Start running if not explicitly set to async
57
+ if (opt.async !== true && states.length > 0) {
58
+ process.nextTick(() => {
59
+ try { this._run(); } catch (e) { console.error(e); }
60
+ });
61
+ }
62
+ }
63
+
64
+ // Override sendMessage to always use our context
65
+ async sendMessage(role, content, functions, opts) {
66
+ return this.context.sendMessage(role, content, functions || this.functions, opts);
67
+ }
68
+
69
+ // Serialize the session for persistence
70
+ serialize() {
71
+ return JSON.stringify({
72
+ id: this.id,
73
+ name: this.name,
74
+ prompt: this.prompt,
75
+ userData: this.userData,
76
+ sessionConfig: this.sessionConfig,
77
+ context: {
78
+ tag: this.context.tag,
79
+ msgs: this.context._msgs,
80
+ functions: this.context.functions
81
+ },
82
+ tm_create: this.tm_create
83
+ });
84
+ }
85
+
86
+ // Deserialize a session from stored data
87
+ static deserialize(data, opt = {}) {
88
+ const parsed = typeof data === 'string' ? JSON.parse(data) : data;
89
+
90
+ const sid = new Sid({
91
+ name: parsed.name,
92
+ prompt: parsed.prompt,
93
+ userData: parsed.userData,
94
+ sessionConfig: parsed.sessionConfig,
95
+ tag: parsed.context?.tag,
96
+ tool_handler: opt.tool_handler,
97
+ functions: opt.functions || parsed.context?.functions,
98
+ async: true, // Don't auto-run states
99
+ ...opt
100
+ }, opt.states || []);
101
+
102
+ // Restore the original ID and timestamps
103
+ sid.id = parsed.id;
104
+ sid.tm_create = parsed.tm_create;
105
+
106
+ // Restore messages to context
107
+ if (parsed.context?.msgs) {
108
+ sid.context._msgs = parsed.context.msgs;
109
+ }
110
+
111
+ return sid;
112
+ }
113
+
114
+ // Create a child task with its own context
115
+ spawnTaskWithContext(opt, states = []) {
116
+ if (typeof opt === 'string')
117
+ opt = { name: opt };
118
+
119
+ const childTask = new Itask({
120
+ ...opt,
121
+ spawn_parent: this,
122
+ async: true
123
+ }, states);
124
+
125
+ if (opt.prompt) {
126
+ const childContext = new Context(opt.prompt, childTask, {
127
+ tag: opt.tag || childTask.id,
128
+ token_limit: opt.token_limit || this.sessionConfig.token_limit,
129
+ max_depth: opt.max_depth || this.sessionConfig.max_depth,
130
+ max_tool_repetition: opt.max_tool_repetition || this.sessionConfig.max_tool_repetition,
131
+ tool_handler: opt.tool_handler || this.tool_handler,
132
+ functions: opt.functions || this.functions
133
+ });
134
+ childTask.setContext(childContext);
135
+ }
136
+
137
+ // Start the child task
138
+ process.nextTick(() => {
139
+ try { childTask._run(); } catch (e) { console.error(e); }
140
+ });
141
+
142
+ return childTask;
143
+ }
144
+
145
+ // Create a child task without its own context (uses parent's context)
146
+ spawnTask(opt, states = []) {
147
+ if (typeof opt === 'string')
148
+ opt = { name: opt };
149
+
150
+ const childTask = new Itask({
151
+ ...opt,
152
+ spawn_parent: this,
153
+ async: true
154
+ }, states);
155
+
156
+ // Start the child task
157
+ process.nextTick(() => {
158
+ try { childTask._run(); } catch (e) { console.error(e); }
159
+ });
160
+
161
+ return childTask;
162
+ }
163
+
164
+ // Close the session - summarize context and cleanup
165
+ async closeSession() {
166
+ await this.context.close();
167
+ this._ecancel();
168
+ }
169
+
170
+ // Get session info for debugging/monitoring
171
+ getSessionInfo() {
172
+ return {
173
+ id: this.id,
174
+ name: this.name,
175
+ running: this.running,
176
+ completed: this._completed,
177
+ messageCount: this.context.length,
178
+ childCount: this.child.size,
179
+ userData: this.userData,
180
+ uptime: Date.now() - this.tm_create
181
+ };
182
+ }
183
+
184
+ // Store user data
185
+ setUserData(key, value) {
186
+ this.userData[key] = value;
187
+ return this;
188
+ }
189
+
190
+ // Get user data
191
+ getUserData(key) {
192
+ return key ? this.userData[key] : this.userData;
193
+ }
194
+
195
+ // Clear user data
196
+ clearUserData() {
197
+ this.userData = {};
198
+ return this;
199
+ }
200
+ }
201
+
202
+ // Factory function to create a Sid instance
203
+ function createSid(opt = {}, states = []) {
204
+ return new Sid(opt, states);
205
+ }
206
+
207
+ module.exports = { Sid, createSid };
package/util.js ADDED
@@ -0,0 +1,110 @@
1
+ const is_mocha = process.env.NODE_ENV == 'test';
2
+ const tiktoken = require('tiktoken');
3
+
4
+ const debug = process.env.DEBUG === '1' || process.env.DEBUG === 'true';
5
+
6
+ function _log(...args) {
7
+ if (!is_mocha || process.env.VERBOSE)
8
+ console.log(...args);
9
+ }
10
+
11
+ function _lerr(...args) {
12
+ console.error(...args);
13
+ }
14
+
15
+ // Add perr method for error logging with stack trace
16
+ _lerr.perr = function(err) {
17
+ if (err instanceof Error) {
18
+ console.error(err.message, err.stack);
19
+ } else {
20
+ console.error(err);
21
+ }
22
+ };
23
+
24
+ function _ldbg(...args) {
25
+ if (debug)
26
+ console.log('[DEBUG]', ...args);
27
+ }
28
+
29
+ function daysSince(timestamp) {
30
+ return (Date.now() - timestamp) / (1000 * 60 * 60 * 24);
31
+ }
32
+
33
+ function minSince(timestamp) {
34
+ return (Date.now() - timestamp) / (1000 * 60);
35
+ }
36
+
37
+ function shallowEqual(obj1, obj2) {
38
+ if (obj1 === obj2) return true;
39
+ if (!obj1 || !obj2) return false;
40
+ const keys1 = Object.keys(obj1);
41
+ const keys2 = Object.keys(obj2);
42
+ if (keys1.length !== keys2.length) return false;
43
+ for (const key of keys1) {
44
+ if (obj1[key] !== obj2[key]) return false;
45
+ }
46
+ return true;
47
+ }
48
+
49
+ function filterArray(arr, predicate) {
50
+ return arr.filter(predicate);
51
+ }
52
+
53
+ function logEvent(event, data) {
54
+ _log(`[EVENT: ${event}]`, data);
55
+ }
56
+
57
+ const lerr = _lerr;
58
+
59
+ module.exports = {
60
+ countTokens,
61
+ is_mocha,
62
+ _log,
63
+ _lerr,
64
+ lerr,
65
+ _ldbg,
66
+ daysSince,
67
+ minSince,
68
+ shallowEqual,
69
+ filterArray,
70
+ logEvent,
71
+ }
72
+
73
+ function countTokens(messages, model = "gpt-4o") {
74
+ // Load the encoding for the specified model
75
+ const encoding = tiktoken.encoding_for_model(model);
76
+ let bmsg_size = 0;
77
+ let numTokens = 0;
78
+
79
+ if (!Array.isArray(messages))
80
+ messages = [messages];
81
+ messages.forEach(message => {
82
+ if (typeof message == 'string')
83
+ {
84
+ numTokens += encoding.encode(message).length;
85
+ if (message.length > bmsg_size)
86
+ bmsg_size = message.length;
87
+ }
88
+ else if (typeof message == 'object')
89
+ {
90
+ numTokens += 4; // Role and other structure overhead
91
+ for (const key in message) {
92
+ if (!message[key])
93
+ continue
94
+ if (typeof message[key] != 'string')
95
+ continue;
96
+ numTokens += encoding.encode(message[key]).length;
97
+ if (message[key].length > bmsg_size)
98
+ bmsg_size = message[key].length;
99
+ }
100
+ }
101
+ });
102
+
103
+ // Add 2 tokens for the assistant's reply overhead
104
+ numTokens += 2;
105
+ encoding.free();
106
+
107
+ return numTokens;
108
+ }
109
+
110
+