interintel 1.0.20 → 1.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,6 +6,5 @@ async function askQuestion(rl, prompt) {
6
6
  });
7
7
  }
8
8
 
9
- module.exports = {
10
- askQuestion,
11
- }
9
+ export { askQuestion };
10
+
@@ -1,52 +1,50 @@
1
- const fs = require('fs');
2
- const path = require('path');
3
- const { aiVersion } = require('../interintel.config');
1
+ import fs from 'fs';
2
+ import path from 'path';
4
3
 
5
4
  // READING FOR INITAL REFERENCE
6
- function readSpecificFiles(configFilePath) {
7
-
5
+ async function readSpecificFiles(configFilePath) {
8
6
  try {
9
- // Read the content of the config file
10
- const configContent = fs.readFileSync(configFilePath, 'utf8');
11
- // Parse the config file content as JavaScript
12
- const config = eval(configContent);
13
- // Extract the file paths from the config object
14
- const filePaths = config.filePaths;
15
- const configDir = path.dirname(configFilePath);
7
+ // Dynamically import the config file
8
+ const absoluteConfigPath = path.resolve(configFilePath);
9
+ const configModule = await import('file://' + absoluteConfigPath);
10
+ const config = configModule.default;
16
11
 
12
+ const filePaths = config.filePaths;
17
13
  let allContent = 'I am sharing information from my file system for reference in our chat.\n';
18
-
19
- filePaths.forEach((filePath) => {
14
+
15
+ for (const filePath of filePaths) {
20
16
  try {
21
17
  // Construct the absolute path
22
- const absolutePath = path.resolve(configDir, filePath);
18
+ const absolutePath = path.resolve(process.cwd(), filePath);
23
19
  const fileContent = fs.readFileSync(absolutePath, 'utf8');
24
20
 
25
-
26
-
27
21
  // Read the file content and add it to allContent
28
22
  allContent += `\nStart File Name: ${filePath}\n File Content:\n${fileContent}\n End File Name: ${filePath}`;
29
23
  } catch (error) {
30
- console.error(`Error reading file ${filePath}: ${error.message}`.bgRed);
24
+ console.error(`Error reading file ${filePath}: ${error.message}`);
31
25
  }
32
- });
26
+ }
33
27
 
34
28
  // Add console.log statements to communicate to the user
35
- console.log(`${aiVersion} sent reference files:`.yellow, `${logFileNames(filePaths)}`.yellow);
36
- return allContent;
29
+ console.log(
30
+ `${config.aiVersion} sent reference files:`.yellow,
31
+ `${logFileNames(filePaths)}`.yellow
32
+ );
33
+ return allContent;
37
34
  } catch (error) {
38
- console.error(`Error reading config file: ${error.message}`.bgRed);
35
+ console.error(`Error reading config file: ${error.message}`);
39
36
  return '';
40
37
  }
41
38
  }
42
39
 
43
40
  function writeFileFromPrompt(promptFileName, contentToWrite, baseDir) {
44
41
  try {
45
-
46
42
  if (!promptFileName.includes('.')) {
47
- throw new Error("Invalid file name. Please include a file name with an extension (e.g., 'output.txt').");
43
+ throw new Error(
44
+ "Invalid file name. Please include a file name with an extension (e.g., 'output.txt')."
45
+ );
48
46
  }
49
-
47
+
50
48
  const projectRoot = process.cwd();
51
49
  const fullPath = path.join(projectRoot, `interintel/session-samples/${promptFileName}`);
52
50
  const directoryPath = path.dirname(fullPath);
@@ -58,7 +56,6 @@ function writeFileFromPrompt(promptFileName, contentToWrite, baseDir) {
58
56
  fs.writeFileSync(fullPath, contentToWrite + '\n');
59
57
  console.log(`Content written to ${fullPath}`.yellow);
60
58
  return true;
61
-
62
59
  } catch (error) {
63
60
  console.error(`Error writing file: ${error.message}`.bgRed);
64
61
  return false;
@@ -69,7 +66,7 @@ function writeFileFromPrompt(promptFileName, contentToWrite, baseDir) {
69
66
  function logFileNames(filePaths) {
70
67
  let fileNames = [];
71
68
 
72
- console.log("")
69
+ console.log('');
73
70
  console.log(`System message`.bgYellow + `: `.yellow);
74
71
  filePaths.forEach((filePath) => {
75
72
  const fileName = path.basename(filePath);
@@ -84,6 +81,4 @@ function appendToFile(filePath, data) {
84
81
  }
85
82
 
86
83
  // Export the function and the array
87
- module.exports = {
88
- readSpecificFiles, writeFileFromPrompt
89
- };
84
+ export { readSpecificFiles, writeFileFromPrompt };
@@ -1,10 +1,9 @@
1
- const path = require('path');
2
- const { aiChatCompletion } = require('./openai-functions.js');
3
- const { writeFileFromPrompt } = require('./file-functions.js');
4
- const configPath = path.join(process.cwd(), 'interintel.config.js');
5
- const config = require(configPath);
1
+ import path from 'path';
2
+ import { fileURLToPath } from 'url';
3
+ import { chatCompletion } from '../serviceInterface.js';
4
+ import { writeFileFromPrompt } from './file-functions.js';
6
5
 
7
- async function handleWriteFile(openai, config, messages, currentState, userInput, promptFileName) {
6
+ async function handleWriteFile(config, messages, currentState, userInput, promptFileName) {
8
7
  let contentToWrite = '';
9
8
 
10
9
  if (currentState === null) {
@@ -17,28 +16,39 @@ async function handleWriteFile(openai, config, messages, currentState, userInput
17
16
  };
18
17
  } else if (currentState === 'awaitingFileName') {
19
18
  promptFileName = userInput;
20
- currentState = 'awaitingGPTPrompt';
19
+ currentState = 'awaitingAIprompt';
21
20
  return {
22
21
  currentState,
23
22
  messages,
24
23
  promptFileName,
25
24
  response: `Please provide a prompt for ${config.aiVersion}:`,
26
25
  };
27
- } else if (currentState === 'awaitingGPTPrompt') {
28
- const promptForGPT = userInput;
26
+ } else if (currentState === 'awaitingAIprompt') {
27
+ const promptForAI = userInput;
28
+
29
+ let updatedMessages = [...messages, { role: 'user', content: promptForAI }];
30
+
29
31
  try {
30
- let gptResponse = await aiChatCompletion(
31
- openai,
32
- [{ role: 'user', content: promptForGPT }],
32
+ let completionResponse = await chatCompletion(
33
+ config.aiService,
34
+ updatedMessages,
33
35
  config.aiVersion
34
36
  );
35
- contentToWrite = gptResponse.choices[0].message.content;
37
+
38
+ // Extract the response content
39
+ let contentToWrite =
40
+ config.aiService === 'openai'
41
+ ? completionResponse.choices[0].message.content
42
+ : completionResponse;
43
+
44
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
45
+
36
46
  await writeFileFromPrompt(promptFileName, contentToWrite, __dirname); // Assuming this function handles file writing
37
47
 
38
48
  currentState = null; // Reset state after completing the operation
39
49
  return {
40
50
  currentState,
41
- messages,
51
+ messages: updatedMessages,
42
52
  promptFileName,
43
53
  contentToWrite,
44
54
  response: `Content written to ${promptFileName}`.yellow,
@@ -47,7 +57,7 @@ async function handleWriteFile(openai, config, messages, currentState, userInput
47
57
  console.error('Error in handleWriteFile:', error);
48
58
  return {
49
59
  currentState,
50
- messages,
60
+ messages: updatedMessages, // Return the updated messages array
51
61
  promptFileName,
52
62
  contentToWrite,
53
63
  response: 'An error occurred while writing the file.',
@@ -59,4 +69,4 @@ async function handleWriteFile(openai, config, messages, currentState, userInput
59
69
  return { currentState, messages, promptFileName, contentToWrite, response: '' };
60
70
  }
61
71
 
62
- module.exports = { handleWriteFile };
72
+ export { handleWriteFile };
@@ -3,6 +3,7 @@ async function aiChatCompletion(openai, messages, model) {
3
3
  const response = await openai.chat.completions.create({
4
4
  messages: messages,
5
5
  model: model,
6
+ stream: false
6
7
  });
7
8
  return response;
8
9
  } catch (error) {
package/index.js CHANGED
@@ -1,20 +1,16 @@
1
- const path = require('path')
2
- const OpenAI = require('openai');
3
- const readline = require('readline');
1
+ import path from 'path';
2
+ import readline from 'readline';
3
+ import dotenv from 'dotenv';
4
+ import colors from 'colors';
4
5
  const configPath = path.join(process.cwd(), 'interintel.config.js');
5
- const config = require(configPath);
6
- require('dotenv').config();
7
- require('colors');
8
6
 
9
- const { readSpecificFiles } = require('./functions/file-functions.js');
10
- const { askQuestion } = require('./functions/chat-functions.js');
11
- const { aiChatCompletion } = require('./functions/openai-functions.js');
12
- const { handleWriteFile } = require('./functions/handleWriteFile.js');
7
+ import config from './interintel.config.js';
8
+ import { readSpecificFiles } from './functions/file-functions.js';
9
+ import { askQuestion } from './functions/chat-functions.js';
10
+ import { handleWriteFile } from './functions/handleWriteFile.js';
11
+ import { chatCompletion } from './serviceInterface.js';
13
12
 
14
- const openai = new OpenAI({
15
- apiKey: config.apiKey,
16
- model: config.aiVersion,
17
- });
13
+ dotenv.config();
18
14
 
19
15
  const rl = readline.createInterface({
20
16
  input: process.stdin,
@@ -22,7 +18,7 @@ const rl = readline.createInterface({
22
18
  });
23
19
 
24
20
  async function main() {
25
- let initialContent = readSpecificFiles(configPath);
21
+ let initialContent = await readSpecificFiles(configPath);
26
22
  let messages = [{ role: 'system', content: initialContent }];
27
23
 
28
24
  let currentState = null;
@@ -30,7 +26,7 @@ async function main() {
30
26
 
31
27
  while (true) {
32
28
  const userMessage = await askQuestion(rl, 'You: '.blue.bold);
33
- let response = ''; // Add a variable to capture the response message
29
+ let response = '';
34
30
 
35
31
  // Exit condition
36
32
  if (userMessage.toLowerCase() === 'exit') {
@@ -40,17 +36,11 @@ async function main() {
40
36
  }
41
37
 
42
38
  if (userMessage.toLowerCase().startsWith('//writefile') && currentState === null) {
43
- ({ currentState, messages, promptFileName, response } = await handleWriteFile(
44
- openai,
45
- config,
46
- messages,
47
- currentState,
48
- ''
49
- ));
39
+ let result = await handleWriteFile(config, messages, currentState, '');
40
+ ({ currentState, messages, promptFileName, response } = result); // Update messages array
50
41
  console.log(response.yellow);
51
42
  } else if (currentState === 'awaitingFileName') {
52
43
  ({ currentState, messages, promptFileName, response } = await handleWriteFile(
53
- openai,
54
44
  config,
55
45
  messages,
56
46
  currentState,
@@ -58,9 +48,8 @@ async function main() {
58
48
  promptFileName
59
49
  ));
60
50
  console.log(response.yellow);
61
- } else if (currentState === 'awaitingGPTPrompt') {
51
+ } else if (currentState === 'awaitingAIprompt') {
62
52
  ({ currentState, messages, promptFileName, response } = await handleWriteFile(
63
- openai,
64
53
  config,
65
54
  messages,
66
55
  currentState,
@@ -75,27 +64,36 @@ async function main() {
75
64
  let content = readSpecificFiles(configPath);
76
65
  messages.push({
77
66
  role: 'user',
78
- content: `please just acknowledge you have read the name and the content of the files I have provided ${content}`,
67
+ content: `please just acknowledge you have read the name and the content of the files I have provided. once you have done this a single time you do not need to do it again. ${content}`,
79
68
  });
80
- const completion = await aiChatCompletion(openai, messages, config.aiVersion);
69
+ const completion = await chatCompletion(config.aiService, messages, config.aiVersion);
81
70
 
82
- const botMessage = completion.choices[0].message.content;
83
- console.log(`${config.aiVersion}`.bgGreen, botMessage);
84
- console.log('----------------'.bgGreen);
71
+ let botMessage = '';
72
+
73
+ if (config.aiService === 'openai' || config.aiService === 'mistral') {
74
+ botMessage = completion.choices[0].message.content;
75
+ } else if (config.aiService === 'ollama') {
76
+ // Adjust this line based on how Ollama's response is structured
77
+ botMessage = completion;
78
+ }
85
79
  } else {
86
80
  // Regular message processing and interaction with GPT model
87
81
  messages.push({ role: 'user', content: userMessage });
88
82
 
89
- const completion = await aiChatCompletion(openai, messages, config.aiVersion);
83
+ const completion = await chatCompletion(config.aiService, messages, config.aiVersion);
84
+
85
+ let botMessage;
86
+ if (config.aiService === 'openai' || config.aiService === 'mistral') {
87
+ botMessage = completion.choices[0].message.content;
88
+ } else if (config.aiService === 'ollama') {
89
+ // Adjust based on Ollama's response format
90
+ botMessage = completion; // Example - replace with actual response structure for Ollama
91
+ }
90
92
 
91
- const botMessage = completion.choices[0].message.content;
92
- console.log(`${config.aiVersion}`.bgGreen, botMessage);
93
+ console.log(`${config.aiVersion}`.bgGreen, botMessage.green);
93
94
  console.log('----------------'.bgGreen);
94
95
  }
95
96
  }
96
97
  }
97
98
 
98
- exports.main = function() {
99
- main()
100
- }
101
-
99
+ export { main };
@@ -1,10 +1,11 @@
1
- require('dotenv').config();
1
+ import dotenv from 'dotenv';
2
+ dotenv.config();
2
3
 
3
4
  const config = {
4
- apiKey: `${process.env.OPENAI_API_KEY}`,
5
- hosted: 'public',
6
- aiVersion: `gpt-3.5-turbo`,
7
- filePaths: ['./interintel/session-samples/updatedReadSpecificFiles.js', './functions/file-functions.js'],
5
+ apiKey: `${process.env.MISTRAL_API_KEY}`,
6
+ aiService: 'mistral',
7
+ aiVersion: `mistral-tiny`,
8
+ filePaths: ['serviceInterface.js'],
8
9
  };
9
10
 
10
- module.exports = config;
11
+ export default config;
package/mistral.js ADDED
@@ -0,0 +1,14 @@
1
+ import MistralClient from '@mistralai/mistralai'
2
+ import dotenv from 'dotenv';
3
+ dotenv.config();
4
+
5
+ const apiKey = process.env.MISTRAL_API_KEY;
6
+
7
+ const client = new MistralClient(apiKey);
8
+
9
+ const completion = await client.chat({
10
+ model: 'mistral-medium',
11
+ messages: [{role: 'user', content: 'When were you last fine tuned? Please keep to under 25 words'}],
12
+ });
13
+
14
+ console.log('Chat:', completion.choices[0].message.content);
package/package.json CHANGED
@@ -1,12 +1,14 @@
1
1
  {
2
2
  "dependencies": {
3
+ "@mistralai/mistralai": "^0.0.8",
3
4
  "colors": "^1.4.0",
4
5
  "dotenv": "^16.3.1",
5
6
  "openai": "^4.24.0"
6
7
  },
7
8
  "name": "interintel",
8
9
  "description": "The application `Interintel` is a command line interface (CLI) application implemented in Node.js. It essentially is an interactive communication tool between the user and an AI model, only openai models for now.",
9
- "version": "1.0.20",
10
+ "version": "1.0.22",
11
+ "type": "module",
10
12
  "main": "index.js",
11
13
  "directories": {
12
14
  "doc": "docs"
@@ -1,9 +1,12 @@
1
1
  require('dotenv').config();
2
2
 
3
3
  const config = {
4
+ // this is service dependent
4
5
  apiKey: `${process.env.OPENAI_API_KEY}`,
5
- // only open ai models for now
6
- aiVersion: `ONLY_USE_OPENAI_MODEL`,
6
+ // openai || ollama
7
+ aiService: 'ONE_OF_THE_ABOVE',
8
+ // only open ai or ollama models for now
9
+ aiVersion: `ONLY_USE_OPENAI_OR_OLLAMA_MODELS`,
7
10
  // These filepaths are relative to where your config is created
8
11
  filePaths: ['interintel/interintelReadMe.md'],
9
12
  };
@@ -0,0 +1,72 @@
1
+ import path from 'path';
2
+ import fetch from 'node-fetch';
3
+ import OpenAI from 'openai';
4
+ import MistralClient from '@mistralai/mistralai';
5
+
6
+ const configPath = path.join(process.cwd(), 'interintel.config.js');
7
+
8
+ let config;
9
+ try {
10
+ const importedModule = await import(configPath);
11
+ config = importedModule.default;
12
+ } catch (error) {
13
+ console.error('Failed to import config:', error);
14
+ }
15
+
16
+ const mistralClient = new MistralClient(config.apiKey);
17
+
18
+ const openai = new OpenAI({
19
+ apiKey: config.apiKey,
20
+ model: config.aiVersion,
21
+ });
22
+
23
+ async function chatCompletion(aiService, messages, model) {
24
+ try {
25
+ let response;
26
+
27
+ if (aiService === 'openai') {
28
+ response = await openai.chat.completions.create({
29
+ messages: messages,
30
+ model: model,
31
+ stream: false,
32
+ });
33
+
34
+ return response;
35
+
36
+ } else if (aiService === 'mistral') {
37
+ let chatResponse;
38
+
39
+ chatResponse = await mistralClient.chat({
40
+ model: model, // or a specific model you wish to use
41
+ messages: messages,
42
+ });
43
+
44
+ return chatResponse;
45
+ } else if (aiService === 'ollama') {
46
+ // Ollama specific code
47
+ let data = {
48
+ messages,
49
+ model,
50
+ stream: false,
51
+ };
52
+ const fetchResponse = await fetch('http://localhost:11434/api/chat', {
53
+ method: 'POST',
54
+ headers: {
55
+ 'Content-Type': 'application/json',
56
+ },
57
+ body: JSON.stringify(data),
58
+ });
59
+
60
+ // Properly resolve the response
61
+ response = await fetchResponse.json();
62
+ return response.message.content;
63
+ } else {
64
+ throw new Error('Invalid AI service');
65
+ }
66
+ } catch (error) {
67
+ console.error('Error:', error);
68
+ return null;
69
+ }
70
+ }
71
+
72
+ export { chatCompletion };
@@ -1,32 +0,0 @@
1
- // role: 'system',
2
- // content: `I am sharing information from my file system for reference in our chat.\n
3
- // File Name: ${fileMsg.fileName}\nContent:\n${fileMsg.content}
4
- // \n Content for File Name: ${fileMsg.fileName}`
5
-
6
-
7
- function readSpecificFiles(configFilePath) {
8
- try {
9
- const configContent = fs.readFileSync(configFilePath, 'utf8');
10
- const config = eval(configContent);
11
- const filePaths = config.filePaths;
12
- const configDir = path.dirname(configFilePath);
13
-
14
- let allContent = 'I am sharing information from my file system for reference in our chat.\n';
15
-
16
- filePaths.forEach((filePath) => {
17
- try {
18
- const absolutePath = path.resolve(configDir, filePath);
19
- const fileContent = fs.readFileSync(absolutePath, 'utf8');
20
-
21
- allContent += `\nFile Name: ${filePath}\nContent:\n${fileContent}\n`;
22
- } catch (error) {
23
- console.error(`Error reading file ${filePath}: ${error.message}`.bgRed);
24
- }
25
- });
26
-
27
- return allContent;
28
- } catch (error) {
29
- console.error(`Error reading config file: ${error.message}`.bgRed);
30
- return '';
31
- }
32
- }
package/ollama.js DELETED
@@ -1,41 +0,0 @@
1
- const fetch = require('node-fetch');
2
-
3
- let ai = 'ollama';
4
- let messages = [
5
- {
6
- role: 'assistant',
7
- content: 'please use a respectful tone',
8
- },
9
- {
10
- role: 'assistant',
11
- content: 'when asked for a code reference, please provide only the code with no commentary or explanation just the code. No commentary or explanation. NO COMMENTARY OR EXPLANATION',
12
- },
13
- {
14
- role: 'user',
15
- content: 'how can I most effectively persist chat history with you? Is every conversation then dependent on a finding a way to persist history by sending along the ongoing chat for you to continually reference context?',
16
- },
17
- ];
18
- let model = 'mistral';
19
-
20
- async function ollamaChatCompletion(ai, messages, model) {
21
- if (ai === 'ollama') {
22
- let data = {
23
- messages,
24
- model,
25
- stream: false,
26
- };
27
-
28
- fetch('http://localhost:11434/api/chat', {
29
- method: 'POST',
30
- headers: {
31
- 'Content-Type': 'application/json',
32
- },
33
- body: JSON.stringify(data),
34
- })
35
- .then((response) => response.json())
36
- .then((data) => console.log(data, 'data baby')) // or process the data as needed
37
- .catch((error) => console.error('Error:', error));
38
- }
39
- }
40
-
41
- ollamaChatCompletion(ai, messages, model);