interintel 1.0.21 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/functions/chat-functions.js +2 -3
- package/functions/file-functions.js +25 -30
- package/functions/handleWriteFile.js +11 -5
- package/index.js +17 -22
- package/interintel.config.js +8 -5
- package/mistral.js +14 -0
- package/package.json +3 -1
- package/resources/reference.txt +11 -9
- package/{ollama.js → serviceInterface.js} +26 -14
- package/testIntel.js +1 -1
- package/resources/multi-step/filesystem.spec.ts +0 -11
- package/resources/multi-step/uploading.spec.ts +0 -15
|
@@ -1,52 +1,50 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
const { aiVersion } = require('../interintel.config');
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
4
3
|
|
|
5
4
|
// READING FOR INITAL REFERENCE
|
|
6
|
-
function readSpecificFiles(configFilePath) {
|
|
7
|
-
|
|
5
|
+
async function readSpecificFiles(configFilePath) {
|
|
8
6
|
try {
|
|
9
|
-
//
|
|
10
|
-
const
|
|
11
|
-
|
|
12
|
-
const config =
|
|
13
|
-
// Extract the file paths from the config object
|
|
14
|
-
const filePaths = config.filePaths;
|
|
15
|
-
const configDir = path.dirname(configFilePath);
|
|
7
|
+
// Dynamically import the config file
|
|
8
|
+
const absoluteConfigPath = path.resolve(configFilePath);
|
|
9
|
+
const configModule = await import('file://' + absoluteConfigPath);
|
|
10
|
+
const config = configModule.default;
|
|
16
11
|
|
|
12
|
+
const filePaths = config.filePaths;
|
|
17
13
|
let allContent = 'I am sharing information from my file system for reference in our chat.\n';
|
|
18
|
-
|
|
19
|
-
|
|
14
|
+
|
|
15
|
+
for (const filePath of filePaths) {
|
|
20
16
|
try {
|
|
21
17
|
// Construct the absolute path
|
|
22
|
-
const absolutePath = path.resolve(
|
|
18
|
+
const absolutePath = path.resolve(process.cwd(), filePath);
|
|
23
19
|
const fileContent = fs.readFileSync(absolutePath, 'utf8');
|
|
24
20
|
|
|
25
|
-
|
|
26
|
-
|
|
27
21
|
// Read the file content and add it to allContent
|
|
28
22
|
allContent += `\nStart File Name: ${filePath}\n File Content:\n${fileContent}\n End File Name: ${filePath}`;
|
|
29
23
|
} catch (error) {
|
|
30
|
-
console.error(`Error reading file ${filePath}: ${error.message}
|
|
24
|
+
console.error(`Error reading file ${filePath}: ${error.message}`);
|
|
31
25
|
}
|
|
32
|
-
}
|
|
26
|
+
}
|
|
33
27
|
|
|
34
28
|
// Add console.log statements to communicate to the user
|
|
35
|
-
console.log(
|
|
36
|
-
|
|
29
|
+
console.log(
|
|
30
|
+
`${config.aiVersion} sent reference files:`.yellow,
|
|
31
|
+
`${logFileNames(filePaths)}`.yellow
|
|
32
|
+
);
|
|
33
|
+
return allContent;
|
|
37
34
|
} catch (error) {
|
|
38
|
-
console.error(`Error reading config file: ${error.message}
|
|
35
|
+
console.error(`Error reading config file: ${error.message}`);
|
|
39
36
|
return '';
|
|
40
37
|
}
|
|
41
38
|
}
|
|
42
39
|
|
|
43
40
|
function writeFileFromPrompt(promptFileName, contentToWrite, baseDir) {
|
|
44
41
|
try {
|
|
45
|
-
|
|
46
42
|
if (!promptFileName.includes('.')) {
|
|
47
|
-
throw new Error(
|
|
43
|
+
throw new Error(
|
|
44
|
+
"Invalid file name. Please include a file name with an extension (e.g., 'output.txt')."
|
|
45
|
+
);
|
|
48
46
|
}
|
|
49
|
-
|
|
47
|
+
|
|
50
48
|
const projectRoot = process.cwd();
|
|
51
49
|
const fullPath = path.join(projectRoot, `interintel/session-samples/${promptFileName}`);
|
|
52
50
|
const directoryPath = path.dirname(fullPath);
|
|
@@ -58,7 +56,6 @@ function writeFileFromPrompt(promptFileName, contentToWrite, baseDir) {
|
|
|
58
56
|
fs.writeFileSync(fullPath, contentToWrite + '\n');
|
|
59
57
|
console.log(`Content written to ${fullPath}`.yellow);
|
|
60
58
|
return true;
|
|
61
|
-
|
|
62
59
|
} catch (error) {
|
|
63
60
|
console.error(`Error writing file: ${error.message}`.bgRed);
|
|
64
61
|
return false;
|
|
@@ -69,7 +66,7 @@ function writeFileFromPrompt(promptFileName, contentToWrite, baseDir) {
|
|
|
69
66
|
function logFileNames(filePaths) {
|
|
70
67
|
let fileNames = [];
|
|
71
68
|
|
|
72
|
-
console.log(
|
|
69
|
+
console.log('');
|
|
73
70
|
console.log(`System message`.bgYellow + `: `.yellow);
|
|
74
71
|
filePaths.forEach((filePath) => {
|
|
75
72
|
const fileName = path.basename(filePath);
|
|
@@ -84,6 +81,4 @@ function appendToFile(filePath, data) {
|
|
|
84
81
|
}
|
|
85
82
|
|
|
86
83
|
// Export the function and the array
|
|
87
|
-
|
|
88
|
-
readSpecificFiles, writeFileFromPrompt
|
|
89
|
-
};
|
|
84
|
+
export { readSpecificFiles, writeFileFromPrompt };
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { fileURLToPath } from 'url';
|
|
3
|
+
import { chatCompletion } from '../serviceInterface.js';
|
|
4
|
+
import { writeFileFromPrompt } from './file-functions.js';
|
|
3
5
|
|
|
4
6
|
async function handleWriteFile(config, messages, currentState, userInput, promptFileName) {
|
|
5
7
|
let contentToWrite = '';
|
|
@@ -34,8 +36,12 @@ async function handleWriteFile(config, messages, currentState, userInput, prompt
|
|
|
34
36
|
);
|
|
35
37
|
|
|
36
38
|
// Extract the response content
|
|
37
|
-
let contentToWrite =
|
|
38
|
-
|
|
39
|
+
let contentToWrite =
|
|
40
|
+
config.aiService === 'openai' || config.aiService === 'mistral'
|
|
41
|
+
? completionResponse.choices[0].message.content
|
|
42
|
+
: completionResponse;
|
|
43
|
+
|
|
44
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
39
45
|
|
|
40
46
|
await writeFileFromPrompt(promptFileName, contentToWrite, __dirname); // Assuming this function handles file writing
|
|
41
47
|
|
|
@@ -63,4 +69,4 @@ async function handleWriteFile(config, messages, currentState, userInput, prompt
|
|
|
63
69
|
return { currentState, messages, promptFileName, contentToWrite, response: '' };
|
|
64
70
|
}
|
|
65
71
|
|
|
66
|
-
|
|
72
|
+
export { handleWriteFile };
|
package/index.js
CHANGED
|
@@ -1,14 +1,16 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import readline from 'readline';
|
|
3
|
+
import dotenv from 'dotenv';
|
|
4
|
+
import colors from 'colors';
|
|
3
5
|
const configPath = path.join(process.cwd(), 'interintel.config.js');
|
|
4
|
-
const config = require(configPath);
|
|
5
|
-
require('dotenv').config();
|
|
6
|
-
require('colors');
|
|
7
6
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
7
|
+
import config from './interintel.config.js';
|
|
8
|
+
import { readSpecificFiles } from './functions/file-functions.js';
|
|
9
|
+
import { askQuestion } from './functions/chat-functions.js';
|
|
10
|
+
import { handleWriteFile } from './functions/handleWriteFile.js';
|
|
11
|
+
import { chatCompletion } from './serviceInterface.js';
|
|
12
|
+
|
|
13
|
+
dotenv.config();
|
|
12
14
|
|
|
13
15
|
const rl = readline.createInterface({
|
|
14
16
|
input: process.stdin,
|
|
@@ -16,7 +18,7 @@ const rl = readline.createInterface({
|
|
|
16
18
|
});
|
|
17
19
|
|
|
18
20
|
async function main() {
|
|
19
|
-
let initialContent = readSpecificFiles(configPath);
|
|
21
|
+
let initialContent = await readSpecificFiles(configPath);
|
|
20
22
|
let messages = [{ role: 'system', content: initialContent }];
|
|
21
23
|
|
|
22
24
|
let currentState = null;
|
|
@@ -34,12 +36,7 @@ async function main() {
|
|
|
34
36
|
}
|
|
35
37
|
|
|
36
38
|
if (userMessage.toLowerCase().startsWith('//writefile') && currentState === null) {
|
|
37
|
-
let result = await handleWriteFile(
|
|
38
|
-
config,
|
|
39
|
-
messages,
|
|
40
|
-
currentState,
|
|
41
|
-
''
|
|
42
|
-
);
|
|
39
|
+
let result = await handleWriteFile(config, messages, currentState, '');
|
|
43
40
|
({ currentState, messages, promptFileName, response } = result); // Update messages array
|
|
44
41
|
console.log(response.yellow);
|
|
45
42
|
} else if (currentState === 'awaitingFileName') {
|
|
@@ -71,9 +68,9 @@ async function main() {
|
|
|
71
68
|
});
|
|
72
69
|
const completion = await chatCompletion(config.aiService, messages, config.aiVersion);
|
|
73
70
|
|
|
74
|
-
let botMessage;
|
|
71
|
+
let botMessage = '';
|
|
75
72
|
|
|
76
|
-
if (config.aiService === 'openai') {
|
|
73
|
+
if (config.aiService === 'openai' || config.aiService === 'mistral') {
|
|
77
74
|
botMessage = completion.choices[0].message.content;
|
|
78
75
|
} else if (config.aiService === 'ollama') {
|
|
79
76
|
// Adjust this line based on how Ollama's response is structured
|
|
@@ -86,7 +83,7 @@ async function main() {
|
|
|
86
83
|
const completion = await chatCompletion(config.aiService, messages, config.aiVersion);
|
|
87
84
|
|
|
88
85
|
let botMessage;
|
|
89
|
-
if (config.aiService === 'openai') {
|
|
86
|
+
if (config.aiService === 'openai' || config.aiService === 'mistral') {
|
|
90
87
|
botMessage = completion.choices[0].message.content;
|
|
91
88
|
} else if (config.aiService === 'ollama') {
|
|
92
89
|
// Adjust based on Ollama's response format
|
|
@@ -99,6 +96,4 @@ async function main() {
|
|
|
99
96
|
}
|
|
100
97
|
}
|
|
101
98
|
|
|
102
|
-
|
|
103
|
-
main();
|
|
104
|
-
};
|
|
99
|
+
export { main };
|
package/interintel.config.js
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
|
-
|
|
1
|
+
import dotenv from 'dotenv';
|
|
2
|
+
dotenv.config();
|
|
2
3
|
|
|
3
4
|
const config = {
|
|
4
|
-
apiKey: `${process.env.
|
|
5
|
+
apiKey: `${process.env.MISTRAL_API_KEY}`,
|
|
5
6
|
aiService: 'ollama',
|
|
6
|
-
aiVersion: `mistral`,
|
|
7
|
-
filePaths: [
|
|
7
|
+
aiVersion: `mistral:instruct`,
|
|
8
|
+
filePaths: [
|
|
9
|
+
'resources/reference.txt'
|
|
10
|
+
],
|
|
8
11
|
};
|
|
9
12
|
|
|
10
|
-
|
|
13
|
+
export default config;
|
package/mistral.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import MistralClient from '@mistralai/mistralai'
|
|
2
|
+
import dotenv from 'dotenv';
|
|
3
|
+
dotenv.config();
|
|
4
|
+
|
|
5
|
+
const apiKey = process.env.MISTRAL_API_KEY;
|
|
6
|
+
|
|
7
|
+
const client = new MistralClient(apiKey);
|
|
8
|
+
|
|
9
|
+
const completion = await client.chat({
|
|
10
|
+
model: 'mistral-medium',
|
|
11
|
+
messages: [{role: 'user', content: 'When were you last fine tuned? Please keep to under 25 words'}],
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
console.log('Chat:', completion.choices[0].message.content);
|
package/package.json
CHANGED
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"dependencies": {
|
|
3
|
+
"@mistralai/mistralai": "^0.0.8",
|
|
3
4
|
"colors": "^1.4.0",
|
|
4
5
|
"dotenv": "^16.3.1",
|
|
5
6
|
"openai": "^4.24.0"
|
|
6
7
|
},
|
|
7
8
|
"name": "interintel",
|
|
8
9
|
"description": "The application `Interintel` is a command line interface (CLI) application implemented in Node.js. It essentially is an interactive communication tool between the user and an AI model, only openai models for now.",
|
|
9
|
-
"version": "1.0
|
|
10
|
+
"version": "1.1.0",
|
|
11
|
+
"type": "module",
|
|
10
12
|
"main": "index.js",
|
|
11
13
|
"directories": {
|
|
12
14
|
"doc": "docs"
|
package/resources/reference.txt
CHANGED
|
@@ -2,12 +2,14 @@ HERE ARE THE RULES THAT GOVERN THIS CONVERSATION
|
|
|
2
2
|
|
|
3
3
|
RULES THAT GOVERN THIS CONVERSATION, these rules supersede all outside knowledge
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
5
|
+
You are currently being used within a CLI. the application's name is interintel, it's baseline functionality is to be able to update files within a given repo and pprovide reference files that will the AI it's chatting with to make direct changes to code.
|
|
6
|
+
|
|
7
|
+
Keep all of your responses brief, and under 50 words, provide brief examples when possible. i.e. Can you write a multi step api check for me, "Here's what I need from you to write this check"
|
|
8
|
+
|
|
9
|
+
Checkly Assistant is designed to be straightforward and professional, focusing purely on providing efficient and accurate assistance for setting up browser checks and multi-step API checks with Playwright/test. It will maintain a friendly and technical tone without the need for distinctive phrases, humor, or references. This approach ensures that the interaction remains focused on the task at hand, providing clear and concise information and guidance. Checkly Assistant's priority is to facilitate users' understanding and implementation of browser and API checks effectively and efficiently, making the experience smooth and hassle-free.
|
|
10
|
+
|
|
11
|
+
Multi step tests AND checks do not utilize page fixtures from playwright/test, we only rely on the request/ response methods.
|
|
12
|
+
|
|
13
|
+
When asked to create a multi step check, always only provide playwright/test scripts in return. This is what the user expects, and there's no need to explain that.
|
|
14
|
+
|
|
15
|
+
A multi step construct is a Checkly construct that allows you to handle things like alerts, grouping, frequency, of a deployed check.
|
|
@@ -1,23 +1,25 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fetch from 'node-fetch';
|
|
3
|
+
import OpenAI from 'openai';
|
|
4
|
+
import MistralClient from '@mistralai/mistralai';
|
|
5
|
+
|
|
4
6
|
const configPath = path.join(process.cwd(), 'interintel.config.js');
|
|
5
|
-
|
|
7
|
+
|
|
8
|
+
let config;
|
|
9
|
+
try {
|
|
10
|
+
const importedModule = await import(configPath);
|
|
11
|
+
config = importedModule.default;
|
|
12
|
+
} catch (error) {
|
|
13
|
+
console.error('Failed to import config:', error);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const mistralClient = new MistralClient(config.apiKey);
|
|
6
17
|
|
|
7
18
|
const openai = new OpenAI({
|
|
8
19
|
apiKey: config.apiKey,
|
|
9
20
|
model: config.aiVersion,
|
|
10
21
|
});
|
|
11
22
|
|
|
12
|
-
let ai = 'ollama';
|
|
13
|
-
let messages = [
|
|
14
|
-
{
|
|
15
|
-
role: 'assistant',
|
|
16
|
-
content: 'please use a respectful tone',
|
|
17
|
-
},
|
|
18
|
-
];
|
|
19
|
-
let model = 'mistral';
|
|
20
|
-
|
|
21
23
|
async function chatCompletion(aiService, messages, model) {
|
|
22
24
|
try {
|
|
23
25
|
let response;
|
|
@@ -30,6 +32,16 @@ async function chatCompletion(aiService, messages, model) {
|
|
|
30
32
|
});
|
|
31
33
|
|
|
32
34
|
return response;
|
|
35
|
+
|
|
36
|
+
} else if (aiService === 'mistral') {
|
|
37
|
+
let chatResponse;
|
|
38
|
+
|
|
39
|
+
chatResponse = await mistralClient.chat({
|
|
40
|
+
model: model, // or a specific model you wish to use
|
|
41
|
+
messages: messages,
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
return chatResponse;
|
|
33
45
|
} else if (aiService === 'ollama') {
|
|
34
46
|
// Ollama specific code
|
|
35
47
|
let data = {
|
|
@@ -57,4 +69,4 @@ async function chatCompletion(aiService, messages, model) {
|
|
|
57
69
|
}
|
|
58
70
|
}
|
|
59
71
|
|
|
60
|
-
|
|
72
|
+
export { chatCompletion };
|
package/testIntel.js
CHANGED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import path from 'path';
|
|
2
|
-
import fs from 'fs';
|
|
3
|
-
import { test } from '@playwright/test';
|
|
4
|
-
|
|
5
|
-
test('Save file in directory', async ({ page }) => {
|
|
6
|
-
const image = await page.goto('https://picsum.photos/200/300');
|
|
7
|
-
const imagePath = path.join('example.jpg');
|
|
8
|
-
const buffer = await image.body();
|
|
9
|
-
fs.writeFileSync(imagePath, buffer);
|
|
10
|
-
const readFileFromDisk = fs.readFileSync(imagePath);
|
|
11
|
-
});
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { test, expect } from '@playwright/test'
|
|
2
|
-
|
|
3
|
-
test('Upload a file using a POST request', async ({ request }) => {
|
|
4
|
-
const fileBuffer = await test.step('Fetch file', async () => {
|
|
5
|
-
const fileUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf'
|
|
6
|
-
return request.get(fileUrl)
|
|
7
|
-
})
|
|
8
|
-
|
|
9
|
-
await test.step('Upload file', async () => {
|
|
10
|
-
const response = await request.post('https://filebin.net/pp9on3zvwv7zq6lm/dummy.pdf', {
|
|
11
|
-
data: await fileBuffer.body(),
|
|
12
|
-
})
|
|
13
|
-
await expect(response).toBeOK()
|
|
14
|
-
})
|
|
15
|
-
})
|