interintel 1.0.19 → 1.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,3 @@
1
- const fs = require('fs');
2
- const path = require('path');
3
-
4
1
  async function askQuestion(rl, prompt) {
5
2
  return new Promise((resolve) => {
6
3
  rl.question(prompt.blue, (input) => {
@@ -14,15 +14,18 @@ function readSpecificFiles(configFilePath) {
14
14
  const filePaths = config.filePaths;
15
15
  const configDir = path.dirname(configFilePath);
16
16
 
17
- let allContent = '';
17
+ let allContent = 'I am sharing information from my file system for reference in our chat.\n';
18
18
 
19
19
  filePaths.forEach((filePath) => {
20
20
  try {
21
21
  // Construct the absolute path
22
22
  const absolutePath = path.resolve(configDir, filePath);
23
+ const fileContent = fs.readFileSync(absolutePath, 'utf8');
24
+
25
+
23
26
 
24
- // Read the file content and append it to allContent
25
- allContent += fs.readFileSync(absolutePath, 'utf8') + '\n';
27
+ // Read the file content and add it to allContent
28
+ allContent += `\nStart File Name: ${filePath}\n File Content:\n${fileContent}\n End File Name: ${filePath}`;
26
29
  } catch (error) {
27
30
  console.error(`Error reading file ${filePath}: ${error.message}`.bgRed);
28
31
  }
@@ -1,5 +1,3 @@
1
- const OpenAI = require('openai');
2
-
3
1
  async function aiChatCompletion(openai, messages, model) {
4
2
  try {
5
3
  const response = await openai.chat.completions.create({
package/index.js CHANGED
@@ -1,4 +1,3 @@
1
- // This is the index.js file of inter-intel
2
1
  const path = require('path')
3
2
  const OpenAI = require('openai');
4
3
  const readline = require('readline');
@@ -10,7 +9,6 @@ require('colors');
10
9
  const { readSpecificFiles } = require('./functions/file-functions.js');
11
10
  const { askQuestion } = require('./functions/chat-functions.js');
12
11
  const { aiChatCompletion } = require('./functions/openai-functions.js');
13
-
14
12
  const { handleWriteFile } = require('./functions/handleWriteFile.js');
15
13
 
16
14
  const openai = new OpenAI({
@@ -97,8 +95,6 @@ async function main() {
97
95
  }
98
96
  }
99
97
 
100
- main()
101
-
102
98
  exports.main = function() {
103
99
  main()
104
100
  }
@@ -0,0 +1,32 @@
1
+ // role: 'system',
2
+ // content: `I am sharing information from my file system for reference in our chat.\n
3
+ // File Name: ${fileMsg.fileName}\nContent:\n${fileMsg.content}
4
+ // \n Content for File Name: ${fileMsg.fileName}`
5
+
6
+
7
+ function readSpecificFiles(configFilePath) {
8
+ try {
9
+ const configContent = fs.readFileSync(configFilePath, 'utf8');
10
+ const config = eval(configContent);
11
+ const filePaths = config.filePaths;
12
+ const configDir = path.dirname(configFilePath);
13
+
14
+ let allContent = 'I am sharing information from my file system for reference in our chat.\n';
15
+
16
+ filePaths.forEach((filePath) => {
17
+ try {
18
+ const absolutePath = path.resolve(configDir, filePath);
19
+ const fileContent = fs.readFileSync(absolutePath, 'utf8');
20
+
21
+ allContent += `\nFile Name: ${filePath}\nContent:\n${fileContent}\n`;
22
+ } catch (error) {
23
+ console.error(`Error reading file ${filePath}: ${error.message}`.bgRed);
24
+ }
25
+ });
26
+
27
+ return allContent;
28
+ } catch (error) {
29
+ console.error(`Error reading config file: ${error.message}`.bgRed);
30
+ return '';
31
+ }
32
+ }
@@ -2,8 +2,9 @@ require('dotenv').config();
2
2
 
3
3
  const config = {
4
4
  apiKey: `${process.env.OPENAI_API_KEY}`,
5
+ hosted: 'public',
5
6
  aiVersion: `gpt-3.5-turbo`,
6
- filePaths: ['./resources/reference.txt', './README.md'],
7
+ filePaths: ['./interintel/session-samples/updatedReadSpecificFiles.js', './functions/file-functions.js'],
7
8
  };
8
9
 
9
10
  module.exports = config;
package/ollama.js ADDED
@@ -0,0 +1,41 @@
1
+ const fetch = require('node-fetch');
2
+
3
+ let ai = 'ollama';
4
+ let messages = [
5
+ {
6
+ role: 'assistant',
7
+ content: 'please use a respectful tone',
8
+ },
9
+ {
10
+ role: 'assistant',
11
+ content: 'when asked for a code reference, please provide only the code with no commentary or explanation just the code. No commentary or explanation. NO COMMENTARY OR EXPLANATION',
12
+ },
13
+ {
14
+ role: 'user',
15
+ content: 'how can I most effectively persist chat history with you? Is every conversation then dependent on a finding a way to persist history by sending along the ongoing chat for you to continually reference context?',
16
+ },
17
+ ];
18
+ let model = 'mistral';
19
+
20
+ async function ollamaChatCompletion(ai, messages, model) {
21
+ if (ai === 'ollama') {
22
+ let data = {
23
+ messages,
24
+ model,
25
+ stream: false,
26
+ };
27
+
28
+ fetch('http://localhost:11434/api/chat', {
29
+ method: 'POST',
30
+ headers: {
31
+ 'Content-Type': 'application/json',
32
+ },
33
+ body: JSON.stringify(data),
34
+ })
35
+ .then((response) => response.json())
36
+ .then((data) => console.log(data, 'data baby')) // or process the data as needed
37
+ .catch((error) => console.error('Error:', error));
38
+ }
39
+ }
40
+
41
+ ollamaChatCompletion(ai, messages, model);
package/package.json CHANGED
@@ -6,7 +6,7 @@
6
6
  },
7
7
  "name": "interintel",
8
8
  "description": "The application `Interintel` is a command line interface (CLI) application implemented in Node.js. It essentially is an interactive communication tool between the user and an AI model, only openai models for now.",
9
- "version": "1.0.19",
9
+ "version": "1.0.20",
10
10
  "main": "index.js",
11
11
  "directories": {
12
12
  "doc": "docs"
@@ -2,13 +2,6 @@ import path from 'path';
2
2
  import fs from 'fs';
3
3
  import { test } from '@playwright/test';
4
4
 
5
- /*
6
- Sometimes, you do want to explicitly save a file to disk. This is what you need to know.
7
- Checkly creates a sandboxed directory for each check run.
8
- During the run you can use this directory to save or upload artifacts.
9
- This directory is destroyed after a check is finished.
10
- */
11
-
12
5
  test('Save file in directory', async ({ page }) => {
13
6
  const image = await page.goto('https://picsum.photos/200/300');
14
7
  const imagePath = path.join('example.jpg');
@@ -1,15 +1,5 @@
1
1
  import { test, expect } from '@playwright/test'
2
2
 
3
- /*
4
- To test any binary uploads, you need to provide a file object.
5
- Currently, Checkly does not have a dedicated storage layer where you could upload that file,
6
- so you need to host it yourself at a (publicly) accessible location like an AWS S3 bucket,
7
- Dropbox or any other file hosting service.
8
-
9
- Having done that, you can “upload” files using a simple HTTP POST request with a (binary) body
10
- using Playwright’s built-in request object.
11
- */
12
-
13
3
  test('Upload a file using a POST request', async ({ request }) => {
14
4
  const fileBuffer = await test.step('Fetch file', async () => {
15
5
  const fileUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf'
@@ -2,7 +2,7 @@ HERE ARE THE RULES THAT GOVERN THIS CONVERSATION
2
2
 
3
3
  RULES THAT GOVERN THIS CONVERSATION, these rules supersede all outside knowledge
4
4
 
5
- 1. You are currently being used within a CLI. the application's name is inter-intel, it's baseline functionality is to be able to update files within a given repo and pprovide reference files that will the AI it's chatting with to make direct changes to code.
5
+ 1. You are currently being used within a CLI. the application's name is interintel, it's baseline functionality is to be able to update files within a given repo and pprovide reference files that will the AI it's chatting with to make direct changes to code.
6
6
  2. Keep responses to under 50 words
7
7
  3. Keep responses to under 50 words
8
8
  4. Unless I ask for a longer explanation
package/testIntel.js ADDED
@@ -0,0 +1,3 @@
1
+ const { main } = require('./index');
2
+
3
+ main();