@notebook-intelligence/notebook-intelligence 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +674 -0
- package/README.md +151 -0
- package/lib/api.d.ts +47 -0
- package/lib/api.js +246 -0
- package/lib/chat-sidebar.d.ts +80 -0
- package/lib/chat-sidebar.js +1277 -0
- package/lib/handler.d.ts +8 -0
- package/lib/handler.js +36 -0
- package/lib/index.d.ts +7 -0
- package/lib/index.js +1078 -0
- package/lib/markdown-renderer.d.ts +10 -0
- package/lib/markdown-renderer.js +60 -0
- package/lib/tokens.d.ts +93 -0
- package/lib/tokens.js +51 -0
- package/lib/utils.d.ts +17 -0
- package/lib/utils.js +163 -0
- package/package.json +219 -0
- package/schema/plugin.json +42 -0
- package/src/api.ts +333 -0
- package/src/chat-sidebar.tsx +2171 -0
- package/src/handler.ts +51 -0
- package/src/index.ts +1398 -0
- package/src/markdown-renderer.tsx +140 -0
- package/src/svg.d.ts +4 -0
- package/src/tokens.ts +114 -0
- package/src/utils.ts +204 -0
- package/style/base.css +590 -0
- package/style/icons/copilot-warning.svg +1 -0
- package/style/icons/copilot.svg +1 -0
- package/style/icons/copy.svg +1 -0
- package/style/icons/sparkles.svg +1 -0
- package/style/index.css +1 -0
- package/style/index.js +1 -0
package/README.md
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
# Notebook Intelligence
|
|
2
|
+
|
|
3
|
+
Notebook Intelligence (NBI) is an AI coding assistant and extensible AI framework for JupyterLab. It can use GitHub Copilot or AI models from any other LLM Provider, including local models from [Ollama](https://ollama.com/). NBI greatly boosts the productivity of JupyterLab users with AI assistance.
|
|
4
|
+
|
|
5
|
+
See blog posts for features and usage.
|
|
6
|
+
|
|
7
|
+
- [Introducing Notebook Intelligence!](https://notebook-intelligence.github.io/notebook-intelligence/blog/2025/01/08/introducing-notebook-intelligence.html)
|
|
8
|
+
- [Building AI Extensions for JupyterLab](https://notebook-intelligence.github.io/notebook-intelligence/blog/2025/02/05/building-ai-extensions-for-jupyterlab.html)
|
|
9
|
+
- [Building AI Agents for JupyterLab](https://notebook-intelligence.github.io/notebook-intelligence/blog/2025/02/09/building-ai-agents-for-jupyterlab.html)
|
|
10
|
+
- [Notebook Intelligence now supports any LLM Provider and AI Model!](https://notebook-intelligence.github.io/notebook-intelligence/blog/2025/03/05/support-for-any-llm-provider.html)
|
|
11
|
+
|
|
12
|
+
### Code generation with inline chat
|
|
13
|
+
|
|
14
|
+

|
|
15
|
+
|
|
16
|
+
### Auto-complete
|
|
17
|
+
|
|
18
|
+
<img src="media/inline-completion.gif" alt="Auto-complete" width=700 />
|
|
19
|
+
|
|
20
|
+
### Chat interface
|
|
21
|
+
|
|
22
|
+
<img src="media/copilot-chat.gif" alt="Chat interface" width=600 />
|
|
23
|
+
|
|
24
|
+
[](https://github.com/notebook-intelligence/notebook-intelligence/actions/workflows/build.yml)
|
|
25
|
+
|
|
26
|
+
## Configuring LLM Provider and models
|
|
27
|
+
|
|
28
|
+
You can configure the model provider and model options using the Notebook Intelligence Settings dialog. You can access this dialog from JupyterLab Settings menu -> Notebook Intelligence Settings, using `/settings` command in Copilot Chat or by using the command palette. For more details, see the [blog post](https://notebook-intelligence.github.io/notebook-intelligence/blog/2025/03/05/support-for-any-llm-provider.html).
|
|
29
|
+
|
|
30
|
+
<img src="media/provider-list.png" alt="Settings dialog" width=500 />
|
|
31
|
+
|
|
32
|
+
Notebook Intelligence extension for JupyterLab
|
|
33
|
+
|
|
34
|
+
This extension is composed of a Python package named `notebook_intelligence`
|
|
35
|
+
for the server extension and a NPM package named `@notebook-intelligence/notebook-intelligence`
|
|
36
|
+
for the frontend extension.
|
|
37
|
+
|
|
38
|
+
## Requirements
|
|
39
|
+
|
|
40
|
+
- JupyterLab >= 4.0.0
|
|
41
|
+
|
|
42
|
+
## Install
|
|
43
|
+
|
|
44
|
+
To install the extension, execute:
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
pip install notebook_intelligence
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Uninstall
|
|
51
|
+
|
|
52
|
+
To remove the extension, execute:
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
pip uninstall notebook_intelligence
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Configuration options
|
|
59
|
+
|
|
60
|
+
### Remembering GitHub Copilot login
|
|
61
|
+
|
|
62
|
+
Notebook Intelligence uses system keyring to store the GitHub access tokens. If your stored access token fails to login (due to expiration or other reasons), you will be prompted to relogin on the UI. If you run into issues with this feature, check the Jupyter server logs and the [keyring package](https://github.com/jaraco/keyring) documentation.
|
|
63
|
+
|
|
64
|
+
To let Notebook Intelligence remember your GitHub access token after you logged in:
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
jupyter lab --NotebookIntelligence.github_access_token=remember
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
Once you set it to remember, it will continue to remember even if you skip `--NotebookIntelligence.github_access_token` at following launches. In order to forget the GitHub access token stored:
|
|
71
|
+
|
|
72
|
+
```bash
|
|
73
|
+
jupyter lab --NotebookIntelligence.github_access_token=forget
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Troubleshoot
|
|
77
|
+
|
|
78
|
+
If you are seeing the frontend extension, but it is not working, check
|
|
79
|
+
that the server extension is enabled:
|
|
80
|
+
|
|
81
|
+
```bash
|
|
82
|
+
jupyter server extension list
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
If the server extension is installed and enabled, but you are not seeing
|
|
86
|
+
the frontend extension, check the frontend extension is installed:
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
jupyter labextension list
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
## Contributing
|
|
93
|
+
|
|
94
|
+
### Development install
|
|
95
|
+
|
|
96
|
+
Note: You will need NodeJS to build the extension package.
|
|
97
|
+
|
|
98
|
+
The `jlpm` command is JupyterLab's pinned version of
|
|
99
|
+
[yarn](https://yarnpkg.com/) that is installed with JupyterLab. You may use
|
|
100
|
+
`yarn` or `npm` in lieu of `jlpm` below.
|
|
101
|
+
|
|
102
|
+
```bash
|
|
103
|
+
# Clone the repo to your local environment
|
|
104
|
+
# Change directory to the notebook_intelligence directory
|
|
105
|
+
# Install package in development mode
|
|
106
|
+
pip install -e "."
|
|
107
|
+
# Link your development version of the extension with JupyterLab
|
|
108
|
+
jupyter labextension develop . --overwrite
|
|
109
|
+
# Server extension must be manually installed in develop mode
|
|
110
|
+
jupyter server extension enable notebook_intelligence
|
|
111
|
+
# Rebuild extension Typescript source after making changes
|
|
112
|
+
jlpm build
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
You can watch the source directory and run JupyterLab at the same time in different terminals to watch for changes in the extension's source and automatically rebuild the extension.
|
|
116
|
+
|
|
117
|
+
```bash
|
|
118
|
+
# Watch the source directory in one terminal, automatically rebuilding when needed
|
|
119
|
+
jlpm watch
|
|
120
|
+
# Run JupyterLab in another terminal
|
|
121
|
+
jupyter lab
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
With the watch command running, every saved change will immediately be built locally and available in your running JupyterLab. Refresh JupyterLab to load the change in your browser (you may need to wait several seconds for the extension to be rebuilt).
|
|
125
|
+
|
|
126
|
+
By default, the `jlpm build` command generates the source maps for this extension to make it easier to debug using the browser dev tools. To also generate source maps for the JupyterLab core extensions, you can run the following command:
|
|
127
|
+
|
|
128
|
+
```bash
|
|
129
|
+
jupyter lab build --minimize=False
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
### Development uninstall
|
|
133
|
+
|
|
134
|
+
```bash
|
|
135
|
+
# Server extension must be manually disabled in develop mode
|
|
136
|
+
jupyter server extension disable notebook_intelligence
|
|
137
|
+
pip uninstall notebook_intelligence
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
In development mode, you will also need to remove the symlink created by `jupyter labextension develop`
|
|
141
|
+
command. To find its location, you can run `jupyter labextension list` to figure out where the `labextensions`
|
|
142
|
+
folder is located. Then you can remove the symlink named `@notebook-intelligence/notebook-intelligence` within that folder.
|
|
143
|
+
|
|
144
|
+
### Packaging the extension
|
|
145
|
+
|
|
146
|
+
See [RELEASE](RELEASE.md)
|
|
147
|
+
|
|
148
|
+
### Resources I used as reference
|
|
149
|
+
|
|
150
|
+
- [Copilot Internals blog post](https://thakkarparth007.github.io/copilot-explorer/posts/copilot-internals.html)
|
|
151
|
+
- [B00TK1D/copilot-api for GitHub auth and inline completions](https://github.com/B00TK1D/copilot-api)
|
package/lib/api.d.ts
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { Signal } from '@lumino/signaling';
|
|
2
|
+
import { IChatCompletionResponseEmitter, IChatParticipant, IContextItem, ITelemetryEvent, RequestDataType } from './tokens';
|
|
3
|
+
export declare enum GitHubCopilotLoginStatus {
|
|
4
|
+
NotLoggedIn = "NOT_LOGGED_IN",
|
|
5
|
+
ActivatingDevice = "ACTIVATING_DEVICE",
|
|
6
|
+
LoggingIn = "LOGGING_IN",
|
|
7
|
+
LoggedIn = "LOGGED_IN"
|
|
8
|
+
}
|
|
9
|
+
export interface IDeviceVerificationInfo {
|
|
10
|
+
verificationURI: string;
|
|
11
|
+
userCode: string;
|
|
12
|
+
}
|
|
13
|
+
export declare class NBIConfig {
|
|
14
|
+
get llmProviders(): [any];
|
|
15
|
+
get chatModels(): [any];
|
|
16
|
+
get inlineCompletionModels(): [any];
|
|
17
|
+
get chatModel(): any;
|
|
18
|
+
get inlineCompletionModel(): any;
|
|
19
|
+
get usingGitHubCopilotModel(): boolean;
|
|
20
|
+
capabilities: any;
|
|
21
|
+
chatParticipants: IChatParticipant[];
|
|
22
|
+
changed: Signal<this, void>;
|
|
23
|
+
}
|
|
24
|
+
export declare class NBIAPI {
|
|
25
|
+
static _loginStatus: GitHubCopilotLoginStatus;
|
|
26
|
+
static _deviceVerificationInfo: IDeviceVerificationInfo;
|
|
27
|
+
static _webSocket: WebSocket;
|
|
28
|
+
static _messageReceived: Signal<unknown, any>;
|
|
29
|
+
static config: NBIConfig;
|
|
30
|
+
static configChanged: Signal<NBIConfig, void>;
|
|
31
|
+
static initialize(): Promise<void>;
|
|
32
|
+
static initializeWebsocket(): Promise<void>;
|
|
33
|
+
static getLoginStatus(): GitHubCopilotLoginStatus;
|
|
34
|
+
static getDeviceVerificationInfo(): IDeviceVerificationInfo;
|
|
35
|
+
static loginToGitHub(): Promise<unknown>;
|
|
36
|
+
static logoutFromGitHub(): Promise<unknown>;
|
|
37
|
+
static updateGitHubLoginStatus(): Promise<void>;
|
|
38
|
+
static fetchCapabilities(): Promise<void>;
|
|
39
|
+
static setConfig(config: any): Promise<void>;
|
|
40
|
+
static updateOllamaModelList(): Promise<void>;
|
|
41
|
+
static chatRequest(messageId: string, chatId: string, prompt: string, language: string, filename: string, additionalContext: IContextItem[], responseEmitter: IChatCompletionResponseEmitter): Promise<void>;
|
|
42
|
+
static generateCode(chatId: string, prompt: string, prefix: string, suffix: string, existingCode: string, language: string, filename: string, responseEmitter: IChatCompletionResponseEmitter): Promise<void>;
|
|
43
|
+
static sendChatUserInput(messageId: string, data: any): Promise<void>;
|
|
44
|
+
static sendWebSocketMessage(messageId: string, messageType: RequestDataType, data: any): Promise<void>;
|
|
45
|
+
static inlineCompletionsRequest(chatId: string, messageId: string, prefix: string, suffix: string, language: string, filename: string, responseEmitter: IChatCompletionResponseEmitter): Promise<void>;
|
|
46
|
+
static emitTelemetryEvent(event: ITelemetryEvent): Promise<void>;
|
|
47
|
+
}
|
package/lib/api.js
ADDED
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
// Copyright (c) Mehmet Bektas <mbektasgh@outlook.com>
|
|
2
|
+
var _a;
|
|
3
|
+
import { ServerConnection } from '@jupyterlab/services';
|
|
4
|
+
import { requestAPI } from './handler';
|
|
5
|
+
import { URLExt } from '@jupyterlab/coreutils';
|
|
6
|
+
import { UUID } from '@lumino/coreutils';
|
|
7
|
+
import { Signal } from '@lumino/signaling';
|
|
8
|
+
import { GITHUB_COPILOT_PROVIDER_ID, RequestDataType } from './tokens';
|
|
9
|
+
const LOGIN_STATUS_UPDATE_INTERVAL = 2000;
|
|
10
|
+
export var GitHubCopilotLoginStatus;
|
|
11
|
+
(function (GitHubCopilotLoginStatus) {
|
|
12
|
+
GitHubCopilotLoginStatus["NotLoggedIn"] = "NOT_LOGGED_IN";
|
|
13
|
+
GitHubCopilotLoginStatus["ActivatingDevice"] = "ACTIVATING_DEVICE";
|
|
14
|
+
GitHubCopilotLoginStatus["LoggingIn"] = "LOGGING_IN";
|
|
15
|
+
GitHubCopilotLoginStatus["LoggedIn"] = "LOGGED_IN";
|
|
16
|
+
})(GitHubCopilotLoginStatus || (GitHubCopilotLoginStatus = {}));
|
|
17
|
+
export class NBIConfig {
|
|
18
|
+
constructor() {
|
|
19
|
+
this.capabilities = {};
|
|
20
|
+
this.chatParticipants = [];
|
|
21
|
+
this.changed = new Signal(this);
|
|
22
|
+
}
|
|
23
|
+
get llmProviders() {
|
|
24
|
+
return this.capabilities.llm_providers;
|
|
25
|
+
}
|
|
26
|
+
get chatModels() {
|
|
27
|
+
return this.capabilities.chat_models;
|
|
28
|
+
}
|
|
29
|
+
get inlineCompletionModels() {
|
|
30
|
+
return this.capabilities.inline_completion_models;
|
|
31
|
+
}
|
|
32
|
+
get chatModel() {
|
|
33
|
+
return this.capabilities.chat_model;
|
|
34
|
+
}
|
|
35
|
+
get inlineCompletionModel() {
|
|
36
|
+
return this.capabilities.inline_completion_model;
|
|
37
|
+
}
|
|
38
|
+
get usingGitHubCopilotModel() {
|
|
39
|
+
return (this.chatModel.provider === GITHUB_COPILOT_PROVIDER_ID ||
|
|
40
|
+
this.inlineCompletionModel.provider === GITHUB_COPILOT_PROVIDER_ID);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
class NBIAPI {
|
|
44
|
+
static async initialize() {
|
|
45
|
+
await this.fetchCapabilities();
|
|
46
|
+
this.updateGitHubLoginStatus();
|
|
47
|
+
setInterval(() => {
|
|
48
|
+
this.updateGitHubLoginStatus();
|
|
49
|
+
}, LOGIN_STATUS_UPDATE_INTERVAL);
|
|
50
|
+
NBIAPI.initializeWebsocket();
|
|
51
|
+
}
|
|
52
|
+
static async initializeWebsocket() {
|
|
53
|
+
const serverSettings = ServerConnection.makeSettings();
|
|
54
|
+
const wsUrl = URLExt.join(serverSettings.wsUrl, 'notebook-intelligence', 'copilot');
|
|
55
|
+
this._webSocket = new WebSocket(wsUrl);
|
|
56
|
+
this._webSocket.onmessage = msg => {
|
|
57
|
+
this._messageReceived.emit(msg.data);
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
static getLoginStatus() {
|
|
61
|
+
return this._loginStatus;
|
|
62
|
+
}
|
|
63
|
+
static getDeviceVerificationInfo() {
|
|
64
|
+
return this._deviceVerificationInfo;
|
|
65
|
+
}
|
|
66
|
+
static async loginToGitHub() {
|
|
67
|
+
this._loginStatus = GitHubCopilotLoginStatus.ActivatingDevice;
|
|
68
|
+
return new Promise((resolve, reject) => {
|
|
69
|
+
requestAPI('gh-login', { method: 'POST' })
|
|
70
|
+
.then(data => {
|
|
71
|
+
resolve({
|
|
72
|
+
verificationURI: data.verification_uri,
|
|
73
|
+
userCode: data.user_code
|
|
74
|
+
});
|
|
75
|
+
this.updateGitHubLoginStatus();
|
|
76
|
+
})
|
|
77
|
+
.catch(reason => {
|
|
78
|
+
console.error(`Failed to login to GitHub Copilot.\n${reason}`);
|
|
79
|
+
reject(reason);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
static async logoutFromGitHub() {
|
|
84
|
+
this._loginStatus = GitHubCopilotLoginStatus.ActivatingDevice;
|
|
85
|
+
return new Promise((resolve, reject) => {
|
|
86
|
+
requestAPI('gh-logout', { method: 'GET' })
|
|
87
|
+
.then(data => {
|
|
88
|
+
this.updateGitHubLoginStatus().then(() => {
|
|
89
|
+
resolve(data);
|
|
90
|
+
});
|
|
91
|
+
})
|
|
92
|
+
.catch(reason => {
|
|
93
|
+
console.error(`Failed to logout from GitHub Copilot.\n${reason}`);
|
|
94
|
+
reject(reason);
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
static async updateGitHubLoginStatus() {
|
|
99
|
+
return new Promise((resolve, reject) => {
|
|
100
|
+
requestAPI('gh-login-status')
|
|
101
|
+
.then(response => {
|
|
102
|
+
this._loginStatus = response.status;
|
|
103
|
+
this._deviceVerificationInfo.verificationURI =
|
|
104
|
+
response.verification_uri || '';
|
|
105
|
+
this._deviceVerificationInfo.userCode = response.user_code || '';
|
|
106
|
+
resolve();
|
|
107
|
+
})
|
|
108
|
+
.catch(reason => {
|
|
109
|
+
console.error(`Failed to fetch GitHub Copilot login status.\n${reason}`);
|
|
110
|
+
reject(reason);
|
|
111
|
+
});
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
static async fetchCapabilities() {
|
|
115
|
+
return new Promise((resolve, reject) => {
|
|
116
|
+
requestAPI('capabilities', { method: 'GET' })
|
|
117
|
+
.then(data => {
|
|
118
|
+
this.config.capabilities = structuredClone(data);
|
|
119
|
+
this.config.chatParticipants = structuredClone(data.chat_participants);
|
|
120
|
+
this.configChanged.emit();
|
|
121
|
+
resolve();
|
|
122
|
+
})
|
|
123
|
+
.catch(reason => {
|
|
124
|
+
console.error(`Failed to get extension capabilities.\n${reason}`);
|
|
125
|
+
reject(reason);
|
|
126
|
+
});
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
static async setConfig(config) {
|
|
130
|
+
requestAPI('config', {
|
|
131
|
+
method: 'POST',
|
|
132
|
+
body: JSON.stringify(config)
|
|
133
|
+
})
|
|
134
|
+
.then(data => {
|
|
135
|
+
NBIAPI.fetchCapabilities();
|
|
136
|
+
})
|
|
137
|
+
.catch(reason => {
|
|
138
|
+
console.error(`Failed to set NBI config.\n${reason}`);
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
static async updateOllamaModelList() {
|
|
142
|
+
return new Promise((resolve, reject) => {
|
|
143
|
+
requestAPI('update-provider-models', {
|
|
144
|
+
method: 'POST',
|
|
145
|
+
body: JSON.stringify({ provider: 'ollama' })
|
|
146
|
+
})
|
|
147
|
+
.then(async (data) => {
|
|
148
|
+
await NBIAPI.fetchCapabilities();
|
|
149
|
+
resolve();
|
|
150
|
+
})
|
|
151
|
+
.catch(reason => {
|
|
152
|
+
console.error(`Failed to update ollama model list.\n${reason}`);
|
|
153
|
+
reject(reason);
|
|
154
|
+
});
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
static async chatRequest(messageId, chatId, prompt, language, filename, additionalContext, responseEmitter) {
|
|
158
|
+
this._messageReceived.connect((_, msg) => {
|
|
159
|
+
msg = JSON.parse(msg);
|
|
160
|
+
if (msg.id === messageId) {
|
|
161
|
+
responseEmitter.emit(msg);
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
this._webSocket.send(JSON.stringify({
|
|
165
|
+
id: messageId,
|
|
166
|
+
type: RequestDataType.ChatRequest,
|
|
167
|
+
data: { chatId, prompt, language, filename, additionalContext }
|
|
168
|
+
}));
|
|
169
|
+
}
|
|
170
|
+
static async generateCode(chatId, prompt, prefix, suffix, existingCode, language, filename, responseEmitter) {
|
|
171
|
+
const messageId = UUID.uuid4();
|
|
172
|
+
this._messageReceived.connect((_, msg) => {
|
|
173
|
+
msg = JSON.parse(msg);
|
|
174
|
+
if (msg.id === messageId) {
|
|
175
|
+
responseEmitter.emit(msg);
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
this._webSocket.send(JSON.stringify({
|
|
179
|
+
id: messageId,
|
|
180
|
+
type: RequestDataType.GenerateCode,
|
|
181
|
+
data: {
|
|
182
|
+
chatId,
|
|
183
|
+
prompt,
|
|
184
|
+
prefix,
|
|
185
|
+
suffix,
|
|
186
|
+
existingCode,
|
|
187
|
+
language,
|
|
188
|
+
filename
|
|
189
|
+
}
|
|
190
|
+
}));
|
|
191
|
+
}
|
|
192
|
+
static async sendChatUserInput(messageId, data) {
|
|
193
|
+
this._webSocket.send(JSON.stringify({
|
|
194
|
+
id: messageId,
|
|
195
|
+
type: RequestDataType.ChatUserInput,
|
|
196
|
+
data
|
|
197
|
+
}));
|
|
198
|
+
}
|
|
199
|
+
static async sendWebSocketMessage(messageId, messageType, data) {
|
|
200
|
+
this._webSocket.send(JSON.stringify({ id: messageId, type: messageType, data }));
|
|
201
|
+
}
|
|
202
|
+
static async inlineCompletionsRequest(chatId, messageId, prefix, suffix, language, filename, responseEmitter) {
|
|
203
|
+
this._messageReceived.connect((_, msg) => {
|
|
204
|
+
msg = JSON.parse(msg);
|
|
205
|
+
if (msg.id === messageId) {
|
|
206
|
+
responseEmitter.emit(msg);
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
this._webSocket.send(JSON.stringify({
|
|
210
|
+
id: messageId,
|
|
211
|
+
type: RequestDataType.InlineCompletionRequest,
|
|
212
|
+
data: {
|
|
213
|
+
chatId,
|
|
214
|
+
prefix,
|
|
215
|
+
suffix,
|
|
216
|
+
language,
|
|
217
|
+
filename
|
|
218
|
+
}
|
|
219
|
+
}));
|
|
220
|
+
}
|
|
221
|
+
static async emitTelemetryEvent(event) {
|
|
222
|
+
return new Promise((resolve, reject) => {
|
|
223
|
+
requestAPI('emit-telemetry-event', {
|
|
224
|
+
method: 'POST',
|
|
225
|
+
body: JSON.stringify(event)
|
|
226
|
+
})
|
|
227
|
+
.then(async (data) => {
|
|
228
|
+
resolve();
|
|
229
|
+
})
|
|
230
|
+
.catch(reason => {
|
|
231
|
+
console.error(`Failed to emit telemetry event.\n${reason}`);
|
|
232
|
+
reject(reason);
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
_a = NBIAPI;
|
|
238
|
+
NBIAPI._loginStatus = GitHubCopilotLoginStatus.NotLoggedIn;
|
|
239
|
+
NBIAPI._deviceVerificationInfo = {
|
|
240
|
+
verificationURI: '',
|
|
241
|
+
userCode: ''
|
|
242
|
+
};
|
|
243
|
+
NBIAPI._messageReceived = new Signal(_a);
|
|
244
|
+
NBIAPI.config = new NBIConfig();
|
|
245
|
+
NBIAPI.configChanged = _a.config.changed;
|
|
246
|
+
export { NBIAPI };
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/// <reference types="react" />
|
|
2
|
+
import { ReactWidget } from '@jupyterlab/apputils';
|
|
3
|
+
import { IActiveDocumentInfo, ICellContents, IContextItem, ITelemetryEmitter } from './tokens';
|
|
4
|
+
import { JupyterFrontEnd } from '@jupyterlab/application';
|
|
5
|
+
export declare enum RunChatCompletionType {
|
|
6
|
+
Chat = 0,
|
|
7
|
+
ExplainThis = 1,
|
|
8
|
+
FixThis = 2,
|
|
9
|
+
GenerateCode = 3,
|
|
10
|
+
ExplainThisOutput = 4,
|
|
11
|
+
TroubleshootThisOutput = 5
|
|
12
|
+
}
|
|
13
|
+
export interface IRunChatCompletionRequest {
|
|
14
|
+
messageId: string;
|
|
15
|
+
chatId: string;
|
|
16
|
+
type: RunChatCompletionType;
|
|
17
|
+
content: string;
|
|
18
|
+
language?: string;
|
|
19
|
+
filename?: string;
|
|
20
|
+
prefix?: string;
|
|
21
|
+
suffix?: string;
|
|
22
|
+
existingCode?: string;
|
|
23
|
+
additionalContext?: IContextItem[];
|
|
24
|
+
}
|
|
25
|
+
export interface IChatSidebarOptions {
|
|
26
|
+
getActiveDocumentInfo: () => IActiveDocumentInfo;
|
|
27
|
+
getActiveSelectionContent: () => string;
|
|
28
|
+
getCurrentCellContents: () => ICellContents;
|
|
29
|
+
openFile: (path: string) => void;
|
|
30
|
+
getApp: () => JupyterFrontEnd;
|
|
31
|
+
getTelemetryEmitter: () => ITelemetryEmitter;
|
|
32
|
+
}
|
|
33
|
+
export declare class ChatSidebar extends ReactWidget {
|
|
34
|
+
constructor(options: IChatSidebarOptions);
|
|
35
|
+
render(): JSX.Element;
|
|
36
|
+
private _options;
|
|
37
|
+
}
|
|
38
|
+
export interface IInlinePromptWidgetOptions {
|
|
39
|
+
prompt: string;
|
|
40
|
+
existingCode: string;
|
|
41
|
+
prefix: string;
|
|
42
|
+
suffix: string;
|
|
43
|
+
onRequestSubmitted: (prompt: string) => void;
|
|
44
|
+
onRequestCancelled: () => void;
|
|
45
|
+
onContentStream: (content: string) => void;
|
|
46
|
+
onContentStreamEnd: () => void;
|
|
47
|
+
onUpdatedCodeChange: (content: string) => void;
|
|
48
|
+
onUpdatedCodeAccepted: () => void;
|
|
49
|
+
telemetryEmitter: ITelemetryEmitter;
|
|
50
|
+
}
|
|
51
|
+
export declare class InlinePromptWidget extends ReactWidget {
|
|
52
|
+
constructor(rect: DOMRect, options: IInlinePromptWidgetOptions);
|
|
53
|
+
updatePosition(rect: DOMRect): void;
|
|
54
|
+
_onResponse(response: any): void;
|
|
55
|
+
_onRequestSubmitted(prompt: string): void;
|
|
56
|
+
render(): JSX.Element;
|
|
57
|
+
private _options;
|
|
58
|
+
private _requestTime;
|
|
59
|
+
}
|
|
60
|
+
export declare class GitHubCopilotStatusBarItem extends ReactWidget {
|
|
61
|
+
constructor(options: {
|
|
62
|
+
getApp: () => JupyterFrontEnd;
|
|
63
|
+
});
|
|
64
|
+
render(): JSX.Element;
|
|
65
|
+
private _getApp;
|
|
66
|
+
}
|
|
67
|
+
export declare class GitHubCopilotLoginDialogBody extends ReactWidget {
|
|
68
|
+
constructor(options: {
|
|
69
|
+
onLoggedIn: () => void;
|
|
70
|
+
});
|
|
71
|
+
render(): JSX.Element;
|
|
72
|
+
private _onLoggedIn;
|
|
73
|
+
}
|
|
74
|
+
export declare class ConfigurationDialogBody extends ReactWidget {
|
|
75
|
+
constructor(options: {
|
|
76
|
+
onSave: () => void;
|
|
77
|
+
});
|
|
78
|
+
render(): JSX.Element;
|
|
79
|
+
private _onSave;
|
|
80
|
+
}
|