@sweetoburrito/backstage-plugin-ai-assistant-backend-module-model-provider-azure-ai 0.4.1 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +54 -2
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -1,5 +1,57 @@
|
|
|
1
1
|
# @sweetoburrito/backstage-plugin-ai-assistant-backend-module-model-provider-azure-ai
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
This module provides an Azure AI (Azure OpenAI / Azure Foundry) model provider implementation for the
|
|
4
|
+
[backstage-plugin-ai-assistant](https://github.com/SweetOBurritO/backstage-plugin-ai-assistant) backend. It lets the AI Assistant backend call Azure-hosted models (chat or completion)
|
|
5
|
+
using a configuration-driven provider so the rest of the plugin can remain model-agnostic.
|
|
4
6
|
|
|
5
|
-
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- Connects Backstage AI Assistant to Azure-hosted LLM models (Azure OpenAI / Azure Foundry).
|
|
10
|
+
- Configuration via Backstage `app-config.yaml` and environment variables.
|
|
11
|
+
|
|
12
|
+
## When to use
|
|
13
|
+
|
|
14
|
+
Use this module when you want the AI Assistant backend to use models hosted in Azure (for example: GPT-family models
|
|
15
|
+
deployed in Azure OpenAI or Azure AI Foundry deployments) in the backstage ai assistant.
|
|
16
|
+
|
|
17
|
+
## Configuration
|
|
18
|
+
|
|
19
|
+
Add the provider configuration in your `app-config.local`.
|
|
20
|
+
|
|
21
|
+
```yaml
|
|
22
|
+
aiAssistant:
|
|
23
|
+
models:
|
|
24
|
+
azureAi:
|
|
25
|
+
apiKey: ${AZURE-AI-API-KEY}
|
|
26
|
+
models:
|
|
27
|
+
- endpoint: https://eastus.api.cognitive.microsoft.com/openai/v1/ # Replace with your deployment endpoint
|
|
28
|
+
modelName: 'gpt-5-mini'
|
|
29
|
+
- endpoint: https://eastus.api.cognitive.microsoft.com/openai/v1/
|
|
30
|
+
modelName: 'DeepSeek-R1'
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Install
|
|
34
|
+
|
|
35
|
+
Install the plugin into your backstage backend with the following command
|
|
36
|
+
|
|
37
|
+
```sh
|
|
38
|
+
yarn workspace backend add @sweetoburrito/backstage-plugin-ai-assistant-backend-module-model-provider-azure-ai
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
Add it to your backend
|
|
42
|
+
|
|
43
|
+
```diff
|
|
44
|
+
// packages/backend/src/index.ts
|
|
45
|
+
|
|
46
|
+
backend.add(import('@backstage/plugin-events-backend'));
|
|
47
|
+
backend.add(import('@backstage/plugin-signals-backend'));
|
|
48
|
+
|
|
49
|
+
backend.add(import('@sweetoburrito/backstage-plugin-ai-assistant-backend'));
|
|
50
|
+
|
|
51
|
+
++backend.add(
|
|
52
|
+
++ import(
|
|
53
|
+
++ '@sweetoburrito/backstage-plugin-ai-assistant-backend-module-model-provider-azure-ai'
|
|
54
|
+
++ ),
|
|
55
|
+
++);
|
|
56
|
+
|
|
57
|
+
```
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sweetoburrito/backstage-plugin-ai-assistant-backend-module-model-provider-azure-ai",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.3",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"description": "The model-provider-azure-ai backend module for the ai-assistant plugin.",
|
|
6
6
|
"main": "dist/index.cjs.js",
|
|
@@ -34,7 +34,7 @@
|
|
|
34
34
|
"@backstage/backend-plugin-api": "backstage:^",
|
|
35
35
|
"@langchain/core": "^0.3.72",
|
|
36
36
|
"@langchain/openai": "^0.6.13",
|
|
37
|
-
"@sweetoburrito/backstage-plugin-ai-assistant-node": "^0.
|
|
37
|
+
"@sweetoburrito/backstage-plugin-ai-assistant-node": "^0.6.0"
|
|
38
38
|
},
|
|
39
39
|
"devDependencies": {
|
|
40
40
|
"@backstage/backend-test-utils": "backstage:^",
|