@azure/mcp-linux-arm64 2.0.0-beta.9 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/NOTICE.txt +5683 -4849
- package/README.md +148 -27
- package/dist/Azure.Mcp.Tools.AzureMigrate.xml +1060 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/ActivityProcessors.md +119 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/AddApplicationInsightsTelemetry.md +129 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/AddApplicationInsightsTelemetryWorkerService.md +115 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/AddOpenTelemetry.md +153 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/ApplicationInsightsWeb.md +103 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/AzureMonitorExporter.md +137 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/ConfigureOpenTelemetryProvider.md +218 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/ConfigureResource.md +119 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/ConsoleExporter.md +47 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/EntityFrameworkInstrumentation.md +56 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/HttpInstrumentation.md +109 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/LogProcessors.md +101 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/OpenTelemetrySdkCreate.md +146 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/OtlpExporter.md +88 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/RedisInstrumentation.md +63 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/Sampling.md +86 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/SdkCreateTracerProviderBuilder.md +127 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/SqlClientInstrumentation.md +53 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/TelemetryClient.md +122 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/TelemetryConfigurationBuilder.md +173 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/UseAzureMonitor.md +96 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/UseAzureMonitorExporter.md +146 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/WithLogging.md +109 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/WithMetrics.md +105 -0
- package/dist/Instrumentation/Resources/api-reference/dotnet/WithTracing.md +91 -0
- package/dist/Instrumentation/Resources/concepts/dotnet/appinsights-aspnetcore.md +113 -0
- package/dist/Instrumentation/Resources/concepts/dotnet/aspnet-classic-appinsights.md +95 -0
- package/dist/Instrumentation/Resources/concepts/dotnet/azure-monitor-distro.md +102 -0
- package/dist/Instrumentation/Resources/concepts/dotnet/opentelemetry-pipeline.md +57 -0
- package/dist/Instrumentation/Resources/concepts/nodejs/azure-monitor-overview.md +106 -0
- package/dist/Instrumentation/Resources/concepts/nodejs/opentelemetry-pipeline.md +201 -0
- package/dist/Instrumentation/Resources/concepts/python/azure-monitor-overview.md +122 -0
- package/dist/Instrumentation/Resources/concepts/python/opentelemetry-pipeline.md +154 -0
- package/dist/Instrumentation/Resources/examples/dotnet/aspnet-classic-setup.md +80 -0
- package/dist/Instrumentation/Resources/examples/dotnet/aspnetcore-distro-setup.md +156 -0
- package/dist/Instrumentation/Resources/examples/dotnet/aspnetcore-setup.md +160 -0
- package/dist/Instrumentation/Resources/examples/dotnet/workerservice-setup.md +154 -0
- package/dist/Instrumentation/Resources/examples/nodejs/bunyan-setup.md +301 -0
- package/dist/Instrumentation/Resources/examples/nodejs/console-setup.md +284 -0
- package/dist/Instrumentation/Resources/examples/nodejs/express-setup.md +169 -0
- package/dist/Instrumentation/Resources/examples/nodejs/fastify-setup.md +237 -0
- package/dist/Instrumentation/Resources/examples/nodejs/langchain-js-setup.md +310 -0
- package/dist/Instrumentation/Resources/examples/nodejs/mongodb-setup.md +185 -0
- package/dist/Instrumentation/Resources/examples/nodejs/mysql-setup.md +231 -0
- package/dist/Instrumentation/Resources/examples/nodejs/nestjs-setup.md +184 -0
- package/dist/Instrumentation/Resources/examples/nodejs/nextjs-setup.md +320 -0
- package/dist/Instrumentation/Resources/examples/nodejs/postgres-setup.md +147 -0
- package/dist/Instrumentation/Resources/examples/nodejs/redis-setup.md +198 -0
- package/dist/Instrumentation/Resources/examples/nodejs/winston-setup.md +260 -0
- package/dist/Instrumentation/Resources/examples/python/console-setup.md +392 -0
- package/dist/Instrumentation/Resources/examples/python/django-setup.md +269 -0
- package/dist/Instrumentation/Resources/examples/python/fastapi-setup.md +256 -0
- package/dist/Instrumentation/Resources/examples/python/flask-setup.md +218 -0
- package/dist/Instrumentation/Resources/examples/python/genai-setup.md +214 -0
- package/dist/Instrumentation/Resources/examples/python/generic-setup.md +164 -0
- package/dist/Instrumentation/Resources/migration/dotnet/aad-authentication-migration.md +150 -0
- package/dist/Instrumentation/Resources/migration/dotnet/appinsights-2x-to-3x-code-migration.md +164 -0
- package/dist/Instrumentation/Resources/migration/dotnet/appinsights-2x-to-3x-no-code-change.md +92 -0
- package/dist/Instrumentation/Resources/migration/dotnet/aspnet-classic-2x-to-3x-code-migration.md +190 -0
- package/dist/Instrumentation/Resources/migration/dotnet/console-2x-to-3x-code-migration.md +106 -0
- package/dist/Instrumentation/Resources/migration/dotnet/ilogger-migration.md +54 -0
- package/dist/Instrumentation/Resources/migration/dotnet/workerservice-2x-to-3x-code-migration.md +126 -0
- package/dist/Instrumentation/Resources/migration/dotnet/workerservice-2x-to-3x-no-code-change.md +102 -0
- package/dist/azmcp +0 -0
- package/package.json +1 -1
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# Basic Azure Monitor Setup for Express.js
|
|
2
|
+
|
|
3
|
+
This guide shows how to add Azure Monitor OpenTelemetry to an Express.js application.
|
|
4
|
+
|
|
5
|
+
## Prerequisites
|
|
6
|
+
|
|
7
|
+
- Node.js 14.x or higher
|
|
8
|
+
- npm or yarn
|
|
9
|
+
- Express.js application
|
|
10
|
+
- Azure Application Insights resource
|
|
11
|
+
|
|
12
|
+
## Step 1: Install Package
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm install @azure/monitor-opentelemetry
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Step 2: Initialize at Startup
|
|
19
|
+
|
|
20
|
+
Create or update your main entry point (typically `index.js` or `server.js`):
|
|
21
|
+
|
|
22
|
+
```javascript
|
|
23
|
+
// IMPORTANT: This must be the first line, before any other imports
|
|
24
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
25
|
+
|
|
26
|
+
// Initialize Azure Monitor
|
|
27
|
+
useAzureMonitor({
|
|
28
|
+
azureMonitorExporterOptions: {
|
|
29
|
+
connectionString: process.env.APPLICATIONINSIGHTS_CONNECTION_STRING
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// Now load your application code
|
|
34
|
+
const express = require('express');
|
|
35
|
+
const app = express();
|
|
36
|
+
const port = process.env.PORT || 3000;
|
|
37
|
+
|
|
38
|
+
// Your middleware and routes
|
|
39
|
+
app.use(express.json());
|
|
40
|
+
|
|
41
|
+
app.get('/', (req, res) => {
|
|
42
|
+
res.json({ message: 'Hello World!' });
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
app.get('/api/users', (req, res) => {
|
|
46
|
+
// This request will be automatically tracked
|
|
47
|
+
res.json([{ id: 1, name: 'Alice' }, { id: 2, name: 'Bob' }]);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
app.listen(port, () => {
|
|
51
|
+
console.log(`Server listening on port ${port}`);
|
|
52
|
+
});
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
## Step 3: Configure Connection String
|
|
56
|
+
|
|
57
|
+
Create a `.env` file in your project root:
|
|
58
|
+
|
|
59
|
+
```env
|
|
60
|
+
APPLICATIONINSIGHTS_CONNECTION_STRING=InstrumentationKey=00000000-0000-0000-0000-000000000000;IngestionEndpoint=https://...
|
|
61
|
+
PORT=3000
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
Install `dotenv` to load environment variables:
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
npm install dotenv
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
Load it at the very top of your entry file:
|
|
71
|
+
|
|
72
|
+
```javascript
|
|
73
|
+
require('dotenv').config();
|
|
74
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
75
|
+
// ... rest of code
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## Step 4: Add Custom Telemetry (Optional)
|
|
79
|
+
|
|
80
|
+
```javascript
|
|
81
|
+
const { trace } = require('@opentelemetry/api');
|
|
82
|
+
|
|
83
|
+
app.get('/api/process', async (req, res) => {
|
|
84
|
+
const span = trace.getActiveSpan();
|
|
85
|
+
|
|
86
|
+
// Add custom attributes
|
|
87
|
+
span?.setAttribute('user.id', req.headers['user-id']);
|
|
88
|
+
span?.setAttribute('operation.type', 'data-processing');
|
|
89
|
+
|
|
90
|
+
try {
|
|
91
|
+
// Your business logic
|
|
92
|
+
const result = await processData();
|
|
93
|
+
res.json(result);
|
|
94
|
+
} catch (error) {
|
|
95
|
+
// Exceptions are automatically tracked
|
|
96
|
+
span?.recordException(error);
|
|
97
|
+
res.status(500).json({ error: 'Processing failed' });
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
## What Gets Tracked Automatically
|
|
103
|
+
|
|
104
|
+
✅ **HTTP Requests**: All incoming requests with duration, status, URL
|
|
105
|
+
✅ **Dependencies**: Outgoing HTTP calls, database queries
|
|
106
|
+
✅ **Exceptions**: Unhandled errors and exceptions
|
|
107
|
+
✅ **Performance**: Response times and request counts
|
|
108
|
+
✅ **Custom Logs**: `console.log()` statements are captured as traces
|
|
109
|
+
|
|
110
|
+
## Verify It Works
|
|
111
|
+
|
|
112
|
+
1. Start your application:
|
|
113
|
+
```bash
|
|
114
|
+
npm start
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
2. Make some HTTP requests:
|
|
118
|
+
```bash
|
|
119
|
+
curl http://localhost:3000/
|
|
120
|
+
curl http://localhost:3000/api/users
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
3. Check Azure Portal:
|
|
124
|
+
- Navigate to your Application Insights resource
|
|
125
|
+
- Go to "Transaction search" or "Live Metrics"
|
|
126
|
+
- You should see requests appearing within 1-2 minutes
|
|
127
|
+
|
|
128
|
+
## Complete package.json Example
|
|
129
|
+
|
|
130
|
+
```json
|
|
131
|
+
{
|
|
132
|
+
"name": "express-azure-monitor-demo",
|
|
133
|
+
"version": "1.0.0",
|
|
134
|
+
"description": "Express app with Azure Monitor",
|
|
135
|
+
"main": "index.js",
|
|
136
|
+
"scripts": {
|
|
137
|
+
"start": "node index.js",
|
|
138
|
+
"dev": "nodemon index.js"
|
|
139
|
+
},
|
|
140
|
+
"dependencies": {
|
|
141
|
+
"@azure/monitor-opentelemetry": "^1.0.0",
|
|
142
|
+
"express": "^4.18.0",
|
|
143
|
+
"dotenv": "^16.0.0"
|
|
144
|
+
},
|
|
145
|
+
"devDependencies": {
|
|
146
|
+
"nodemon": "^3.0.0"
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
## Troubleshooting
|
|
152
|
+
|
|
153
|
+
**No telemetry appearing?**
|
|
154
|
+
- Verify connection string is correct
|
|
155
|
+
- Ensure `useAzureMonitor()` is called BEFORE loading Express
|
|
156
|
+
- Check console for error messages
|
|
157
|
+
- Wait 2-3 minutes for initial data to appear
|
|
158
|
+
|
|
159
|
+
**Performance impact?**
|
|
160
|
+
- Azure Monitor has minimal overhead (<5% in most cases)
|
|
161
|
+
- Use sampling for high-traffic applications
|
|
162
|
+
- Disable in development if needed
|
|
163
|
+
|
|
164
|
+
## Next Steps
|
|
165
|
+
|
|
166
|
+
- Configure custom dimensions and metrics
|
|
167
|
+
- Set up alerts and dashboards in Azure Portal
|
|
168
|
+
- Enable profiler for performance analysis
|
|
169
|
+
- Add distributed tracing across microservices
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
# Basic Azure Monitor Setup for Fastify
|
|
2
|
+
|
|
3
|
+
This guide shows how to add Azure Monitor OpenTelemetry to a Fastify application.
|
|
4
|
+
|
|
5
|
+
## Prerequisites
|
|
6
|
+
|
|
7
|
+
- Node.js 18.x or higher
|
|
8
|
+
- npm or yarn
|
|
9
|
+
- Fastify application
|
|
10
|
+
- Azure Application Insights resource
|
|
11
|
+
|
|
12
|
+
## Step 1: Install Package
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm install @azure/monitor-opentelemetry
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Step 2: Initialize at Startup
|
|
19
|
+
|
|
20
|
+
Update your main entry point (typically `index.js`, `server.js`, or `app.js`):
|
|
21
|
+
|
|
22
|
+
```javascript
|
|
23
|
+
// IMPORTANT: This must be the first line, before any other imports
|
|
24
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
25
|
+
|
|
26
|
+
// Enable Azure Monitor integration - must be called before other requires
|
|
27
|
+
useAzureMonitor({
|
|
28
|
+
azureMonitorExporterOptions: {
|
|
29
|
+
connectionString: process.env.APPLICATIONINSIGHTS_CONNECTION_STRING
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// Now load your application code
|
|
34
|
+
const fastify = require('fastify')({ logger: true });
|
|
35
|
+
|
|
36
|
+
// Register routes
|
|
37
|
+
fastify.get('/', async (request, reply) => {
|
|
38
|
+
return { message: 'Hello World!' };
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
fastify.get('/api/users', async (request, reply) => {
|
|
42
|
+
// This request will be automatically tracked
|
|
43
|
+
return [
|
|
44
|
+
{ id: 1, name: 'Alice' },
|
|
45
|
+
{ id: 2, name: 'Bob' }
|
|
46
|
+
];
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
// Start server
|
|
50
|
+
const start = async () => {
|
|
51
|
+
try {
|
|
52
|
+
await fastify.listen({ port: process.env.PORT || 3000 });
|
|
53
|
+
} catch (err) {
|
|
54
|
+
fastify.log.error(err);
|
|
55
|
+
process.exit(1);
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
start();
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
> **Important**: `useAzureMonitor()` must be called before requiring Fastify or any other modules to ensure proper instrumentation.
|
|
63
|
+
|
|
64
|
+
## Step 3: Configure Connection String
|
|
65
|
+
|
|
66
|
+
Create a `.env` file in your project root:
|
|
67
|
+
|
|
68
|
+
```env
|
|
69
|
+
APPLICATIONINSIGHTS_CONNECTION_STRING=InstrumentationKey=00000000-0000-0000-0000-000000000000;IngestionEndpoint=https://...
|
|
70
|
+
PORT=3000
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
Install `dotenv` to load environment variables:
|
|
74
|
+
|
|
75
|
+
```bash
|
|
76
|
+
npm install dotenv
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
Load it at the very top of your entry file:
|
|
80
|
+
|
|
81
|
+
```javascript
|
|
82
|
+
require('dotenv').config();
|
|
83
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
84
|
+
// ... rest of code
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
## Step 4: Add Custom Telemetry (Optional)
|
|
88
|
+
|
|
89
|
+
```javascript
|
|
90
|
+
const { trace } = require('@opentelemetry/api');
|
|
91
|
+
|
|
92
|
+
fastify.get('/api/process/:id', async (request, reply) => {
|
|
93
|
+
const span = trace.getActiveSpan();
|
|
94
|
+
|
|
95
|
+
// Add custom attributes to the current span
|
|
96
|
+
span?.setAttribute('process.id', request.params.id);
|
|
97
|
+
span?.setAttribute('operation.type', 'data-processing');
|
|
98
|
+
|
|
99
|
+
try {
|
|
100
|
+
// Your business logic
|
|
101
|
+
const result = await processData(request.params.id);
|
|
102
|
+
return result;
|
|
103
|
+
} catch (error) {
|
|
104
|
+
// Exceptions are automatically tracked
|
|
105
|
+
span?.recordException(error);
|
|
106
|
+
reply.status(500).send({ error: 'Processing failed' });
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Using with TypeScript
|
|
112
|
+
|
|
113
|
+
For TypeScript projects, create your entry file:
|
|
114
|
+
|
|
115
|
+
```typescript
|
|
116
|
+
// IMPORTANT: This must be the first import
|
|
117
|
+
import { useAzureMonitor } from '@azure/monitor-opentelemetry';
|
|
118
|
+
|
|
119
|
+
useAzureMonitor({
|
|
120
|
+
azureMonitorExporterOptions: {
|
|
121
|
+
connectionString: process.env.APPLICATIONINSIGHTS_CONNECTION_STRING
|
|
122
|
+
}
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
import Fastify from 'fastify';
|
|
126
|
+
|
|
127
|
+
const fastify = Fastify({ logger: true });
|
|
128
|
+
|
|
129
|
+
fastify.get('/', async () => {
|
|
130
|
+
return { message: 'Hello World!' };
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
fastify.listen({ port: 3000 });
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
## What Gets Tracked Automatically
|
|
137
|
+
|
|
138
|
+
✅ **HTTP Requests**: All incoming requests with duration, status, URL
|
|
139
|
+
✅ **Dependencies**: Outgoing HTTP calls, database queries
|
|
140
|
+
✅ **Exceptions**: Unhandled errors and exceptions
|
|
141
|
+
✅ **Performance**: Response times and request counts
|
|
142
|
+
✅ **Custom Logs**: Fastify logger output is captured as traces
|
|
143
|
+
|
|
144
|
+
## Using with Fastify Plugins
|
|
145
|
+
|
|
146
|
+
Azure Monitor works seamlessly with Fastify plugins:
|
|
147
|
+
|
|
148
|
+
```javascript
|
|
149
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
150
|
+
useAzureMonitor({
|
|
151
|
+
azureMonitorExporterOptions: {
|
|
152
|
+
connectionString: process.env.APPLICATIONINSIGHTS_CONNECTION_STRING
|
|
153
|
+
}
|
|
154
|
+
});
|
|
155
|
+
|
|
156
|
+
const fastify = require('fastify')({ logger: true });
|
|
157
|
+
|
|
158
|
+
// Register plugins - they will be automatically instrumented
|
|
159
|
+
fastify.register(require('@fastify/postgres'), {
|
|
160
|
+
connectionString: process.env.DATABASE_URL
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
fastify.register(require('@fastify/redis'), {
|
|
164
|
+
host: process.env.REDIS_HOST
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
// Routes using the plugins
|
|
168
|
+
fastify.get('/users', async (request, reply) => {
|
|
169
|
+
const { rows } = await fastify.pg.query('SELECT * FROM users');
|
|
170
|
+
return rows;
|
|
171
|
+
});
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
## Verify It Works
|
|
175
|
+
|
|
176
|
+
1. Start your application:
|
|
177
|
+
```bash
|
|
178
|
+
npm start
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
2. Make some HTTP requests:
|
|
182
|
+
```bash
|
|
183
|
+
curl http://localhost:3000/
|
|
184
|
+
curl http://localhost:3000/api/users
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
3. Check Azure Portal:
|
|
188
|
+
- Navigate to your Application Insights resource
|
|
189
|
+
- Go to "Transaction search" or "Live Metrics"
|
|
190
|
+
- You should see requests appearing within 1-2 minutes
|
|
191
|
+
|
|
192
|
+
## Complete package.json Example
|
|
193
|
+
|
|
194
|
+
```json
|
|
195
|
+
{
|
|
196
|
+
"name": "fastify-azure-monitor-demo",
|
|
197
|
+
"version": "1.0.0",
|
|
198
|
+
"description": "Fastify app with Azure Monitor",
|
|
199
|
+
"main": "index.js",
|
|
200
|
+
"scripts": {
|
|
201
|
+
"start": "node index.js",
|
|
202
|
+
"dev": "nodemon index.js"
|
|
203
|
+
},
|
|
204
|
+
"dependencies": {
|
|
205
|
+
"@azure/monitor-opentelemetry": "^1.0.0",
|
|
206
|
+
"fastify": "^4.24.0",
|
|
207
|
+
"dotenv": "^16.0.0"
|
|
208
|
+
},
|
|
209
|
+
"devDependencies": {
|
|
210
|
+
"nodemon": "^3.0.0"
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
## Troubleshooting
|
|
216
|
+
|
|
217
|
+
**No telemetry appearing?**
|
|
218
|
+
- Verify connection string is correct
|
|
219
|
+
- Ensure `useAzureMonitor()` is called BEFORE requiring Fastify
|
|
220
|
+
- Check console for error messages
|
|
221
|
+
- Wait 2-3 minutes for initial data to appear
|
|
222
|
+
|
|
223
|
+
**Fastify logger not working with telemetry?**
|
|
224
|
+
- Both work independently; Fastify logs go to stdout, telemetry goes to Azure
|
|
225
|
+
- Use `@opentelemetry/api` for custom spans within telemetry
|
|
226
|
+
|
|
227
|
+
**Performance impact?**
|
|
228
|
+
- Azure Monitor has minimal overhead (<5% in most cases)
|
|
229
|
+
- Use sampling for high-traffic applications
|
|
230
|
+
- Disable in development if needed
|
|
231
|
+
|
|
232
|
+
## Next Steps
|
|
233
|
+
|
|
234
|
+
- Configure custom dimensions and metrics
|
|
235
|
+
- Set up alerts and dashboards in Azure Portal
|
|
236
|
+
- Enable profiler for performance analysis
|
|
237
|
+
- Add distributed tracing across microservices
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
# Basic Azure Monitor Setup for LangChain.js
|
|
2
|
+
|
|
3
|
+
This guide shows how to add Azure Monitor OpenTelemetry to a LangChain.js application for observability into LLM calls, chains, and agents.
|
|
4
|
+
|
|
5
|
+
## Prerequisites
|
|
6
|
+
|
|
7
|
+
- Node.js 18.x or higher
|
|
8
|
+
- npm or yarn
|
|
9
|
+
- LangChain.js application
|
|
10
|
+
- Azure Application Insights resource
|
|
11
|
+
|
|
12
|
+
## Step 1: Install Package
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm install @azure/monitor-opentelemetry
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Step 2: Create Tracing File
|
|
19
|
+
|
|
20
|
+
Create a separate tracing file to ensure OpenTelemetry initializes before LangChain imports. This is critical for proper instrumentation.
|
|
21
|
+
|
|
22
|
+
**For CommonJS projects** - create `tracing.js`:
|
|
23
|
+
|
|
24
|
+
```javascript
|
|
25
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
26
|
+
|
|
27
|
+
// Enable Azure Monitor integration
|
|
28
|
+
// This must be called before any other imports to ensure proper instrumentation
|
|
29
|
+
useAzureMonitor({
|
|
30
|
+
azureMonitorExporterOptions: {
|
|
31
|
+
connectionString: process.env.APPLICATIONINSIGHTS_CONNECTION_STRING
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
**For ES Module projects** - create `tracing.mjs`:
|
|
37
|
+
|
|
38
|
+
```javascript
|
|
39
|
+
import { useAzureMonitor } from '@azure/monitor-opentelemetry';
|
|
40
|
+
|
|
41
|
+
// Enable Azure Monitor integration
|
|
42
|
+
// This must be called before any other imports to ensure proper instrumentation
|
|
43
|
+
useAzureMonitor({
|
|
44
|
+
azureMonitorExporterOptions: {
|
|
45
|
+
connectionString: process.env.APPLICATIONINSIGHTS_CONNECTION_STRING
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Step 3: Import Tracing First
|
|
51
|
+
|
|
52
|
+
Update your main entry point to import tracing **as the very first line**:
|
|
53
|
+
|
|
54
|
+
**For CommonJS** (`index.js`):
|
|
55
|
+
|
|
56
|
+
```javascript
|
|
57
|
+
require('./tracing'); // MUST be the first import
|
|
58
|
+
|
|
59
|
+
const { ChatOpenAI } = require('@langchain/openai');
|
|
60
|
+
const { PromptTemplate } = require('@langchain/core/prompts');
|
|
61
|
+
const { StringOutputParser } = require('@langchain/core/output_parsers');
|
|
62
|
+
|
|
63
|
+
// Your LangChain application code
|
|
64
|
+
async function main() {
|
|
65
|
+
const model = new ChatOpenAI({
|
|
66
|
+
modelName: 'gpt-4',
|
|
67
|
+
temperature: 0.7
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
const prompt = PromptTemplate.fromTemplate(
|
|
71
|
+
'Tell me a short joke about {topic}'
|
|
72
|
+
);
|
|
73
|
+
|
|
74
|
+
const chain = prompt.pipe(model).pipe(new StringOutputParser());
|
|
75
|
+
|
|
76
|
+
const result = await chain.invoke({ topic: 'programming' });
|
|
77
|
+
console.log(result);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
main().catch(console.error);
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
**For ES Modules** (`index.mjs`):
|
|
84
|
+
|
|
85
|
+
```javascript
|
|
86
|
+
import './tracing.mjs'; // MUST be the first import
|
|
87
|
+
|
|
88
|
+
import { ChatOpenAI } from '@langchain/openai';
|
|
89
|
+
import { PromptTemplate } from '@langchain/core/prompts';
|
|
90
|
+
import { StringOutputParser } from '@langchain/core/output_parsers';
|
|
91
|
+
|
|
92
|
+
// Your LangChain application code
|
|
93
|
+
async function main() {
|
|
94
|
+
const model = new ChatOpenAI({
|
|
95
|
+
modelName: 'gpt-4',
|
|
96
|
+
temperature: 0.7
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
const prompt = PromptTemplate.fromTemplate(
|
|
100
|
+
'Tell me a short joke about {topic}'
|
|
101
|
+
);
|
|
102
|
+
|
|
103
|
+
const chain = prompt.pipe(model).pipe(new StringOutputParser());
|
|
104
|
+
|
|
105
|
+
const result = await chain.invoke({ topic: 'programming' });
|
|
106
|
+
console.log(result);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
main().catch(console.error);
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
## Step 4: Configure Connection String
|
|
113
|
+
|
|
114
|
+
Create a `.env` file in your project root:
|
|
115
|
+
|
|
116
|
+
```env
|
|
117
|
+
APPLICATIONINSIGHTS_CONNECTION_STRING=InstrumentationKey=00000000-0000-0000-0000-000000000000;IngestionEndpoint=https://...
|
|
118
|
+
OPENAI_API_KEY=your-openai-key
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
Install `dotenv` to load environment variables:
|
|
122
|
+
|
|
123
|
+
```bash
|
|
124
|
+
npm install dotenv
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
Update your tracing file to load environment variables first:
|
|
128
|
+
|
|
129
|
+
```javascript
|
|
130
|
+
require('dotenv').config();
|
|
131
|
+
const { useAzureMonitor } = require('@azure/monitor-opentelemetry');
|
|
132
|
+
// ... rest of tracing setup
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
## Step 5: Add Custom Telemetry (Optional)
|
|
136
|
+
|
|
137
|
+
Track custom attributes for LLM operations:
|
|
138
|
+
|
|
139
|
+
```javascript
|
|
140
|
+
const { trace } = require('@opentelemetry/api');
|
|
141
|
+
|
|
142
|
+
async function processWithTelemetry(userQuery) {
|
|
143
|
+
const span = trace.getActiveSpan();
|
|
144
|
+
|
|
145
|
+
// Add custom attributes
|
|
146
|
+
span?.setAttribute('llm.query.length', userQuery.length);
|
|
147
|
+
span?.setAttribute('llm.model', 'gpt-4');
|
|
148
|
+
span?.setAttribute('operation.type', 'chat-completion');
|
|
149
|
+
|
|
150
|
+
try {
|
|
151
|
+
const result = await chain.invoke({ query: userQuery });
|
|
152
|
+
|
|
153
|
+
// Track response metrics
|
|
154
|
+
span?.setAttribute('llm.response.length', result.length);
|
|
155
|
+
span?.setAttribute('llm.success', true);
|
|
156
|
+
|
|
157
|
+
return result;
|
|
158
|
+
} catch (error) {
|
|
159
|
+
span?.recordException(error);
|
|
160
|
+
span?.setAttribute('llm.success', false);
|
|
161
|
+
throw error;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
## What Gets Tracked Automatically
|
|
167
|
+
|
|
168
|
+
✅ **HTTP Requests**: Outgoing calls to LLM APIs (OpenAI, Azure OpenAI, etc.)
|
|
169
|
+
✅ **Dependencies**: External service calls and database queries
|
|
170
|
+
✅ **Exceptions**: Errors from LLM providers, rate limits, timeouts
|
|
171
|
+
✅ **Performance**: Latency of LLM calls and chains
|
|
172
|
+
✅ **Token Usage**: When using supported providers
|
|
173
|
+
|
|
174
|
+
## Using with Different LLM Providers
|
|
175
|
+
|
|
176
|
+
### Azure OpenAI
|
|
177
|
+
|
|
178
|
+
```javascript
|
|
179
|
+
const { AzureChatOpenAI } = require('@langchain/openai');
|
|
180
|
+
|
|
181
|
+
const model = new AzureChatOpenAI({
|
|
182
|
+
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_DEPLOYMENT,
|
|
183
|
+
azureOpenAIApiVersion: '2024-02-15-preview',
|
|
184
|
+
});
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
### Anthropic Claude
|
|
188
|
+
|
|
189
|
+
```javascript
|
|
190
|
+
const { ChatAnthropic } = require('@langchain/anthropic');
|
|
191
|
+
|
|
192
|
+
const model = new ChatAnthropic({
|
|
193
|
+
modelName: 'claude-3-opus-20240229',
|
|
194
|
+
});
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
## Using with Agents
|
|
198
|
+
|
|
199
|
+
```javascript
|
|
200
|
+
require('./tracing');
|
|
201
|
+
|
|
202
|
+
const { ChatOpenAI } = require('@langchain/openai');
|
|
203
|
+
const { AgentExecutor, createOpenAIToolsAgent } = require('langchain/agents');
|
|
204
|
+
const { TavilySearchResults } = require('@langchain/community/tools/tavily_search');
|
|
205
|
+
const { trace } = require('@opentelemetry/api');
|
|
206
|
+
|
|
207
|
+
async function runAgent(input) {
|
|
208
|
+
const span = trace.getActiveSpan();
|
|
209
|
+
span?.setAttribute('agent.input', input);
|
|
210
|
+
|
|
211
|
+
const tools = [new TavilySearchResults()];
|
|
212
|
+
const model = new ChatOpenAI({ modelName: 'gpt-4' });
|
|
213
|
+
|
|
214
|
+
const agent = await createOpenAIToolsAgent({ llm: model, tools, prompt });
|
|
215
|
+
const executor = new AgentExecutor({ agent, tools });
|
|
216
|
+
|
|
217
|
+
const result = await executor.invoke({ input });
|
|
218
|
+
|
|
219
|
+
span?.setAttribute('agent.steps', result.intermediateSteps?.length || 0);
|
|
220
|
+
return result;
|
|
221
|
+
}
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
## Verify It Works
|
|
225
|
+
|
|
226
|
+
1. Start your application:
|
|
227
|
+
```bash
|
|
228
|
+
node index.js
|
|
229
|
+
```
|
|
230
|
+
|
|
231
|
+
2. Run some LLM operations and check Azure Portal:
|
|
232
|
+
- Navigate to your Application Insights resource
|
|
233
|
+
- Go to "Transaction search" or "Application map"
|
|
234
|
+
- You should see outgoing requests to LLM APIs
|
|
235
|
+
- Check "Dependencies" to see LLM call latencies
|
|
236
|
+
|
|
237
|
+
## Complete package.json Example
|
|
238
|
+
|
|
239
|
+
**CommonJS:**
|
|
240
|
+
```json
|
|
241
|
+
{
|
|
242
|
+
"name": "langchain-azure-monitor-demo",
|
|
243
|
+
"version": "1.0.0",
|
|
244
|
+
"main": "index.js",
|
|
245
|
+
"scripts": {
|
|
246
|
+
"start": "node index.js"
|
|
247
|
+
},
|
|
248
|
+
"dependencies": {
|
|
249
|
+
"@azure/monitor-opentelemetry": "^1.0.0",
|
|
250
|
+
"@langchain/core": "^0.2.0",
|
|
251
|
+
"@langchain/openai": "^0.2.0",
|
|
252
|
+
"dotenv": "^16.0.0"
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
**ES Modules:**
|
|
258
|
+
```json
|
|
259
|
+
{
|
|
260
|
+
"name": "langchain-azure-monitor-demo",
|
|
261
|
+
"version": "1.0.0",
|
|
262
|
+
"type": "module",
|
|
263
|
+
"main": "index.mjs",
|
|
264
|
+
"scripts": {
|
|
265
|
+
"start": "node index.mjs"
|
|
266
|
+
},
|
|
267
|
+
"dependencies": {
|
|
268
|
+
"@azure/monitor-opentelemetry": "^1.0.0",
|
|
269
|
+
"@langchain/core": "^0.2.0",
|
|
270
|
+
"@langchain/openai": "^0.2.0",
|
|
271
|
+
"dotenv": "^16.0.0"
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
## Project Structure
|
|
277
|
+
|
|
278
|
+
```
|
|
279
|
+
my-langchain-app/
|
|
280
|
+
├── tracing.js ← Azure Monitor setup (load first)
|
|
281
|
+
├── index.js ← Main entry point
|
|
282
|
+
├── chains/
|
|
283
|
+
│ └── qa-chain.js ← Your LangChain chains
|
|
284
|
+
├── .env ← Connection strings and API keys
|
|
285
|
+
└── package.json
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
## Troubleshooting
|
|
289
|
+
|
|
290
|
+
**No telemetry appearing?**
|
|
291
|
+
- Verify the tracing import is the FIRST line in your entry file
|
|
292
|
+
- Check that connection string is correct
|
|
293
|
+
- Ensure `dotenv.config()` is called in tracing file before `useAzureMonitor()`
|
|
294
|
+
|
|
295
|
+
**LLM calls not tracked?**
|
|
296
|
+
- Make sure OpenTelemetry initializes BEFORE importing LangChain
|
|
297
|
+
- HTTP instrumentation should capture LLM API calls automatically
|
|
298
|
+
|
|
299
|
+
**ES Module vs CommonJS issues?**
|
|
300
|
+
- Check your `package.json` for `"type": "module"`
|
|
301
|
+
- Use `.mjs` extension for ES modules or `.cjs` for CommonJS
|
|
302
|
+
- Match import/require syntax to your module system
|
|
303
|
+
|
|
304
|
+
## Next Steps
|
|
305
|
+
|
|
306
|
+
- Add custom metrics for token usage and costs
|
|
307
|
+
- Set up alerts for LLM error rates or latency
|
|
308
|
+
- Create dashboards for LLM operation insights
|
|
309
|
+
- Enable distributed tracing for multi-service architectures
|
|
310
|
+
- Track RAG pipeline performance with custom spans
|