a24z 1.0.17 → 1.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,19 @@
1
+ declare class HookLogger {
2
+ private config;
3
+ constructor(config: {
4
+ apiUrl: string;
5
+ accessToken?: string;
6
+ cliVersion: string;
7
+ });
8
+ sendHookData(logEntry: any): Promise<void>;
9
+ log(obj: any, msg?: string): void;
10
+ info(obj: any, msg?: string): void;
11
+ error(obj: any, msg?: string): void;
12
+ warn(obj: any, msg?: string): void;
13
+ }
14
+ declare const createLogger: (config: {
15
+ apiUrl: string;
16
+ accessToken?: string;
17
+ cliVersion: string;
18
+ }) => HookLogger;
19
+ export { createLogger };
@@ -0,0 +1 @@
1
+ {"version":3,"file":"","sourceRoot":"","sources":["file:///Users/brandonin/Projects/a24z-observability/apps/a24z-cli/src/logger.ts"],"names":[],"mappings":"AACA,cAAM,UAAU;IACd,OAAO,CAAC,MAAM,CAA+D;gBAEjE,MAAM,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,WAAW,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;IAK1E,YAAY,CAAC,QAAQ,EAAE,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;IAwBhD,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAYjC,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAIlC,KAAK,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAInC,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;CAGnC;AAED,QAAA,MAAM,YAAY,GAAI,QAAQ;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAAC,UAAU,EAAE,MAAM,CAAA;CAAE,eAEzF,CAAC;AAEF,OAAO,EAAE,YAAY,EAAE,CAAC"}
package/dist/logger.js ADDED
@@ -0,0 +1,53 @@
1
+ // Simple logger for sending hook data to backend
2
+ class HookLogger {
3
+ constructor(config) {
4
+ this.config = config;
5
+ }
6
+ // Send hook data to backend
7
+ async sendHookData(logEntry) {
8
+ const { level, time, pid, hostname, ...cleanObj } = logEntry;
9
+ const tool = cleanObj.tool_name || 'claude-code';
10
+ try {
11
+ const response = await fetch(`${this.config.apiUrl}/api/v1/${tool}/hooks`, {
12
+ method: 'POST',
13
+ headers: {
14
+ 'Content-Type': 'application/json',
15
+ 'User-Agent': `a24z-cli/${this.config.cliVersion}`,
16
+ ...(this.config.accessToken && { 'Authorization': `Bearer ${this.config.accessToken}` }),
17
+ },
18
+ body: JSON.stringify(cleanObj),
19
+ });
20
+ if (!response.ok) {
21
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
22
+ }
23
+ }
24
+ catch (error) {
25
+ throw error;
26
+ }
27
+ }
28
+ // Log with optional HTTP transport for hook events
29
+ log(obj, msg) {
30
+ // Pretty print to console
31
+ console.log(JSON.stringify(obj, null, 2));
32
+ // Send hook events to backend
33
+ if (obj.hook_event_name) {
34
+ this.sendHookData(obj).catch((error) => {
35
+ console.error('Failed to send hook data:', error instanceof Error ? error.message : String(error));
36
+ });
37
+ }
38
+ }
39
+ info(obj, msg) {
40
+ this.log(obj, msg);
41
+ }
42
+ error(obj, msg) {
43
+ this.log(obj, msg);
44
+ }
45
+ warn(obj, msg) {
46
+ this.log(obj, msg);
47
+ }
48
+ }
49
+ const createLogger = (config) => {
50
+ return new HookLogger(config);
51
+ };
52
+ export { createLogger };
53
+ //# sourceMappingURL=logger.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"logger.js","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA,iDAAiD;AACjD,MAAM,UAAU;IAGd,YAAY,MAAoE;QAC9E,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED,4BAA4B;IAC5B,KAAK,CAAC,YAAY,CAAC,QAAa;QAC9B,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,QAAQ,EAAE,GAAG,QAAQ,CAAC;QAC7D,MAAM,IAAI,GAAG,QAAQ,CAAC,SAAS,IAAI,aAAa,CAAC;QAEjD,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,WAAW,IAAI,QAAQ,EAAE;gBACzE,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACP,cAAc,EAAE,kBAAkB;oBAClC,YAAY,EAAE,YAAY,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE;oBAClD,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,EAAE,eAAe,EAAE,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE,EAAE,CAAC;iBACzF;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;aAC/B,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,KAAK,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC;YACrE,CAAC;QACH,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,KAAK,CAAC;QACd,CAAC;IACH,CAAC;IAED,mDAAmD;IACnD,GAAG,CAAC,GAAQ,EAAE,GAAY;QACxB,0BAA0B;QAC1B,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC;QAE1C,8BAA8B;QAC9B,IAAI,GAAG,CAAC,eAAe,EAAE,CAAC;YACxB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;gBACrC,OAAO,CAAC,KAAK,CAAC,2BAA2B,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YACrG,CAAC,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED,IAAI,CAAC,GAAQ,EAAE,GAAY;QACzB,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACrB,CAAC;IAED,KAAK,CAAC,GAAQ,EAAE,GAAY;QAC1B,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACrB,CAAC;IAED,IAAI,CAAC,GAAQ,EAAE,GAAY;QACzB,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACrB,CAAC;CACF;AAED,MAAM,YAAY,GAAG,CAAC,MAAoE,EAAE,EAAE;IAC5F,OAAO,IAAI,UAAU,CAAC,MAAM,CAAC,CAAC;AAChC,CAAC,CAAC;AAEF,OAAO,EAAE,YAAY,EAAE,CAAC"}
@@ -0,0 +1,38 @@
1
+ 'use strict'
2
+
3
+ const EE = require('events')
4
+ const loadTransportStreamBuilder = require('./transport-stream')
5
+ const { pipeline, PassThrough } = require('stream')
6
+
7
+ // This file is not checked by the code coverage tool,
8
+ // as it is not reliable.
9
+
10
+ /* istanbul ignore file */
11
+
12
+ module.exports = async function ({ targets }) {
13
+ const streams = await Promise.all(targets.map(async (t) => {
14
+ const fn = await loadTransportStreamBuilder(t.target)
15
+ const stream = await fn(t.options)
16
+ return stream
17
+ }))
18
+ const ee = new EE()
19
+
20
+ const stream = new PassThrough({
21
+ autoDestroy: true,
22
+ destroy (_, cb) {
23
+ ee.on('error', cb)
24
+ ee.on('closed', cb)
25
+ }
26
+ })
27
+
28
+ pipeline(stream, ...streams, function (err) {
29
+ if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
30
+ ee.emit('error', err)
31
+ return
32
+ }
33
+
34
+ ee.emit('closed')
35
+ })
36
+
37
+ return stream
38
+ }
package/dist/worker.js ADDED
@@ -0,0 +1,54 @@
1
+ 'use strict'
2
+
3
+ const pino = require('../pino.js')
4
+ const build = require('pino-abstract-transport')
5
+ const loadTransportStreamBuilder = require('./transport-stream')
6
+
7
+ // This file is not checked by the code coverage tool,
8
+ // as it is not reliable.
9
+
10
+ /* istanbul ignore file */
11
+
12
+ module.exports = async function ({ targets, levels, dedupe }) {
13
+ targets = await Promise.all(targets.map(async (t) => {
14
+ const fn = await loadTransportStreamBuilder(t.target)
15
+ const stream = await fn(t.options)
16
+ return {
17
+ level: t.level,
18
+ stream
19
+ }
20
+ }))
21
+ return build(process, {
22
+ parse: 'lines',
23
+ metadata: true,
24
+ close (err, cb) {
25
+ let expected = 0
26
+ for (const transport of targets) {
27
+ expected++
28
+ transport.stream.on('close', closeCb)
29
+ transport.stream.end()
30
+ }
31
+
32
+ function closeCb () {
33
+ if (--expected === 0) {
34
+ cb(err)
35
+ }
36
+ }
37
+ }
38
+ })
39
+
40
+ function process (stream) {
41
+ const multi = pino.multistream(targets, { levels, dedupe })
42
+ // TODO manage backpressure
43
+ stream.on('data', function (chunk) {
44
+ const { lastTime, lastMsg, lastObj, lastLevel } = this
45
+ multi.lastLevel = lastLevel
46
+ multi.lastTime = lastTime
47
+ multi.lastMsg = lastMsg
48
+ multi.lastObj = lastObj
49
+
50
+ // TODO handle backpressure
51
+ multi.write(chunk + '\n')
52
+ })
53
+ }
54
+ }
@@ -0,0 +1,171 @@
1
+ 'use strict'
2
+
3
+ const { realImport, realRequire } = require('real-require')
4
+ const { workerData, parentPort } = require('worker_threads')
5
+ const { WRITE_INDEX, READ_INDEX } = require('./indexes')
6
+ const { waitDiff } = require('./wait')
7
+
8
+ const {
9
+ dataBuf,
10
+ filename,
11
+ stateBuf
12
+ } = workerData
13
+
14
+ let destination
15
+
16
+ const state = new Int32Array(stateBuf)
17
+ const data = Buffer.from(dataBuf)
18
+
19
+ async function start () {
20
+ let worker
21
+ try {
22
+ if (filename.endsWith('.ts') || filename.endsWith('.cts')) {
23
+ // TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
24
+ if (!process[Symbol.for('ts-node.register.instance')]) {
25
+ realRequire('ts-node/register')
26
+ } else if (process.env.TS_NODE_DEV) {
27
+ realRequire('ts-node-dev')
28
+ }
29
+ // TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
30
+ // Remove extra forwardslash on Windows
31
+ worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
32
+ } else {
33
+ worker = (await realImport(filename))
34
+ }
35
+ } catch (error) {
36
+ // A yarn user that tries to start a ThreadStream for an external module
37
+ // provides a filename pointing to a zip file.
38
+ // eg. require.resolve('pino-elasticsearch') // returns /foo/pino-elasticsearch-npm-6.1.0-0c03079478-6915435172.zip/bar.js
39
+ // The `import` will fail to try to load it.
40
+ // This catch block executes the `require` fallback to load the module correctly.
41
+ // In fact, yarn modifies the `require` function to manage the zipped path.
42
+ // More details at https://github.com/pinojs/pino/pull/1113
43
+ // The error codes may change based on the node.js version (ENOTDIR > 12, ERR_MODULE_NOT_FOUND <= 12 )
44
+ if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND') &&
45
+ filename.startsWith('file://')) {
46
+ worker = realRequire(decodeURIComponent(filename.replace('file://', '')))
47
+ } else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
48
+ // When bundled with pkg, an undefined error is thrown when called with realImport
49
+ // When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
50
+ // More info at: https://github.com/pinojs/thread-stream/issues/143
51
+ worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
52
+ } else {
53
+ throw error
54
+ }
55
+ }
56
+
57
+ // Depending on how the default export is performed, and on how the code is
58
+ // transpiled, we may find cases of two nested "default" objects.
59
+ // See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
60
+ if (typeof worker === 'object') worker = worker.default
61
+ if (typeof worker === 'object') worker = worker.default
62
+
63
+ destination = await worker(workerData.workerData)
64
+
65
+ destination.on('error', function (err) {
66
+ Atomics.store(state, WRITE_INDEX, -2)
67
+ Atomics.notify(state, WRITE_INDEX)
68
+
69
+ Atomics.store(state, READ_INDEX, -2)
70
+ Atomics.notify(state, READ_INDEX)
71
+
72
+ parentPort.postMessage({
73
+ code: 'ERROR',
74
+ err
75
+ })
76
+ })
77
+
78
+ destination.on('close', function () {
79
+ // process._rawDebug('worker close emitted')
80
+ const end = Atomics.load(state, WRITE_INDEX)
81
+ Atomics.store(state, READ_INDEX, end)
82
+ Atomics.notify(state, READ_INDEX)
83
+ setImmediate(() => {
84
+ process.exit(0)
85
+ })
86
+ })
87
+ }
88
+
89
+ // No .catch() handler,
90
+ // in case there is an error it goes
91
+ // to unhandledRejection
92
+ start().then(function () {
93
+ parentPort.postMessage({
94
+ code: 'READY'
95
+ })
96
+
97
+ process.nextTick(run)
98
+ })
99
+
100
+ function run () {
101
+ const current = Atomics.load(state, READ_INDEX)
102
+ const end = Atomics.load(state, WRITE_INDEX)
103
+
104
+ // process._rawDebug(`pre state ${current} ${end}`)
105
+
106
+ if (end === current) {
107
+ if (end === data.length) {
108
+ waitDiff(state, READ_INDEX, end, Infinity, run)
109
+ } else {
110
+ waitDiff(state, WRITE_INDEX, end, Infinity, run)
111
+ }
112
+ return
113
+ }
114
+
115
+ // process._rawDebug(`post state ${current} ${end}`)
116
+
117
+ if (end === -1) {
118
+ // process._rawDebug('end')
119
+ destination.end()
120
+ return
121
+ }
122
+
123
+ const toWrite = data.toString('utf8', current, end)
124
+ // process._rawDebug('worker writing: ' + toWrite)
125
+
126
+ const res = destination.write(toWrite)
127
+
128
+ if (res) {
129
+ Atomics.store(state, READ_INDEX, end)
130
+ Atomics.notify(state, READ_INDEX)
131
+ setImmediate(run)
132
+ } else {
133
+ destination.once('drain', function () {
134
+ Atomics.store(state, READ_INDEX, end)
135
+ Atomics.notify(state, READ_INDEX)
136
+ run()
137
+ })
138
+ }
139
+ }
140
+
141
+ process.on('unhandledRejection', function (err) {
142
+ parentPort.postMessage({
143
+ code: 'ERROR',
144
+ err
145
+ })
146
+ process.exit(1)
147
+ })
148
+
149
+ process.on('uncaughtException', function (err) {
150
+ parentPort.postMessage({
151
+ code: 'ERROR',
152
+ err
153
+ })
154
+ process.exit(1)
155
+ })
156
+
157
+ process.once('exit', exitCode => {
158
+ if (exitCode !== 0) {
159
+ process.exit(exitCode)
160
+ return
161
+ }
162
+
163
+ if (destination?.writableNeedDrain && !destination?.writableEnded) {
164
+ parentPort.postMessage({
165
+ code: 'WARNING',
166
+ err: new Error('ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream')
167
+ })
168
+ }
169
+
170
+ process.exit(0)
171
+ })
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "a24z",
3
- "version": "1.0.17",
3
+ "version": "1.0.18",
4
4
  "description": "AI Agent Observability CLI - Monitor and analyze your AI tool performance",
5
5
  "main": "dist/index.js",
6
6
  "bin": {
@@ -25,7 +25,6 @@
25
25
  "author": "a24z",
26
26
  "homepage": "https://a24z.ai",
27
27
  "dependencies": {
28
- "axios": "^1.6.2",
29
28
  "chalk": "^5.3.0",
30
29
  "commander": "^11.1.0",
31
30
  "inquirer": "^9.2.12",