effect-gpt 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -0
- package/data/chat_training_data.json +55 -0
- package/data/pretraining_data.json +27 -0
- package/package.json +25 -0
- package/src/cli/errors.ts +51 -0
- package/src/cli/main.ts +163 -0
- package/src/config.ts +3 -0
- package/src/data/Dataset.ts +168 -0
- package/src/errors.ts +73 -0
- package/src/index.ts +88 -0
- package/src/model/Embeddings.ts +108 -0
- package/src/model/FeedForward.ts +121 -0
- package/src/model/LLM.ts +124 -0
- package/src/model/LayerNorm.ts +138 -0
- package/src/model/ModelLayer.ts +10 -0
- package/src/model/OutputProjection.ts +76 -0
- package/src/model/SelfAttention.ts +169 -0
- package/src/model/TransformerBlock.ts +53 -0
- package/src/services/Logger.ts +124 -0
- package/src/services/Metrics.ts +260 -0
- package/src/services/Random.ts +98 -0
- package/src/services/SeedLayer.ts +39 -0
- package/src/services/index.ts +32 -0
- package/src/tensor/Tensor2D.ts +42 -0
- package/src/tensor/ops.ts +371 -0
- package/src/tensor/random.ts +32 -0
- package/src/tokenize/split.ts +27 -0
- package/src/tokenize/tokenize.ts +28 -0
- package/src/training/Adam.ts +61 -0
- package/src/training/clip.ts +16 -0
- package/src/training/loss.ts +35 -0
- package/src/training/train.ts +203 -0
- package/src/vocab/Vocab.ts +79 -0
- package/tests/fixtures/csv_bad.csv +2 -0
- package/tests/fixtures/csv_good.csv +3 -0
- package/tests/ts/cli_error_format.test.ts +26 -0
- package/tests/ts/dataset.test.ts +35 -0
- package/tests/ts/embeddings.test.ts +81 -0
- package/tests/ts/errors.test.ts +36 -0
- package/tests/ts/feed_forward.test.ts +74 -0
- package/tests/ts/initNormal.test.ts +41 -0
- package/tests/ts/layer_norm.test.ts +96 -0
- package/tests/ts/llm_parameters.test.ts +96 -0
- package/tests/ts/llm_predict.test.ts +98 -0
- package/tests/ts/llm_tokenize.test.ts +69 -0
- package/tests/ts/output_projection.test.ts +78 -0
- package/tests/ts/random.test.ts +44 -0
- package/tests/ts/self_attention.test.ts +63 -0
- package/tests/ts/support/factories.ts +126 -0
- package/tests/ts/support/runEffect.ts +29 -0
- package/tests/ts/support/seed.ts +12 -0
- package/tests/ts/support/stubs.ts +58 -0
- package/tests/ts/support/tensorMatchers.ts +96 -0
- package/tests/ts/support.test.ts +165 -0
- package/tests/ts/train_loop.test.ts +229 -0
- package/tests/ts/transformer_block.test.ts +72 -0
- package/tsconfig.json +20 -0
- package/tsconfig.test.json +8 -0
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import * as Effect from "effect/Effect"
|
|
2
|
+
import * as FiberId from "effect/FiberId"
|
|
3
|
+
import type { Tensor2D } from "../tensor/Tensor2D"
|
|
4
|
+
import * as T from "../tensor/Tensor2D"
|
|
5
|
+
import * as Ops from "../tensor/ops"
|
|
6
|
+
import type { ShapeError } from "../tensor/ops"
|
|
7
|
+
import type { ModelLayer } from "./ModelLayer"
|
|
8
|
+
import { EMBEDDING_DIM } from "../config"
|
|
9
|
+
import { Adam } from "../training/Adam"
|
|
10
|
+
import type { Rng } from "../tensor/random"
|
|
11
|
+
|
|
12
|
+
export class SelfAttention implements ModelLayer {
|
|
13
|
+
readonly _tag = "SelfAttention"
|
|
14
|
+
readonly embeddingDim: number
|
|
15
|
+
wQ: Tensor2D
|
|
16
|
+
wK: Tensor2D
|
|
17
|
+
wV: Tensor2D
|
|
18
|
+
|
|
19
|
+
private cache = new Map<number | string, Tensor2D>()
|
|
20
|
+
private lastCache: Tensor2D | null = null
|
|
21
|
+
optimizerWQ: Adam
|
|
22
|
+
optimizerWK: Adam
|
|
23
|
+
optimizerWV: Adam
|
|
24
|
+
|
|
25
|
+
constructor(embeddingDim: number = EMBEDDING_DIM, rng: Rng) {
|
|
26
|
+
this.embeddingDim = embeddingDim
|
|
27
|
+
const std = Math.sqrt(2.0 / embeddingDim)
|
|
28
|
+
this.wQ = Ops.initNormal(embeddingDim, embeddingDim, 0, std, rng)
|
|
29
|
+
this.wK = Ops.initNormal(embeddingDim, embeddingDim, 0, std, rng)
|
|
30
|
+
this.wV = Ops.initNormal(embeddingDim, embeddingDim, 0, std, rng)
|
|
31
|
+
this.optimizerWQ = Adam.make(embeddingDim, embeddingDim)
|
|
32
|
+
this.optimizerWK = Adam.make(embeddingDim, embeddingDim)
|
|
33
|
+
this.optimizerWV = Adam.make(embeddingDim, embeddingDim)
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
private fiberKey(fiberId: FiberId.FiberId): number | string {
|
|
37
|
+
return FiberId.isRuntime(fiberId) ? fiberId.id : JSON.stringify(fiberId)
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
get parametersCount(): number {
|
|
41
|
+
return this.wQ.data.length + this.wK.data.length + this.wV.data.length
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
private computeQKV(input: Tensor2D): Effect.Effect<{ q: Tensor2D; k: Tensor2D; v: Tensor2D }, ShapeError> {
|
|
45
|
+
return Effect.gen(this, function* () {
|
|
46
|
+
const q: Tensor2D = yield* Ops.matMul(input, this.wQ)
|
|
47
|
+
const k: Tensor2D = yield* Ops.matMul(input, this.wK)
|
|
48
|
+
const v: Tensor2D = yield* Ops.matMul(input, this.wV)
|
|
49
|
+
return { q, k, v }
|
|
50
|
+
})
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
private attention(q: Tensor2D, k: Tensor2D, v: Tensor2D): Effect.Effect<Tensor2D, ShapeError> {
|
|
54
|
+
return Effect.gen(this, function* () {
|
|
55
|
+
const dk = Math.sqrt(this.embeddingDim)
|
|
56
|
+
const kT = Ops.transpose(k)
|
|
57
|
+
const scores = yield* Ops.matMul(q, kT)
|
|
58
|
+
const scaledScores = Ops.mulScalar(scores, 1 / dk)
|
|
59
|
+
|
|
60
|
+
const seqLen = scaledScores.rows
|
|
61
|
+
for (let i = 0; i < seqLen; i++) {
|
|
62
|
+
for (let j = i + 1; j < seqLen; j++) {
|
|
63
|
+
T.set(scaledScores, i, j, -Infinity)
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const weights = Ops.softmaxRows(scaledScores)
|
|
68
|
+
const attended = yield* Ops.matMul(weights, v)
|
|
69
|
+
return attended
|
|
70
|
+
})
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
forward(input: Tensor2D): Effect.Effect<Tensor2D, ShapeError> {
|
|
74
|
+
return Effect.gen(this, function* () {
|
|
75
|
+
const fiberId = yield* Effect.fiberId
|
|
76
|
+
const key = this.fiberKey(fiberId)
|
|
77
|
+
const cloned = T.clone(input)
|
|
78
|
+
this.cache.set(key, cloned)
|
|
79
|
+
this.lastCache = cloned
|
|
80
|
+
const { q, k, v } = yield* this.computeQKV(input)
|
|
81
|
+
const attended = yield* this.attention(q, k, v)
|
|
82
|
+
const output = yield* Ops.add(attended, input)
|
|
83
|
+
return output
|
|
84
|
+
})
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
private static softmaxBackward(softmaxOutput: Tensor2D, gradOutput: Tensor2D): Tensor2D {
|
|
88
|
+
const gradInput = T.zeros(softmaxOutput.rows, softmaxOutput.cols)
|
|
89
|
+
for (let i = 0; i < softmaxOutput.rows; i++) {
|
|
90
|
+
let dot = 0
|
|
91
|
+
const rowOffset = i * softmaxOutput.cols
|
|
92
|
+
for (let j = 0; j < softmaxOutput.cols; j++) {
|
|
93
|
+
dot += softmaxOutput.data[rowOffset + j] * gradOutput.data[rowOffset + j]
|
|
94
|
+
}
|
|
95
|
+
for (let j = 0; j < softmaxOutput.cols; j++) {
|
|
96
|
+
const y = softmaxOutput.data[rowOffset + j]
|
|
97
|
+
const dy = gradOutput.data[rowOffset + j]
|
|
98
|
+
gradInput.data[rowOffset + j] = y * (dy - dot)
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
return gradInput
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
backward(dOut: Tensor2D, lr: number): Effect.Effect<Tensor2D, ShapeError> {
|
|
105
|
+
return Effect.gen(this, function* () {
|
|
106
|
+
const fiberId = yield* Effect.fiberId
|
|
107
|
+
const key = this.fiberKey(fiberId)
|
|
108
|
+
const cachedInput = this.cache.get(key) ?? this.lastCache
|
|
109
|
+
if (!cachedInput) {
|
|
110
|
+
return yield* Effect.fail(new Ops.ShapeError("SelfAttention.backward called before forward"))
|
|
111
|
+
}
|
|
112
|
+
this.cache.delete(key)
|
|
113
|
+
this.lastCache = null
|
|
114
|
+
|
|
115
|
+
const input = cachedInput
|
|
116
|
+
const q = yield* Ops.matMul(input, this.wQ)
|
|
117
|
+
const k = yield* Ops.matMul(input, this.wK)
|
|
118
|
+
const v = yield* Ops.matMul(input, this.wV)
|
|
119
|
+
const scale = Math.sqrt(this.wQ.cols)
|
|
120
|
+
|
|
121
|
+
const kT = Ops.transpose(k)
|
|
122
|
+
const scores = yield* Ops.matMul(q, kT)
|
|
123
|
+
const scaledScores = Ops.mulScalar(scores, 1 / scale)
|
|
124
|
+
|
|
125
|
+
const seqLen = scaledScores.rows
|
|
126
|
+
for (let i = 0; i < seqLen; i++) {
|
|
127
|
+
for (let j = i + 1; j < seqLen; j++) {
|
|
128
|
+
T.set(scaledScores, i, j, -Infinity)
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const attnWeights = Ops.softmaxRows(scaledScores)
|
|
133
|
+
|
|
134
|
+
const vT = Ops.transpose(v)
|
|
135
|
+
const gradAttnWeights = yield* Ops.matMul(dOut, vT)
|
|
136
|
+
const attnWeightsT = Ops.transpose(attnWeights)
|
|
137
|
+
const gradV = yield* Ops.matMul(attnWeightsT, dOut)
|
|
138
|
+
|
|
139
|
+
const gradScores = SelfAttention.softmaxBackward(attnWeights, gradAttnWeights)
|
|
140
|
+
const gradScoresScaled = Ops.mulScalar(gradScores, 1 / scale)
|
|
141
|
+
|
|
142
|
+
const gradQ = yield* Ops.matMul(gradScoresScaled, k)
|
|
143
|
+
const gradScoresScaledT = Ops.transpose(gradScoresScaled)
|
|
144
|
+
const gradK = yield* Ops.matMul(gradScoresScaledT, q)
|
|
145
|
+
|
|
146
|
+
const inputT = Ops.transpose(input)
|
|
147
|
+
const gradWQ = yield* Ops.matMul(inputT, gradQ)
|
|
148
|
+
const gradWK = yield* Ops.matMul(inputT, gradK)
|
|
149
|
+
const gradWV = yield* Ops.matMul(inputT, gradV)
|
|
150
|
+
|
|
151
|
+
const wQT = Ops.transpose(this.wQ)
|
|
152
|
+
const wKT = Ops.transpose(this.wK)
|
|
153
|
+
const wVT = Ops.transpose(this.wV)
|
|
154
|
+
const gradInputQ = yield* Ops.matMul(gradQ, wQT)
|
|
155
|
+
const gradInputK = yield* Ops.matMul(gradK, wKT)
|
|
156
|
+
const gradInputV = yield* Ops.matMul(gradV, wVT)
|
|
157
|
+
|
|
158
|
+
const gradInputAttention = yield* Ops.add(gradInputQ, gradInputK)
|
|
159
|
+
const gradInputAttention2 = yield* Ops.add(gradInputAttention, gradInputV)
|
|
160
|
+
const gradInput = yield* Ops.add(gradInputAttention2, dOut)
|
|
161
|
+
|
|
162
|
+
this.optimizerWQ.step(this.wQ, gradWQ, lr)
|
|
163
|
+
this.optimizerWK.step(this.wK, gradWK, lr)
|
|
164
|
+
this.optimizerWV.step(this.wV, gradWV, lr)
|
|
165
|
+
|
|
166
|
+
return gradInput
|
|
167
|
+
})
|
|
168
|
+
}
|
|
169
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import * as Effect from "effect/Effect"
|
|
2
|
+
import type { Tensor2D } from "../tensor/Tensor2D"
|
|
3
|
+
import type { ShapeError } from "../tensor/ops"
|
|
4
|
+
import type { ModelLayer } from "./ModelLayer"
|
|
5
|
+
import { SelfAttention } from "./SelfAttention"
|
|
6
|
+
import { FeedForward } from "./FeedForward"
|
|
7
|
+
import { LayerNorm } from "./LayerNorm"
|
|
8
|
+
import { EMBEDDING_DIM, HIDDEN_DIM } from "../config"
|
|
9
|
+
import type { Rng } from "../tensor/random"
|
|
10
|
+
|
|
11
|
+
export class TransformerBlock implements ModelLayer {
|
|
12
|
+
readonly _tag = "TransformerBlock"
|
|
13
|
+
attention: SelfAttention
|
|
14
|
+
feedForward: FeedForward
|
|
15
|
+
norm1: LayerNorm
|
|
16
|
+
norm2: LayerNorm
|
|
17
|
+
|
|
18
|
+
constructor(embeddingDim: number = EMBEDDING_DIM, hiddenDim: number = HIDDEN_DIM, rng: Rng) {
|
|
19
|
+
this.attention = new SelfAttention(embeddingDim, rng)
|
|
20
|
+
this.feedForward = new FeedForward(embeddingDim, hiddenDim, rng)
|
|
21
|
+
this.norm1 = new LayerNorm(embeddingDim)
|
|
22
|
+
this.norm2 = new LayerNorm(embeddingDim)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
get parametersCount(): number {
|
|
26
|
+
return (
|
|
27
|
+
this.attention.parametersCount +
|
|
28
|
+
this.feedForward.parametersCount +
|
|
29
|
+
this.norm1.parametersCount +
|
|
30
|
+
this.norm2.parametersCount
|
|
31
|
+
)
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
forward(input: Tensor2D): Effect.Effect<Tensor2D, ShapeError> {
|
|
35
|
+
return Effect.gen(this, function* () {
|
|
36
|
+
const attentionOut: Tensor2D = yield* this.attention.forward(input)
|
|
37
|
+
const norm1Out: Tensor2D = yield* this.norm1.forward(attentionOut)
|
|
38
|
+
const ffnOut: Tensor2D = yield* this.feedForward.forward(norm1Out)
|
|
39
|
+
const norm2Out: Tensor2D = yield* this.norm2.forward(ffnOut)
|
|
40
|
+
return norm2Out
|
|
41
|
+
})
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
backward(dOut: Tensor2D, lr: number): Effect.Effect<Tensor2D, ShapeError> {
|
|
45
|
+
return Effect.gen(this, function* () {
|
|
46
|
+
let grad: Tensor2D = yield* this.norm2.backward(dOut, lr)
|
|
47
|
+
grad = yield* this.feedForward.backward(grad, lr)
|
|
48
|
+
grad = yield* this.norm1.backward(grad, lr)
|
|
49
|
+
grad = yield* this.attention.backward(grad, lr)
|
|
50
|
+
return grad
|
|
51
|
+
})
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import * as Effect from "effect/Effect"
|
|
2
|
+
import * as Context from "effect/Context"
|
|
3
|
+
import * as Layer from "effect/Layer"
|
|
4
|
+
import { Terminal } from "@effect/platform"
|
|
5
|
+
|
|
6
|
+
export type LogLevel = "debug" | "info" | "warn" | "error"
|
|
7
|
+
|
|
8
|
+
const LOG_LEVEL_PRIORITY: Record<LogLevel, number> = {
|
|
9
|
+
debug: 0,
|
|
10
|
+
info: 1,
|
|
11
|
+
warn: 2,
|
|
12
|
+
error: 3
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface LoggerService {
|
|
16
|
+
readonly log: (level: LogLevel, message: string, data?: Record<string, unknown>) => Effect.Effect<void>
|
|
17
|
+
readonly debug: (message: string, data?: Record<string, unknown>) => Effect.Effect<void>
|
|
18
|
+
readonly info: (message: string, data?: Record<string, unknown>) => Effect.Effect<void>
|
|
19
|
+
readonly warn: (message: string, data?: Record<string, unknown>) => Effect.Effect<void>
|
|
20
|
+
readonly error: (message: string, data?: Record<string, unknown>) => Effect.Effect<void>
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface LoggerServiceId {
|
|
24
|
+
readonly LoggerService: unique symbol
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export const Logger = Context.GenericTag<LoggerServiceId, LoggerService>("LoggerService")
|
|
28
|
+
|
|
29
|
+
type Formatter = (level: LogLevel, message: string, data?: Record<string, unknown>) => string
|
|
30
|
+
|
|
31
|
+
const formatStructured: Formatter = (level, message, data) => {
|
|
32
|
+
const timestamp = new Date().toISOString()
|
|
33
|
+
const dataStr = data ? ` ${JSON.stringify(data)}` : ""
|
|
34
|
+
return `[${timestamp}] [${level.toUpperCase()}] ${message}${dataStr}\n`
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const formatPretty: Formatter = (level, message, data) => {
|
|
38
|
+
const prefix: Record<LogLevel, string> = { debug: "🔍", info: "ℹ️ ", warn: "⚠️ ", error: "❌" }
|
|
39
|
+
const dataStr = data ? ` ${JSON.stringify(data)}` : ""
|
|
40
|
+
return `${prefix[level]} ${message}${dataStr}\n`
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const makeLogger = (
|
|
44
|
+
minLevel: LogLevel,
|
|
45
|
+
write: (msg: string) => Effect.Effect<void>,
|
|
46
|
+
format: Formatter = formatStructured
|
|
47
|
+
): LoggerService => ({
|
|
48
|
+
log: (level, message, data) =>
|
|
49
|
+
LOG_LEVEL_PRIORITY[level] >= LOG_LEVEL_PRIORITY[minLevel]
|
|
50
|
+
? write(format(level, message, data))
|
|
51
|
+
: Effect.void,
|
|
52
|
+
debug: (message, data) =>
|
|
53
|
+
LOG_LEVEL_PRIORITY.debug >= LOG_LEVEL_PRIORITY[minLevel]
|
|
54
|
+
? write(format("debug", message, data))
|
|
55
|
+
: Effect.void,
|
|
56
|
+
info: (message, data) =>
|
|
57
|
+
LOG_LEVEL_PRIORITY.info >= LOG_LEVEL_PRIORITY[minLevel]
|
|
58
|
+
? write(format("info", message, data))
|
|
59
|
+
: Effect.void,
|
|
60
|
+
warn: (message, data) =>
|
|
61
|
+
LOG_LEVEL_PRIORITY.warn >= LOG_LEVEL_PRIORITY[minLevel]
|
|
62
|
+
? write(format("warn", message, data))
|
|
63
|
+
: Effect.void,
|
|
64
|
+
error: (message, data) =>
|
|
65
|
+
LOG_LEVEL_PRIORITY.error >= LOG_LEVEL_PRIORITY[minLevel]
|
|
66
|
+
? write(format("error", message, data))
|
|
67
|
+
: Effect.void
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
export const ConsoleLoggerLive = (minLevel: LogLevel = "info"): Layer.Layer<LoggerServiceId> =>
|
|
71
|
+
Layer.succeed(
|
|
72
|
+
Logger,
|
|
73
|
+
makeLogger(minLevel, (msg) => Effect.sync(() => process.stdout.write(msg)))
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
export const TerminalLoggerLive = (
|
|
77
|
+
minLevel: LogLevel = "info"
|
|
78
|
+
): Layer.Layer<LoggerServiceId, never, Terminal.Terminal> =>
|
|
79
|
+
Layer.effect(
|
|
80
|
+
Logger,
|
|
81
|
+
Effect.gen(function* () {
|
|
82
|
+
const terminal = yield* Terminal.Terminal
|
|
83
|
+
return makeLogger(minLevel, (msg) =>
|
|
84
|
+
terminal.display(msg).pipe(
|
|
85
|
+
Effect.catchAll(() => Effect.void),
|
|
86
|
+
Effect.asVoid
|
|
87
|
+
)
|
|
88
|
+
)
|
|
89
|
+
})
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
export const NullLoggerLive: Layer.Layer<LoggerServiceId> = Layer.succeed(
|
|
93
|
+
Logger,
|
|
94
|
+
makeLogger("error", () => Effect.void)
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
export const SilentLoggerLive: Layer.Layer<LoggerServiceId> = Layer.succeed(Logger, {
|
|
98
|
+
log: () => Effect.void,
|
|
99
|
+
debug: () => Effect.void,
|
|
100
|
+
info: () => Effect.void,
|
|
101
|
+
warn: () => Effect.void,
|
|
102
|
+
error: () => Effect.void
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
export const PrettyLoggerLive = (minLevel: LogLevel = "info"): Layer.Layer<LoggerServiceId> =>
|
|
106
|
+
Layer.succeed(
|
|
107
|
+
Logger,
|
|
108
|
+
makeLogger(minLevel, (msg) => Effect.sync(() => process.stdout.write(msg)), formatPretty)
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
export const log = (level: LogLevel, message: string, data?: Record<string, unknown>) =>
|
|
112
|
+
Effect.flatMap(Logger, (logger) => logger.log(level, message, data))
|
|
113
|
+
|
|
114
|
+
export const debug = (message: string, data?: Record<string, unknown>) =>
|
|
115
|
+
Effect.flatMap(Logger, (logger) => logger.debug(message, data))
|
|
116
|
+
|
|
117
|
+
export const info = (message: string, data?: Record<string, unknown>) =>
|
|
118
|
+
Effect.flatMap(Logger, (logger) => logger.info(message, data))
|
|
119
|
+
|
|
120
|
+
export const warn = (message: string, data?: Record<string, unknown>) =>
|
|
121
|
+
Effect.flatMap(Logger, (logger) => logger.warn(message, data))
|
|
122
|
+
|
|
123
|
+
export const error = (message: string, data?: Record<string, unknown>) =>
|
|
124
|
+
Effect.flatMap(Logger, (logger) => logger.error(message, data))
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
import * as Effect from "effect/Effect"
|
|
2
|
+
import * as Context from "effect/Context"
|
|
3
|
+
import * as Layer from "effect/Layer"
|
|
4
|
+
import * as Clock from "effect/Clock"
|
|
5
|
+
import * as Ref from "effect/Ref"
|
|
6
|
+
import * as HashMap from "effect/HashMap"
|
|
7
|
+
|
|
8
|
+
export interface Counter {
|
|
9
|
+
readonly inc: (n?: number) => Effect.Effect<void>
|
|
10
|
+
readonly get: () => Effect.Effect<number>
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface Gauge {
|
|
14
|
+
readonly set: (value: number) => Effect.Effect<void>
|
|
15
|
+
readonly get: () => Effect.Effect<number>
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export interface Histogram {
|
|
19
|
+
readonly observe: (value: number) => Effect.Effect<void>
|
|
20
|
+
readonly getStats: () => Effect.Effect<{ count: number; sum: number; min: number; max: number; mean: number }>
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface TimingResult<A> {
|
|
24
|
+
readonly value: A
|
|
25
|
+
readonly durationMs: number
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface MetricsService {
|
|
29
|
+
readonly counter: (name: string) => Effect.Effect<Counter>
|
|
30
|
+
readonly gauge: (name: string) => Effect.Effect<Gauge>
|
|
31
|
+
readonly histogram: (name: string) => Effect.Effect<Histogram>
|
|
32
|
+
readonly timed: <A, E, R>(label: string, effect: Effect.Effect<A, E, R>) => Effect.Effect<TimingResult<A>, E, R>
|
|
33
|
+
readonly snapshot: () => Effect.Effect<MetricsSnapshot>
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface MetricsSnapshot {
|
|
37
|
+
readonly counters: ReadonlyArray<{ name: string; value: number }>
|
|
38
|
+
readonly gauges: ReadonlyArray<{ name: string; value: number }>
|
|
39
|
+
readonly histograms: ReadonlyArray<{
|
|
40
|
+
name: string
|
|
41
|
+
count: number
|
|
42
|
+
sum: number
|
|
43
|
+
min: number
|
|
44
|
+
max: number
|
|
45
|
+
mean: number
|
|
46
|
+
}>
|
|
47
|
+
readonly timings: ReadonlyArray<{ label: string; durationMs: number }>
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export interface MetricsServiceId {
|
|
51
|
+
readonly MetricsService: unique symbol
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export const Metrics = Context.GenericTag<MetricsServiceId, MetricsService>("MetricsService")
|
|
55
|
+
|
|
56
|
+
interface InMemoryState {
|
|
57
|
+
counters: HashMap.HashMap<string, Ref.Ref<number>>
|
|
58
|
+
gauges: HashMap.HashMap<string, Ref.Ref<number>>
|
|
59
|
+
histograms: HashMap.HashMap<string, Ref.Ref<Array<number>>>
|
|
60
|
+
timings: Ref.Ref<Array<{ label: string; durationMs: number }>>
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const makeInMemoryMetrics = (): Effect.Effect<MetricsService> =>
|
|
64
|
+
Effect.gen(function* () {
|
|
65
|
+
const stateRef = yield* Ref.make<InMemoryState>({
|
|
66
|
+
counters: HashMap.empty(),
|
|
67
|
+
gauges: HashMap.empty(),
|
|
68
|
+
histograms: HashMap.empty(),
|
|
69
|
+
timings: yield* Ref.make<Array<{ label: string; durationMs: number }>>([])
|
|
70
|
+
})
|
|
71
|
+
|
|
72
|
+
const getOrCreateCounter = (name: string): Effect.Effect<Ref.Ref<number>> =>
|
|
73
|
+
Effect.gen(function* () {
|
|
74
|
+
const state = yield* Ref.get(stateRef)
|
|
75
|
+
const existing = HashMap.get(state.counters, name)
|
|
76
|
+
if (existing._tag === "Some") {
|
|
77
|
+
return existing.value
|
|
78
|
+
}
|
|
79
|
+
const newRef = yield* Ref.make(0)
|
|
80
|
+
yield* Ref.update(stateRef, (s) => ({
|
|
81
|
+
...s,
|
|
82
|
+
counters: HashMap.set(s.counters, name, newRef)
|
|
83
|
+
}))
|
|
84
|
+
return newRef
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
const getOrCreateGauge = (name: string): Effect.Effect<Ref.Ref<number>> =>
|
|
88
|
+
Effect.gen(function* () {
|
|
89
|
+
const state = yield* Ref.get(stateRef)
|
|
90
|
+
const existing = HashMap.get(state.gauges, name)
|
|
91
|
+
if (existing._tag === "Some") {
|
|
92
|
+
return existing.value
|
|
93
|
+
}
|
|
94
|
+
const newRef = yield* Ref.make(0)
|
|
95
|
+
yield* Ref.update(stateRef, (s) => ({
|
|
96
|
+
...s,
|
|
97
|
+
gauges: HashMap.set(s.gauges, name, newRef)
|
|
98
|
+
}))
|
|
99
|
+
return newRef
|
|
100
|
+
})
|
|
101
|
+
|
|
102
|
+
const getOrCreateHistogram = (name: string): Effect.Effect<Ref.Ref<Array<number>>> =>
|
|
103
|
+
Effect.gen(function* () {
|
|
104
|
+
const state = yield* Ref.get(stateRef)
|
|
105
|
+
const existing = HashMap.get(state.histograms, name)
|
|
106
|
+
if (existing._tag === "Some") {
|
|
107
|
+
return existing.value
|
|
108
|
+
}
|
|
109
|
+
const newRef = yield* Ref.make<Array<number>>([])
|
|
110
|
+
yield* Ref.update(stateRef, (s) => ({
|
|
111
|
+
...s,
|
|
112
|
+
histograms: HashMap.set(s.histograms, name, newRef)
|
|
113
|
+
}))
|
|
114
|
+
return newRef
|
|
115
|
+
})
|
|
116
|
+
|
|
117
|
+
const service: MetricsService = {
|
|
118
|
+
counter: (name) =>
|
|
119
|
+
Effect.gen(function* () {
|
|
120
|
+
const ref = yield* getOrCreateCounter(name)
|
|
121
|
+
return {
|
|
122
|
+
inc: (n = 1) => Ref.update(ref, (v) => v + n),
|
|
123
|
+
get: () => Ref.get(ref)
|
|
124
|
+
}
|
|
125
|
+
}),
|
|
126
|
+
|
|
127
|
+
gauge: (name) =>
|
|
128
|
+
Effect.gen(function* () {
|
|
129
|
+
const ref = yield* getOrCreateGauge(name)
|
|
130
|
+
return {
|
|
131
|
+
set: (value) => Ref.set(ref, value),
|
|
132
|
+
get: () => Ref.get(ref)
|
|
133
|
+
}
|
|
134
|
+
}),
|
|
135
|
+
|
|
136
|
+
histogram: (name) =>
|
|
137
|
+
Effect.gen(function* () {
|
|
138
|
+
const ref = yield* getOrCreateHistogram(name)
|
|
139
|
+
return {
|
|
140
|
+
observe: (value) => Ref.update(ref, (arr) => [...arr, value]),
|
|
141
|
+
getStats: () =>
|
|
142
|
+
Effect.gen(function* () {
|
|
143
|
+
const values = yield* Ref.get(ref)
|
|
144
|
+
if (values.length === 0) {
|
|
145
|
+
return { count: 0, sum: 0, min: 0, max: 0, mean: 0 }
|
|
146
|
+
}
|
|
147
|
+
const sum = values.reduce((a, b) => a + b, 0)
|
|
148
|
+
return {
|
|
149
|
+
count: values.length,
|
|
150
|
+
sum,
|
|
151
|
+
min: Math.min(...values),
|
|
152
|
+
max: Math.max(...values),
|
|
153
|
+
mean: sum / values.length
|
|
154
|
+
}
|
|
155
|
+
})
|
|
156
|
+
}
|
|
157
|
+
}),
|
|
158
|
+
|
|
159
|
+
timed: (label, effect) =>
|
|
160
|
+
Effect.gen(function* () {
|
|
161
|
+
const start = yield* Clock.currentTimeMillis
|
|
162
|
+
const value = yield* effect
|
|
163
|
+
const end = yield* Clock.currentTimeMillis
|
|
164
|
+
const durationMs = Number(end - start)
|
|
165
|
+
const state = yield* Ref.get(stateRef)
|
|
166
|
+
yield* Ref.update(state.timings, (arr) => [...arr, { label, durationMs }])
|
|
167
|
+
return { value, durationMs }
|
|
168
|
+
}),
|
|
169
|
+
|
|
170
|
+
snapshot: () =>
|
|
171
|
+
Effect.gen(function* () {
|
|
172
|
+
const state = yield* Ref.get(stateRef)
|
|
173
|
+
|
|
174
|
+
const counters: Array<{ name: string; value: number }> = []
|
|
175
|
+
for (const [name, ref] of HashMap.entries(state.counters)) {
|
|
176
|
+
const value = yield* Ref.get(ref)
|
|
177
|
+
counters.push({ name, value })
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
const gauges: Array<{ name: string; value: number }> = []
|
|
181
|
+
for (const [name, ref] of HashMap.entries(state.gauges)) {
|
|
182
|
+
const value = yield* Ref.get(ref)
|
|
183
|
+
gauges.push({ name, value })
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
const histograms: Array<{
|
|
187
|
+
name: string
|
|
188
|
+
count: number
|
|
189
|
+
sum: number
|
|
190
|
+
min: number
|
|
191
|
+
max: number
|
|
192
|
+
mean: number
|
|
193
|
+
}> = []
|
|
194
|
+
for (const [name, ref] of HashMap.entries(state.histograms)) {
|
|
195
|
+
const values = yield* Ref.get(ref)
|
|
196
|
+
if (values.length > 0) {
|
|
197
|
+
const sum = values.reduce((a, b) => a + b, 0)
|
|
198
|
+
histograms.push({
|
|
199
|
+
name,
|
|
200
|
+
count: values.length,
|
|
201
|
+
sum,
|
|
202
|
+
min: Math.min(...values),
|
|
203
|
+
max: Math.max(...values),
|
|
204
|
+
mean: sum / values.length
|
|
205
|
+
})
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const timings = yield* Ref.get(state.timings)
|
|
210
|
+
|
|
211
|
+
return { counters, gauges, histograms, timings }
|
|
212
|
+
})
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
return service
|
|
216
|
+
})
|
|
217
|
+
|
|
218
|
+
export const InMemoryMetricsLive: Layer.Layer<MetricsServiceId> = Layer.effect(Metrics, makeInMemoryMetrics())
|
|
219
|
+
|
|
220
|
+
const noOpCounter: Counter = {
|
|
221
|
+
inc: () => Effect.void,
|
|
222
|
+
get: () => Effect.succeed(0)
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
const noOpGauge: Gauge = {
|
|
226
|
+
set: () => Effect.void,
|
|
227
|
+
get: () => Effect.succeed(0)
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const noOpHistogram: Histogram = {
|
|
231
|
+
observe: () => Effect.void,
|
|
232
|
+
getStats: () => Effect.succeed({ count: 0, sum: 0, min: 0, max: 0, mean: 0 })
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
const noOpMetrics: MetricsService = {
|
|
236
|
+
counter: () => Effect.succeed(noOpCounter),
|
|
237
|
+
gauge: () => Effect.succeed(noOpGauge),
|
|
238
|
+
histogram: () => Effect.succeed(noOpHistogram),
|
|
239
|
+
timed: (_, effect) => Effect.map(effect, (value) => ({ value, durationMs: 0 })),
|
|
240
|
+
snapshot: () =>
|
|
241
|
+
Effect.succeed({
|
|
242
|
+
counters: [],
|
|
243
|
+
gauges: [],
|
|
244
|
+
histograms: [],
|
|
245
|
+
timings: []
|
|
246
|
+
})
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
export const NoOpMetricsLive: Layer.Layer<MetricsServiceId> = Layer.succeed(Metrics, noOpMetrics)
|
|
250
|
+
|
|
251
|
+
export const counter = (name: string) => Effect.flatMap(Metrics, (metrics) => metrics.counter(name))
|
|
252
|
+
|
|
253
|
+
export const gauge = (name: string) => Effect.flatMap(Metrics, (metrics) => metrics.gauge(name))
|
|
254
|
+
|
|
255
|
+
export const histogram = (name: string) => Effect.flatMap(Metrics, (metrics) => metrics.histogram(name))
|
|
256
|
+
|
|
257
|
+
export const timed = <A, E, R>(label: string, effect: Effect.Effect<A, E, R>) =>
|
|
258
|
+
Effect.flatMap(Metrics, (metrics) => metrics.timed(label, effect))
|
|
259
|
+
|
|
260
|
+
export const snapshot = () => Effect.flatMap(Metrics, (metrics) => metrics.snapshot())
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import * as Effect from "effect/Effect"
|
|
2
|
+
import * as Context from "effect/Context"
|
|
3
|
+
import * as Layer from "effect/Layer"
|
|
4
|
+
|
|
5
|
+
export interface RandomService {
|
|
6
|
+
readonly next: () => Effect.Effect<number>
|
|
7
|
+
readonly nextGaussian: (mean: number, std: number) => Effect.Effect<number>
|
|
8
|
+
readonly nextInt: (min: number, max: number) => Effect.Effect<number>
|
|
9
|
+
readonly fork: () => Effect.Effect<RandomService>
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface RandomServiceId {
|
|
13
|
+
readonly RandomService: unique symbol
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export const Random = Context.GenericTag<RandomServiceId, RandomService>("RandomService")
|
|
17
|
+
|
|
18
|
+
interface RngState {
|
|
19
|
+
state: number
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const mulberry32Next = (rng: RngState): number => {
|
|
23
|
+
rng.state = (rng.state + 0x6d2b79f5) >>> 0
|
|
24
|
+
let t = rng.state
|
|
25
|
+
t = Math.imul(t ^ (t >>> 15), t | 1)
|
|
26
|
+
t ^= t + Math.imul(t ^ (t >>> 7), t | 61)
|
|
27
|
+
return ((t ^ (t >>> 14)) >>> 0) / 4294967296
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const boxMullerGaussian = (rng: RngState, mean: number, std: number): number => {
|
|
31
|
+
const u1 = mulberry32Next(rng)
|
|
32
|
+
const u2 = mulberry32Next(rng)
|
|
33
|
+
const z0 = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2)
|
|
34
|
+
return z0 * std + mean
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const makeSeededRandom = (seed: number): RandomService => {
|
|
38
|
+
const rng: RngState = { state: seed >>> 0 }
|
|
39
|
+
|
|
40
|
+
const service: RandomService = {
|
|
41
|
+
next: () => Effect.sync(() => mulberry32Next(rng)),
|
|
42
|
+
|
|
43
|
+
nextGaussian: (mean, std) => Effect.sync(() => boxMullerGaussian(rng, mean, std)),
|
|
44
|
+
|
|
45
|
+
nextInt: (min, max) =>
|
|
46
|
+
Effect.sync(() => {
|
|
47
|
+
const range = max - min
|
|
48
|
+
return Math.floor(mulberry32Next(rng) * range) + min
|
|
49
|
+
}),
|
|
50
|
+
|
|
51
|
+
fork: () =>
|
|
52
|
+
Effect.sync(() => {
|
|
53
|
+
const forkSeed = Math.floor(mulberry32Next(rng) * 0xffffffff)
|
|
54
|
+
return makeSeededRandom(forkSeed)
|
|
55
|
+
})
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return service
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const makeSystemRandom = (): RandomService => {
|
|
62
|
+
const service: RandomService = {
|
|
63
|
+
next: () => Effect.sync(() => Math.random()),
|
|
64
|
+
|
|
65
|
+
nextGaussian: (mean, std) =>
|
|
66
|
+
Effect.sync(() => {
|
|
67
|
+
const u1 = Math.random()
|
|
68
|
+
const u2 = Math.random()
|
|
69
|
+
const z0 = Math.sqrt(-2.0 * Math.log(u1)) * Math.cos(2.0 * Math.PI * u2)
|
|
70
|
+
return z0 * std + mean
|
|
71
|
+
}),
|
|
72
|
+
|
|
73
|
+
nextInt: (min, max) =>
|
|
74
|
+
Effect.sync(() => {
|
|
75
|
+
const range = max - min
|
|
76
|
+
return Math.floor(Math.random() * range) + min
|
|
77
|
+
}),
|
|
78
|
+
|
|
79
|
+
fork: () => Effect.succeed(makeSystemRandom())
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return service
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
export const SeededRandomLive = (seed: number): Layer.Layer<RandomServiceId> =>
|
|
86
|
+
Layer.succeed(Random, makeSeededRandom(seed))
|
|
87
|
+
|
|
88
|
+
export const SystemRandomLive: Layer.Layer<RandomServiceId> = Layer.succeed(Random, makeSystemRandom())
|
|
89
|
+
|
|
90
|
+
export const next = () => Effect.flatMap(Random, (random) => random.next())
|
|
91
|
+
|
|
92
|
+
export const nextGaussian = (mean: number, std: number) =>
|
|
93
|
+
Effect.flatMap(Random, (random) => random.nextGaussian(mean, std))
|
|
94
|
+
|
|
95
|
+
export const nextInt = (min: number, max: number) =>
|
|
96
|
+
Effect.flatMap(Random, (random) => random.nextInt(min, max))
|
|
97
|
+
|
|
98
|
+
export const fork = () => Effect.flatMap(Random, (random) => random.fork())
|