@effect/sql-clickhouse 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ClickhouseClient/package.json +6 -0
- package/ClickhouseMigrator/package.json +6 -0
- package/LICENSE +21 -0
- package/README.md +5 -0
- package/dist/cjs/ClickhouseClient.js +258 -0
- package/dist/cjs/ClickhouseClient.js.map +1 -0
- package/dist/cjs/ClickhouseMigrator.js +61 -0
- package/dist/cjs/ClickhouseMigrator.js.map +1 -0
- package/dist/cjs/index.js +13 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/dts/ClickhouseClient.d.ts +96 -0
- package/dist/dts/ClickhouseClient.d.ts.map +1 -0
- package/dist/dts/ClickhouseMigrator.d.ts +27 -0
- package/dist/dts/ClickhouseMigrator.d.ts.map +1 -0
- package/dist/dts/index.d.ts +9 -0
- package/dist/dts/index.d.ts.map +1 -0
- package/dist/esm/ClickhouseClient.js +246 -0
- package/dist/esm/ClickhouseClient.js.map +1 -0
- package/dist/esm/ClickhouseMigrator.js +24 -0
- package/dist/esm/ClickhouseMigrator.js.map +1 -0
- package/dist/esm/index.js +9 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/package.json +4 -0
- package/package.json +56 -0
- package/src/ClickhouseClient.ts +361 -0
- package/src/ClickhouseMigrator.ts +42 -0
- package/src/index.ts +9 -0
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @since 1.0.0
|
|
3
|
+
*/
|
|
4
|
+
import * as Clickhouse from "@clickhouse/client"
|
|
5
|
+
import * as NodeStream from "@effect/platform-node/NodeStream"
|
|
6
|
+
import * as Client from "@effect/sql/SqlClient"
|
|
7
|
+
import type { Connection } from "@effect/sql/SqlConnection"
|
|
8
|
+
import { SqlError } from "@effect/sql/SqlError"
|
|
9
|
+
import type { Primitive } from "@effect/sql/Statement"
|
|
10
|
+
import * as Statement from "@effect/sql/Statement"
|
|
11
|
+
import * as Otel from "@opentelemetry/semantic-conventions"
|
|
12
|
+
import * as Chunk from "effect/Chunk"
|
|
13
|
+
import * as Config from "effect/Config"
|
|
14
|
+
import type { ConfigError } from "effect/ConfigError"
|
|
15
|
+
import * as Context from "effect/Context"
|
|
16
|
+
import * as Effect from "effect/Effect"
|
|
17
|
+
import * as FiberRef from "effect/FiberRef"
|
|
18
|
+
import { dual, identity } from "effect/Function"
|
|
19
|
+
import { globalValue } from "effect/GlobalValue"
|
|
20
|
+
import * as Layer from "effect/Layer"
|
|
21
|
+
import type * as Scope from "effect/Scope"
|
|
22
|
+
import * as Stream from "effect/Stream"
|
|
23
|
+
import * as Crypto from "node:crypto"
|
|
24
|
+
import type { Readable } from "node:stream"
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @category type ids
|
|
28
|
+
* @since 1.0.0
|
|
29
|
+
*/
|
|
30
|
+
export const TypeId: unique symbol = Symbol.for("@effect/sql-clickhouse/ClickhouseClient")
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* @category type ids
|
|
34
|
+
* @since 1.0.0
|
|
35
|
+
*/
|
|
36
|
+
export type TypeId = typeof TypeId
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* @category models
|
|
40
|
+
* @since 1.0.0
|
|
41
|
+
*/
|
|
42
|
+
export interface ClickhouseClient extends Client.SqlClient {
|
|
43
|
+
readonly [TypeId]: TypeId
|
|
44
|
+
readonly config: ClickhouseClientConfig
|
|
45
|
+
readonly param: (dataType: string, value: Statement.Primitive) => Statement.Fragment
|
|
46
|
+
readonly asCommand: <A, E, R>(effect: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>
|
|
47
|
+
readonly insertQuery: <T = unknown>(options: {
|
|
48
|
+
readonly table: string
|
|
49
|
+
readonly values: Clickhouse.InsertValues<Readable, T>
|
|
50
|
+
readonly format?: Clickhouse.DataFormat
|
|
51
|
+
}) => Effect.Effect<Clickhouse.InsertResult, SqlError>
|
|
52
|
+
readonly withQueryId: {
|
|
53
|
+
(queryId: string): <A, E, R>(effect: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>
|
|
54
|
+
<A, E, R>(effect: Effect.Effect<A, E, R>, queryId: string): Effect.Effect<A, E, R>
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* @category tags
|
|
60
|
+
* @since 1.0.0
|
|
61
|
+
*/
|
|
62
|
+
export const ClickhouseClient = Context.GenericTag<ClickhouseClient>("@effect/sql-clickhouse/ClickhouseClient")
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* @category constructors
|
|
66
|
+
* @since 1.0.0
|
|
67
|
+
*/
|
|
68
|
+
export interface ClickhouseClientConfig extends Clickhouse.ClickHouseClientConfigOptions {
|
|
69
|
+
readonly spanAttributes?: Record<string, unknown> | undefined
|
|
70
|
+
readonly transformResultNames?: ((str: string) => string) | undefined
|
|
71
|
+
readonly transformQueryNames?: ((str: string) => string) | undefined
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* @category constructors
|
|
76
|
+
* @since 1.0.0
|
|
77
|
+
*/
|
|
78
|
+
export const make = (
|
|
79
|
+
options: ClickhouseClientConfig
|
|
80
|
+
): Effect.Effect<ClickhouseClient, SqlError, Scope.Scope> =>
|
|
81
|
+
Effect.gen(function*(_) {
|
|
82
|
+
const compiler = makeCompiler(options.transformQueryNames)
|
|
83
|
+
const transformRows = options.transformResultNames
|
|
84
|
+
? Statement.defaultTransforms(options.transformResultNames).array
|
|
85
|
+
: identity
|
|
86
|
+
|
|
87
|
+
const client = Clickhouse.createClient(options)
|
|
88
|
+
|
|
89
|
+
yield* Effect.acquireRelease(
|
|
90
|
+
Effect.tryPromise({
|
|
91
|
+
try: () => client.exec({ query: "SELECT 1" }),
|
|
92
|
+
catch: (cause) => new SqlError({ cause, message: "ClickhouseClient: Failed to connect" })
|
|
93
|
+
}),
|
|
94
|
+
() => Effect.promise(() => client.close())
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
class ConnectionImpl implements Connection {
|
|
98
|
+
constructor(private readonly conn: Clickhouse.ClickHouseClient) {}
|
|
99
|
+
|
|
100
|
+
private runRaw(sql: string, params: ReadonlyArray<Primitive>, format: Clickhouse.DataFormat = "JSON") {
|
|
101
|
+
const paramsObj: Record<string, unknown> = {}
|
|
102
|
+
for (let i = 0; i < params.length; i++) {
|
|
103
|
+
paramsObj[`p${i + 1}`] = params[i]
|
|
104
|
+
}
|
|
105
|
+
return Effect.withFiberRuntime<Clickhouse.ResultSet<"JSON"> | Clickhouse.CommandResult, SqlError>((fiber) => {
|
|
106
|
+
const method = fiber.getFiberRef(currentClientMethod)
|
|
107
|
+
return Effect.async<Clickhouse.ResultSet<"JSON"> | Clickhouse.CommandResult, SqlError>((resume) => {
|
|
108
|
+
const queryId = fiber.getFiberRef(currentQueryId) ?? Crypto.randomUUID()
|
|
109
|
+
const controller = new AbortController()
|
|
110
|
+
if (method === "command") {
|
|
111
|
+
this.conn.command({
|
|
112
|
+
query: sql,
|
|
113
|
+
query_params: paramsObj,
|
|
114
|
+
abort_signal: controller.signal,
|
|
115
|
+
query_id: queryId
|
|
116
|
+
}).then(
|
|
117
|
+
(result) => resume(Effect.succeed(result)),
|
|
118
|
+
(cause) => resume(Effect.fail(new SqlError({ cause, message: "Failed to execute statement" })))
|
|
119
|
+
)
|
|
120
|
+
} else {
|
|
121
|
+
this.conn.query({
|
|
122
|
+
query: sql,
|
|
123
|
+
query_params: paramsObj,
|
|
124
|
+
abort_signal: controller.signal,
|
|
125
|
+
query_id: queryId,
|
|
126
|
+
format
|
|
127
|
+
}).then(
|
|
128
|
+
(result) => resume(Effect.succeed(result)),
|
|
129
|
+
(cause) => resume(Effect.fail(new SqlError({ cause, message: "Failed to execute statement" })))
|
|
130
|
+
)
|
|
131
|
+
}
|
|
132
|
+
return Effect.suspend(() => {
|
|
133
|
+
controller.abort()
|
|
134
|
+
return Effect.promise(() => this.conn.command({ query: `KILL QUERY WHERE query_id = '${queryId}'` }))
|
|
135
|
+
})
|
|
136
|
+
})
|
|
137
|
+
})
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
private run(sql: string, params: ReadonlyArray<Primitive>, format?: Clickhouse.DataFormat) {
|
|
141
|
+
return this.runRaw(sql, params, format).pipe(
|
|
142
|
+
Effect.flatMap((result) => {
|
|
143
|
+
if ("json" in result) {
|
|
144
|
+
return Effect.promise(() =>
|
|
145
|
+
result.json().then(
|
|
146
|
+
(result) => "data" in result ? result.data : result as any,
|
|
147
|
+
() => []
|
|
148
|
+
)
|
|
149
|
+
)
|
|
150
|
+
}
|
|
151
|
+
return Effect.succeed([])
|
|
152
|
+
})
|
|
153
|
+
)
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
private runTransform(sql: string, params: ReadonlyArray<Primitive>) {
|
|
157
|
+
return options.transformResultNames
|
|
158
|
+
? Effect.map(this.run(sql, params), transformRows)
|
|
159
|
+
: this.run(sql, params)
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
execute(sql: string, params: ReadonlyArray<Primitive>) {
|
|
163
|
+
return this.runTransform(sql, params)
|
|
164
|
+
}
|
|
165
|
+
executeRaw(sql: string, params: ReadonlyArray<Primitive>) {
|
|
166
|
+
return this.runRaw(sql, params)
|
|
167
|
+
}
|
|
168
|
+
executeWithoutTransform(sql: string, params: ReadonlyArray<Primitive>) {
|
|
169
|
+
return this.run(sql, params)
|
|
170
|
+
}
|
|
171
|
+
executeValues(sql: string, params: ReadonlyArray<Primitive>) {
|
|
172
|
+
return this.run(sql, params, "JSONCompact")
|
|
173
|
+
}
|
|
174
|
+
executeUnprepared(sql: string, params?: ReadonlyArray<Primitive>) {
|
|
175
|
+
return this.runTransform(sql, params ?? [])
|
|
176
|
+
}
|
|
177
|
+
executeStream(sql: string, params: ReadonlyArray<Primitive>) {
|
|
178
|
+
return this.runRaw(sql, params, "JSONEachRow").pipe(
|
|
179
|
+
Effect.map((result) => {
|
|
180
|
+
if (!("stream" in result)) {
|
|
181
|
+
return Stream.empty
|
|
182
|
+
}
|
|
183
|
+
return NodeStream.fromReadable<SqlError, ReadonlyArray<Clickhouse.Row<any, "JSONEachRow">>>(
|
|
184
|
+
() => result.stream() as any,
|
|
185
|
+
(cause) => new SqlError({ cause, message: "Failed to execute stream" })
|
|
186
|
+
)
|
|
187
|
+
}),
|
|
188
|
+
Stream.unwrap,
|
|
189
|
+
Stream.chunks,
|
|
190
|
+
Stream.mapEffect((chunk) => {
|
|
191
|
+
const promises: Array<Promise<any>> = []
|
|
192
|
+
for (const rows of chunk) {
|
|
193
|
+
for (const row of rows) {
|
|
194
|
+
promises.push(row.json())
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return Effect.tryPromise({
|
|
198
|
+
try: () => Promise.all(promises).then((rows) => Chunk.unsafeFromArray(transformRows(rows))),
|
|
199
|
+
catch: (cause) => new SqlError({ cause, message: "Failed to parse row" })
|
|
200
|
+
})
|
|
201
|
+
}),
|
|
202
|
+
Stream.flattenChunks
|
|
203
|
+
)
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const connection = new ConnectionImpl(client)
|
|
208
|
+
|
|
209
|
+
return Object.assign(
|
|
210
|
+
Client.make({
|
|
211
|
+
acquirer: Effect.succeed(connection),
|
|
212
|
+
compiler,
|
|
213
|
+
spanAttributes: [
|
|
214
|
+
...(options.spanAttributes ? Object.entries(options.spanAttributes) : []),
|
|
215
|
+
[Otel.SEMATTRS_DB_SYSTEM, "clickhouse"],
|
|
216
|
+
[Otel.SEMATTRS_DB_NAME, options.database ?? "default"]
|
|
217
|
+
],
|
|
218
|
+
beginTransaction: "BEGIN TRANSACTION"
|
|
219
|
+
}),
|
|
220
|
+
{
|
|
221
|
+
[TypeId]: TypeId as TypeId,
|
|
222
|
+
config: options,
|
|
223
|
+
param(dataType: string, value: Statement.Primitive) {
|
|
224
|
+
return clickhouseParam(dataType, value)
|
|
225
|
+
},
|
|
226
|
+
asCommand<A, E, R>(effect: Effect.Effect<A, E, R>) {
|
|
227
|
+
return Effect.locally(effect, currentClientMethod, "command")
|
|
228
|
+
},
|
|
229
|
+
insertQuery<T = unknown>(options: {
|
|
230
|
+
readonly table: string
|
|
231
|
+
readonly values: Clickhouse.InsertValues<Readable, T>
|
|
232
|
+
readonly format?: Clickhouse.DataFormat
|
|
233
|
+
}) {
|
|
234
|
+
return FiberRef.getWith(currentQueryId, (queryId_) =>
|
|
235
|
+
Effect.async<Clickhouse.InsertResult, SqlError>((resume) => {
|
|
236
|
+
const queryId = queryId_ ?? Crypto.randomUUID()
|
|
237
|
+
const controller = new AbortController()
|
|
238
|
+
client.insert({
|
|
239
|
+
format: "JSONEachRow",
|
|
240
|
+
...options,
|
|
241
|
+
abort_signal: controller.signal,
|
|
242
|
+
query_id: queryId
|
|
243
|
+
}).then(
|
|
244
|
+
(result) =>
|
|
245
|
+
resume(Effect.succeed(result)),
|
|
246
|
+
(cause) => resume(Effect.fail(new SqlError({ cause, message: "Failed to insert data" })))
|
|
247
|
+
)
|
|
248
|
+
return Effect.suspend(() => {
|
|
249
|
+
controller.abort()
|
|
250
|
+
return Effect.promise(() => client.command({ query: `KILL QUERY WHERE query_id = '${queryId}'` }))
|
|
251
|
+
})
|
|
252
|
+
}))
|
|
253
|
+
},
|
|
254
|
+
withQueryId: dual(2, <A, E, R>(effect: Effect.Effect<A, E, R>, queryId: string) =>
|
|
255
|
+
Effect.locally(effect, currentQueryId, queryId))
|
|
256
|
+
}
|
|
257
|
+
)
|
|
258
|
+
})
|
|
259
|
+
|
|
260
|
+
/**
|
|
261
|
+
* @category fiber refs
|
|
262
|
+
* @since 1.0.0
|
|
263
|
+
*/
|
|
264
|
+
export const currentClientMethod: FiberRef.FiberRef<"query" | "command" | "insert"> = globalValue(
|
|
265
|
+
"@effect/sql-clickhouse/ClickhouseClient/currentClientMethod",
|
|
266
|
+
() => FiberRef.unsafeMake<"query" | "command" | "insert">("query")
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* @category fiber refs
|
|
271
|
+
* @since 1.0.0
|
|
272
|
+
*/
|
|
273
|
+
export const currentQueryId: FiberRef.FiberRef<string | undefined> = globalValue(
|
|
274
|
+
"@effect/sql-clickhouse/ClickhouseClient/currentQueryId",
|
|
275
|
+
() => FiberRef.unsafeMake<string | undefined>(undefined)
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* @category constructor
|
|
280
|
+
* @since 1.0.0
|
|
281
|
+
*/
|
|
282
|
+
export const layer = (
|
|
283
|
+
config: Config.Config.Wrap<ClickhouseClientConfig>
|
|
284
|
+
): Layer.Layer<ClickhouseClient | Client.SqlClient, ConfigError | SqlError> =>
|
|
285
|
+
Layer.scopedContext(
|
|
286
|
+
Config.unwrap(config).pipe(
|
|
287
|
+
Effect.flatMap(make),
|
|
288
|
+
Effect.map((client) =>
|
|
289
|
+
Context.make(ClickhouseClient, client).pipe(
|
|
290
|
+
Context.add(Client.SqlClient, client)
|
|
291
|
+
)
|
|
292
|
+
)
|
|
293
|
+
)
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
const typeFromUnknown = (value: unknown): string => {
|
|
297
|
+
if (Statement.isFragment(value)) {
|
|
298
|
+
return typeFromUnknown(value.segments[0])
|
|
299
|
+
} else if (isClickhouseParam(value)) {
|
|
300
|
+
return value.i0
|
|
301
|
+
} else if (Array.isArray(value)) {
|
|
302
|
+
return `Array(${typeFromUnknown(value[0])})`
|
|
303
|
+
}
|
|
304
|
+
switch (typeof value) {
|
|
305
|
+
case "number":
|
|
306
|
+
return "Decimal"
|
|
307
|
+
case "bigint":
|
|
308
|
+
return "Int64"
|
|
309
|
+
case "boolean":
|
|
310
|
+
return "Bool"
|
|
311
|
+
case "object":
|
|
312
|
+
if (value instanceof Date) {
|
|
313
|
+
return "DateTime()"
|
|
314
|
+
}
|
|
315
|
+
return "String"
|
|
316
|
+
default:
|
|
317
|
+
return "String"
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
/**
|
|
322
|
+
* @category compiler
|
|
323
|
+
* @since 1.0.0
|
|
324
|
+
*/
|
|
325
|
+
export const makeCompiler = (transform?: (_: string) => string) =>
|
|
326
|
+
Statement.makeCompiler<ClickhouseCustom>({
|
|
327
|
+
dialect: "sqlite",
|
|
328
|
+
placeholder(i, u) {
|
|
329
|
+
return `{p${i}: ${typeFromUnknown(u)}}`
|
|
330
|
+
},
|
|
331
|
+
onIdentifier: transform ?
|
|
332
|
+
function(value, withoutTransform) {
|
|
333
|
+
return withoutTransform ? escape(value) : escape(transform(value))
|
|
334
|
+
} :
|
|
335
|
+
escape,
|
|
336
|
+
onRecordUpdate() {
|
|
337
|
+
return ["", []]
|
|
338
|
+
},
|
|
339
|
+
onCustom(type, placeholder) {
|
|
340
|
+
return [placeholder(type), [type.i1]]
|
|
341
|
+
}
|
|
342
|
+
})
|
|
343
|
+
|
|
344
|
+
// compiler helpers
|
|
345
|
+
|
|
346
|
+
const escape = Statement.defaultEscape("\"")
|
|
347
|
+
|
|
348
|
+
/**
|
|
349
|
+
* @category custom types
|
|
350
|
+
* @since 1.0.0
|
|
351
|
+
*/
|
|
352
|
+
export type ClickhouseCustom = ClickhouseParam
|
|
353
|
+
|
|
354
|
+
/**
|
|
355
|
+
* @category custom types
|
|
356
|
+
* @since 1.0.0
|
|
357
|
+
*/
|
|
358
|
+
interface ClickhouseParam extends Statement.Custom<"ClickhouseParam", string, Statement.Primitive> {}
|
|
359
|
+
|
|
360
|
+
const clickhouseParam = Statement.custom<ClickhouseParam>("ClickhouseParam")
|
|
361
|
+
const isClickhouseParam = Statement.isCustom<ClickhouseParam>("ClickhouseParam")
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @since 1.0.0
|
|
3
|
+
*/
|
|
4
|
+
import * as Migrator from "@effect/sql/Migrator"
|
|
5
|
+
import type * as Client from "@effect/sql/SqlClient"
|
|
6
|
+
import type { SqlError } from "@effect/sql/SqlError"
|
|
7
|
+
import type * as Effect from "effect/Effect"
|
|
8
|
+
import * as Layer from "effect/Layer"
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @since 1.0.0
|
|
12
|
+
*/
|
|
13
|
+
export * from "@effect/sql/Migrator"
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* @since 1.0.0
|
|
17
|
+
*/
|
|
18
|
+
export * from "@effect/sql/Migrator/FileSystem"
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* @category constructor
|
|
22
|
+
* @since 1.0.0
|
|
23
|
+
*/
|
|
24
|
+
export const run: <R2 = never>(
|
|
25
|
+
{ loader, schemaDirectory, table }: Migrator.MigratorOptions<R2>
|
|
26
|
+
) => Effect.Effect<
|
|
27
|
+
ReadonlyArray<readonly [id: number, name: string]>,
|
|
28
|
+
Migrator.MigrationError | SqlError,
|
|
29
|
+
Client.SqlClient | R2
|
|
30
|
+
> = Migrator.make({})
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* @category layers
|
|
34
|
+
* @since 1.0.0
|
|
35
|
+
*/
|
|
36
|
+
export const layer = <R>(
|
|
37
|
+
options: Migrator.MigratorOptions<R>
|
|
38
|
+
): Layer.Layer<
|
|
39
|
+
never,
|
|
40
|
+
Migrator.MigrationError | SqlError,
|
|
41
|
+
Client.SqlClient | R
|
|
42
|
+
> => Layer.effectDiscard(run(options))
|