@bayoudhi/moose-lib-serverless 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,383 @@
1
+ export { JWT, Key } from './browserCompatible.js';
2
+ import { Readable } from 'node:stream';
3
+ import { IsTuple } from 'typia/lib/typings/IsTuple';
4
+ export { y as Aggregated, z as Api, B as ApiConfig, A as ApiUtil, a as ClickHouseByteSize, b as ClickHouseCodec, c as ClickHouseDecimal, d as ClickHouseDefault, E as ClickHouseEngines, e as ClickHouseFixedStringSize, f as ClickHouseFloat, g as ClickHouseInt, h as ClickHouseJson, i as ClickHouseMaterialized, j as ClickHouseNamedTuple, k as ClickHousePrecision, l as ClickHouseTTL, G as ConsumptionApi, C as ConsumptionUtil, D as DateTime, m as DateTime64, n as DateTime64String, o as DateTimeString, H as DeadLetter, J as DeadLetterModel, K as DeadLetterQueue, p as Decimal, N as ETLPipeline, O as ETLPipelineConfig, M as EgressConfig, F as FixedString, q as Float32, r as Float64, P as FrameworkApp, ar as IdentifierBrandedString, a8 as IngestApi, a9 as IngestConfig, aa as IngestPipeline, s as Int16, t as Int32, u as Int64, I as Int8, ab as LifeCycle, L as LowCardinality, ac as MaterializedView, as as NonIdentifierBrandedString, ad as OlapConfig, ae as OlapTable, au as RawValue, af as S3QueueTableSettings, ag as SimpleAggregated, ax as Sql, ah as SqlResource, av as SqlTemplateTag, ai as Stream, aj as StreamConfig, ak as Task, v as UInt16, w as UInt32, x as UInt64, U as UInt8, at as Value, al as View, am as WebApp, an as WebAppConfig, ao as WebAppHandler, W as WithDefault, ap as Workflow, aC as createClickhouseParameter, Q as getApi, R as getApis, S as getIngestApi, T as getIngestApis, V as getMaterializedView, X as getMaterializedViews, Y as getSqlResource, Z as getSqlResources, _ as getStream, $ as getStreams, a0 as getTable, a1 as getTables, aB as getValueFromParameter, a2 as getView, a3 as getViews, a4 as getWebApp, a5 as getWebApps, a6 as getWorkflow, a7 as getWorkflows, aD as mapToClickHouseType, aq as quoteIdentifier, aw as sql, az as toQuery, aA as toQueryPreview, ay as toStaticQuery } from './helpers-C3Yr4RzN.js';
5
+ import 'typia';
6
+ import '@clickhouse/client';
7
+ import '@temporalio/client';
8
+ import 'jose';
9
+ import 'http';
10
+ import 'typia/src/schemas/json/IJsonSchemaCollection';
11
+ import 'typia/lib/tags';
12
+
13
+ /**
14
+ * @fileoverview Pure TypeScript types, interfaces, constants, and utility functions
15
+ * extracted from commons.ts. This module has NO native dependencies (no Kafka,
16
+ * no ClickHouse client, no fs) and is safe to import in serverless/Lambda environments.
17
+ *
18
+ * The full commons.ts re-exports everything from this module for backward compatibility.
19
+ *
20
+ * @module commons-types
21
+ */
22
+ /**
23
+ * Utility function for compiler-related logging that can be disabled via environment variable.
24
+ * Set MOOSE_DISABLE_COMPILER_LOGS=true to suppress these logs (useful for testing environments).
25
+ */
26
+ declare const compilerLog: (message: string) => void;
27
+ declare const antiCachePath: (path: string) => string;
28
+ declare const getFileName: (filePath: string) => string;
29
+ type CliLogData = {
30
+ message_type?: "Info" | "Success" | "Warning" | "Error" | "Highlight";
31
+ action: string;
32
+ message: string;
33
+ };
34
+ declare const cliLog: (log: CliLogData) => void;
35
+ /**
36
+ * Method to change .ts, .cts, and .mts to .js, .cjs, and .mjs
37
+ * This is needed because 'import' does not support .ts, .cts, and .mts
38
+ */
39
+ declare function mapTstoJs(filePath: string): string;
40
+ declare const MAX_RETRIES = 150;
41
+ declare const MAX_RETRY_TIME_MS = 1000;
42
+ declare const RETRY_INITIAL_TIME_MS = 100;
43
+ declare const MAX_RETRIES_PRODUCER = 150;
44
+ declare const RETRY_FACTOR_PRODUCER = 0.2;
45
+ declare const ACKs = -1;
46
+ /**
47
+ * Interface for logging functionality
48
+ */
49
+ interface Logger {
50
+ logPrefix: string;
51
+ log: (message: string) => void;
52
+ error: (message: string) => void;
53
+ warn: (message: string) => void;
54
+ }
55
+ declare const logError: (logger: Logger, e: Error) => void;
56
+ type KafkaClientConfig = {
57
+ clientId: string;
58
+ broker: string;
59
+ securityProtocol?: string;
60
+ saslUsername?: string;
61
+ saslPassword?: string;
62
+ saslMechanism?: string;
63
+ };
64
+
65
+ /**
66
+ * @module secrets
67
+ * Utilities for runtime environment variable resolution.
68
+ *
69
+ * This module provides functionality to mark values that should be resolved
70
+ * from environment variables at runtime by the Moose CLI, rather than being
71
+ * embedded at build time.
72
+ *
73
+ * @example
74
+ * ```typescript
75
+ * import { S3QueueEngine, mooseRuntimeEnv } from 'moose-lib';
76
+ *
77
+ * const table = OlapTable<MyData>(
78
+ * "MyTable",
79
+ * OlapConfig({
80
+ * engine: S3QueueEngine({
81
+ * s3_path: "s3://bucket/data/*.json",
82
+ * format: "JSONEachRow",
83
+ * awsAccessKeyId: mooseRuntimeEnv.get("AWS_ACCESS_KEY_ID"),
84
+ * awsSecretAccessKey: mooseRuntimeEnv.get("AWS_SECRET_ACCESS_KEY")
85
+ * })
86
+ * })
87
+ * );
88
+ * ```
89
+ */
90
+ /**
91
+ * Prefix used to mark values for runtime environment variable resolution.
92
+ * @internal
93
+ */
94
+ declare const MOOSE_RUNTIME_ENV_PREFIX = "__MOOSE_RUNTIME_ENV__:";
95
+ /**
96
+ * Utilities for marking values to be resolved from environment variables at runtime.
97
+ *
98
+ * When you use `mooseRuntimeEnv.get()`, the behavior depends on the context:
99
+ * - During infrastructure map loading: Returns a marker string for later resolution
100
+ * - During function/workflow execution: Returns the actual environment variable value
101
+ *
102
+ * This is useful for:
103
+ * - Credentials that should never be embedded in Docker images
104
+ * - Configuration that can be rotated without rebuilding
105
+ * - Different values for different environments (dev, staging, prod)
106
+ * - Any runtime configuration in infrastructure elements (Tables, Topics, etc.)
107
+ */
108
+ declare const mooseRuntimeEnv: {
109
+ /**
110
+ * Gets a value from an environment variable, with behavior depending on context.
111
+ *
112
+ * When IS_LOADING_INFRA_MAP=true (infrastructure loading):
113
+ * Returns a marker string that Moose CLI will resolve later
114
+ *
115
+ * When IS_LOADING_INFRA_MAP is unset (function/workflow runtime):
116
+ * Returns the actual value from the environment variable
117
+ *
118
+ * @param envVarName - Name of the environment variable to resolve
119
+ * @returns Either a marker string or the actual environment variable value
120
+ * @throws {Error} If the environment variable name is empty
121
+ * @throws {Error} If the environment variable is not set (runtime mode only)
122
+ *
123
+ * @example
124
+ * ```typescript
125
+ * // Instead of this (evaluated at build time):
126
+ * awsAccessKeyId: process.env.AWS_ACCESS_KEY_ID
127
+ *
128
+ * // Use this (evaluated at runtime):
129
+ * awsAccessKeyId: mooseRuntimeEnv.get("AWS_ACCESS_KEY_ID")
130
+ * ```
131
+ */
132
+ get(envVarName: string): string;
133
+ };
134
+ /** @deprecated Use mooseRuntimeEnv instead */
135
+ declare const mooseEnvSecrets: {
136
+ /**
137
+ * Gets a value from an environment variable, with behavior depending on context.
138
+ *
139
+ * When IS_LOADING_INFRA_MAP=true (infrastructure loading):
140
+ * Returns a marker string that Moose CLI will resolve later
141
+ *
142
+ * When IS_LOADING_INFRA_MAP is unset (function/workflow runtime):
143
+ * Returns the actual value from the environment variable
144
+ *
145
+ * @param envVarName - Name of the environment variable to resolve
146
+ * @returns Either a marker string or the actual environment variable value
147
+ * @throws {Error} If the environment variable name is empty
148
+ * @throws {Error} If the environment variable is not set (runtime mode only)
149
+ *
150
+ * @example
151
+ * ```typescript
152
+ * // Instead of this (evaluated at build time):
153
+ * awsAccessKeyId: process.env.AWS_ACCESS_KEY_ID
154
+ *
155
+ * // Use this (evaluated at runtime):
156
+ * awsAccessKeyId: mooseRuntimeEnv.get("AWS_ACCESS_KEY_ID")
157
+ * ```
158
+ */
159
+ get(envVarName: string): string;
160
+ };
161
+
162
+ /**
163
+ * Configuration for CSV parsing options
164
+ */
165
+ interface CSVParsingConfig {
166
+ /** CSV delimiter character */
167
+ delimiter: string;
168
+ /** Whether to treat first row as headers */
169
+ columns?: boolean;
170
+ /** Whether to skip empty lines */
171
+ skipEmptyLines?: boolean;
172
+ /** Whether to trim whitespace from values */
173
+ trim?: boolean;
174
+ }
175
+ /**
176
+ * Configuration for JSON parsing options
177
+ */
178
+ interface JSONParsingConfig {
179
+ /** Custom reviver function for JSON.parse */
180
+ reviver?: (key: string, value: any) => any;
181
+ }
182
+ /**
183
+ * Parses CSV content into an array of objects
184
+ *
185
+ * @param content - The CSV content as a string
186
+ * @param config - CSV parsing configuration
187
+ * @returns Promise resolving to an array of parsed objects
188
+ */
189
+ declare function parseCSV<T = Record<string, any>>(content: string, config: CSVParsingConfig): Promise<T[]>;
190
+ /**
191
+ * Parses JSON content into an array of objects
192
+ *
193
+ * @param content - The JSON content as a string
194
+ * @param config - JSON parsing configuration
195
+ * @returns Array of parsed objects
196
+ */
197
+ declare function parseJSON<T = any>(content: string, config?: JSONParsingConfig): T[];
198
+ /**
199
+ * Parses JSON content with automatic date revival
200
+ *
201
+ * @param content - The JSON content as a string
202
+ * @returns Array of parsed objects with Date objects for ISO 8601 strings
203
+ */
204
+ declare function parseJSONWithDates<T = any>(content: string): T[];
205
+ /**
206
+ * Type guard to check if a value is a valid CSV delimiter
207
+ */
208
+ declare function isValidCSVDelimiter(delimiter: string): boolean;
209
+ /**
210
+ * Common CSV delimiters
211
+ */
212
+ declare const CSV_DELIMITERS: {
213
+ readonly COMMA: ",";
214
+ readonly TAB: "\t";
215
+ readonly SEMICOLON: ";";
216
+ readonly PIPE: "|";
217
+ };
218
+ /**
219
+ * Default CSV parsing configuration
220
+ */
221
+ declare const DEFAULT_CSV_CONFIG: CSVParsingConfig;
222
+ /**
223
+ * Default JSON parsing configuration with date revival
224
+ */
225
+ declare const DEFAULT_JSON_CONFIG: JSONParsingConfig;
226
+
227
+ type HasFunctionField<T> = T extends object ? {
228
+ [K in keyof T]: T[K] extends Function ? true : false;
229
+ }[keyof T] extends false | undefined ? false : true : false;
230
+ type OptionalToUndefinedable<T> = {
231
+ [K in {} & keyof T]: T[K];
232
+ };
233
+ type StripInterfaceFields<T> = {
234
+ [K in keyof T]: StripDateIntersection<T[K]>;
235
+ };
236
+ /**
237
+ * `Date & ...` is considered "nonsensible intersection" by typia,
238
+ * causing JSON schema to fail.
239
+ * This helper type recursively cleans up the intersection type tagging.
240
+ */
241
+ type StripDateIntersection<T> = T extends Date ? Date extends T ? Date : T : T extends ReadonlyArray<unknown> ? IsTuple<T> extends true ? StripDateFromTuple<T> : T extends ReadonlyArray<infer U> ? ReadonlyArray<U> extends T ? ReadonlyArray<StripDateIntersection<U>> : Array<StripDateIntersection<U>> : T extends Array<infer U> ? Array<StripDateIntersection<U>> : T : true extends HasFunctionField<T> ? T : T extends object ? StripInterfaceFields<OptionalToUndefinedable<T>> : T;
242
+ type StripDateFromTuple<T extends readonly any[]> = T extends ([
243
+ infer T1,
244
+ infer T2,
245
+ infer T3,
246
+ infer T4,
247
+ infer T5,
248
+ infer T6,
249
+ infer T7,
250
+ infer T8,
251
+ infer T9,
252
+ infer T10
253
+ ]) ? [
254
+ StripDateIntersection<T1>,
255
+ StripDateIntersection<T2>,
256
+ StripDateIntersection<T3>,
257
+ StripDateIntersection<T4>,
258
+ StripDateIntersection<T5>,
259
+ StripDateIntersection<T6>,
260
+ StripDateIntersection<T7>,
261
+ StripDateIntersection<T8>,
262
+ StripDateIntersection<T9>,
263
+ StripDateIntersection<T10>
264
+ ] : T extends ([
265
+ infer T1,
266
+ infer T2,
267
+ infer T3,
268
+ infer T4,
269
+ infer T5,
270
+ infer T6,
271
+ infer T7,
272
+ infer T8,
273
+ infer T9
274
+ ]) ? [
275
+ StripDateIntersection<T1>,
276
+ StripDateIntersection<T2>,
277
+ StripDateIntersection<T3>,
278
+ StripDateIntersection<T4>,
279
+ StripDateIntersection<T5>,
280
+ StripDateIntersection<T6>,
281
+ StripDateIntersection<T7>,
282
+ StripDateIntersection<T8>,
283
+ StripDateIntersection<T9>
284
+ ] : T extends ([
285
+ infer T1,
286
+ infer T2,
287
+ infer T3,
288
+ infer T4,
289
+ infer T5,
290
+ infer T6,
291
+ infer T7,
292
+ infer T8
293
+ ]) ? [
294
+ StripDateIntersection<T1>,
295
+ StripDateIntersection<T2>,
296
+ StripDateIntersection<T3>,
297
+ StripDateIntersection<T4>,
298
+ StripDateIntersection<T5>,
299
+ StripDateIntersection<T6>,
300
+ StripDateIntersection<T7>,
301
+ StripDateIntersection<T8>
302
+ ] : T extends ([
303
+ infer T1,
304
+ infer T2,
305
+ infer T3,
306
+ infer T4,
307
+ infer T5,
308
+ infer T6,
309
+ infer T7
310
+ ]) ? [
311
+ StripDateIntersection<T1>,
312
+ StripDateIntersection<T2>,
313
+ StripDateIntersection<T3>,
314
+ StripDateIntersection<T4>,
315
+ StripDateIntersection<T5>,
316
+ StripDateIntersection<T6>,
317
+ StripDateIntersection<T7>
318
+ ] : T extends [infer T1, infer T2, infer T3, infer T4, infer T5, infer T6] ? [
319
+ StripDateIntersection<T1>,
320
+ StripDateIntersection<T2>,
321
+ StripDateIntersection<T3>,
322
+ StripDateIntersection<T4>,
323
+ StripDateIntersection<T5>,
324
+ StripDateIntersection<T6>
325
+ ] : T extends [infer T1, infer T2, infer T3, infer T4, infer T5] ? [
326
+ StripDateIntersection<T1>,
327
+ StripDateIntersection<T2>,
328
+ StripDateIntersection<T3>,
329
+ StripDateIntersection<T4>,
330
+ StripDateIntersection<T5>
331
+ ] : T extends [infer T1, infer T2, infer T3, infer T4] ? [
332
+ StripDateIntersection<T1>,
333
+ StripDateIntersection<T2>,
334
+ StripDateIntersection<T3>,
335
+ StripDateIntersection<T4>
336
+ ] : T extends [infer T1, infer T2, infer T3] ? [
337
+ StripDateIntersection<T1>,
338
+ StripDateIntersection<T2>,
339
+ StripDateIntersection<T3>
340
+ ] : T extends [infer T1, infer T2] ? [
341
+ StripDateIntersection<T1>,
342
+ StripDateIntersection<T2>
343
+ ] : T extends [infer T1] ? [StripDateIntersection<T1>] : [];
344
+
345
+ /**
346
+ * Configuration for a data source
347
+ */
348
+ interface DataSourceConfig {
349
+ name: string;
350
+ supportsIncremental?: boolean;
351
+ }
352
+ /**
353
+ * DataSource is an abstract class that defines the interface for all data sources.
354
+ * It is used to extract data from a source and test the connection to the source.
355
+ */
356
+ declare abstract class DataSource<T = any, ItemType = any> {
357
+ protected name: string;
358
+ protected supportsIncremental: boolean;
359
+ constructor(config: DataSourceConfig);
360
+ /**
361
+ * Extract data from the source
362
+ * Returns either ItemType (for single requests) or Readable (for paginated requests)
363
+ */
364
+ abstract extract(): Promise<ItemType | Readable>;
365
+ /**
366
+ * Test connection to the source
367
+ */
368
+ abstract testConnection(): Promise<{
369
+ success: boolean;
370
+ message?: string;
371
+ }>;
372
+ }
373
+ /**
374
+ * Result returned from extraction
375
+ * For single requests: data is of type T
376
+ * For paginated requests: data is a Readable stream yielding items of type T
377
+ */
378
+ interface ExtractionResult<T = any> {
379
+ data: T | Readable;
380
+ metadata: Record<string, any>;
381
+ }
382
+
383
+ export { ACKs, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, DataSource, type DataSourceConfig, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, antiCachePath, cliLog, compilerLog, getFileName, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };